diff --git a/.clang-tidy b/.clang-tidy index 952c0cca82580..310c3d182c8f2 100644 --- a/.clang-tidy +++ b/.clang-tidy @@ -17,8 +17,10 @@ Checks: > -clang-analyzer-security.insecureAPI.DeprecatedOrUnsafeBufferHandling, performance-*, portability-*, + -portability-simd-intrinsics, misc-*, -misc-const-correctness, -misc-non-private-member-variables-in-classes, -misc-no-recursion, + -misc-use-anonymous-namespace, FormatStyle: none diff --git a/.devops/full.Dockerfile b/.devops/full.Dockerfile index 2a06f82b738ae..d93c0be6a70c0 100644 --- a/.devops/full.Dockerfile +++ b/.devops/full.Dockerfile @@ -3,23 +3,36 @@ ARG UBUNTU_VERSION=22.04 FROM ubuntu:$UBUNTU_VERSION AS build RUN apt-get update && \ - apt-get install -y build-essential python3 python3-pip git libcurl4-openssl-dev libgomp1 + apt-get install -y build-essential git cmake libcurl4-openssl-dev + +WORKDIR /app + +COPY . . -COPY requirements.txt requirements.txt -COPY requirements requirements +RUN cmake -S . -B build -DGGML_BACKEND_DL=ON -DGGML_NATIVE=OFF -DGGML_CPU_ALL_VARIANTS=ON -DLLAMA_CURL=ON -DCMAKE_BUILD_TYPE=Release && \ + cmake --build build -j $(nproc) && \ + mkdir -p /app/lib && \ + find build -name "*.so" -exec cp {} /app/lib/ \; -RUN pip install --upgrade pip setuptools wheel \ - && pip install -r requirements.txt +FROM ubuntu:$UBUNTU_VERSION as runtime WORKDIR /app -COPY . . +RUN apt-get update && \ + apt-get install -y build-essential python3 python3-pip git libcurl4-openssl-dev libgomp1 -ENV LLAMA_CURL=1 +COPY requirements.txt /app/requirements.txt +COPY requirements /app/requirements +COPY .devops/tools.sh /app/tools.sh +RUN pip install --upgrade pip setuptools wheel && \ + pip install -r /app/requirements.txt -RUN make -j$(nproc) +COPY --from=build /app/build/bin/ /app/ +COPY --from=build /app/lib/ /app/ +COPY --from=build /app/convert_hf_to_gguf.py /app/ +COPY --from=build /app/gguf-py /app/gguf-py ENV LC_ALL=C.utf8 -ENTRYPOINT ["/app/.devops/tools.sh"] +ENTRYPOINT ["/app/tools.sh"] diff --git a/.devops/llama-cli.Dockerfile b/.devops/llama-cli.Dockerfile index 7f741aa46ecf0..be234d55dce5c 100644 --- a/.devops/llama-cli.Dockerfile +++ b/.devops/llama-cli.Dockerfile @@ -3,21 +3,27 @@ ARG UBUNTU_VERSION=22.04 FROM ubuntu:$UBUNTU_VERSION AS build RUN apt-get update && \ - apt-get install -y build-essential git + apt-get install -y build-essential git cmake libcurl4-openssl-dev WORKDIR /app COPY . . -RUN make -j$(nproc) llama-cli +RUN cmake -S . -B build -DGGML_BACKEND_DL=ON -DGGML_NATIVE=OFF -DGGML_CPU_ALL_VARIANTS=ON -DLLAMA_CURL=ON -DCMAKE_BUILD_TYPE=Release && \ + cmake --build build -j $(nproc) && \ + mkdir -p /app/lib && \ + find build -name "*.so" -exec cp {} /app/lib/ \; FROM ubuntu:$UBUNTU_VERSION AS runtime +WORKDIR /app + RUN apt-get update && \ - apt-get install -y libgomp1 + apt-get install -y libcurl4-openssl-dev libgomp1 curl -COPY --from=build /app/llama-cli /llama-cli +COPY --from=build /app/build/bin/llama-cli /app/ +COPY --from=build /app/lib/ /app/ ENV LC_ALL=C.utf8 -ENTRYPOINT [ "/llama-cli" ] +ENTRYPOINT [ "/app/llama-cli" ] diff --git a/.devops/llama-server.Dockerfile b/.devops/llama-server.Dockerfile index 02accc85e1368..72ccde2feaeb9 100644 --- a/.devops/llama-server.Dockerfile +++ b/.devops/llama-server.Dockerfile @@ -3,22 +3,26 @@ ARG UBUNTU_VERSION=22.04 FROM ubuntu:$UBUNTU_VERSION AS build RUN apt-get update && \ - apt-get install -y build-essential git libcurl4-openssl-dev + apt-get install -y build-essential git cmake libcurl4-openssl-dev WORKDIR /app COPY . . -ENV LLAMA_CURL=1 - -RUN make -j$(nproc) llama-server +RUN cmake -S . -B build -DGGML_BACKEND_DL=ON -DGGML_NATIVE=OFF -DGGML_CPU_ALL_VARIANTS=ON -DLLAMA_CURL=ON -DCMAKE_BUILD_TYPE=Release && \ + cmake --build build -j $(nproc) && \ + mkdir -p /app/lib && \ + find build -name "*.so" -exec cp {} /app/lib/ \; FROM ubuntu:$UBUNTU_VERSION AS runtime +WORKDIR /app + RUN apt-get update && \ apt-get install -y libcurl4-openssl-dev libgomp1 curl -COPY --from=build /app/llama-server /llama-server +COPY --from=build /app/build/bin/llama-server /app/ +COPY --from=build /app/lib/ /app/ ENV LC_ALL=C.utf8 # Must be set to 0.0.0.0 so it can listen to requests from host machine @@ -26,4 +30,4 @@ ENV LLAMA_ARG_HOST=0.0.0.0 HEALTHCHECK CMD [ "curl", "-f", "http://localhost:8080/health" ] -ENTRYPOINT [ "/llama-server" ] +ENTRYPOINT [ "/app/llama-server" ] diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 997c6d9d05397..d9f5bdc235a00 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,7 +1 @@ - - -- [x] I have read the [contributing guidelines](https://github.com/ggerganov/llama.cpp/blob/master/CONTRIBUTING.md) -- Self-reported review complexity: - - [ ] Low - - [ ] Medium - - [ ] High +*Make sure to read the [contributing guidelines](https://github.com/ggerganov/llama.cpp/blob/master/CONTRIBUTING.md) before submitting a PR* diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 48953dafa0744..886d33d2d5640 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -160,66 +160,6 @@ jobs: path: llama-${{ steps.tag.outputs.name }}-bin-macos-x64.zip name: llama-bin-macos-x64.zip - ubuntu-focal-make: - runs-on: ubuntu-20.04 - env: - LLAMA_NODE_AVAILABLE: true - LLAMA_PYTHON_AVAILABLE: true - - steps: - - name: Clone - id: checkout - uses: actions/checkout@v4 - - - name: Dependencies - id: depends - run: | - sudo apt-get update - sudo apt-get install build-essential gcc-8 - - - uses: actions/setup-node@v4 - with: - node-version: "20" - - - uses: actions/setup-python@v5 - with: - python-version: "3.11" - - - name: Build - id: make_build - env: - LLAMA_FATAL_WARNINGS: 1 - run: | - CC=gcc-8 make -j $(nproc) - - - name: Test - id: make_test - run: | - CC=gcc-8 make tests -j $(nproc) - make test -j $(nproc) - - ubuntu-focal-make-curl: - runs-on: ubuntu-20.04 - - steps: - - name: Clone - id: checkout - uses: actions/checkout@v4 - - - name: Dependencies - id: depends - run: | - sudo apt-get update - sudo apt-get install build-essential gcc-8 libcurl4-openssl-dev - - - name: Build - id: make_build - env: - LLAMA_FATAL_WARNINGS: 1 - LLAMA_CURL: 1 - run: | - CC=gcc-8 make -j $(nproc) - ubuntu-latest-cmake: runs-on: ubuntu-latest @@ -517,36 +457,6 @@ jobs: cmake -DGGML_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DGGML_SYCL_F16=ON .. cmake --build . --config Release -j $(nproc) - # TODO: build with GGML_NO_METAL because test-backend-ops fail on "Apple Paravirtual device" and I don't know - # how to debug it. - # ref: https://github.com/ggerganov/llama.cpp/actions/runs/7131777249/job/19420981052#step:5:1124 - macOS-latest-make: - runs-on: macos-latest - - steps: - - name: Clone - id: checkout - uses: actions/checkout@v4 - - - name: Dependencies - id: depends - continue-on-error: true - run: | - brew update - - - name: Build - id: make_build - env: - LLAMA_FATAL_WARNINGS: 1 - run: | - GGML_NO_METAL=1 make -j $(sysctl -n hw.logicalcpu) - - - name: Test - id: make_test - run: | - GGML_NO_METAL=1 make tests -j $(sysctl -n hw.logicalcpu) - GGML_NO_METAL=1 make test -j $(sysctl -n hw.logicalcpu) - # TODO: build with GGML_METAL=OFF because test-backend-ops fail on "Apple Paravirtual device" and I don't know # how to debug it. # ref: https://github.com/ggerganov/llama.cpp/actions/runs/7132125951/job/19422043567?pr=4359#step:5:6584 @@ -660,15 +570,26 @@ jobs: run: | brew update - - name: xcodebuild for swift package - id: xcodebuild + - name: Build llama.cpp with CMake + id: cmake_build run: | - xcodebuild -scheme llama -destination "${{ matrix.destination }}" + sysctl -a + mkdir build + cd build + cmake -G Xcode .. \ + -DGGML_METAL_USE_BF16=ON \ + -DGGML_METAL_EMBED_LIBRARY=ON \ + -DLLAMA_BUILD_EXAMPLES=OFF \ + -DLLAMA_BUILD_TESTS=OFF \ + -DLLAMA_BUILD_SERVER=OFF \ + -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64" + cmake --build . --config Release -j $(sysctl -n hw.logicalcpu) + sudo cmake --install . --config Release - - name: Build Swift Example - id: make_build_swift_example + - name: xcodebuild for swift package + id: xcodebuild run: | - make swift + xcodebuild -scheme llama-Package -destination "${{ matrix.destination }}" windows-msys2: runs-on: windows-latest @@ -695,21 +616,6 @@ jobs: mingw-w64-${{matrix.env}}-cmake mingw-w64-${{matrix.env}}-openblas - - name: Build using make - shell: msys2 {0} - run: | - make -j $(nproc) - - - name: Clean after building using make - shell: msys2 {0} - run: | - make clean - - - name: Build using make w/ OpenBLAS - shell: msys2 {0} - run: | - make GGML_OPENBLAS=1 -j $(nproc) - - name: Build using CMake shell: msys2 {0} run: | @@ -1121,6 +1027,11 @@ jobs: run: | & 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' --version + - name: Install ccache + uses: hendrikmuhs/ccache-action@v1.2 + with: + key: ${{ github.job }} + - name: Build id: cmake_build run: | @@ -1202,6 +1113,29 @@ jobs: - name: Checkout code uses: actions/checkout@v4 + - name: Build + id: cmake_build + run: | + sysctl -a + mkdir build + cd build + cmake -G Xcode .. \ + -DGGML_METAL_USE_BF16=ON \ + -DGGML_METAL_EMBED_LIBRARY=ON \ + -DLLAMA_BUILD_EXAMPLES=OFF \ + -DLLAMA_BUILD_TESTS=OFF \ + -DLLAMA_BUILD_SERVER=OFF \ + -DCMAKE_SYSTEM_NAME=iOS \ + -DCMAKE_OSX_DEPLOYMENT_TARGET=14.0 \ + -DCMAKE_XCODE_ATTRIBUTE_DEVELOPMENT_TEAM=ggml + cmake --build . --config Release -j $(sysctl -n hw.logicalcpu) -- CODE_SIGNING_ALLOWED=NO + sudo cmake --install . --config Release + + - name: xcodebuild for swift package + id: xcodebuild + run: | + xcodebuild -scheme llama-Package -destination 'generic/platform=iOS' + - name: Build Xcode project run: xcodebuild -project examples/llama.swiftui/llama.swiftui.xcodeproj -scheme llama.swiftui -sdk iphoneos CODE_SIGNING_REQUIRED=NO CODE_SIGN_IDENTITY= -destination 'generic/platform=iOS' build @@ -1229,32 +1163,13 @@ jobs: ./gradlew build --no-daemon -# freeBSD-latest: -# runs-on: macos-12 -# steps: -# - name: Clone -# uses: actions/checkout@v4 -# -# - name: Build -# uses: cross-platform-actions/action@v0.19.0 -# with: -# operating_system: freebsd -# version: '13.2' -# hypervisor: 'qemu' -# run: | -# sudo pkg update -# sudo pkg install -y gmake automake autoconf pkgconf llvm15 openblas -# gmake CC=/usr/local/bin/clang15 CXX=/usr/local/bin/clang++15 -j `sysctl -n hw.ncpu` - release: if: ${{ ( github.event_name == 'push' && github.ref == 'refs/heads/master' ) || github.event.inputs.create_release == 'true' }} runs-on: ubuntu-latest needs: - - ubuntu-focal-make - ubuntu-latest-cmake - - macOS-latest-make - macOS-latest-cmake - windows-latest-cmake - windows-2019-cmake-cuda diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml index 2e8e3348f4292..9e66fb68cffe8 100644 --- a/.github/workflows/server.yml +++ b/.github/workflows/server.yml @@ -76,20 +76,26 @@ jobs: run: | pip install -r examples/server/tests/requirements.txt - - name: Verify server deps - id: verify_server_deps + # Setup nodejs (to be used for verifying bundled index.html) + - uses: actions/setup-node@v4 + with: + node-version: 22 + + - name: Verify bundled index.html + id: verify_server_index_html run: | git config --global --add safe.directory $(realpath .) - cd examples/server - git ls-files --others --modified + cd examples/server/webui git status - ./deps.sh + npm ci + npm run build git status - not_ignored_files="$(git ls-files --others --modified)" - echo "Modified files: ${not_ignored_files}" - if [ -n "${not_ignored_files}" ]; then - echo "Repository is dirty or server deps are not built as expected" - echo "${not_ignored_files}" + modified_files="$(git status -s)" + echo "Modified files: ${modified_files}" + if [ -n "${modified_files}" ]; then + echo "Repository is dirty or server/webui is not built as expected" + echo "Hint: You may need to follow Web UI build guide in server/README.md" + echo "${modified_files}" exit 1 fi diff --git a/.gitignore b/.gitignore index 307c065f793e6..1df7cf4a15ecd 100644 --- a/.gitignore +++ b/.gitignore @@ -104,6 +104,10 @@ examples/server/*.mjs.hpp !examples/sycl/*.bat !examples/sycl/*.sh +# Server Web UI temporary files +node_modules +examples/server/webui/dist + # Python /.venv diff --git a/CMakeLists.txt b/CMakeLists.txt index 0d389dccbfb42..a717a508ff5d9 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -46,11 +46,9 @@ if (WIN32) add_compile_definitions(_CRT_SECURE_NO_WARNINGS) endif() -if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC") - add_compile_options("$<$:/source-charset:utf-8>") - add_compile_options("$<$:/source-charset:utf-8>") - add_compile_options("$<$:/execution-charset:utf-8>") - add_compile_options("$<$:/execution-charset:utf-8>") +if (MSVC) + add_compile_options("$<$:/utf-8>") + add_compile_options("$<$:/utf-8>") endif() # @@ -96,10 +94,6 @@ if (NOT DEFINED GGML_LLAMAFILE) set(GGML_LLAMAFILE_DEFAULT ON) endif() -if (NOT DEFINED GGML_AMX) - set(GGML_AMX ON) -endif() - if (NOT DEFINED GGML_CUDA_GRAPHS) set(GGML_CUDA_GRAPHS_DEFAULT ON) endif() diff --git a/CMakePresets.json b/CMakePresets.json index 436448967c9af..13bdd7907ab40 100644 --- a/CMakePresets.json +++ b/CMakePresets.json @@ -31,6 +31,13 @@ { "name": "sycl_f16", "hidden": true, "cacheVariables": { "GGML_SYCL_F16": "ON" } }, { "name": "vulkan", "hidden": true, "cacheVariables": { "GGML_VULKAN": "ON" } }, + { + "name": "x64-windows-llvm", "hidden": true, + "cacheVariables": { + "CMAKE_TOOLCHAIN_FILE": "${sourceDir}/cmake/x64-windows-llvm.cmake" + } + }, + { "name": "arm64-windows-msvc", "hidden": true, "architecture": { "value": "arm64", "strategy": "external" }, @@ -70,6 +77,11 @@ { "name": "arm64-windows-msvc-release", "inherits": [ "base", "arm64-windows-msvc", "reldbg" ] }, { "name": "arm64-windows-msvc+static-release", "inherits": [ "base", "arm64-windows-msvc", "reldbg", "static" ] }, + { "name": "x64-windows-llvm-debug", "inherits": [ "base", "x64-windows-llvm", "debug" ] }, + { "name": "x64-windows-llvm-release", "inherits": [ "base", "x64-windows-llvm", "release" ] }, + { "name": "x64-windows-llvm-reldbg", "inherits": [ "base", "x64-windows-llvm", "reldbg" ] }, + { "name": "x64-windows-llvm+static-release", "inherits": [ "base", "x64-windows-llvm", "reldbg", "static" ] }, + { "name": "x64-windows-msvc-debug", "inherits": [ "base", "debug" ] }, { "name": "x64-windows-msvc-release", "inherits": [ "base", "reldbg" ] }, { "name": "x64-windows-msvc+static-release", "inherits": [ "base", "reldbg", "static" ] }, diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 0000000000000..88ab6de4fb618 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,3 @@ +# collaborators can optionally add themselves here to indicate their availability for reviewing related PRs + +ci/ @ggerganov diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4c882c254cac5..5a85ec5d2efb4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,9 +1,10 @@ # Pull requests (for contributors) - Test your changes: - - Using the commands in the [`tests`](tests) folder. For instance, running the `./tests/test-backend-ops` command tests different backend implementations of the `ggml` library - Execute [the full CI locally on your machine](ci/README.md) before publishing -- Optionally rate the complexity of your PR (i.e. `Review Complexity : Low`, `Review Complexity : Medium`, `Review Complexity : High`). This makes it easier for maintainers to triage the PRs + - Verify that the perplexity and the performance are not affected negatively by your changes (use `llama-perplexity` and `llama-bench`) + - If you modified the `ggml` source, run the `test-backend-ops` tool to check whether different backend implementations of the `ggml` operators produce consistent results (this requires access to at least two different `ggml` backends) + - If you modified a `ggml` operator or added a new one, add the corresponding test cases to `test-backend-ops` - Consider allowing write access to your branch for faster reviews, as reviewers can push commits directly - If your PR becomes stale, don't hesitate to ping the maintainers in the comments @@ -12,6 +13,7 @@ - Squash-merge PRs - Use the following format for the squashed commit title: ` : (#)`. For example: `utils : fix typo in utils.py (#1234)` - Optionally pick a `` from here: https://github.com/ggerganov/llama.cpp/wiki/Modules +- Consider adding yourself to [CODEOWNERS](CODEOWNERS) # Coding guidelines diff --git a/Makefile b/Makefile index 200aafd3caa68..19ae0d5f1c87b 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,7 @@ +ifndef LLAMA_MAKEFILE +$(error The Makefile build is deprecated. Use the CMake build instead. For more details, see https://github.com/ggerganov/llama.cpp/blob/master/docs/build.md) +endif + # Define the default target now so that it is always the first target BUILD_TARGETS = \ libllava.a \ @@ -252,11 +256,11 @@ endif # Compile flags # -# keep standard at C11 and C++11 +# keep standard at C11 and C++17 MK_CPPFLAGS = -Iggml/include -Iggml/src -Iinclude -Isrc -Icommon -DGGML_USE_CPU MK_CFLAGS = -std=c11 -fPIC -MK_CXXFLAGS = -std=c++11 -fPIC -MK_NVCCFLAGS = -std=c++11 +MK_CXXFLAGS = -std=c++17 -fPIC +MK_NVCCFLAGS = -std=c++17 ifdef LLAMA_NO_CCACHE GGML_NO_CCACHE := 1 @@ -442,6 +446,10 @@ ifeq ($(UNAME_M),$(filter $(UNAME_M),x86_64 i686 amd64)) MK_CFLAGS += -march=native -mtune=native HOST_CXXFLAGS += -march=native -mtune=native + # Usage AMX build test + #MK_CFLAGS += -march=graniterapids -mtune=graniterapids + #HOST_CXXFLAGS += -march=graniterapids -mtune=graniterapids + # Usage AVX-only #MK_CFLAGS += -mfma -mf16c -mavx #MK_CXXFLAGS += -mfma -mf16c -mavx @@ -576,9 +584,12 @@ endif ifndef GGML_NO_AMX MK_CPPFLAGS += -DGGML_USE_AMX - OBJ_GGML_EXT += ggml/src/ggml-amx/ggml-amx.o ggml/src/ggml-amx/mmq.o + OBJ_GGML_EXT += ggml/src/ggml-cpu/amx/amx.o ggml/src/ggml-cpu/amx/mmq.o endif +# only necessary for the CPU backend files +MK_CPPFLAGS += -Iggml/src/ggml-cpu + ifdef GGML_RPC MK_CPPFLAGS += -DGGML_USE_RPC OBJ_GGML_EXT += ggml/src/ggml-rpc.o @@ -942,7 +953,6 @@ DIR_COMMON = common OBJ_GGML = \ $(DIR_GGML)/src/ggml.o \ - $(DIR_GGML)/src/ggml-aarch64.o \ $(DIR_GGML)/src/ggml-alloc.o \ $(DIR_GGML)/src/ggml-backend.o \ $(DIR_GGML)/src/ggml-backend-reg.o \ @@ -950,9 +960,11 @@ OBJ_GGML = \ $(DIR_GGML)/src/ggml-quants.o \ $(DIR_GGML)/src/ggml-threading.o \ $(DIR_GGML)/src/ggml-cpu/ggml-cpu.o \ - $(DIR_GGML)/src/ggml-cpu/ggml-cpu-cpp.o \ + $(DIR_GGML)/src/ggml-cpu/ggml-cpu_cpp.o \ $(DIR_GGML)/src/ggml-cpu/ggml-cpu-aarch64.o \ + $(DIR_GGML)/src/ggml-cpu/ggml-cpu-hbm.o \ $(DIR_GGML)/src/ggml-cpu/ggml-cpu-quants.o \ + $(DIR_GGML)/src/ggml-cpu/ggml-cpu-traits.o \ $(OBJ_GGML_EXT) OBJ_LLAMA = \ @@ -1092,17 +1104,10 @@ DEP_FILES = $(OBJ_GGML:.o=.d) $(OBJ_LLAMA:.o=.d) $(OBJ_COMMON:.o=.d) # Default target all: $(BUILD_TARGETS) +# force c++ build for source file that have same name as c file # Note: need this exception because `ggml-cpu.c` and `ggml-cpu.cpp` both produce the same obj/dep files -# g++ -M -I ./ggml/include/ -I ./ggml/src ggml/src/ggml-cpu/ggml-cpu.cpp | grep ggml -$(DIR_GGML)/src/ggml-cpu/ggml-cpu-cpp.o: \ - ggml/src/ggml-cpu/ggml-cpu.cpp \ - ggml/include/ggml-backend.h \ - ggml/include/ggml.h \ - ggml/include/ggml-alloc.h \ - ggml/src/ggml-backend-impl.h \ - ggml/include/ggml-cpu.h \ - ggml/src/ggml-impl.h - $(CXX) $(CXXFLAGS) -c $< -o $@ +$(DIR_GGML)/%_cpp.o: $(DIR_GGML)/%.cpp + $(CXX) $(CXXFLAGS) -MMD -c $< -o $@ # Rules for building object files $(DIR_GGML)/%.o: $(DIR_GGML)/%.c @@ -1139,8 +1144,15 @@ $(LIB_COMMON_S): $(OBJ_COMMON) # Include dependency files -include $(DEP_FILES) +# Clean generated server assets +clean-server-assets: + find examples/server -type f -name "*.js.hpp" -delete + find examples/server -type f -name "*.mjs.hpp" -delete + find examples/server -type f -name "*.css.hpp" -delete + find examples/server -type f -name "*.html.hpp" -delete + # Clean rule -clean: +clean: clean-server-assets rm -vrf $(BUILD_TARGETS) $(TEST_TARGETS) rm -rvf *.a *.dll *.so *.dot find ggml src common tests examples pocs -type f -name "*.o" -delete @@ -1348,20 +1360,14 @@ llama-server: \ examples/server/utils.hpp \ examples/server/httplib.h \ examples/server/index.html.hpp \ - examples/server/completion.js.hpp \ examples/server/loading.html.hpp \ - examples/server/deps_daisyui.min.css.hpp \ - examples/server/deps_markdown-it.js.hpp \ - examples/server/deps_tailwindcss.js.hpp \ - examples/server/deps_vue.esm-browser.js.hpp \ common/json.hpp \ - common/stb_image.h \ $(OBJ_ALL) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h %.hpp $<,$^) -Iexamples/server $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) $(LWINSOCK2) # Portable equivalent of `cd examples/server/public && xxd -i $(notdir $<) ../$(notdir $<).hpp`: -examples/server/%.hpp: examples/server/public/% Makefile +examples/server/%.hpp: examples/server/public/% FORCE Makefile @( export NAME=$(subst .,_,$(subst -,_,$(notdir $<))) && \ echo "unsigned char $${NAME}[] = {" && \ cat $< | od -v -t x1 -An | sed -E 's/([0-9a-fA-F]+)/0x\1, /g' && \ @@ -1544,7 +1550,7 @@ llama-q8dot: pocs/vdot/q8dot.cpp ggml/src/ggml.o \ # Deprecated binaries that we want to keep around long enough for people to migrate to the new filenames, then these can be removed. # # Mark legacy binary targets as .PHONY so that they are always checked. -.PHONY: main quantize perplexity embedding server +.PHONY: FORCE main quantize perplexity embedding server # Define the object file target examples/deprecation-warning/deprecation-warning.o: examples/deprecation-warning/deprecation-warning.cpp diff --git a/Package.swift b/Package.swift index d9e8a4e2d21d7..01c996d242037 100644 --- a/Package.swift +++ b/Package.swift @@ -2,57 +2,6 @@ import PackageDescription -var sources = [ - "src/llama.cpp", - "src/llama-vocab.cpp", - "src/llama-grammar.cpp", - "src/llama-sampling.cpp", - "src/unicode.cpp", - "src/unicode-data.cpp", - "ggml/src/ggml.c", - "ggml/src/ggml-aarch64.c", - "ggml/src/ggml-alloc.c", - "ggml/src/ggml-backend.cpp", - "ggml/src/ggml-backend-reg.cpp", - "ggml/src/ggml-cpu/ggml-cpu.c", - "ggml/src/ggml-cpu/ggml-cpu.cpp", - "ggml/src/ggml-cpu/ggml-cpu-aarch64.c", - "ggml/src/ggml-cpu/ggml-cpu-quants.c", - "ggml/src/ggml-threading.cpp", - "ggml/src/ggml-quants.c", -] - -var resources: [Resource] = [] -var linkerSettings: [LinkerSetting] = [] -var cSettings: [CSetting] = [ - .unsafeFlags(["-Wno-shorten-64-to-32", "-O3", "-DNDEBUG"]), - .unsafeFlags(["-fno-objc-arc"]), - .headerSearchPath("ggml/src"), - // NOTE: NEW_LAPACK will required iOS version 16.4+ - // We should consider add this in the future when we drop support for iOS 14 - // (ref: ref: https://developer.apple.com/documentation/accelerate/1513264-cblas_sgemm?language=objc) - // .define("ACCELERATE_NEW_LAPACK"), - // .define("ACCELERATE_LAPACK_ILP64") -] - -#if canImport(Darwin) -sources.append("ggml/src/ggml-common.h") -sources.append("ggml/src/ggml-metal/ggml-metal.m") -resources.append(.process("ggml/src/ggml-metal/ggml-metal.metal")) -linkerSettings.append(.linkedFramework("Accelerate")) -cSettings.append( - contentsOf: [ - .define("GGML_USE_ACCELERATE"), - .define("GGML_USE_METAL"), - .define("GGML_USE_CPU") - ] -) -#endif - -#if os(Linux) - cSettings.append(.define("_GNU_SOURCE")) -#endif - let package = Package( name: "llama", platforms: [ @@ -65,26 +14,6 @@ let package = Package( .library(name: "llama", targets: ["llama"]), ], targets: [ - .target( - name: "llama", - path: ".", - exclude: [ - "build", - "cmake", - "examples", - "scripts", - "models", - "tests", - "CMakeLists.txt", - "Makefile", - "ggml/src/ggml-metal-embed.metal" - ], - sources: sources, - resources: resources, - publicHeadersPath: "spm-headers", - cSettings: cSettings, - linkerSettings: linkerSettings - ) - ], - cxxLanguageStandard: .cxx11 + .systemLibrary(name: "llama", pkgConfig: "llama"), + ] ) diff --git a/README.md b/README.md index 414c5b1c0824a..6fdd8d9eefbfb 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,6 @@ [![License: MIT](https://img.shields.io/badge/license-MIT-blue.svg)](https://opensource.org/licenses/MIT) [![Server](https://github.com/ggerganov/llama.cpp/actions/workflows/server.yml/badge.svg)](https://github.com/ggerganov/llama.cpp/actions/workflows/server.yml) -[![Conan Center](https://shields.io/conan/v/llama-cpp)](https://conan.io/center/llama-cpp) [Roadmap](https://github.com/users/ggerganov/projects/7) / [Project status](https://github.com/ggerganov/llama.cpp/discussions/3471) / [Manifesto](https://github.com/ggerganov/llama.cpp/discussions/205) / [ggml](https://github.com/ggerganov/ggml) @@ -26,7 +25,7 @@ Inference of Meta's [LLaMA](https://arxiv.org/abs/2302.13971) model (and others) ## Description The main goal of `llama.cpp` is to enable LLM inference with minimal setup and state-of-the-art performance on a wide -variety of hardware - locally and in the cloud. +range of hardware - locally and in the cloud. - Plain C/C++ implementation without any dependencies - Apple silicon is a first-class citizen - optimized via ARM NEON, Accelerate and Metal frameworks @@ -36,14 +35,17 @@ variety of hardware - locally and in the cloud. - Vulkan and SYCL backend support - CPU+GPU hybrid inference to partially accelerate models larger than the total VRAM capacity -Since its [inception](https://github.com/ggerganov/llama.cpp/issues/33#issuecomment-1465108022), the project has -improved significantly thanks to many contributions. It is the main playground for developing new features for the -[ggml](https://github.com/ggerganov/ggml) library. +The `llama.cpp` project is the main playground for developing new features for the [ggml](https://github.com/ggerganov/ggml) library. -**Supported models:** +
+Models Typically finetunes of the base models below are supported as well. +Instructions for adding support for new models: [HOWTO-add-model.md](docs/development/HOWTO-add-model.md) + +#### Text-only + - [X] LLaMA 🦙 - [x] LLaMA 2 🦙🦙 - [x] LLaMA 3 🦙🦙🦙 @@ -97,9 +99,7 @@ Typically finetunes of the base models below are supported as well. - [x] [Bielik-11B-v2.3](https://huggingface.co/collections/speakleash/bielik-11b-v23-66ee813238d9b526a072408a) - [x] [RWKV-6](https://github.com/BlinkDL/RWKV-LM) -(instructions for supporting more models: [HOWTO-add-model.md](./docs/development/HOWTO-add-model.md)) - -**Multimodal models:** +#### Multimodal - [x] [LLaVA 1.5 models](https://huggingface.co/collections/liuhaotian/llava-15-653aac15d994e992e2677a7e), [LLaVA 1.6 models](https://huggingface.co/collections/liuhaotian/llava-16-65b9e40155f60fd046a5ccf2) - [x] [BakLLaVA](https://huggingface.co/models?search=SkunkworksAI/Bakllava) @@ -111,7 +111,10 @@ Typically finetunes of the base models below are supported as well. - [x] [Moondream](https://huggingface.co/vikhyatk/moondream2) - [x] [Bunny](https://github.com/BAAI-DCAI/Bunny) -**Bindings:** +
+ +
+Bindings - Python: [abetlen/llama-cpp-python](https://github.com/abetlen/llama-cpp-python) - Go: [go-skynet/go-llama.cpp](https://github.com/go-skynet/go-llama.cpp) @@ -138,316 +141,314 @@ Typically finetunes of the base models below are supported as well. - Swift [srgtuszy/llama-cpp-swift](https://github.com/srgtuszy/llama-cpp-swift) - Swift [ShenghaiWang/SwiftLlama](https://github.com/ShenghaiWang/SwiftLlama) -**UI:** +
-Unless otherwise noted these projects are open-source with permissive licensing: +
+UIs -- [MindWorkAI/AI-Studio](https://github.com/MindWorkAI/AI-Studio) (FSL-1.1-MIT) -- [iohub/collama](https://github.com/iohub/coLLaMA) +*(to have a project listed here, it should clearly state that it depends on `llama.cpp`)* + +- [AI Sublime Text plugin](https://github.com/yaroslavyaroslav/OpenAI-sublime-text) (MIT) +- [cztomsik/ava](https://github.com/cztomsik/ava) (MIT) +- [Dot](https://github.com/alexpinel/Dot) (GPL) +- [eva](https://github.com/ylsdamxssjxxdd/eva) (MIT) +- [iohub/collama](https://github.com/iohub/coLLaMA) (Apache-2.0) - [janhq/jan](https://github.com/janhq/jan) (AGPL) -- [nat/openplayground](https://github.com/nat/openplayground) -- [Faraday](https://faraday.dev/) (proprietary) +- [KanTV](https://github.com/zhouwg/kantv?tab=readme-ov-file) (Apache-2.0) +- [KodiBot](https://github.com/firatkiral/kodibot) (GPL) +- [llama.vim](https://github.com/ggml-org/llama.vim) (MIT) +- [LARS](https://github.com/abgulati/LARS) (AGPL) +- [Llama Assistant](https://github.com/vietanhdev/llama-assistant) (GPL) +- [LLMFarm](https://github.com/guinmoon/LLMFarm?tab=readme-ov-file) (MIT) +- [LLMUnity](https://github.com/undreamai/LLMUnity) (MIT) - [LMStudio](https://lmstudio.ai/) (proprietary) -- [Layla](https://play.google.com/store/apps/details?id=com.laylalite) (proprietary) -- [ramalama](https://github.com/containers/ramalama) (MIT) - [LocalAI](https://github.com/mudler/LocalAI) (MIT) - [LostRuins/koboldcpp](https://github.com/LostRuins/koboldcpp) (AGPL) -- [Mozilla-Ocho/llamafile](https://github.com/Mozilla-Ocho/llamafile) -- [nomic-ai/gpt4all](https://github.com/nomic-ai/gpt4all) -- [ollama/ollama](https://github.com/ollama/ollama) +- [MindMac](https://mindmac.app) (proprietary) +- [MindWorkAI/AI-Studio](https://github.com/MindWorkAI/AI-Studio) (FSL-1.1-MIT) +- [Mobile-Artificial-Intelligence/maid](https://github.com/Mobile-Artificial-Intelligence/maid) (MIT) +- [Mozilla-Ocho/llamafile](https://github.com/Mozilla-Ocho/llamafile) (Apache-2.0) +- [nat/openplayground](https://github.com/nat/openplayground) (MIT) +- [nomic-ai/gpt4all](https://github.com/nomic-ai/gpt4all) (MIT) +- [ollama/ollama](https://github.com/ollama/ollama) (MIT) - [oobabooga/text-generation-webui](https://github.com/oobabooga/text-generation-webui) (AGPL) -- [psugihara/FreeChat](https://github.com/psugihara/FreeChat) -- [cztomsik/ava](https://github.com/cztomsik/ava) (MIT) -- [ptsochantaris/emeltal](https://github.com/ptsochantaris/emeltal) +- [PocketPal AI](https://github.com/a-ghorbani/pocketpal-ai) (MIT) +- [psugihara/FreeChat](https://github.com/psugihara/FreeChat) (MIT) +- [ptsochantaris/emeltal](https://github.com/ptsochantaris/emeltal) (MIT) - [pythops/tenere](https://github.com/pythops/tenere) (AGPL) -- [RAGNA Desktop](https://ragna.app/) (proprietary) -- [RecurseChat](https://recurse.chat/) (proprietary) -- [semperai/amica](https://github.com/semperai/amica) -- [withcatai/catai](https://github.com/withcatai/catai) -- [Mobile-Artificial-Intelligence/maid](https://github.com/Mobile-Artificial-Intelligence/maid) (MIT) -- [Msty](https://msty.app) (proprietary) -- [LLMFarm](https://github.com/guinmoon/LLMFarm?tab=readme-ov-file) (MIT) -- [KanTV](https://github.com/zhouwg/kantv?tab=readme-ov-file)(Apachev2.0 or later) -- [Dot](https://github.com/alexpinel/Dot) (GPL) -- [MindMac](https://mindmac.app) (proprietary) -- [KodiBot](https://github.com/firatkiral/kodibot) (GPL) -- [eva](https://github.com/ylsdamxssjxxdd/eva) (MIT) -- [AI Sublime Text plugin](https://github.com/yaroslavyaroslav/OpenAI-sublime-text) (MIT) -- [AIKit](https://github.com/sozercan/aikit) (MIT) -- [LARS - The LLM & Advanced Referencing Solution](https://github.com/abgulati/LARS) (AGPL) -- [LLMUnity](https://github.com/undreamai/LLMUnity) (MIT) -- [Llama Assistant](https://github.com/vietanhdev/llama-assistant) (GPL) -- [PocketPal AI - An iOS and Android App](https://github.com/a-ghorbani/pocketpal-ai) (MIT) +- [ramalama](https://github.com/containers/ramalama) (MIT) +- [semperai/amica](https://github.com/semperai/amica) (MIT) +- [withcatai/catai](https://github.com/withcatai/catai) (MIT) -*(to have a project listed here, it should clearly state that it depends on `llama.cpp`)* +
-**Tools:** +
+Tools - [akx/ggify](https://github.com/akx/ggify) – download PyTorch models from HuggingFace Hub and convert them to GGML - [akx/ollama-dl](https://github.com/akx/ollama-dl) – download models from the Ollama library to be used directly with llama.cpp - [crashr/gppm](https://github.com/crashr/gppm) – launch llama.cpp instances utilizing NVIDIA Tesla P40 or P100 GPUs with reduced idle power consumption - [gpustack/gguf-parser](https://github.com/gpustack/gguf-parser-go/tree/main/cmd/gguf-parser) - review/check the GGUF file and estimate the memory usage -- [Styled Lines](https://marketplace.unity.com/packages/tools/generative-ai/styled-lines-llama-cpp-model-292902) (proprietary licensed, async wrapper of inference part for game development in Unity3d with prebuild Mobile and Web platform wrappers and a model example) +- [Styled Lines](https://marketplace.unity.com/packages/tools/generative-ai/styled-lines-llama-cpp-model-292902) (proprietary licensed, async wrapper of inference part for game development in Unity3d with pre-built Mobile and Web platform wrappers and a model example) + +
-**Infrastructure:** +
+Infrastructure - [Paddler](https://github.com/distantmagic/paddler) - Stateful load balancer custom-tailored for llama.cpp - [GPUStack](https://github.com/gpustack/gpustack) - Manage GPU clusters for running LLMs - [llama_cpp_canister](https://github.com/onicai/llama_cpp_canister) - llama.cpp as a smart contract on the Internet Computer, using WebAssembly -**Games:** -- [Lucy's Labyrinth](https://github.com/MorganRO8/Lucys_Labyrinth) - A simple maze game where agents controlled by an AI model will try to trick you. - -## Demo +
-Typical run using LLaMA v2 13B on M2 Ultra - -``` -$ make -j && ./llama-cli -m models/llama-13b-v2/ggml-model-q4_0.gguf -p "Building a website can be done in 10 simple steps:\nStep 1:" -n 400 -e -I llama.cpp build info: -I UNAME_S: Darwin -I UNAME_P: arm -I UNAME_M: arm64 -I CFLAGS: -I. -O3 -std=c11 -fPIC -DNDEBUG -Wall -Wextra -Wpedantic -Wcast-qual -Wdouble-promotion -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -pthread -DGGML_USE_K_QUANTS -DGGML_USE_ACCELERATE -I CXXFLAGS: -I. -I./common -O3 -std=c++11 -fPIC -DNDEBUG -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wno-multichar -pthread -DGGML_USE_K_QUANTS -I LDFLAGS: -framework Accelerate -I CC: Apple clang version 14.0.3 (clang-1403.0.22.14.1) -I CXX: Apple clang version 14.0.3 (clang-1403.0.22.14.1) - -make: Nothing to be done for `default'. -main: build = 1041 (cf658ad) -main: seed = 1692823051 -llama_model_loader: loaded meta data with 16 key-value pairs and 363 tensors from models/llama-13b-v2/ggml-model-q4_0.gguf (version GGUF V1 (latest)) -llama_model_loader: - type f32: 81 tensors -llama_model_loader: - type q4_0: 281 tensors -llama_model_loader: - type q6_K: 1 tensors -llm_load_print_meta: format = GGUF V1 (latest) -llm_load_print_meta: arch = llama -llm_load_print_meta: vocab type = SPM -llm_load_print_meta: n_vocab = 32000 -llm_load_print_meta: n_merges = 0 -llm_load_print_meta: n_ctx_train = 4096 -llm_load_print_meta: n_ctx = 512 -llm_load_print_meta: n_embd = 5120 -llm_load_print_meta: n_head = 40 -llm_load_print_meta: n_head_kv = 40 -llm_load_print_meta: n_layer = 40 -llm_load_print_meta: n_rot = 128 -llm_load_print_meta: n_gqa = 1 -llm_load_print_meta: f_norm_eps = 1.0e-05 -llm_load_print_meta: f_norm_rms_eps = 1.0e-05 -llm_load_print_meta: n_ff = 13824 -llm_load_print_meta: freq_base = 10000.0 -llm_load_print_meta: freq_scale = 1 -llm_load_print_meta: model type = 13B -llm_load_print_meta: model ftype = mostly Q4_0 -llm_load_print_meta: model size = 13.02 B -llm_load_print_meta: general.name = LLaMA v2 -llm_load_print_meta: BOS token = 1 '' -llm_load_print_meta: EOS token = 2 '' -llm_load_print_meta: UNK token = 0 '' -llm_load_print_meta: LF token = 13 '<0x0A>' -llm_load_tensors: ggml ctx size = 0.11 MB -llm_load_tensors: mem required = 7024.01 MB (+ 400.00 MB per state) -................................................................................................... -llama_new_context_with_model: kv self size = 400.00 MB -llama_new_context_with_model: compute buffer total size = 75.41 MB - -system_info: n_threads = 16 / 24 | AVX = 0 | AVX2 = 0 | AVX512 = 0 | AVX512_VBMI = 0 | AVX512_VNNI = 0 | FMA = 0 | NEON = 1 | ARM_FMA = 1 | F16C = 0 | FP16_VA = 1 | WASM_SIMD = 0 | BLAS = 1 | SSE3 = 0 | VSX = 0 | -sampling: repeat_last_n = 64, repeat_penalty = 1.100000, presence_penalty = 0.000000, frequency_penalty = 0.000000, top_k = 40, tfs_z = 1.000000, top_p = 0.950000, typical_p = 1.000000, temp = 0.800000, mirostat = 0, mirostat_lr = 0.100000, mirostat_ent = 5.000000 -generate: n_ctx = 512, n_batch = 512, n_predict = 400, n_keep = 0 - - - Building a website can be done in 10 simple steps: -Step 1: Find the right website platform. -Step 2: Choose your domain name and hosting plan. -Step 3: Design your website layout. -Step 4: Write your website content and add images. -Step 5: Install security features to protect your site from hackers or spammers -Step 6: Test your website on multiple browsers, mobile devices, operating systems etc… -Step 7: Test it again with people who are not related to you personally – friends or family members will work just fine! -Step 8: Start marketing and promoting the website via social media channels or paid ads -Step 9: Analyze how many visitors have come to your site so far, what type of people visit more often than others (e.g., men vs women) etc… -Step 10: Continue to improve upon all aspects mentioned above by following trends in web design and staying up-to-date on new technologies that can enhance user experience even further! -How does a Website Work? -A website works by having pages, which are made of HTML code. This code tells your computer how to display the content on each page you visit – whether it’s an image or text file (like PDFs). In order for someone else’s browser not only be able but also want those same results when accessing any given URL; some additional steps need taken by way of programming scripts that will add functionality such as making links clickable! -The most common type is called static HTML pages because they remain unchanged over time unless modified manually (either through editing files directly or using an interface such as WordPress). They are usually served up via HTTP protocols – this means anyone can access them without having any special privileges like being part of a group who is allowed into restricted areas online; however, there may still exist some limitations depending upon where one lives geographically speaking. -How to -llama_print_timings: load time = 576.45 ms -llama_print_timings: sample time = 283.10 ms / 400 runs ( 0.71 ms per token, 1412.91 tokens per second) -llama_print_timings: prompt eval time = 599.83 ms / 19 tokens ( 31.57 ms per token, 31.68 tokens per second) -llama_print_timings: eval time = 24513.59 ms / 399 runs ( 61.44 ms per token, 16.28 tokens per second) -llama_print_timings: total time = 25431.49 ms -``` +Games + +- [Lucy's Labyrinth](https://github.com/MorganRO8/Lucys_Labyrinth) - A simple maze game where agents controlled by an AI model will try to trick you.
-
-Demo of running both LLaMA-7B and whisper.cpp on a single M1 Pro MacBook +## Supported backends + +| Backend | Target devices | +| --- | --- | +| [Metal](docs/build.md#metal-build) | Apple Silicon | +| [BLAS](docs/build.md#blas-build) | All | +| [BLIS](docs/backend/BLIS.md) | All | +| [SYCL](docs/backend/SYCL.md) | Intel and Nvidia GPU | +| [MUSA](docs/build.md#musa) | Moore Threads MTT GPU | +| [CUDA](docs/build.md#cuda) | Nvidia GPU | +| [hipBLAS](docs/build.md#hipblas) | AMD GPU | +| [Vulkan](docs/build.md#vulkan) | GPU | +| [CANN](docs/build.md#cann) | Ascend NPU | -And here is another demo of running both LLaMA-7B and [whisper.cpp](https://github.com/ggerganov/whisper.cpp) on a single M1 Pro MacBook: +## Building the project -https://user-images.githubusercontent.com/1991296/224442907-7693d4be-acaa-4e01-8b4f-add84093ffff.mp4 +The main product of this project is the `llama` library. Its C-style interface can be found in [include/llama.h](include/llama.h). +The project also includes many example programs and tools using the `llama` library. The examples range from simple, minimal code snippets to sophisticated sub-projects such as an OpenAI-compatible HTTP server. Possible methods for obtaining the binaries: -
+- Clone this repository and build locally, see [how to build](docs/build.md) +- On MacOS or Linux, install `llama.cpp` via [brew, flox or nix](docs/install.md) +- Use a Docker image, see [documentation for Docker](docs/docker.md) +- Download pre-built binaries from [releases](https://github.com/ggerganov/llama.cpp/releases) -## Usage +## Obtaining and quantizing models -Here are the end-to-end binary build and model conversion steps for most supported models. +The [Hugging Face](https://huggingface.co) platform hosts a [number of LLMs](https://huggingface.co/models?library=gguf&sort=trending) compatible with `llama.cpp`: -### Basic usage +- [Trending](https://huggingface.co/models?library=gguf&sort=trending) +- [LLaMA](https://huggingface.co/models?sort=trending&search=llama+gguf) -Firstly, you need to get the binary. There are different methods that you can follow: -- Method 1: Clone this repository and build locally, see [how to build](./docs/build.md) -- Method 2: If you are using MacOS or Linux, you can install llama.cpp via [brew, flox or nix](./docs/install.md) -- Method 3: Use a Docker image, see [documentation for Docker](./docs/docker.md) -- Method 4: Download pre-built binary from [releases](https://github.com/ggerganov/llama.cpp/releases) +After downloading a model, use the CLI tools to run it locally - see below. -You can run a basic completion using this command: +`llama.cpp` requires the model to be stored in the [GGUF](https://github.com/ggerganov/ggml/blob/master/docs/gguf.md) file format. Models in other data formats can be converted to GGUF using the `convert_*.py` Python scripts in this repo. -```bash -llama-cli -m your_model.gguf -p "I believe the meaning of life is" -n 128 +The Hugging Face platform provides a variety of online tools for converting, quantizing and hosting models with `llama.cpp`: -# Output: -# I believe the meaning of life is to find your own truth and to live in accordance with it. For me, this means being true to myself and following my passions, even if they don't align with societal expectations. I think that's what I love about yoga – it's not just a physical practice, but a spiritual one too. It's about connecting with yourself, listening to your inner voice, and honoring your own unique journey. -``` +- Use the [GGUF-my-repo space](https://huggingface.co/spaces/ggml-org/gguf-my-repo) to convert to GGUF format and quantize model weights to smaller sizes +- Use the [GGUF-my-LoRA space](https://huggingface.co/spaces/ggml-org/gguf-my-lora) to convert LoRA adapters to GGUF format (more info: https://github.com/ggerganov/llama.cpp/discussions/10123) +- Use the [GGUF-editor space](https://huggingface.co/spaces/CISCai/gguf-editor) to edit GGUF meta data in the browser (more info: https://github.com/ggerganov/llama.cpp/discussions/9268) +- Use the [Inference Endpoints](https://ui.endpoints.huggingface.co/) to directly host `llama.cpp` in the cloud (more info: https://github.com/ggerganov/llama.cpp/discussions/9669) -See [this page](./examples/main/README.md) for a full list of parameters. +To learn more about model quantization, [read this documentation](examples/quantize/README.md) -### Conversation mode +## [`llama-cli`](examples/main) -If you want a more ChatGPT-like experience, you can run in conversation mode by passing `-cnv` as a parameter: +#### A CLI tool for accessing and experimenting with most of `llama.cpp`'s functionality. -```bash -llama-cli -m your_model.gguf -p "You are a helpful assistant" -cnv +-
+ Run simple text completion -# Output: -# > hi, who are you? -# Hi there! I'm your helpful assistant! I'm an AI-powered chatbot designed to assist and provide information to users like you. I'm here to help answer your questions, provide guidance, and offer support on a wide range of topics. I'm a friendly and knowledgeable AI, and I'm always happy to help with anything you need. What's on your mind, and how can I assist you today? -# -# > what is 1+1? -# Easy peasy! The answer to 1+1 is... 2! -``` + ```bash + llama-cli -m model.gguf -p "I believe the meaning of life is" -n 128 -By default, the chat template will be taken from the input model. If you want to use another chat template, pass `--chat-template NAME` as a parameter. See the list of [supported templates](https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template) + # I believe the meaning of life is to find your own truth and to live in accordance with it. For me, this means being true to myself and following my passions, even if they don't align with societal expectations. I think that's what I love about yoga – it's not just a physical practice, but a spiritual one too. It's about connecting with yourself, listening to your inner voice, and honoring your own unique journey. + ``` -```bash -./llama-cli -m your_model.gguf -p "You are a helpful assistant" -cnv --chat-template chatml -``` +
-You can also use your own template via in-prefix, in-suffix and reverse-prompt parameters: +-
+ Run in conversation mode -```bash -./llama-cli -m your_model.gguf -p "You are a helpful assistant" -cnv --in-prefix 'User: ' --reverse-prompt 'User:' -``` + ```bash + llama-cli -m model.gguf -p "You are a helpful assistant" -cnv -### Web server + # > hi, who are you? + # Hi there! I'm your helpful assistant! I'm an AI-powered chatbot designed to assist and provide information to users like you. I'm here to help answer your questions, provide guidance, and offer support on a wide range of topics. I'm a friendly and knowledgeable AI, and I'm always happy to help with anything you need. What's on your mind, and how can I assist you today? + # + # > what is 1+1? + # Easy peasy! The answer to 1+1 is... 2! + ``` -[llama.cpp web server](./examples/server/README.md) is a lightweight [OpenAI API](https://github.com/openai/openai-openapi) compatible HTTP server that can be used to serve local models and easily connect them to existing clients. +
-Example usage: +-
+ Run with custom chat template -```bash -./llama-server -m your_model.gguf --port 8080 + ```bash + # use the "chatml" template + llama-cli -m model.gguf -p "You are a helpful assistant" -cnv --chat-template chatml -# Basic web UI can be accessed via browser: http://localhost:8080 -# Chat completion endpoint: http://localhost:8080/v1/chat/completions -``` + # use a custom template + llama-cli -m model.gguf -p "You are a helpful assistant" -cnv --in-prefix 'User: ' --reverse-prompt 'User:' + ``` -### Interactive mode + [Supported templates](https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template) -> [!NOTE] -> If you prefer basic usage, please consider using conversation mode instead of interactive mode +
-In this mode, you can always interrupt generation by pressing Ctrl+C and entering one or more lines of text, which will be converted into tokens and appended to the current context. You can also specify a *reverse prompt* with the parameter `-r "reverse prompt string"`. This will result in user input being prompted whenever the exact tokens of the reverse prompt string are encountered in the generation. A typical use is to use a prompt that makes LLaMA emulate a chat between multiple users, say Alice and Bob, and pass `-r "Alice:"`. +-
+ Constrain the output with a custom grammar -Here is an example of a few-shot interaction, invoked with the command + ```bash + llama-cli -m model.gguf -n 256 --grammar-file grammars/json.gbnf -p 'Request: schedule a call at 8pm; Command:' -```bash -# default arguments using a 7B model -./examples/chat.sh + # {"appointmentTime": "8pm", "appointmentDetails": "schedule a a call"} + ``` -# advanced chat with a 13B model -./examples/chat-13B.sh + The [grammars/](grammars/) folder contains a handful of sample grammars. To write your own, check out the [GBNF Guide](grammars/README.md). -# custom arguments using a 13B model -./llama-cli -m ./models/13B/ggml-model-q4_0.gguf -n 256 --repeat_penalty 1.0 --color -i -r "User:" -f prompts/chat-with-bob.txt -``` + For authoring more complex JSON grammars, check out https://grammar.intrinsiclabs.ai/ -Note the use of `--color` to distinguish between user input and generated text. Other parameters are explained in more detail in the [README](examples/main/README.md) for the `llama-cli` example program. +
-![image](https://user-images.githubusercontent.com/1991296/224575029-2af3c7dc-5a65-4f64-a6bb-517a532aea38.png) -### Persistent Interaction +## [`llama-server`](examples/server) -The prompt, user inputs, and model generations can be saved and resumed across calls to `./llama-cli` by leveraging `--prompt-cache` and `--prompt-cache-all`. The `./examples/chat-persistent.sh` script demonstrates this with support for long-running, resumable chat sessions. To use this example, you must provide a file to cache the initial chat prompt and a directory to save the chat session, and may optionally provide the same variables as `chat-13B.sh`. The same prompt cache can be reused for new chat sessions. Note that both prompt cache and chat directory are tied to the initial prompt (`PROMPT_TEMPLATE`) and the model file. +#### A lightweight, [OpenAI API](https://github.com/openai/openai-openapi) compatible, HTTP server for serving LLMs. -```bash -# Start a new chat -PROMPT_CACHE_FILE=chat.prompt.bin CHAT_SAVE_DIR=./chat/default ./examples/chat-persistent.sh +-
+ Start a local HTTP server with default configuration on port 8080 -# Resume that chat -PROMPT_CACHE_FILE=chat.prompt.bin CHAT_SAVE_DIR=./chat/default ./examples/chat-persistent.sh + ```bash + llama-server -m model.gguf --port 8080 -# Start a different chat with the same prompt/model -PROMPT_CACHE_FILE=chat.prompt.bin CHAT_SAVE_DIR=./chat/another ./examples/chat-persistent.sh + # Basic web UI can be accessed via browser: http://localhost:8080 + # Chat completion endpoint: http://localhost:8080/v1/chat/completions + ``` -# Different prompt cache for different prompt/model -PROMPT_TEMPLATE=./prompts/chat-with-bob.txt PROMPT_CACHE_FILE=bob.prompt.bin \ - CHAT_SAVE_DIR=./chat/bob ./examples/chat-persistent.sh -``` +
-### Constrained output with grammars +-
+ Support multiple-users and parallel decoding -`llama.cpp` supports grammars to constrain model output. For example, you can force the model to output JSON only: + ```bash + # up to 4 concurrent requests, each with 4096 max context + llama-server -m model.gguf -c 16384 -np 4 + ``` -```bash -./llama-cli -m ./models/13B/ggml-model-q4_0.gguf -n 256 --grammar-file grammars/json.gbnf -p 'Request: schedule a call at 8pm; Command:' -``` +
-The `grammars/` folder contains a handful of sample grammars. To write your own, check out the [GBNF Guide](./grammars/README.md). +-
+ Enable speculative decoding -For authoring more complex JSON grammars, you can also check out https://grammar.intrinsiclabs.ai/, a browser app that lets you write TypeScript interfaces which it compiles to GBNF grammars that you can save for local use. Note that the app is built and maintained by members of the community, please file any issues or FRs on [its repo](http://github.com/intrinsiclabsai/gbnfgen) and not this one. + ```bash + # the draft.gguf model should be a small variant of the target model.gguf + llama-server -m model.gguf -md draft.gguf + ``` -## Build +
-Please refer to [Build llama.cpp locally](./docs/build.md) +-
+ Serve an embedding model -## Supported backends + ```bash + # use the /embedding endpoint + llama-server -m model.gguf --embedding --pooling cls -ub 8192 + ``` -| Backend | Target devices | -| --- | --- | -| [Metal](./docs/build.md#metal-build) | Apple Silicon | -| [BLAS](./docs/build.md#blas-build) | All | -| [BLIS](./docs/backend/BLIS.md) | All | -| [SYCL](./docs/backend/SYCL.md) | Intel and Nvidia GPU | -| [MUSA](./docs/build.md#musa) | Moore Threads MTT GPU | -| [CUDA](./docs/build.md#cuda) | Nvidia GPU | -| [hipBLAS](./docs/build.md#hipblas) | AMD GPU | -| [Vulkan](./docs/build.md#vulkan) | GPU | -| [CANN](./docs/build.md#cann) | Ascend NPU | +
+ +-
+ Serve a reranking model + + ```bash + # use the /reranking endpoint + llama-server -m model.gguf --reranking + ``` + +
+ +-
+ Constrain all outputs with a grammar + + ```bash + # custom grammar + llama-server -m model.gguf --grammar-file grammar.gbnf -## Tools + # JSON + llama-server -m model.gguf --grammar-file grammars/json.gbnf + ``` -### Prepare and Quantize +
-> [!NOTE] -> You can use the [GGUF-my-repo](https://huggingface.co/spaces/ggml-org/gguf-my-repo) space on Hugging Face to quantise your model weights without any setup too. It is synced from `llama.cpp` main every 6 hours. -To obtain the official LLaMA 2 weights please see the Obtaining and using the Facebook LLaMA 2 model section. There is also a large selection of pre-quantized `gguf` models available on Hugging Face. +## [`llama-perplexity`](examples/perplexity) -Note: `convert.py` has been moved to `examples/convert_legacy_llama.py` and shouldn't be used for anything other than `Llama/Llama2/Mistral` models and their derivatives. -It does not support LLaMA 3, you can use `convert_hf_to_gguf.py` with LLaMA 3 downloaded from Hugging Face. +#### A tool for measuring the perplexity [^1][^2] (and other quality metrics) of a model over a given text. -To learn more about quantizing model, [read this documentation](./examples/quantize/README.md) +-
+ Measure the perplexity over a text file -### Perplexity (measuring model quality) + ```bash + llama-perplexity -m model.gguf -f file.txt -You can use the `perplexity` example to measure perplexity over a given prompt (lower perplexity is better). -For more information, see [https://huggingface.co/docs/transformers/perplexity](https://huggingface.co/docs/transformers/perplexity). + # [1]15.2701,[2]5.4007,[3]5.3073,[4]6.2965,[5]5.8940,[6]5.6096,[7]5.7942,[8]4.9297, ... + # Final estimate: PPL = 5.4007 +/- 0.67339 + ``` + +
+ +-
+ Measure KL divergence + + ```bash + # TODO + ``` + +
+ +[^1]: [examples/perplexity/README.md](examples/perplexity/README.md) +[^2]: [https://huggingface.co/docs/transformers/perplexity](https://huggingface.co/docs/transformers/perplexity) + +## [`llama-bench`](example/bench) + +#### Benchmark the performance of the inference for various parameters. + +-
+ Run default benchmark + + ```bash + llama-bench -m model.gguf + + # Output: + # | model | size | params | backend | threads | test | t/s | + # | ------------------- | ---------: | ---------: | ---------- | ------: | ------------: | -------------------: | + # | qwen2 1.5B Q4_0 | 885.97 MiB | 1.54 B | Metal,BLAS | 16 | pp512 | 5765.41 ± 20.55 | + # | qwen2 1.5B Q4_0 | 885.97 MiB | 1.54 B | Metal,BLAS | 16 | tg128 | 197.71 ± 0.81 | + # + # build: 3e0ba0e60 (4229) + ``` + +
+ + +## [`llama-simple`](examples/simple) + +#### A minimal example for implementing apps with `llama.cpp`. Useful for developers. + +-
+ Basic text completion + + ```bash + llama-simple -m model.gguf + + # Hello my name is Kaitlyn and I am a 16 year old girl. I am a junior in high school and I am currently taking a class called "The Art of + ``` + +
-To learn more how to measure perplexity using llama.cpp, [read this documentation](./examples/perplexity/README.md) ## Contributing @@ -462,20 +463,19 @@ To learn more how to measure perplexity using llama.cpp, [read this documentatio ## Other documentation -- [main (cli)](./examples/main/README.md) -- [server](./examples/server/README.md) -- [jeopardy](./examples/jeopardy/README.md) -- [GBNF grammars](./grammars/README.md) +- [main (cli)](examples/main/README.md) +- [server](examples/server/README.md) +- [GBNF grammars](grammars/README.md) -**Development documentation** +#### Development documentation -- [How to build](./docs/build.md) -- [Running on Docker](./docs/docker.md) -- [Build on Android](./docs/android.md) -- [Performance troubleshooting](./docs/development/token_generation_performance_tips.md) +- [How to build](docs/build.md) +- [Running on Docker](docs/docker.md) +- [Build on Android](docs/android.md) +- [Performance troubleshooting](docs/development/token_generation_performance_tips.md) - [GGML tips & tricks](https://github.com/ggerganov/llama.cpp/wiki/GGML-Tips-&-Tricks) -**Seminal papers and background on the models** +#### Seminal papers and background on the models If your issue is with model generation quality, then please at least scan the following links and papers to understand the limitations of LLaMA models. This is especially important when choosing an appropriate model size and appreciating both the significant and subtle differences between LLaMA models and ChatGPT: - LLaMA: @@ -486,3 +486,6 @@ If your issue is with model generation quality, then please at least scan the fo - GPT-3.5 / InstructGPT / ChatGPT: - [Aligning language models to follow instructions](https://openai.com/research/instruction-following) - [Training language models to follow instructions with human feedback](https://arxiv.org/abs/2203.02155) + +#### References + diff --git a/Sources/llama/llama.h b/Sources/llama/llama.h new file mode 100644 index 0000000000000..41725880ed8c0 --- /dev/null +++ b/Sources/llama/llama.h @@ -0,0 +1,4 @@ +#pragma once + +#include + diff --git a/Sources/llama/module.modulemap b/Sources/llama/module.modulemap new file mode 100644 index 0000000000000..d010555b1cb65 --- /dev/null +++ b/Sources/llama/module.modulemap @@ -0,0 +1,5 @@ +module llama [system] { + header "llama.h" + link "llama" + export * +} diff --git a/ci/run.sh b/ci/run.sh index 20610e56009be..abf08a4ff6411 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -815,7 +815,10 @@ if [ -z ${GG_BUILD_LOW_PERF} ]; then ln -sfn ${mnt_models} ${SRC}/models-mnt # Create a fresh python3 venv and enter it - python3 -m venv "$MNT/venv" + if ! python3 -m venv "$MNT/venv"; then + echo "Error: Failed to create Python virtual environment at $MNT/venv." + exit 1 + fi source "$MNT/venv/bin/activate" pip install -r ${SRC}/requirements.txt --disable-pip-version-check diff --git a/cmake/llama.pc.in b/cmake/llama.pc.in index 326acbb6108fd..0b2b6bcfabfd1 100644 --- a/cmake/llama.pc.in +++ b/cmake/llama.pc.in @@ -6,5 +6,5 @@ includedir=${prefix}/include Name: llama Description: Port of Facebook's LLaMA model in C/C++ Version: @PROJECT_VERSION@ -Libs: -L${libdir} -lllama +Libs: -L${libdir} -lggml -lggml-base -lllama Cflags: -I${includedir} diff --git a/cmake/x64-windows-llvm.cmake b/cmake/x64-windows-llvm.cmake new file mode 100644 index 0000000000000..0603d738fbef0 --- /dev/null +++ b/cmake/x64-windows-llvm.cmake @@ -0,0 +1,11 @@ +set( CMAKE_SYSTEM_NAME Windows ) +set( CMAKE_SYSTEM_PROCESSOR x86_64 ) + +set( CMAKE_C_COMPILER clang ) +set( CMAKE_CXX_COMPILER clang++ ) + +set( arch_c_flags "-march=native" ) + +set( CMAKE_C_FLAGS_INIT "${arch_c_flags}" ) +set( CMAKE_CXX_FLAGS_INIT "${arch_c_flags}" ) + diff --git a/common/CMakeLists.txt b/common/CMakeLists.txt index 2231748848bf5..89862fe113946 100644 --- a/common/CMakeLists.txt +++ b/common/CMakeLists.txt @@ -88,5 +88,5 @@ if (LLAMA_CURL) endif () target_include_directories(${TARGET} PUBLIC .) -target_compile_features (${TARGET} PUBLIC cxx_std_11) +target_compile_features (${TARGET} PUBLIC cxx_std_17) target_link_libraries (${TARGET} PRIVATE ${LLAMA_COMMON_EXTRA_LIBS} PUBLIC llama Threads::Threads) diff --git a/common/arg.cpp b/common/arg.cpp index 32d9a964c1716..49af31682510d 100644 --- a/common/arg.cpp +++ b/common/arg.cpp @@ -348,6 +348,18 @@ bool common_params_parse(int argc, char ** argv, common_params & params, llama_e return true; } +static std::string list_builtin_chat_templates() { + std::vector supported_tmpl; + int32_t res = llama_chat_builtin_templates(nullptr, 0); + supported_tmpl.resize(res); + res = llama_chat_builtin_templates(supported_tmpl.data(), supported_tmpl.size()); + std::ostringstream msg; + for (auto & tmpl : supported_tmpl) { + msg << tmpl << (&tmpl == &supported_tmpl.back() ? "" : ", "); + } + return msg.str(); +} + common_params_context common_params_parser_init(common_params & params, llama_example ex, void(*print_usage)(int, char **)) { // load dynamic backends ggml_backend_load_all(); @@ -579,7 +591,7 @@ common_params_context common_params_parser_init(common_params & params, llama_ex [](common_params & params) { params.ctx_shift = false; } - ).set_examples({LLAMA_EXAMPLE_MAIN, LLAMA_EXAMPLE_SERVER}).set_env("LLAMA_ARG_NO_CONTEXT_SHIFT")); + ).set_examples({LLAMA_EXAMPLE_MAIN, LLAMA_EXAMPLE_SERVER, LLAMA_EXAMPLE_IMATRIX}).set_env("LLAMA_ARG_NO_CONTEXT_SHIFT")); add_opt(common_arg( {"--chunks"}, "N", string_format("max number of chunks to process (default: %d, -1 = all)", params.n_chunks), @@ -774,7 +786,7 @@ common_params_context common_params_parser_init(common_params & params, llama_ex [](common_params & params) { params.warmup = false; } - ).set_examples({LLAMA_EXAMPLE_MAIN})); + ).set_examples({LLAMA_EXAMPLE_MAIN, LLAMA_EXAMPLE_SERVER})); add_opt(common_arg( {"--spm-infill"}, string_format( @@ -1699,6 +1711,13 @@ common_params_context common_params_parser_init(common_params & params, llama_ex params.public_path = value; } ).set_examples({LLAMA_EXAMPLE_SERVER}).set_env("LLAMA_ARG_STATIC_PATH")); + add_opt(common_arg( + {"--no-webui"}, + string_format("Disable the Web UI (default: %s)", params.webui ? "enabled" : "disabled"), + [](common_params & params) { + params.webui = false; + } + ).set_examples({LLAMA_EXAMPLE_SERVER}).set_env("LLAMA_ARG_NO_WEBUI")); add_opt(common_arg( {"--embedding", "--embeddings"}, string_format("restrict to only support embedding use case; use only with dedicated embedding models (default: %s)", params.embedding ? "enabled" : "disabled"), @@ -1814,9 +1833,11 @@ common_params_context common_params_parser_init(common_params & params, llama_ex ).set_examples({LLAMA_EXAMPLE_SERVER})); add_opt(common_arg( {"--chat-template"}, "JINJA_TEMPLATE", - "set custom jinja chat template (default: template taken from model's metadata)\n" - "if suffix/prefix are specified, template will be disabled\n" - "only commonly used templates are accepted:\nhttps://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template", + string_format( + "set custom jinja chat template (default: template taken from model's metadata)\n" + "if suffix/prefix are specified, template will be disabled\n" + "list of built-in templates:\n%s", list_builtin_chat_templates().c_str() + ), [](common_params & params, const std::string & value) { if (!common_chat_verify_template(value)) { throw std::runtime_error(string_format( diff --git a/common/common.cpp b/common/common.cpp index 2b2f0009897f3..6143516d2250f 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -652,7 +652,17 @@ bool fs_validate_filename(const std::string & filename) { std::u32string filename_utf32; try { +#if defined(__clang__) + // disable C++17 deprecation warning for std::codecvt_utf8 +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wdeprecated-declarations" +#endif std::wstring_convert, char32_t> converter; + +#if defined(__clang__) +# pragma clang diagnostic pop +#endif + filename_utf32 = converter.from_bytes(filename); // If the reverse conversion mismatches, it means overlong UTF-8 sequences were used, diff --git a/common/common.h b/common/common.h index 9b1508a15fb43..95d20401d2a9a 100644 --- a/common/common.h +++ b/common/common.h @@ -133,6 +133,7 @@ struct common_params_sampling { bool penalize_nl = false; // consider newlines as a repeatable token bool ignore_eos = false; bool no_perf = false; // disable performance metrics + bool timing_per_token = false; std::vector dry_sequence_breakers = {"\n", ":", "\"", "*"}; // default sequence breakers for DRY @@ -214,7 +215,7 @@ struct common_params { struct common_params_speculative speculative; std::string model = ""; // model path // NOLINT - std::string model_alias = "unknown"; // model alias // NOLINT + std::string model_alias = ""; // model alias // NOLINT std::string model_url = ""; // model url to download // NOLINT std::string hf_token = ""; // HF token // NOLINT std::string hf_repo = ""; // HF repo // NOLINT diff --git a/common/speculative.cpp b/common/speculative.cpp index e559675c436ef..3fcbb0020b6af 100644 --- a/common/speculative.cpp +++ b/common/speculative.cpp @@ -62,6 +62,10 @@ struct common_speculative * common_speculative_init( } void common_speculative_free(struct common_speculative * spec) { + if (spec == nullptr) { + return; + } + common_sampler_free(spec->smpl); llama_batch_free(spec->batch); diff --git a/convert_hf_to_gguf.py b/convert_hf_to_gguf.py index 5ce828ddc0194..206ab502ecadd 100755 --- a/convert_hf_to_gguf.py +++ b/convert_hf_to_gguf.py @@ -658,6 +658,12 @@ def get_vocab_base_pre(self, tokenizer) -> str: if chkhsh == "60824e3c0d9401f89943cbb2fff727f0e2d4c545ba4df2d6e4f09a6db0f5b450": # ref: https://huggingface.co/facebook/chameleon-7b res = "chameleon" + if chkhsh == "1431a23e583c97432bc230bff598d103ddb5a1f89960c8f1d1051aaa944d0b35": + # ref: https://huggingface.co/sapienzanlp/Minerva-7B-base-v1.0 + res = "minerva-7b" + if chkhsh == "8b5a93ed704057481f240da0be7e7dca721d7f8f4755263b6807227a2cbeae65": + # ref: https://huggingface.co/sentence-transformers/stsb-roberta-base + res = "roberta-bpe" if res is None: logger.warning("\n") @@ -1831,29 +1837,40 @@ class MiniCPMModel(Model): model_arch = gguf.MODEL_ARCH.MINICPM def set_gguf_parameters(self): - block_count = self.hparams["num_hidden_layers"] - self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) - self.gguf_writer.add_embedding_length(self.hparams["hidden_size"]) - self.gguf_writer.add_block_count(block_count) - self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) - self.gguf_writer.add_rope_dimension_count(self.hparams["hidden_size"] // self.hparams["num_attention_heads"]) - self.gguf_writer.add_head_count(self.hparams["num_attention_heads"]) - self.gguf_writer.add_head_count_kv(self.hparams["num_key_value_heads"]) - self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) - self.gguf_writer.add_file_type(self.ftype) + super().set_gguf_parameters() + embedding_scale = float(self.hparams["scale_emb"]) + self.gguf_writer.add_embedding_scale(embedding_scale) + logger.info(f"gguf: (minicpm) embedding_scale = {embedding_scale}") + residual_scale = self.hparams["scale_depth"] / self.hparams["num_hidden_layers"] ** 0.5 + self.gguf_writer.add_residual_scale(residual_scale) + logger.info(f"gguf: (minicpm) residual_scale = {residual_scale}") + logit_scale = self.hparams["hidden_size"] / self.hparams["dim_model_base"] + self.gguf_writer.add_logit_scale(logit_scale) + logger.info(f"gguf: (minicpm) logit_scale = {logit_scale}") + if self.hparams.get("rope_scaling") is not None: + if self.hparams["rope_scaling"].get("type") == "longrope": + self.gguf_writer.add_rope_scaling_type(gguf.RopeScalingType.LONGROPE) + logger.info(f"gguf: (minicpm) rope_scaling_type = {gguf.RopeScalingType.LONGROPE}") - def set_vocab(self): - self._set_vocab_llama_hf() + def generate_extra_tensors(self) -> Iterable[tuple[str, Tensor]]: + rope_dims = self.hparams["hidden_size"] // self.hparams["num_attention_heads"] - def _reverse_hf_permute(self, weights: Tensor, n_head: int, n_kv_head: int | None = None) -> Tensor: - if n_kv_head is not None and n_head != n_kv_head: - n_head //= n_kv_head + rope_scaling = self.find_hparam(['rope_scaling'], True) + if rope_scaling is not None: + long_factors = rope_scaling.get('long_factor', None) + short_factors = rope_scaling.get('short_factor', None) - return ( - weights.reshape(n_head, 2, weights.shape[0] // n_head // 2, *weights.shape[1:]) - .swapaxes(1, 2) - .reshape(weights.shape) - ) + if long_factors is None or short_factors is None: + raise KeyError('Missing the required key rope_scaling.long_factor or rope_scaling_short_factor') + + if len(long_factors) != len(short_factors) or len(long_factors) != rope_dims / 2: + raise ValueError(f'The length of rope long and short factors must be {rope_dims / 2}') + + yield (self.format_tensor_name(gguf.MODEL_TENSOR.ROPE_FACTORS_LONG), torch.tensor(long_factors, dtype=torch.float32)) + yield (self.format_tensor_name(gguf.MODEL_TENSOR.ROPE_FACTORS_SHORT), torch.tensor(short_factors, dtype=torch.float32)) + + def set_vocab(self): + self._set_vocab_sentencepiece() def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iterable[tuple[str, Tensor]]: del bid # unused @@ -1863,9 +1880,9 @@ def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iter # HF models permute some of the tensors, so we need to undo that if name.endswith(("q_proj.weight")): - data_torch = self._reverse_hf_permute(data_torch, n_head, n_head) + data_torch = LlamaModel.permute(data_torch, n_head, n_head) if name.endswith(("k_proj.weight")): - data_torch = self._reverse_hf_permute(data_torch, n_head, n_kv_head) + data_torch = LlamaModel.permute(data_torch, n_head, n_kv_head) return [(self.map_tensor_name(name), data_torch)] @@ -1975,6 +1992,14 @@ def set_vocab(self): except FileNotFoundError: self._set_vocab_gpt2() + def set_gguf_parameters(self): + super().set_gguf_parameters() + if self.hparams.get("rope_scaling") is not None and "factor" in self.hparams["rope_scaling"]: + if self.hparams["rope_scaling"].get("type") == "yarn": + self.gguf_writer.add_rope_scaling_type(gguf.RopeScalingType.YARN) + self.gguf_writer.add_rope_scaling_factor(self.hparams["rope_scaling"]["factor"]) + self.gguf_writer.add_rope_scaling_orig_ctx_len(self.hparams["rope_scaling"]["original_max_position_embeddings"]) + @Model.register("Qwen2VLForConditionalGeneration") class Qwen2VLModel(Model): @@ -2542,7 +2567,7 @@ def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iter return [(self.map_tensor_name(name), data_torch)] -@Model.register("BertModel", "CamembertModel") +@Model.register("BertModel", "CamembertModel", "RobertaModel") class BertModel(Model): model_arch = gguf.MODEL_ARCH.BERT @@ -2583,7 +2608,8 @@ def set_vocab(self): # we need this to validate the size of the token_type embeddings # though currently we are passing all zeros to the token_type embeddings - self.gguf_writer.add_token_type_count(2) # "Sequence A" or "Sequence B" + # "Sequence A" or "Sequence B" + self.gguf_writer.add_token_type_count(self.hparams.get("type_vocab_size", 1)) # convert to phantom space vocab def phantom(tok): diff --git a/convert_hf_to_gguf_update.py b/convert_hf_to_gguf_update.py index 28cd02e5a7f66..aa10e5db796a0 100755 --- a/convert_hf_to_gguf_update.py +++ b/convert_hf_to_gguf_update.py @@ -17,7 +17,7 @@ # # python3 convert_hf_to_gguf_update.py # -# - Copy-paste the generated get_vocab_base_pre() function into convert_hf_to_gguf.py +# - The convert_hf_to_gguf.py script will have had its get_vocab_base_pre() function updated # - Update llama.cpp with the new pre-tokenizer if necessary # # TODO: generate tokenizer tests for llama.cpp @@ -102,6 +102,8 @@ class TOKENIZER_TYPE(IntEnum): {"name": "exaone", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct", }, {"name": "phi-2", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/microsoft/phi-2", }, {"name": "chameleon", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/facebook/chameleon-7b", }, + {"name": "minerva-7b", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/sapienzanlp/Minerva-7B-base-v1.0", }, + {"name": "roberta-bpe", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/sentence-transformers/stsb-roberta-base"}, ] diff --git a/docs/backend/BLIS.md b/docs/backend/BLIS.md index 35d06bd0f303d..9045485771ea6 100644 --- a/docs/backend/BLIS.md +++ b/docs/backend/BLIS.md @@ -27,13 +27,6 @@ We recommend using openmp since it's easier to modify the cores being used. ### llama.cpp compilation -Makefile: - -```bash -make GGML_BLIS=1 -j -# make GGML_BLIS=1 llama-benchmark-matmult -``` - CMake: ```bash diff --git a/docs/build.md b/docs/build.md index 72b81043796dc..84019b2046a89 100644 --- a/docs/build.md +++ b/docs/build.md @@ -7,124 +7,75 @@ git clone https://github.com/ggerganov/llama.cpp cd llama.cpp ``` -In order to build llama.cpp you have four different options. +The following sections describe how to build with different backends and options. -- Using `make`: - - On Linux or MacOS: +## CPU Build - ```bash - make - ``` - - - On Windows (x86/x64 only, arm64 requires cmake): +Build llama.cpp using `CMake`: - 1. Download the latest fortran version of [w64devkit](https://github.com/skeeto/w64devkit/releases). - 2. Extract `w64devkit` on your pc. - 3. Run `w64devkit.exe`. - 4. Use the `cd` command to reach the `llama.cpp` folder. - 5. From here you can run: - ```bash - make - ``` +```bash +cmake -B build +cmake --build build --config Release +``` - - Notes: - - For `Q4_0_4_4` quantization type build, add the `GGML_NO_LLAMAFILE=1` flag. For example, use `make GGML_NO_LLAMAFILE=1`. - - For faster compilation, add the `-j` argument to run multiple jobs in parallel. For example, `make -j 8` will run 8 jobs in parallel. - - For faster repeated compilation, install [ccache](https://ccache.dev/). - - For debug builds, run `make LLAMA_DEBUG=1` +**Notes**: -- Using `CMake`: +- For faster compilation, add the `-j` argument to run multiple jobs in parallel, or use a generator that does this automatically such as Ninja. For example, `cmake --build build --config Release -j 8` will run 8 jobs in parallel. +- For faster repeated compilation, install [ccache](https://ccache.dev/) +- For debug builds, there are two cases: - ```bash - cmake -B build - cmake --build build --config Release - ``` + 1. Single-config generators (e.g. default = `Unix Makefiles`; note that they just ignore the `--config` flag): - **Notes**: + ```bash + cmake -B build -DCMAKE_BUILD_TYPE=Debug + cmake --build build + ``` - - For `Q4_0_4_4` quantization type build, add the `-DGGML_LLAMAFILE=OFF` cmake option. For example, use `cmake -B build -DGGML_LLAMAFILE=OFF`. - - For faster compilation, add the `-j` argument to run multiple jobs in parallel. For example, `cmake --build build --config Release -j 8` will run 8 jobs in parallel. - - For faster repeated compilation, install [ccache](https://ccache.dev/). - - For debug builds, there are two cases: + 2. Multi-config generators (`-G` param set to Visual Studio, XCode...): - 1. Single-config generators (e.g. default = `Unix Makefiles`; note that they just ignore the `--config` flag): + ```bash + cmake -B build -G "Xcode" + cmake --build build --config Debug + ``` - ```bash - cmake -B build -DCMAKE_BUILD_TYPE=Debug - cmake --build build - ``` + For more details and a list of supported generators, see the [CMake documentation](https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html). +- For static builds, add `-DBUILD_SHARED_LIBS=OFF`: + ``` + cmake -B build -DBUILD_SHARED_LIBS=OFF + cmake --build build --config Release + ``` - 2. Multi-config generators (`-G` param set to Visual Studio, XCode...): +- Building for Windows (x86, x64 and arm64) with MSVC or clang as compilers: + - Install Visual Studio 2022, e.g. via the [Community Edition](https://visualstudio.microsoft.com/de/vs/community/). In the installer, select at least the following options (this also automatically installs the required additional tools like CMake,...): + - Tab Workload: Desktop-development with C++ + - Tab Components (select quickly via search): C++-_CMake_ Tools for Windows, _Git_ for Windows, C++-_Clang_ Compiler for Windows, MS-Build Support for LLVM-Toolset (clang) + - Please remember to always use a Developer Command Prompt / PowerShell for VS2022 for git, build, test + - For Windows on ARM (arm64, WoA) build with: + ```bash + cmake --preset arm64-windows-llvm-release -D GGML_OPENMP=OFF + cmake --build build-arm64-windows-llvm-release + ``` + Building for arm64 can also be done with the MSVC compiler with the build-arm64-windows-MSVC preset, or the standard CMake build instructions. However, note that the MSVC compiler does not support inline ARM assembly code, used e.g. for the accelerated Q4_0_N_M CPU kernels. + For building with ninja generator and clang compiler as default: + -set path:set LIB=C:\Program Files (x86)\Windows Kits\10\Lib\10.0.22621.0\um\x64;C:\Program Files\Microsoft Visual Studio\2022\Community\VC\Tools\MSVC\14.41.34120\lib\x64\uwp;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.22621.0\ucrt\x64 ```bash - cmake -B build -G "Xcode" - cmake --build build --config Debug + cmake --preset x64-windows-llvm-release + cmake --build build-x64-windows-llvm-release ``` - - Building for Windows (x86, x64 and arm64) with MSVC or clang as compilers: - - Install Visual Studio 2022, e.g. via the [Community Edition](https://visualstudio.microsoft.com/de/vs/community/). In the installer, select at least the following options (this also automatically installs the required additional tools like CMake,...): - - Tab Workload: Desktop-development with C++ - - Tab Components (select quickly via search): C++-_CMake_ Tools for Windows, _Git_ for Windows, C++-_Clang_ Compiler for Windows, MS-Build Support for LLVM-Toolset (clang) - - Please remember to always use a Developer Command Prompt / PowerShell for VS2022 for git, build, test - - For Windows on ARM (arm64, WoA) build with: - ```bash - cmake --preset arm64-windows-llvm-release -D GGML_OPENMP=OFF - cmake --build build-arm64-windows-llvm-release - ``` - Note: Building for arm64 could also be done just with MSVC (with the build-arm64-windows-MSVC preset, or the standard CMake build instructions). But MSVC does not support inline ARM assembly-code, used e.g. for the accelerated Q4_0_4_8 CPU kernels. - -- Using `gmake` (FreeBSD): - - 1. Install and activate [DRM in FreeBSD](https://wiki.freebsd.org/Graphics) - 2. Add your user to **video** group - 3. Install compilation dependencies. - - ```bash - sudo pkg install gmake automake autoconf pkgconf llvm15 openblas - - gmake CC=/usr/local/bin/clang15 CXX=/usr/local/bin/clang++15 -j4 - ``` - -## Metal Build - -On MacOS, Metal is enabled by default. Using Metal makes the computation run on the GPU. -To disable the Metal build at compile time use the `GGML_NO_METAL=1` flag or the `GGML_METAL=OFF` cmake option. - -When built with Metal support, you can explicitly disable GPU inference with the `--n-gpu-layers|-ngl 0` command-line -argument. ## BLAS Build -Building the program with BLAS support may lead to some performance improvements in prompt processing using batch sizes higher than 32 (the default is 512). Support with CPU-only BLAS implementations doesn't affect the normal generation performance. We may see generation performance improvements with GPU-involved BLAS implementations, e.g. cuBLAS, hipBLAS. There are currently several different BLAS implementations available for build and use: +Building the program with BLAS support may lead to some performance improvements in prompt processing using batch sizes higher than 32 (the default is 512). Using BLAS doesn't affect the generation performance. There are currently several different BLAS implementations available for build and use: -### Accelerate Framework: +### Accelerate Framework This is only available on Mac PCs and it's enabled by default. You can just build using the normal instructions. -### OpenBLAS: +### OpenBLAS This provides BLAS acceleration using only the CPU. Make sure to have OpenBLAS installed on your machine. -- Using `make`: - - On Linux: - ```bash - make GGML_OPENBLAS=1 - ``` - - - On Windows: - - 1. Download the latest fortran version of [w64devkit](https://github.com/skeeto/w64devkit/releases). - 2. Download the latest version of [OpenBLAS for Windows](https://github.com/xianyi/OpenBLAS/releases). - 3. Extract `w64devkit` on your pc. - 4. From the OpenBLAS zip that you just downloaded copy `libopenblas.a`, located inside the `lib` folder, inside `w64devkit\x86_64-w64-mingw32\lib`. - 5. From the same OpenBLAS zip copy the content of the `include` folder inside `w64devkit\x86_64-w64-mingw32\include`. - 6. Run `w64devkit.exe`. - 7. Use the `cd` command to reach the `llama.cpp` folder. - 8. From here you can run: - - ```bash - make GGML_OPENBLAS=1 - ``` - - Using `CMake` on Linux: ```bash @@ -136,14 +87,6 @@ This provides BLAS acceleration using only the CPU. Make sure to have OpenBLAS i Check [BLIS.md](./backend/BLIS.md) for more information. -### SYCL - -SYCL is a higher-level programming model to improve programming productivity on various hardware accelerators. - -llama.cpp based on SYCL is used to **support Intel GPU** (Data Center Max series, Flex series, Arc series, Built-in GPU and iGPU). - -For detailed info, please refer to [llama.cpp for SYCL](./backend/SYCL.md). - ### Intel oneMKL Building through oneAPI compilers will make avx_vnni instruction set available for intel processors that do not support avx512 and avx512_vnni. Please note that this build config **does not support Intel GPU**. For Intel GPU support, please refer to [llama.cpp for SYCL](./backend/SYCL.md). @@ -161,16 +104,29 @@ Building through oneAPI compilers will make avx_vnni instruction set available f Check [Optimizing and Running LLaMA2 on Intel® CPU](https://www.intel.com/content/www/us/en/content-details/791610/optimizing-and-running-llama2-on-intel-cpu.html) for more information. -### CUDA +### Other BLAS libraries -This provides GPU acceleration using the CUDA cores of your Nvidia GPU. Make sure to have the CUDA toolkit installed. You can download it from your Linux distro's package manager (e.g. `apt install nvidia-cuda-toolkit`) or from here: [CUDA Toolkit](https://developer.nvidia.com/cuda-downloads). +Any other BLAS library can be used by setting the `GGML_BLAS_VENDOR` option. See the [CMake documentation](https://cmake.org/cmake/help/latest/module/FindBLAS.html#blas-lapack-vendors) for a list of supported vendors. -For Jetson user, if you have Jetson Orin, you can try this: [Offical Support](https://www.jetson-ai-lab.com/tutorial_text-generation.html). If you are using an old model(nano/TX2), need some additional operations before compiling. +## Metal Build + +On MacOS, Metal is enabled by default. Using Metal makes the computation run on the GPU. +To disable the Metal build at compile time use the `-DGGML_METAL=OFF` cmake option. + +When built with Metal support, you can explicitly disable GPU inference with the `--n-gpu-layers 0` command-line argument. + +## SYCL + +SYCL is a higher-level programming model to improve programming productivity on various hardware accelerators. + +llama.cpp based on SYCL is used to **support Intel GPU** (Data Center Max series, Flex series, Arc series, Built-in GPU and iGPU). + +For detailed info, please refer to [llama.cpp for SYCL](./backend/SYCL.md). + +## CUDA + +This provides GPU acceleration using an NVIDIA GPU. Make sure to have the CUDA toolkit installed. You can download it from your Linux distro's package manager (e.g. `apt install nvidia-cuda-toolkit`) or from the [NVIDIA developer site](https://developer.nvidia.com/cuda-downloads). -- Using `make`: - ```bash - make GGML_CUDA=1 - ``` - Using `CMake`: ```bash @@ -192,14 +148,10 @@ The following compilation options are also available to tweak performance: | GGML_CUDA_PEER_MAX_BATCH_SIZE | Positive integer | 128 | Maximum batch size for which to enable peer access between multiple GPUs. Peer access requires either Linux or NVLink. When using NVLink enabling peer access for larger batch sizes is potentially beneficial. | | GGML_CUDA_FA_ALL_QUANTS | Boolean | false | Compile support for all KV cache quantization type (combinations) for the FlashAttention CUDA kernels. More fine-grained control over KV cache size but compilation takes much longer. | -### MUSA +## MUSA This provides GPU acceleration using the MUSA cores of your Moore Threads MTT GPU. Make sure to have the MUSA SDK installed. You can download it from here: [MUSA SDK](https://developer.mthreads.com/sdk/download/musa). -- Using `make`: - ```bash - make GGML_MUSA=1 - ``` - Using `CMake`: ```bash @@ -213,16 +165,12 @@ The environment variable `GGML_CUDA_ENABLE_UNIFIED_MEMORY=1` can be used to enab Most of the compilation options available for CUDA should also be available for MUSA, though they haven't been thoroughly tested yet. -### hipBLAS +## HIP -This provides BLAS acceleration on HIP-supported AMD GPUs. +This provides GPU acceleration on HIP-supported AMD GPUs. Make sure to have ROCm installed. You can download it from your Linux distro's package manager or from here: [ROCm Quick Start (Linux)](https://rocm.docs.amd.com/projects/install-on-linux/en/latest/tutorial/quick-start.html#rocm-install-quick). -- Using `make`: - ```bash - make GGML_HIP=1 - ``` - Using `CMake` for Linux (assuming a gfx1030-compatible AMD GPU): ```bash HIPCXX="$(hipconfig -l)/clang" HIP_PATH="$(hipconfig -R)" \ @@ -247,11 +195,6 @@ You can download it from your Linux distro's package manager or from here: [ROCm && cmake --build build -- -j 16 ``` -- Using `make` (example for target gfx1030, build with 16 CPU threads): - ```bash - make -j16 GGML_HIP=1 GGML_HIP_UMA=1 AMDGPU_TARGETS=gfx1030 - ``` - - Using `CMake` for Windows (using x64 Native Tools Command Prompt for VS, and assuming a gfx1100-compatible AMD GPU): ```bash set PATH=%HIP_PATH%\bin;%PATH% @@ -265,11 +208,11 @@ You can download it from your Linux distro's package manager or from here: [ROCm The environment variable [`HIP_VISIBLE_DEVICES`](https://rocm.docs.amd.com/en/latest/understand/gpu_isolation.html#hip-visible-devices) can be used to specify which GPU(s) will be used. If your GPU is not officially supported you can use the environment variable [`HSA_OVERRIDE_GFX_VERSION`] set to a similar GPU, for example 10.3.0 on RDNA2 (e.g. gfx1030, gfx1031, or gfx1035) or 11.0.0 on RDNA3. -### Vulkan +## Vulkan **Windows** -#### w64devkit +### w64devkit Download and extract [`w64devkit`](https://github.com/skeeto/w64devkit/releases). @@ -289,9 +232,14 @@ Libs: -lvulkan-1 EOF ``` -Switch into the `llama.cpp` directory and run `make GGML_VULKAN=1`. -#### Git Bash MINGW64 +Switch into the `llama.cpp` directory and build using CMake. +```sh +cmake -B build -DGGML_VULKAN=ON +cmake --build build --config Release +``` + +### Git Bash MINGW64 Download and install [`Git-SCM`](https://git-scm.com/downloads/win) with the default settings @@ -310,20 +258,21 @@ cmake --build build --config Release Now you can load the model in conversation mode using `Vulkan` -``` -build/bin/release/llama-cli -m "[PATH TO MODEL]" -ngl 100 -c 16384 -t 10 -n -2 -cnv +```sh +build/bin/Release/llama-cli -m "[PATH TO MODEL]" -ngl 100 -c 16384 -t 10 -n -2 -cnv ``` -#### MSYS2 +### MSYS2 Install [MSYS2](https://www.msys2.org/) and then run the following commands in a UCRT terminal to install dependencies. - ```sh - pacman -S git \ - mingw-w64-ucrt-x86_64-gcc \ - mingw-w64-ucrt-x86_64-cmake \ - mingw-w64-ucrt-x86_64-vulkan-devel \ - mingw-w64-ucrt-x86_64-shaderc - ``` -Switch into `llama.cpp` directory and build using CMake. +```sh +pacman -S git \ + mingw-w64-ucrt-x86_64-gcc \ + mingw-w64-ucrt-x86_64-cmake \ + mingw-w64-ucrt-x86_64-vulkan-devel \ + mingw-w64-ucrt-x86_64-shaderc +``` + +Switch into the `llama.cpp` directory and build using CMake. ```sh cmake -B build -DGGML_VULKAN=ON cmake --build build --config Release @@ -372,7 +321,7 @@ cmake --build build --config Release # ggml_vulkan: Using Intel(R) Graphics (ADL GT2) | uma: 1 | fp16: 1 | warp size: 32 ``` -### CANN +## CANN This provides NPU acceleration using the AI cores of your Ascend NPU. And [CANN](https://www.hiascend.com/en/software/cann) is a hierarchical APIs to help you to quickly build AI applications and service based on Ascend NPU. For more information about Ascend NPU in [Ascend Community](https://www.hiascend.com/en/). @@ -387,22 +336,26 @@ cmake --build build --config release You can test with: -`./build/bin/llama-cli -m PATH_TO_MODEL -p "Building a website can be done in 10 steps:" -ngl 32` +```bash +./build/bin/llama-cli -m PATH_TO_MODEL -p "Building a website can be done in 10 steps:" -ngl 32 +``` -If the fllowing info is output on screen, you are using `llama.cpp by CANN backend`: +If the following info is output on screen, you are using `llama.cpp` with the CANN backend: ```bash -llm_load_tensors: CANN buffer size = 13313.00 MiB +llm_load_tensors: CANN model buffer size = 13313.00 MiB llama_new_context_with_model: CANN compute buffer size = 1260.81 MiB ``` For detailed info, such as model/device supports, CANN install, please refer to [llama.cpp for CANN](./backend/CANN.md). -### Android +## Android To read documentation for how to build on Android, [click here](./android.md) -### Arm CPU optimized mulmat kernels +## Notes about GPU-accelerated backends + +The GPU may still be used to accelerate some parts of the computation even when using the `-ngl 0` option. You can fully disable GPU acceleration by using `--device none`. -Llama.cpp includes a set of optimized mulmat kernels for the Arm architecture, leveraging Arm® Neon™, int8mm and SVE instructions. These kernels are enabled at build time through the appropriate compiler cpu-type flags, such as `-DCMAKE_C_FLAGS=-march=armv8.2a+i8mm+sve`. Note that these optimized kernels require the model to be quantized into one of the formats: `Q4_0_4_4` (Arm Neon), `Q4_0_4_8` (int8mm) or `Q4_0_8_8` (SVE). The SVE mulmat kernel specifically requires a vector width of 256 bits. When running on devices with a different vector width, it is recommended to use the `Q4_0_4_8` (int8mm) or `Q4_0_4_4` (Arm Neon) formats for better performance. Refer to [examples/quantize/README.md](../examples/quantize/README.md) for more information on the quantization formats. +In most cases, it is possible to build and use multiple backends at the same time. For example, you can build llama.cpp with both CUDA and Vulkan support by using the `-DGGML_CUDA=ON -DGGML_VULKAN=ON` options with CMake. At runtime, you can specify which backend devices to use with the `--device` option. To see a list of available devices, use the `--list-devices` option. -To support `Q4_0_4_4`, you must build with `GGML_NO_LLAMAFILE=1` (`make`) or `-DGGML_LLAMAFILE=OFF` (`cmake`). +Backends can be built as dynamic libraries that can be loaded dynamically at runtime. This allows you to use the same llama.cpp binary on different machines with different GPUs. To enable this feature, use the `GGML_BACKEND_DL` option when building. diff --git a/examples/base-translate.sh b/examples/base-translate.sh deleted file mode 100755 index 103a52f55e6db..0000000000000 --- a/examples/base-translate.sh +++ /dev/null @@ -1,61 +0,0 @@ -#!/bin/bash -# -# Few-shot translation example. -# Requires a base model (i.e. no fine-tuned or instruct models). -# -# Usage: -# -# cd llama.cpp -# make -j -# -# ./examples/base-translate.sh "" [extra-main-args] -# - -if [ $# -lt 2 ]; then - echo "Usage: ./base-translate.sh \"\" [extra-main-args]" - exit 1 -fi - -eargs="" -if [ $# -gt 2 ]; then - eargs="${@:3}" -fi - -ftmp="__llama.cpp_example_tmp__.txt" -trap "rm -f $ftmp" EXIT - -echo "Translate from English to French: - -=== - -sea otter, peppermint, plush girafe: - -sea otter => loutre de mer -peppermint => menthe poivrée -plush girafe => girafe peluche - -=== - -violin - -violin => violon - -=== - -phone, computer, mouse, keyboard: - -phone => téléphone -computer => ordinateur -mouse => souris -keyboard => clavier - -=== -" > $ftmp - -echo "$2 -" >> $ftmp - -model=$1 - -# generate the most likely continuation until the string "===" is found -./llama-cli -m $model -f $ftmp -n 64 --temp 0 --repeat-penalty 1.0 --no-penalize-nl -r "===" $eargs diff --git a/examples/batched-bench/CMakeLists.txt b/examples/batched-bench/CMakeLists.txt index 959acaeeebc38..68ad707f32c98 100644 --- a/examples/batched-bench/CMakeLists.txt +++ b/examples/batched-bench/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-batched-bench) add_executable(${TARGET} batched-bench.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/batched/CMakeLists.txt b/examples/batched/CMakeLists.txt index 77e33343b6673..0d439f49842b5 100644 --- a/examples/batched/CMakeLists.txt +++ b/examples/batched/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-batched) add_executable(${TARGET} batched.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/convert-llama2c-to-ggml/CMakeLists.txt b/examples/convert-llama2c-to-ggml/CMakeLists.txt index a6790e617217e..44e5f722a9739 100644 --- a/examples/convert-llama2c-to-ggml/CMakeLists.txt +++ b/examples/convert-llama2c-to-ggml/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-convert-llama2c-to-ggml) add_executable(${TARGET} convert-llama2c-to-ggml.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/convert-llama2c-to-ggml/README.md b/examples/convert-llama2c-to-ggml/README.md index 5774ac83c32c8..46a42da691830 100644 --- a/examples/convert-llama2c-to-ggml/README.md +++ b/examples/convert-llama2c-to-ggml/README.md @@ -2,11 +2,8 @@ This example reads weights from project [llama2.c](https://github.com/karpathy/llama2.c) and saves them in ggml compatible format. The vocab that is available in `models/ggml-vocab.bin` is used by default. -To convert the model first download the models from the [llama2.c](https://github.com/karpathy/llama2.c) repository: +To convert the model first download the models from the [llama2.c](https://github.com/karpathy/llama2.c) repository. -`$ make -j` - -After successful compilation, following usage options are available: ``` usage: ./llama-convert-llama2c-to-ggml [options] diff --git a/examples/cvector-generator/CMakeLists.txt b/examples/cvector-generator/CMakeLists.txt index 0a559d60c2a6d..49ad9561c82ea 100644 --- a/examples/cvector-generator/CMakeLists.txt +++ b/examples/cvector-generator/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-cvector-generator) add_executable(${TARGET} cvector-generator.cpp pca.hpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/deprecation-warning/deprecation-warning.cpp b/examples/deprecation-warning/deprecation-warning.cpp index 11b35d2c22500..c2958ea12d92d 100644 --- a/examples/deprecation-warning/deprecation-warning.cpp +++ b/examples/deprecation-warning/deprecation-warning.cpp @@ -12,7 +12,7 @@ int main(int argc, char** argv) { } // Get only the program name from the full path - auto pos = filename.find_last_of('/'); + auto pos = filename.find_last_of("/\\"); if (pos != std::string::npos) { filename = filename.substr(pos+1); } diff --git a/examples/embedding/CMakeLists.txt b/examples/embedding/CMakeLists.txt index 8256e789ad33a..809040307d2c9 100644 --- a/examples/embedding/CMakeLists.txt +++ b/examples/embedding/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-embedding) add_executable(${TARGET} embedding.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/eval-callback/CMakeLists.txt b/examples/eval-callback/CMakeLists.txt index 5d1048aad74b6..95915ed91c099 100644 --- a/examples/eval-callback/CMakeLists.txt +++ b/examples/eval-callback/CMakeLists.txt @@ -2,7 +2,7 @@ set(TARGET llama-eval-callback) add_executable(${TARGET} eval-callback.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) set(TEST_TARGET test-eval-callback) add_test(NAME ${TEST_TARGET} diff --git a/examples/export-lora/CMakeLists.txt b/examples/export-lora/CMakeLists.txt index 1cef6e71694e2..310455787a7ef 100644 --- a/examples/export-lora/CMakeLists.txt +++ b/examples/export-lora/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-export-lora) add_executable(${TARGET} export-lora.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/gbnf-validator/CMakeLists.txt b/examples/gbnf-validator/CMakeLists.txt index 4edd6ec7394c5..d2cb524c0a7f7 100644 --- a/examples/gbnf-validator/CMakeLists.txt +++ b/examples/gbnf-validator/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-gbnf-validator) add_executable(${TARGET} gbnf-validator.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/gen-docs/CMakeLists.txt b/examples/gen-docs/CMakeLists.txt index c94cda7764341..25de0af35df60 100644 --- a/examples/gen-docs/CMakeLists.txt +++ b/examples/gen-docs/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-gen-docs) add_executable(${TARGET} gen-docs.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/gguf-hash/CMakeLists.txt b/examples/gguf-hash/CMakeLists.txt index 7a494ce327ba5..15c5c68c6f402 100644 --- a/examples/gguf-hash/CMakeLists.txt +++ b/examples/gguf-hash/CMakeLists.txt @@ -19,4 +19,4 @@ add_library(sha256 OBJECT deps/sha256/sha256.c deps/sha256/sha256.h) target_link_libraries(${TARGET} PRIVATE sha256) target_link_libraries(${TARGET} PRIVATE ggml ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/gguf-split/CMakeLists.txt b/examples/gguf-split/CMakeLists.txt index f63887da7dfca..c407e2f0af44a 100644 --- a/examples/gguf-split/CMakeLists.txt +++ b/examples/gguf-split/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-gguf-split) add_executable(${TARGET} gguf-split.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/gguf/CMakeLists.txt b/examples/gguf/CMakeLists.txt index a9569b411956b..fb04eb83f34ce 100644 --- a/examples/gguf/CMakeLists.txt +++ b/examples/gguf/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-gguf) add_executable(${TARGET} gguf.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE ggml ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/gritlm/CMakeLists.txt b/examples/gritlm/CMakeLists.txt index 86dfddca346fe..fa1b4dc70c2f6 100644 --- a/examples/gritlm/CMakeLists.txt +++ b/examples/gritlm/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-gritlm) add_executable(${TARGET} gritlm.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/imatrix/CMakeLists.txt b/examples/imatrix/CMakeLists.txt index d4c8265bdb9d2..412696c47c31c 100644 --- a/examples/imatrix/CMakeLists.txt +++ b/examples/imatrix/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-imatrix) add_executable(${TARGET} imatrix.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/imatrix/README.md b/examples/imatrix/README.md index bb5faec94c20a..9c056986b3ed6 100644 --- a/examples/imatrix/README.md +++ b/examples/imatrix/README.md @@ -25,8 +25,6 @@ For faster computation, make sure to use GPU offloading via the `-ngl` argument ## Example ```bash -GGML_CUDA=1 make -j - # generate importance matrix (imatrix.dat) ./llama-imatrix -m ggml-model-f16.gguf -f train-data.txt -ngl 99 diff --git a/examples/imatrix/imatrix.cpp b/examples/imatrix/imatrix.cpp index 70ff47768c02b..45206f4a7dee1 100644 --- a/examples/imatrix/imatrix.cpp +++ b/examples/imatrix/imatrix.cpp @@ -637,10 +637,19 @@ int main(int argc, char ** argv) { LOG_INF("%s\n", common_params_get_system_info(params).c_str()); } - if (!compute_imatrix(ctx, params)) { - return 1; + if (params.prompt.empty()) { + if (params.in_files.empty()) { + LOG_ERR("Error: No prompt provided and no precomputed matrices (--in-file) to combine.\n"); + return 1; + } + LOG_INF("No prompt provided; combining precomputed matrices only.\n"); + } else { + if (!compute_imatrix(ctx, params)) { + return 1; + } } + g_collector.save_imatrix(); LOG("\n"); diff --git a/examples/infill/CMakeLists.txt b/examples/infill/CMakeLists.txt index 9b1aa3b63c920..fb26628d82bf9 100644 --- a/examples/infill/CMakeLists.txt +++ b/examples/infill/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-infill) add_executable(${TARGET} infill.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/infill/README.md b/examples/infill/README.md index 810a0c5e76697..df4d976f2bb4f 100644 --- a/examples/infill/README.md +++ b/examples/infill/README.md @@ -14,7 +14,7 @@ In this section, we cover the most commonly used options for running the `infill - `-m FNAME, --model FNAME`: Specify the path to the LLaMA model file (e.g., `models/7B/ggml-model.bin`). - `-i, --interactive`: Run the program in interactive mode, allowing you to provide input directly and receive real-time responses. - `-n N, --n-predict N`: Set the number of tokens to predict when generating text. Adjusting this value can influence the length of the generated text. -- `-c N, --ctx-size N`: Set the size of the prompt context. The default is 512, but LLaMA models were built with a context of 2048, which will provide better results for longer input/inference. +- `-c N, --ctx-size N`: Set the size of the prompt context. The default is 4096, but if a LLaMA model was built with a longer context, increasing this value will provide better results for longer input/inference. - `--spm-infill`: Use Suffix/Prefix/Middle pattern for infill (instead of Prefix/Suffix/Middle) as some models prefer this. ## Input Prompts diff --git a/examples/llama-bench/CMakeLists.txt b/examples/llama-bench/CMakeLists.txt index 5bdbea4e28187..17e3b9b87bae4 100644 --- a/examples/llama-bench/CMakeLists.txt +++ b/examples/llama-bench/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-bench) add_executable(${TARGET} llama-bench.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index 65cd4eb515c7f..998c673d5d31f 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -210,20 +210,20 @@ actor LlamaContext { llama_kv_cache_clear(context) - let t_pp_start = ggml_time_us() + let t_pp_start = DispatchTime.now().uptimeNanoseconds / 1000; if llama_decode(context, batch) != 0 { print("llama_decode() failed during prompt") } llama_synchronize(context) - let t_pp_end = ggml_time_us() + let t_pp_end = DispatchTime.now().uptimeNanoseconds / 1000; // bench text generation llama_kv_cache_clear(context) - let t_tg_start = ggml_time_us() + let t_tg_start = DispatchTime.now().uptimeNanoseconds / 1000; for i in 0..) if (BUILD_SHARED_LIBS) @@ -35,18 +35,18 @@ add_executable(${TARGET} llava-cli.cpp) set_target_properties(${TARGET} PROPERTIES OUTPUT_NAME llama-llava-cli) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llava ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) set(TARGET llama-minicpmv-cli) add_executable(${TARGET} minicpmv-cli.cpp) set_target_properties(${TARGET} PROPERTIES OUTPUT_NAME llama-minicpmv-cli) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llava ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) set(TARGET llama-qwen2vl-cli) add_executable(${TARGET} qwen2vl-cli.cpp) set_target_properties(${TARGET} PROPERTIES OUTPUT_NAME llama-qwen2vl-cli) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llava ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) \ No newline at end of file +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 22d3b53dc7836..e1df2aa5e7992 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -12,6 +12,10 @@ #include "ggml-cuda.h" #endif +#ifdef GGML_USE_SYCL +#include "ggml-sycl.h" +#endif + #ifdef GGML_USE_METAL #include "ggml-metal.h" #endif @@ -1238,6 +1242,11 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { LOG_INF("%s: CLIP using Vulkan backend\n", __func__); #endif +#ifdef GGML_USE_SYCL + new_clip->backend = ggml_backend_sycl_init(0); + LOG_INF("%s: CLIP using SYCL backend\n", __func__); +#endif + if (!new_clip->backend) { new_clip->backend = ggml_backend_cpu_init(); LOG_INF("%s: CLIP using CPU backend\n", __func__); diff --git a/examples/lookahead/CMakeLists.txt b/examples/lookahead/CMakeLists.txt index f0ae5cd89244c..3468613142de0 100644 --- a/examples/lookahead/CMakeLists.txt +++ b/examples/lookahead/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-lookahead) add_executable(${TARGET} lookahead.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/lookup/CMakeLists.txt b/examples/lookup/CMakeLists.txt index ef19fe25e31a3..fba78ceda6fd7 100644 --- a/examples/lookup/CMakeLists.txt +++ b/examples/lookup/CMakeLists.txt @@ -2,22 +2,22 @@ set(TARGET llama-lookup) add_executable(${TARGET} lookup.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) set(TARGET llama-lookup-create) add_executable(${TARGET} lookup-create.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) set(TARGET llama-lookup-merge) add_executable(${TARGET} lookup-merge.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) set(TARGET llama-lookup-stats) add_executable(${TARGET} lookup-stats.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/main-cmake-pkg/CMakeLists.txt b/examples/main-cmake-pkg/CMakeLists.txt index 3b38db292320f..5563f4de0a504 100644 --- a/examples/main-cmake-pkg/CMakeLists.txt +++ b/examples/main-cmake-pkg/CMakeLists.txt @@ -29,4 +29,4 @@ add_executable(${TARGET} ${CMAKE_CURRENT_LIST_DIR}/../main/main.cpp) target_include_directories(${TARGET} PRIVATE ${_common_path}) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/main/CMakeLists.txt b/examples/main/CMakeLists.txt index 5f6efaa9aa94b..af3d9150f8640 100644 --- a/examples/main/CMakeLists.txt +++ b/examples/main/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-cli) add_executable(${TARGET} main.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/main/README.md b/examples/main/README.md index 145216938fdb7..7787f7b11b81f 100644 --- a/examples/main/README.md +++ b/examples/main/README.md @@ -66,7 +66,7 @@ In this section, we cover the most commonly used options for running the `llama- - `-mu MODEL_URL --model-url MODEL_URL`: Specify a remote http url to download the file (e.g [https://huggingface.co/ggml-org/gemma-1.1-7b-it-Q4_K_M-GGUF/resolve/main/gemma-1.1-7b-it.Q4_K_M.gguf?download=true](https://huggingface.co/ggml-org/gemma-1.1-7b-it-Q4_K_M-GGUF/resolve/main/gemma-1.1-7b-it.Q4_K_M.gguf?download=true)). - `-i, --interactive`: Run the program in interactive mode, allowing you to provide input directly and receive real-time responses. - `-n N, --n-predict N`: Set the number of tokens to predict when generating text. Adjusting this value can influence the length of the generated text. -- `-c N, --ctx-size N`: Set the size of the prompt context. The default is 512, but LLaMA models were built with a context of 2048, which will provide better results for longer input/inference. +- `-c N, --ctx-size N`: Set the size of the prompt context. The default is 4096, but if a LLaMA model was built with a longer context, increasing this value will provide better results for longer input/inference. - `-mli, --multiline-input`: Allows you to write or paste multiple lines without ending each in '\' - `-t N, --threads N`: Set the number of threads to use during generation. For optimal performance, it is recommended to set this value to the number of physical CPU cores your system has. - `-ngl N, --n-gpu-layers N`: When compiled with GPU support, this option allows offloading some layers to the GPU for computation. Generally results in increased performance. @@ -131,7 +131,7 @@ During text generation, LLaMA models have a limited context size, which means th ### Context Size -- `-c N, --ctx-size N`: Set the size of the prompt context (default: 0, 0 = loaded from model). The LLaMA models were built with a context of 2048-8192, which will yield the best results on longer input/inference. +- `-c N, --ctx-size N`: Set the size of the prompt context (default: 4096, 0 = loaded from model). If a LLaMA model was built with a longer context, increasing this value will yield the best results on longer input/inference. ### Extended Context Size @@ -348,6 +348,7 @@ These options provide extra functionality and customization when running the LLa - `-h, --help`: Display a help message showing all available options and their default values. This is particularly useful for checking the latest options and default values, as they can change frequently, and the information in this document may become outdated. - `--verbose-prompt`: Print the prompt before generating text. +- `--no-display-prompt`: Don't print prompt at generation. - `-mg i, --main-gpu i`: When using multiple GPUs this option controls which GPU is used for small tensors for which the overhead of splitting the computation across all GPUs is not worthwhile. The GPU in question will use slightly more VRAM to store a scratch buffer for temporary results. By default GPU 0 is used. - `-ts SPLIT, --tensor-split SPLIT`: When using multiple GPUs this option controls how large tensors should be split across all GPUs. `SPLIT` is a comma-separated list of non-negative values that assigns the proportion of data that each GPU should get in order. For example, "3,2" will assign 60% of the data to GPU 0 and 40% to GPU 1. By default the data is split in proportion to VRAM but this may not be optimal for performance. - `-hfr URL --hf-repo URL`: The url to the Hugging Face model repository. Used in conjunction with `--hf-file` or `-hff`. The model is downloaded and stored in the file provided by `-m` or `--model`. If `-m` is not provided, the model is auto-stored in the path specified by the `LLAMA_CACHE` environment variable or in an OS-specific local cache. diff --git a/examples/parallel/CMakeLists.txt b/examples/parallel/CMakeLists.txt index c13557bac2bac..847e916de6ed8 100644 --- a/examples/parallel/CMakeLists.txt +++ b/examples/parallel/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-parallel) add_executable(${TARGET} parallel.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/passkey/CMakeLists.txt b/examples/passkey/CMakeLists.txt index dc467a5d3e411..9bc5110c29309 100644 --- a/examples/passkey/CMakeLists.txt +++ b/examples/passkey/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-passkey) add_executable(${TARGET} passkey.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/perplexity/CMakeLists.txt b/examples/perplexity/CMakeLists.txt index be0f2fd029e67..3e68640933afb 100644 --- a/examples/perplexity/CMakeLists.txt +++ b/examples/perplexity/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-perplexity) add_executable(${TARGET} perplexity.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/quantize-stats/CMakeLists.txt b/examples/quantize-stats/CMakeLists.txt index bb986a716883d..9a3a0d3cd2dee 100644 --- a/examples/quantize-stats/CMakeLists.txt +++ b/examples/quantize-stats/CMakeLists.txt @@ -3,4 +3,4 @@ add_executable(${TARGET} quantize-stats.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE llama build_info ${CMAKE_THREAD_LIBS_INIT}) target_include_directories(${TARGET} PRIVATE ../../common) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/quantize/CMakeLists.txt b/examples/quantize/CMakeLists.txt index 62680cda4455f..47e5cbe30cfe3 100644 --- a/examples/quantize/CMakeLists.txt +++ b/examples/quantize/CMakeLists.txt @@ -3,4 +3,4 @@ add_executable(${TARGET} quantize.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_include_directories(${TARGET} PRIVATE ../../common) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/quantize/README.md b/examples/quantize/README.md index 704f0d56bea72..5d1e11c67b13f 100644 --- a/examples/quantize/README.md +++ b/examples/quantize/README.md @@ -54,8 +54,6 @@ As the models are currently fully loaded into memory, you will need adequate dis Several quantization methods are supported. They differ in the resulting model disk size and inference speed. -The quantization formats `Q4_0_4_4`, `Q4_0_4_8` and `Q4_0_8_8` are block interleaved variants of the `Q4_0` format, providing a data layout that is better suited for specific implementations of optimized mulmat kernels. Since these formats differ only in data layout, they have the same quantized size as the `Q4_0` format. - *(outdated)* | Model | Measure | F16 | Q4_0 | Q4_1 | Q5_0 | Q5_1 | Q8_0 | diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index b989932107dba..8d47b17b6bce7 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -48,9 +48,6 @@ static const std::vector QUANT_OPTIONS = { { "Q5_K_M", LLAMA_FTYPE_MOSTLY_Q5_K_M, " 5.33G, +0.0569 ppl @ Llama-3-8B", }, { "Q6_K", LLAMA_FTYPE_MOSTLY_Q6_K, " 6.14G, +0.0217 ppl @ Llama-3-8B", }, { "Q8_0", LLAMA_FTYPE_MOSTLY_Q8_0, " 7.96G, +0.0026 ppl @ Llama-3-8B", }, - { "Q4_0_4_4", LLAMA_FTYPE_MOSTLY_Q4_0_4_4, " 4.34G, +0.4685 ppl @ Llama-3-8B", }, - { "Q4_0_4_8", LLAMA_FTYPE_MOSTLY_Q4_0_4_8, " 4.34G, +0.4685 ppl @ Llama-3-8B", }, - { "Q4_0_8_8", LLAMA_FTYPE_MOSTLY_Q4_0_8_8, " 4.34G, +0.4685 ppl @ Llama-3-8B", }, { "F16", LLAMA_FTYPE_MOSTLY_F16, "14.00G, +0.0020 ppl @ Mistral-7B", }, { "BF16", LLAMA_FTYPE_MOSTLY_BF16, "14.00G, -0.0050 ppl @ Mistral-7B", }, { "F32", LLAMA_FTYPE_ALL_F32, "26.00G @ 7B", }, diff --git a/examples/retrieval/CMakeLists.txt b/examples/retrieval/CMakeLists.txt index 66610f3111405..512a602ec045c 100644 --- a/examples/retrieval/CMakeLists.txt +++ b/examples/retrieval/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-retrieval) add_executable(${TARGET} retrieval.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/run/CMakeLists.txt b/examples/run/CMakeLists.txt index 084f1e92d77b0..52add51ef77c3 100644 --- a/examples/run/CMakeLists.txt +++ b/examples/run/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-run) add_executable(${TARGET} run.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/save-load-state/CMakeLists.txt b/examples/save-load-state/CMakeLists.txt index 0fb5e359bc9ad..0f50e50deecd7 100644 --- a/examples/save-load-state/CMakeLists.txt +++ b/examples/save-load-state/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-save-load-state) add_executable(${TARGET} save-load-state.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/server/CMakeLists.txt b/examples/server/CMakeLists.txt index 93e876f5a5524..63fca1d590f52 100644 --- a/examples/server/CMakeLists.txt +++ b/examples/server/CMakeLists.txt @@ -16,12 +16,7 @@ set(TARGET_SRCS ) set(PUBLIC_ASSETS index.html - completion.js loading.html - deps_daisyui.min.css - deps_markdown-it.js - deps_tailwindcss.js - deps_vue.esm-browser.js ) foreach(asset ${PUBLIC_ASSETS}) @@ -33,6 +28,7 @@ foreach(asset ${PUBLIC_ASSETS}) OUTPUT "${output}" COMMAND "${CMAKE_COMMAND}" "-DINPUT=${input}" "-DOUTPUT=${output}" -P "${PROJECT_SOURCE_DIR}/scripts/xxd.cmake" ) + set_source_files_properties(${output} PROPERTIES GENERATED TRUE) endforeach() add_executable(${TARGET} ${TARGET_SRCS}) @@ -50,4 +46,4 @@ if (WIN32) TARGET_LINK_LIBRARIES(${TARGET} PRIVATE ws2_32) endif() -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/server/README.md b/examples/server/README.md index 877768c8b0bd2..6294f541fc7d7 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -69,6 +69,8 @@ The project is under active development, and we are [looking for feedback and co | `--mlock` | force system to keep model in RAM rather than swapping or compressing
(env: LLAMA_ARG_MLOCK) | | `--no-mmap` | do not memory-map model (slower load but may reduce pageouts if not using mlock)
(env: LLAMA_ARG_NO_MMAP) | | `--numa TYPE` | attempt optimizations that help on some NUMA systems
- distribute: spread execution evenly over all nodes
- isolate: only spawn threads on CPUs on the node that execution started on
- numactl: use the CPU map provided by numactl
if run without this previously, it is recommended to drop the system page cache before using this
see https://github.com/ggerganov/llama.cpp/issues/1437
(env: LLAMA_ARG_NUMA) | +| `-dev, --device ` | comma-separated list of devices to use for offloading (none = don't offload)
use --list-devices to see a list of available devices
(env: LLAMA_ARG_DEVICE) | +| `--list-devices` | print list of available devices and exit | | `-ngl, --gpu-layers, --n-gpu-layers N` | number of layers to store in VRAM
(env: LLAMA_ARG_N_GPU_LAYERS) | | `-sm, --split-mode {none,layer,row}` | how to split the model across multiple GPUs, one of:
- none: use one GPU only
- layer (default): split layers and KV across GPUs
- row: split rows across GPUs
(env: LLAMA_ARG_SPLIT_MODE) | | `-ts, --tensor-split N0,N1,N2,...` | fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1
(env: LLAMA_ARG_TENSOR_SPLIT) | @@ -144,6 +146,7 @@ The project is under active development, and we are [looking for feedback and co | `--host HOST` | ip address to listen (default: 127.0.0.1)
(env: LLAMA_ARG_HOST) | | `--port PORT` | port to listen (default: 8080)
(env: LLAMA_ARG_PORT) | | `--path PATH` | path to serve static files from (default: )
(env: LLAMA_ARG_STATIC_PATH) | +| `--no-webui` | disable the Web UI
(env: LLAMA_ARG_NO_WEBUI) | | `--embedding, --embeddings` | restrict to only support embedding use case; use only with dedicated embedding models (default: disabled)
(env: LLAMA_ARG_EMBEDDINGS) | | `--reranking, --rerank` | enable reranking endpoint on server (default: disabled)
(env: LLAMA_ARG_RERANKING) | | `--api-key KEY` | API key to use for authentication (default: none)
(env: LLAMA_API_KEY) | @@ -158,9 +161,16 @@ The project is under active development, and we are [looking for feedback and co | `--props` | enable changing global properties via POST /props (default: disabled)
(env: LLAMA_ARG_ENDPOINT_PROPS) | | `--no-slots` | disables slots monitoring endpoint
(env: LLAMA_ARG_NO_ENDPOINT_SLOTS) | | `--slot-save-path PATH` | path to save slot kv cache (default: disabled) | -| `--chat-template JINJA_TEMPLATE` | set custom jinja chat template (default: template taken from model's metadata)
if suffix/prefix are specified, template will be disabled
only commonly used templates are accepted:
https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template
(env: LLAMA_ARG_CHAT_TEMPLATE) | +| `--chat-template JINJA_TEMPLATE` | set custom jinja chat template (default: template taken from model's metadata)
if suffix/prefix are specified, template will be disabled
list of built-in templates:
chatglm3, chatglm4, chatml, command-r, deepseek, deepseek2, exaone3, gemma, granite, llama2, llama2-sys, llama2-sys-bos, llama2-sys-strip, llama3, minicpm, mistral-v1, mistral-v3, mistral-v3-tekken, mistral-v7, monarch, openchat, orion, phi3, rwkv-world, vicuna, vicuna-orca, zephyr
(env: LLAMA_ARG_CHAT_TEMPLATE) | | `-sps, --slot-prompt-similarity SIMILARITY` | how much the prompt of a request must match the prompt of a slot in order to use that slot (default: 0.50, 0.0 = disabled)
| | `--lora-init-without-apply` | load LoRA adapters without applying them (apply later via POST /lora-adapters) (default: disabled) | +| `--draft-max, --draft, --draft-n N` | number of tokens to draft for speculative decoding (default: 16) | +| `--draft-min, --draft-n-min N` | minimum number of draft tokens to use for speculative decoding (default: 5) | +| `--draft-p-min P` | minimum speculative decoding probability (greedy) (default: 0.9) | +| `-cd, --ctx-size-draft N` | size of the prompt context for the draft model (default: 0, 0 = loaded from model) | +| `-devd, --device-draft ` | comma-separated list of devices to use for offloading the draft model (none = don't offload)
use --list-devices to see a list of available devices | +| `-ngld, --gpu-layers-draft, --n-gpu-layers-draft N` | number of layers to store in VRAM for the draft model | +| `-md, --model-draft FNAME` | draft model for speculative decoding (default: unused) | Note: If both command line argument and environment variable are both set for the same param, the argument will take precedence over env var. @@ -188,12 +198,6 @@ services: `llama-server` is built alongside everything else from the root of the project -- Using `make`: - - ```bash - make llama-server - ``` - - Using `CMake`: ```bash @@ -207,15 +211,6 @@ services: `llama-server` can also be built with SSL support using OpenSSL 3 -- Using `make`: - - ```bash - # NOTE: For non-system openssl, use the following: - # CXXFLAGS="-I /path/to/openssl/include" - # LDFLAGS="-L /path/to/openssl/lib" - make LLAMA_SERVER_SSL=true llama-server - ``` - - Using `CMake`: ```bash @@ -223,6 +218,37 @@ services: cmake --build build --config Release -t llama-server ``` +## Web UI + +The project includes a web-based user interface that enables interaction with the model through the `/chat/completions` endpoint. + +The web UI is developed using: +- `vue` framework for frontend development +- `tailwindcss` and `daisyui` for styling +- `vite` for build tooling + +A pre-built version is available as a single HTML file under `/public` directory. + +To build or to run the dev server (with hot reload): + +```sh +# make sure you have nodejs installed +cd examples/server/webui +npm i + +# to run the dev server +npm run dev + +# to build the public/index.html +npm run build +``` + +NOTE: if you are using the vite dev server, you can change the API base URL to llama.cpp. To do that, run this code snippet in browser's console: + +```js +localStorage.setItem('base', 'http://localhost:8080') +``` + ## Quick Start To get started right away, run the following command, making sure to use the correct path for the model you have: @@ -317,104 +343,106 @@ node index.js ### POST `/completion`: Given a `prompt`, it returns the predicted completion. - *Options:* +*Options:* + +`prompt`: Provide the prompt for this completion as a string or as an array of strings or numbers representing tokens. Internally, if `cache_prompt` is `true`, the prompt is compared to the previous completion and only the "unseen" suffix is evaluated. A `BOS` token is inserted at the start, if all of the following conditions are true: - `prompt`: Provide the prompt for this completion as a string or as an array of strings or numbers representing tokens. Internally, if `cache_prompt` is `true`, the prompt is compared to the previous completion and only the "unseen" suffix is evaluated. A `BOS` token is inserted at the start, if all of the following conditions are true: + - The prompt is a string or an array with the first element given as a string + - The model's `tokenizer.ggml.add_bos_token` metadata is `true` - - The prompt is a string or an array with the first element given as a string - - The model's `tokenizer.ggml.add_bos_token` metadata is `true` +These input shapes and data type are allowed for `prompt`: - These input shapes and data type are allowed for `prompt`: + - Single string: `"string"` + - Single sequence of tokens: `[12, 34, 56]` + - Mixed tokens and strings: `[12, 34, "string", 56, 78]` - - Single string: `"string"` - - Single sequence of tokens: `[12, 34, 56]` - - Mixed tokens and strings: `[12, 34, "string", 56, 78]` +Multiple prompts are also supported. In this case, the completion result will be an array. - Multiple prompts are also supported. In this case, the completion result will be an array. + - Only strings: `["string1", "string2"]` + - Strings and sequences of tokens: `["string1", [12, 34, 56]]` + - Mixed types: `[[12, 34, "string", 56, 78], [12, 34, 56], "string"]` - - Only strings: `["string1", "string2"]` - - Strings and sequences of tokens: `["string1", [12, 34, 56]]` - - Mixed types: `[[12, 34, "string", 56, 78], [12, 34, 56], "string"]` +`temperature`: Adjust the randomness of the generated text. Default: `0.8` - `temperature`: Adjust the randomness of the generated text. Default: `0.8` +`dynatemp_range`: Dynamic temperature range. The final temperature will be in the range of `[temperature - dynatemp_range; temperature + dynatemp_range]` Default: `0.0`, which is disabled. - `dynatemp_range`: Dynamic temperature range. The final temperature will be in the range of `[temperature - dynatemp_range; temperature + dynatemp_range]` Default: `0.0`, which is disabled. +`dynatemp_exponent`: Dynamic temperature exponent. Default: `1.0` - `dynatemp_exponent`: Dynamic temperature exponent. Default: `1.0` +`top_k`: Limit the next token selection to the K most probable tokens. Default: `40` - `top_k`: Limit the next token selection to the K most probable tokens. Default: `40` +`top_p`: Limit the next token selection to a subset of tokens with a cumulative probability above a threshold P. Default: `0.95` - `top_p`: Limit the next token selection to a subset of tokens with a cumulative probability above a threshold P. Default: `0.95` +`min_p`: The minimum probability for a token to be considered, relative to the probability of the most likely token. Default: `0.05` - `min_p`: The minimum probability for a token to be considered, relative to the probability of the most likely token. Default: `0.05` +`n_predict`: Set the maximum number of tokens to predict when generating text. **Note:** May exceed the set limit slightly if the last token is a partial multibyte character. When 0, no tokens will be generated but the prompt is evaluated into the cache. Default: `-1`, where `-1` is infinity. - `n_predict`: Set the maximum number of tokens to predict when generating text. **Note:** May exceed the set limit slightly if the last token is a partial multibyte character. When 0, no tokens will be generated but the prompt is evaluated into the cache. Default: `-1`, where `-1` is infinity. +`n_indent`: Specify the minimum line indentation for the generated text in number of whitespace characters. Useful for code completion tasks. Default: `0` - `n_indent`: Specify the minimum line indentation for the generated text in number of whitespace characters. Useful for code completion tasks. Default: `0` +`n_keep`: Specify the number of tokens from the prompt to retain when the context size is exceeded and tokens need to be discarded. The number excludes the BOS token. +By default, this value is set to `0`, meaning no tokens are kept. Use `-1` to retain all tokens from the prompt. - `n_keep`: Specify the number of tokens from the prompt to retain when the context size is exceeded and tokens need to be discarded. The number excludes the BOS token. - By default, this value is set to `0`, meaning no tokens are kept. Use `-1` to retain all tokens from the prompt. +`stream`: It allows receiving each predicted token in real-time instead of waiting for the completion to finish. To enable this, set to `true`. - `stream`: It allows receiving each predicted token in real-time instead of waiting for the completion to finish. To enable this, set to `true`. +`stop`: Specify a JSON array of stopping strings. +These words will not be included in the completion, so make sure to add them to the prompt for the next iteration. Default: `[]` - `stop`: Specify a JSON array of stopping strings. - These words will not be included in the completion, so make sure to add them to the prompt for the next iteration. Default: `[]` +`typical_p`: Enable locally typical sampling with parameter p. Default: `1.0`, which is disabled. - `typical_p`: Enable locally typical sampling with parameter p. Default: `1.0`, which is disabled. +`repeat_penalty`: Control the repetition of token sequences in the generated text. Default: `1.1` - `repeat_penalty`: Control the repetition of token sequences in the generated text. Default: `1.1` +`repeat_last_n`: Last n tokens to consider for penalizing repetition. Default: `64`, where `0` is disabled and `-1` is ctx-size. - `repeat_last_n`: Last n tokens to consider for penalizing repetition. Default: `64`, where `0` is disabled and `-1` is ctx-size. +`penalize_nl`: Penalize newline tokens when applying the repeat penalty. Default: `true` - `penalize_nl`: Penalize newline tokens when applying the repeat penalty. Default: `true` +`presence_penalty`: Repeat alpha presence penalty. Default: `0.0`, which is disabled. - `presence_penalty`: Repeat alpha presence penalty. Default: `0.0`, which is disabled. +`frequency_penalty`: Repeat alpha frequency penalty. Default: `0.0`, which is disabled. - `frequency_penalty`: Repeat alpha frequency penalty. Default: `0.0`, which is disabled. +`dry_multiplier`: Set the DRY (Don't Repeat Yourself) repetition penalty multiplier. Default: `0.0`, which is disabled. - `dry_multiplier`: Set the DRY (Don't Repeat Yourself) repetition penalty multiplier. Default: `0.0`, which is disabled. +`dry_base`: Set the DRY repetition penalty base value. Default: `1.75` - `dry_base`: Set the DRY repetition penalty base value. Default: `1.75` +`dry_allowed_length`: Tokens that extend repetition beyond this receive exponentially increasing penalty: multiplier * base ^ (length of repeating sequence before token - allowed length). Default: `2` - `dry_allowed_length`: Tokens that extend repetition beyond this receive exponentially increasing penalty: multiplier * base ^ (length of repeating sequence before token - allowed length). Default: `2` +`dry_penalty_last_n`: How many tokens to scan for repetitions. Default: `-1`, where `0` is disabled and `-1` is context size. - `dry_penalty_last_n`: How many tokens to scan for repetitions. Default: `-1`, where `0` is disabled and `-1` is context size. +`dry_sequence_breakers`: Specify an array of sequence breakers for DRY sampling. Only a JSON array of strings is accepted. Default: `['\n', ':', '"', '*']` - `dry_sequence_breakers`: Specify an array of sequence breakers for DRY sampling. Only a JSON array of strings is accepted. Default: `['\n', ':', '"', '*']` +`xtc_probability`: Set the chance for token removal via XTC sampler. Default: `0.0`, which is disabled. - `xtc_probability`: Set the chance for token removal via XTC sampler. Default: `0.0`, which is disabled. +`xtc_threshold`: Set a minimum probability threshold for tokens to be removed via XTC sampler. Default: `0.1` (> `0.5` disables XTC) - `xtc_threshold`: Set a minimum probability threshold for tokens to be removed via XTC sampler. Default: `0.1` (> `0.5` disables XTC) +`mirostat`: Enable Mirostat sampling, controlling perplexity during text generation. Default: `0`, where `0` is disabled, `1` is Mirostat, and `2` is Mirostat 2.0. - `mirostat`: Enable Mirostat sampling, controlling perplexity during text generation. Default: `0`, where `0` is disabled, `1` is Mirostat, and `2` is Mirostat 2.0. +`mirostat_tau`: Set the Mirostat target entropy, parameter tau. Default: `5.0` - `mirostat_tau`: Set the Mirostat target entropy, parameter tau. Default: `5.0` +`mirostat_eta`: Set the Mirostat learning rate, parameter eta. Default: `0.1` - `mirostat_eta`: Set the Mirostat learning rate, parameter eta. Default: `0.1` +`grammar`: Set grammar for grammar-based sampling. Default: no grammar - `grammar`: Set grammar for grammar-based sampling. Default: no grammar +`json_schema`: Set a JSON schema for grammar-based sampling (e.g. `{"items": {"type": "string"}, "minItems": 10, "maxItems": 100}` of a list of strings, or `{}` for any JSON). See [tests](../../tests/test-json-schema-to-grammar.cpp) for supported features. Default: no JSON schema. - `json_schema`: Set a JSON schema for grammar-based sampling (e.g. `{"items": {"type": "string"}, "minItems": 10, "maxItems": 100}` of a list of strings, or `{}` for any JSON). See [tests](../../tests/test-json-schema-to-grammar.cpp) for supported features. Default: no JSON schema. +`seed`: Set the random number generator (RNG) seed. Default: `-1`, which is a random seed. - `seed`: Set the random number generator (RNG) seed. Default: `-1`, which is a random seed. +`ignore_eos`: Ignore end of stream token and continue generating. Default: `false` - `ignore_eos`: Ignore end of stream token and continue generating. Default: `false` +`logit_bias`: Modify the likelihood of a token appearing in the generated text completion. For example, use `"logit_bias": [[15043,1.0]]` to increase the likelihood of the token 'Hello', or `"logit_bias": [[15043,-1.0]]` to decrease its likelihood. Setting the value to false, `"logit_bias": [[15043,false]]` ensures that the token `Hello` is never produced. The tokens can also be represented as strings, e.g. `[["Hello, World!",-0.5]]` will reduce the likelihood of all the individual tokens that represent the string `Hello, World!`, just like the `presence_penalty` does. Default: `[]` - `logit_bias`: Modify the likelihood of a token appearing in the generated text completion. For example, use `"logit_bias": [[15043,1.0]]` to increase the likelihood of the token 'Hello', or `"logit_bias": [[15043,-1.0]]` to decrease its likelihood. Setting the value to false, `"logit_bias": [[15043,false]]` ensures that the token `Hello` is never produced. The tokens can also be represented as strings, e.g. `[["Hello, World!",-0.5]]` will reduce the likelihood of all the individual tokens that represent the string `Hello, World!`, just like the `presence_penalty` does. Default: `[]` +`n_probs`: If greater than 0, the response also contains the probabilities of top N tokens for each generated token given the sampling settings. Note that for temperature < 0 the tokens are sampled greedily but token probabilities are still being calculated via a simple softmax of the logits without considering any other sampler settings. Default: `0` - `n_probs`: If greater than 0, the response also contains the probabilities of top N tokens for each generated token given the sampling settings. Note that for temperature < 0 the tokens are sampled greedily but token probabilities are still being calculated via a simple softmax of the logits without considering any other sampler settings. Default: `0` +`min_keep`: If greater than 0, force samplers to return N possible tokens at minimum. Default: `0` - `min_keep`: If greater than 0, force samplers to return N possible tokens at minimum. Default: `0` +`t_max_predict_ms`: Set a time limit in milliseconds for the prediction (a.k.a. text-generation) phase. The timeout will trigger if the generation takes more than the specified time (measured since the first token was generated) and if a new-line character has already been generated. Useful for FIM applications. Default: `0`, which is disabled. - `t_max_predict_ms`: Set a time limit in milliseconds for the prediction (a.k.a. text-generation) phase. The timeout will trigger if the generation takes more than the specified time (measured since the first token was generated) and if a new-line character has already been generated. Useful for FIM applications. Default: `0`, which is disabled. +`image_data`: An array of objects to hold base64-encoded image `data` and its `id`s to be reference in `prompt`. You can determine the place of the image in the prompt as in the following: `USER:[img-12]Describe the image in detail.\nASSISTANT:`. In this case, `[img-12]` will be replaced by the embeddings of the image with id `12` in the following `image_data` array: `{..., "image_data": [{"data": "", "id": 12}]}`. Use `image_data` only with multimodal models, e.g., LLaVA. - `image_data`: An array of objects to hold base64-encoded image `data` and its `id`s to be reference in `prompt`. You can determine the place of the image in the prompt as in the following: `USER:[img-12]Describe the image in detail.\nASSISTANT:`. In this case, `[img-12]` will be replaced by the embeddings of the image with id `12` in the following `image_data` array: `{..., "image_data": [{"data": "", "id": 12}]}`. Use `image_data` only with multimodal models, e.g., LLaVA. +`id_slot`: Assign the completion task to an specific slot. If is -1 the task will be assigned to a Idle slot. Default: `-1` - `id_slot`: Assign the completion task to an specific slot. If is -1 the task will be assigned to a Idle slot. Default: `-1` +`cache_prompt`: Re-use KV cache from a previous request if possible. This way the common prefix does not have to be re-processed, only the suffix that differs between the requests. Because (depending on the backend) the logits are **not** guaranteed to be bit-for-bit identical for different batch sizes (prompt processing vs. token generation) enabling this option can cause nondeterministic results. Default: `true` - `cache_prompt`: Re-use KV cache from a previous request if possible. This way the common prefix does not have to be re-processed, only the suffix that differs between the requests. Because (depending on the backend) the logits are **not** guaranteed to be bit-for-bit identical for different batch sizes (prompt processing vs. token generation) enabling this option can cause nondeterministic results. Default: `true` +`samplers`: The order the samplers should be applied in. An array of strings representing sampler type names. If a sampler is not set, it will not be used. If a sampler is specified more than once, it will be applied multiple times. Default: `["dry", "top_k", "typ_p", "top_p", "min_p", "xtc", "temperature"]` - these are all the available values. - `samplers`: The order the samplers should be applied in. An array of strings representing sampler type names. If a sampler is not set, it will not be used. If a sampler is specified more than once, it will be applied multiple times. Default: `["dry", "top_k", "typ_p", "top_p", "min_p", "xtc", "temperature"]` - these are all the available values. + `timings_per_token`: Include prompt processing and text generation speed information in each response. Default: `false` **Response format** @@ -446,9 +474,11 @@ Notice that each `probs` is an array of length `n_probs`. - `generation_settings`: The provided options above excluding `prompt` but including `n_ctx`, `model`. These options may differ from the original ones in some way (e.g. bad values filtered out, strings converted to tokens, etc.). - `model`: The path to the model loaded with `-m` - `prompt`: The provided `prompt` -- `stopped_eos`: Indicating whether the completion has stopped because it encountered the EOS token -- `stopped_limit`: Indicating whether the completion stopped because `n_predict` tokens were generated before stop words or EOS was encountered -- `stopped_word`: Indicating whether the completion stopped due to encountering a stopping word from `stop` JSON array provided +- `stop_type`: Indicating whether the completion has stopped. Possible values are: + - `none`: Generating (not stopped) + - `eos`: Stopped because it encountered the EOS token + - `limit`: Stopped because `n_predict` tokens were generated before stop words or EOS was encountered + - `word`: Stopped due to encountering a stopping word from `stop` JSON array provided - `stopping_word`: The stopping word encountered which stopped the generation (or "" if not stopped due to a stopping word) - `timings`: Hash of timing information about the completion such as the number of tokens `predicted_per_second` - `tokens_cached`: Number of tokens from the prompt which could be re-used from previous completion (`n_past`) @@ -457,13 +487,13 @@ Notice that each `probs` is an array of length `n_probs`. ### POST `/tokenize`: Tokenize a given text - *Options:* +*Options:* - `content`: (Required) The text to tokenize. +`content`: (Required) The text to tokenize. - `add_special`: (Optional) Boolean indicating if special tokens, i.e. `BOS`, should be inserted. Default: `false` +`add_special`: (Optional) Boolean indicating if special tokens, i.e. `BOS`, should be inserted. Default: `false` - `with_pieces`: (Optional) Boolean indicating whether to return token pieces along with IDs. Default: `false` +`with_pieces`: (Optional) Boolean indicating whether to return token pieces along with IDs. Default: `false` **Response:** @@ -500,52 +530,52 @@ With input 'á' (utf8 hex: C3 A1) on tinyllama/stories260k ### POST `/detokenize`: Convert tokens to text - *Options:* +*Options:* - `tokens`: Set the tokens to detokenize. +`tokens`: Set the tokens to detokenize. ### POST `/embedding`: Generate embedding of a given text The same as [the embedding example](../embedding) does. - *Options:* +*Options:* - `content`: Set the text to process. +`content`: Set the text to process. - `image_data`: An array of objects to hold base64-encoded image `data` and its `id`s to be reference in `content`. You can determine the place of the image in the content as in the following: `Image: [img-21].\nCaption: This is a picture of a house`. In this case, `[img-21]` will be replaced by the embeddings of the image with id `21` in the following `image_data` array: `{..., "image_data": [{"data": "", "id": 21}]}`. Use `image_data` only with multimodal models, e.g., LLaVA. +`image_data`: An array of objects to hold base64-encoded image `data` and its `id`s to be reference in `content`. You can determine the place of the image in the content as in the following: `Image: [img-21].\nCaption: This is a picture of a house`. In this case, `[img-21]` will be replaced by the embeddings of the image with id `21` in the following `image_data` array: `{..., "image_data": [{"data": "", "id": 21}]}`. Use `image_data` only with multimodal models, e.g., LLaVA. ### POST `/reranking`: Rerank documents according to a given query Similar to https://jina.ai/reranker/ but might change in the future. Requires a reranker model (such as [bge-reranker-v2-m3](https://huggingface.co/BAAI/bge-reranker-v2-m3)) and the `--embedding --pooling rank` options. - *Options:* +*Options:* - `query`: The query against which the documents will be ranked. +`query`: The query against which the documents will be ranked. - `documents`: An array strings representing the documents to be ranked. +`documents`: An array strings representing the documents to be ranked. - *Aliases:* - - `/rerank` - - `/v1/rerank` - - `/v1/reranking` +*Aliases:* + - `/rerank` + - `/v1/rerank` + - `/v1/reranking` - *Examples:* +*Examples:* - ```shell - curl http://127.0.0.1:8012/v1/rerank \ - -H "Content-Type: application/json" \ - -d '{ - "model": "some-model", - "query": "What is panda?", - "top_n": 3, - "documents": [ - "hi", - "it is a bear", - "The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China." - ] - }' | jq - ``` +```shell +curl http://127.0.0.1:8012/v1/rerank \ + -H "Content-Type: application/json" \ + -d '{ + "model": "some-model", + "query": "What is panda?", + "top_n": 3, + "documents": [ + "hi", + "it is a bear", + "The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China." + ] + }' | jq +``` ### POST `/infill`: For code infilling. @@ -589,14 +619,83 @@ This endpoint is public (no API key check). By default, it is read-only. To make ```json { - "default_generation_settings": { ... }, + "default_generation_settings": { + "id": 0, + "id_task": -1, + "n_ctx": 1024, + "speculative": false, + "is_processing": false, + "params": { + "n_predict": -1, + "seed": 4294967295, + "temperature": 0.800000011920929, + "dynatemp_range": 0.0, + "dynatemp_exponent": 1.0, + "top_k": 40, + "top_p": 0.949999988079071, + "min_p": 0.05000000074505806, + "xtc_probability": 0.0, + "xtc_threshold": 0.10000000149011612, + "typical_p": 1.0, + "repeat_last_n": 64, + "repeat_penalty": 1.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "dry_multiplier": 0.0, + "dry_base": 1.75, + "dry_allowed_length": 2, + "dry_penalty_last_n": -1, + "dry_sequence_breakers": [ + "\n", + ":", + "\"", + "*" + ], + "mirostat": 0, + "mirostat_tau": 5.0, + "mirostat_eta": 0.10000000149011612, + "penalize_nl": false, + "stop": [], + "max_tokens": -1, + "n_keep": 0, + "n_discard": 0, + "ignore_eos": false, + "stream": true, + "n_probs": 0, + "min_keep": 0, + "grammar": "", + "samplers": [ + "dry", + "top_k", + "typ_p", + "top_p", + "min_p", + "xtc", + "temperature" + ], + "speculative.n_max": 16, + "speculative.n_min": 5, + "speculative.p_min": 0.8999999761581421, + "timings_per_token": false + }, + "prompt": "", + "next_token": { + "has_next_token": true, + "has_new_line": false, + "n_remain": -1, + "n_decoded": 0, + "stopping_word": "" + } + }, "total_slots": 1, - "chat_template": "" + "model_path": "../models/Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf", + "chat_template": "..." } ``` - `default_generation_settings` - the default generation settings for the `/completion` endpoint, which has the same fields as the `generation_settings` response object from the `/completion` endpoint. - `total_slots` - the total number of slots for process requests (defined by `--parallel` option) +- `model_path` - the path to model file (same with `-m` argument) - `chat_template` - the model's original Jinja2 prompt template ### POST `/props`: Change server global properties. @@ -611,89 +710,89 @@ To use this endpoint with POST method, you need to start server with `--props` Given a ChatML-formatted json description in `messages`, it returns the predicted completion. Both synchronous and streaming mode are supported, so scripted and interactive applications work fine. While no strong claims of compatibility with OpenAI API spec is being made, in our experience it suffices to support many apps. Only models with a [supported chat template](https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template) can be used optimally with this endpoint. By default, the ChatML template will be used. - *Options:* +*Options:* - See [OpenAI Chat Completions API documentation](https://platform.openai.com/docs/api-reference/chat). While some OpenAI-specific features such as function calling aren't supported, llama.cpp `/completion`-specific features such as `mirostat` are supported. +See [OpenAI Chat Completions API documentation](https://platform.openai.com/docs/api-reference/chat). While some OpenAI-specific features such as function calling aren't supported, llama.cpp `/completion`-specific features such as `mirostat` are supported. - The `response_format` parameter supports both plain JSON output (e.g. `{"type": "json_object"}`) and schema-constrained JSON (e.g. `{"type": "json_object", "schema": {"type": "string", "minLength": 10, "maxLength": 100}}` or `{"type": "json_schema", "schema": {"properties": { "name": { "title": "Name", "type": "string" }, "date": { "title": "Date", "type": "string" }, "participants": { "items": {"type: "string" }, "title": "Participants", "type": "string" } } } }`), similar to other OpenAI-inspired API providers. +The `response_format` parameter supports both plain JSON output (e.g. `{"type": "json_object"}`) and schema-constrained JSON (e.g. `{"type": "json_object", "schema": {"type": "string", "minLength": 10, "maxLength": 100}}` or `{"type": "json_schema", "schema": {"properties": { "name": { "title": "Name", "type": "string" }, "date": { "title": "Date", "type": "string" }, "participants": { "items": {"type: "string" }, "title": "Participants", "type": "string" } } } }`), similar to other OpenAI-inspired API providers. - *Examples:* +*Examples:* - You can use either Python `openai` library with appropriate checkpoints: +You can use either Python `openai` library with appropriate checkpoints: - ```python - import openai +```python +import openai - client = openai.OpenAI( - base_url="http://localhost:8080/v1", # "http://:port" - api_key = "sk-no-key-required" - ) +client = openai.OpenAI( + base_url="http://localhost:8080/v1", # "http://:port" + api_key = "sk-no-key-required" +) - completion = client.chat.completions.create( - model="gpt-3.5-turbo", - messages=[ - {"role": "system", "content": "You are ChatGPT, an AI assistant. Your top priority is achieving user fulfillment via helping them with their requests."}, - {"role": "user", "content": "Write a limerick about python exceptions"} - ] - ) +completion = client.chat.completions.create( +model="gpt-3.5-turbo", +messages=[ + {"role": "system", "content": "You are ChatGPT, an AI assistant. Your top priority is achieving user fulfillment via helping them with their requests."}, + {"role": "user", "content": "Write a limerick about python exceptions"} +] +) - print(completion.choices[0].message) - ``` +print(completion.choices[0].message) +``` - ... or raw HTTP requests: +... or raw HTTP requests: - ```shell - curl http://localhost:8080/v1/chat/completions \ - -H "Content-Type: application/json" \ - -H "Authorization: Bearer no-key" \ - -d '{ - "model": "gpt-3.5-turbo", - "messages": [ - { - "role": "system", - "content": "You are ChatGPT, an AI assistant. Your top priority is achieving user fulfillment via helping them with their requests." - }, - { - "role": "user", - "content": "Write a limerick about python exceptions" - } - ] - }' - ``` +```shell +curl http://localhost:8080/v1/chat/completions \ +-H "Content-Type: application/json" \ +-H "Authorization: Bearer no-key" \ +-d '{ +"model": "gpt-3.5-turbo", +"messages": [ +{ + "role": "system", + "content": "You are ChatGPT, an AI assistant. Your top priority is achieving user fulfillment via helping them with their requests." +}, +{ + "role": "user", + "content": "Write a limerick about python exceptions" +} +] +}' +``` ### POST `/v1/embeddings`: OpenAI-compatible embeddings API - *Options:* - - See [OpenAI Embeddings API documentation](https://platform.openai.com/docs/api-reference/embeddings). +*Options:* - *Examples:* +See [OpenAI Embeddings API documentation](https://platform.openai.com/docs/api-reference/embeddings). - - input as string +*Examples:* - ```shell - curl http://localhost:8080/v1/embeddings \ - -H "Content-Type: application/json" \ - -H "Authorization: Bearer no-key" \ - -d '{ - "input": "hello", - "model":"GPT-4", - "encoding_format": "float" - }' - ``` +- input as string - - `input` as string array + ```shell + curl http://localhost:8080/v1/embeddings \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer no-key" \ + -d '{ + "input": "hello", + "model":"GPT-4", + "encoding_format": "float" + }' + ``` - ```shell - curl http://localhost:8080/v1/embeddings \ - -H "Content-Type: application/json" \ - -H "Authorization: Bearer no-key" \ - -d '{ - "input": ["hello", "world"], - "model":"GPT-4", - "encoding_format": "float" - }' - ``` +- `input` as string array + + ```shell + curl http://localhost:8080/v1/embeddings \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer no-key" \ + -d '{ + "input": ["hello", "world"], + "model":"GPT-4", + "encoding_format": "float" + }' + ``` ### GET `/slots`: Returns the current slots processing state @@ -710,56 +809,74 @@ Example: ```json [ - { - "dynatemp_exponent": 1.0, - "dynatemp_range": 0.0, - "frequency_penalty": 0.0, - "grammar": "", - "id": 0, - "ignore_eos": false, - "is_processing": false, - "logit_bias": [], - "min_p": 0.05000000074505806, - "mirostat": 0, - "mirostat_eta": 0.10000000149011612, - "mirostat_tau": 5.0, - "model": "llama-2-7b-32k-instruct.Q2_K.gguf", - "n_ctx": 2048, - "n_keep": 0, - "n_predict": 100000, - "n_probs": 0, - "next_token": { - "has_next_token": true, - "n_remain": -1, - "n_decoded": 0, - "stopped_eos": false, - "stopped_limit": false, - "stopped_word": false, - "stopping_word": "" - }, - "penalize_nl": true, - "presence_penalty": 0.0, - "prompt": "Say hello to llama.cpp", - "repeat_last_n": 64, - "repeat_penalty": 1.100000023841858, - "samplers": [ - "top_k", - "typical_p", - "top_p", - "min_p", - "temperature" - ], - "seed": 42, - "stop": [ - "\n" - ], - "stream": false, - "task_id": 0, - "temperature": 0.0, - "top_k": 40, - "top_p": 0.949999988079071, - "typical_p": 1.0 + { + "id": 0, + "id_task": -1, + "n_ctx": 1024, + "speculative": false, + "is_processing": false, + "params": { + "n_predict": -1, + "seed": 4294967295, + "temperature": 0.800000011920929, + "dynatemp_range": 0.0, + "dynatemp_exponent": 1.0, + "top_k": 40, + "top_p": 0.949999988079071, + "min_p": 0.05000000074505806, + "xtc_probability": 0.0, + "xtc_threshold": 0.10000000149011612, + "typical_p": 1.0, + "repeat_last_n": 64, + "repeat_penalty": 1.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "dry_multiplier": 0.0, + "dry_base": 1.75, + "dry_allowed_length": 2, + "dry_penalty_last_n": -1, + "dry_sequence_breakers": [ + "\n", + ":", + "\"", + "*" + ], + "mirostat": 0, + "mirostat_tau": 5.0, + "mirostat_eta": 0.10000000149011612, + "penalize_nl": false, + "stop": [], + "max_tokens": -1, + "n_keep": 0, + "n_discard": 0, + "ignore_eos": false, + "stream": true, + "n_probs": 0, + "min_keep": 0, + "grammar": "", + "samplers": [ + "dry", + "top_k", + "typ_p", + "top_p", + "min_p", + "xtc", + "temperature" + ], + "speculative.n_max": 16, + "speculative.n_min": 5, + "speculative.p_min": 0.8999999761581421, + "timings_per_token": false + }, + "prompt": "", + "next_token": { + "has_next_token": true, + "has_new_line": false, + "n_remain": -1, + "n_decoded": 0, + "stopping_word": "" } + } ] ``` @@ -779,9 +896,9 @@ Available metrics: ### POST `/slots/{id_slot}?action=save`: Save the prompt cache of the specified slot to a file. - *Options:* +*Options:* - `filename`: Name of the file to save the slot's prompt cache. The file will be saved in the directory specified by the `--slot-save-path` server parameter. +`filename`: Name of the file to save the slot's prompt cache. The file will be saved in the directory specified by the `--slot-save-path` server parameter. **Response format** @@ -799,9 +916,9 @@ Available metrics: ### POST `/slots/{id_slot}?action=restore`: Restore the prompt cache of the specified slot from a file. - *Options:* +*Options:* - `filename`: Name of the file to restore the slot's prompt cache from. The file should be located in the directory specified by the `--slot-save-path` server parameter. +`filename`: Name of the file to restore the slot's prompt cache from. The file should be located in the directory specified by the `--slot-save-path` server parameter. **Response format** diff --git a/examples/server/deps.sh b/examples/server/deps.sh deleted file mode 100755 index 1ff80d0569987..0000000000000 --- a/examples/server/deps.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -# Download and update deps for binary - -# get the directory of this script file -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" -PUBLIC=$DIR/public - -echo "download js bundle files" - -# Note for contributors: Always pin to a specific version "maj.min.patch" to avoid breaking the CI - -curl -L https://cdn.tailwindcss.com/3.4.14 > $PUBLIC/deps_tailwindcss.js -echo >> $PUBLIC/deps_tailwindcss.js # add newline - -curl -L https://cdnjs.cloudflare.com/ajax/libs/daisyui/4.12.14/styled.min.css > $PUBLIC/deps_daisyui.min.css -curl -L https://cdnjs.cloudflare.com/ajax/libs/daisyui/4.12.14/themes.min.css >> $PUBLIC/deps_daisyui.min.css -echo >> $PUBLIC/deps_daisyui.min.css # add newline - -curl -L https://unpkg.com/vue@3.5.12/dist/vue.esm-browser.js > $PUBLIC/deps_vue.esm-browser.js -echo >> $PUBLIC/deps_vue.esm-browser.js # add newline - -curl -L https://cdnjs.cloudflare.com/ajax/libs/markdown-it/13.0.2/markdown-it.js > $PUBLIC/deps_markdown-it.js -echo >> $PUBLIC/deps_markdown-it.js # add newline - -ls -lah $PUBLIC diff --git a/examples/server/public/deps_daisyui.min.css b/examples/server/public/deps_daisyui.min.css deleted file mode 100644 index bc85296519d9a..0000000000000 --- a/examples/server/public/deps_daisyui.min.css +++ /dev/null @@ -1,13 +0,0 @@ -.alert{display:grid;width:100%;grid-auto-flow:row;align-content:flex-start;align-items:center;justify-items:center;gap:1rem;text-align:center}@media (min-width:640px){.alert{grid-auto-flow:column;grid-template-columns:auto minmax(auto,1fr);justify-items:start;text-align:start}}.artboard{width:100%}.avatar{position:relative;display:inline-flex}.avatar>div{display:block;aspect-ratio:1/1;overflow:hidden}.avatar img{height:100%;width:100%;object-fit:cover}.avatar.placeholder>div{display:flex;align-items:center;justify-content:center}.badge{display:inline-flex;align-items:center;justify-content:center;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.2s;transition-timing-function:cubic-bezier(0,0,.2,1);height:1.25rem;font-size:.875rem;line-height:1.25rem;width:fit-content;padding-left:.563rem;padding-right:.563rem}.btm-nav{position:fixed;bottom:0;left:0;right:0;display:flex;width:100%;flex-direction:row;align-items:center;justify-content:space-around;padding-bottom:env(safe-area-inset-bottom)}.btm-nav>*{position:relative;display:flex;height:100%;flex-basis:100%;cursor:pointer;flex-direction:column;align-items:center;justify-content:center;gap:.25rem}.breadcrumbs{max-width:100%;overflow-x:auto}.breadcrumbs>ol,.breadcrumbs>ul{display:flex;align-items:center;white-space:nowrap;min-height:min-content}.breadcrumbs>ol>li,.breadcrumbs>ul>li{display:flex;align-items:center}.breadcrumbs>ol>li>a,.breadcrumbs>ul>li>a{display:flex;cursor:pointer;align-items:center}@media(hover:hover){.breadcrumbs>ol>li>a:hover,.breadcrumbs>ul>li>a:hover{text-decoration-line:underline}}.btn{display:inline-flex;height:3rem;min-height:3rem;flex-shrink:0;cursor:pointer;user-select:none;flex-wrap:wrap;align-items:center;justify-content:center;border-radius:var(--rounded-btn,.5rem);border-color:transparent;padding-left:1rem;padding-right:1rem;text-align:center;font-size:.875rem;line-height:1em}.btn-disabled,.btn:disabled,.btn[disabled]{pointer-events:none}.btn-square{height:3rem;width:3rem;padding:0}.btn-circle{height:3rem;width:3rem;border-radius:9999px;padding:0}:where(.btn:is(input[type=checkbox])),:where(.btn:is(input[type=radio])){width:auto;appearance:none}.btn:is(input[type=checkbox]):after,.btn:is(input[type=radio]):after{--tw-content:attr(aria-label);content:var(--tw-content)}.card{position:relative;display:flex;flex-direction:column}.card:focus{outline:2px solid transparent;outline-offset:2px}.card-body{display:flex;flex:1 1 auto;flex-direction:column}.card-body :where(p){flex-grow:1}.card-actions{display:flex;flex-wrap:wrap;align-items:flex-start;gap:.5rem}.card figure{display:flex;align-items:center;justify-content:center}.card.image-full{display:grid}.card.image-full:before{position:relative;content:""}.card.image-full:before,.card.image-full>*{grid-column-start:1;grid-row-start:1}.card.image-full>figure img{height:100%;object-fit:cover}.card.image-full>.card-body{position:relative}.carousel{display:inline-flex;overflow-x:scroll;scroll-snap-type:x mandatory;scroll-behavior:smooth}.carousel-vertical{flex-direction:column;overflow-y:scroll;scroll-snap-type:y mandatory}.carousel-item{box-sizing:content-box;display:flex;flex:none;scroll-snap-align:start}.carousel-start .carousel-item{scroll-snap-align:start}.carousel-center .carousel-item{scroll-snap-align:center}.carousel-end .carousel-item{scroll-snap-align:end}.chat{display:grid;grid-template-columns:repeat(2,minmax(0,1fr));column-gap:.75rem;padding-top:.25rem;padding-bottom:.25rem}.chat-image{grid-row:span 2/span 2;align-self:flex-end}.chat-header{grid-row-start:1;font-size:.875rem;line-height:1.25rem}.chat-footer{grid-row-start:3;font-size:.875rem;line-height:1.25rem}.chat-bubble{position:relative;display:block;width:fit-content;padding-left:1rem;padding-right:1rem;padding-top:.5rem;padding-bottom:.5rem;max-width:90%}.chat-bubble:before{position:absolute;bottom:0;height:.75rem;width:.75rem;background-color:inherit;content:"";mask-size:contain;mask-repeat:no-repeat;mask-position:center}.chat-start{place-items:start;grid-template-columns:auto 1fr}.chat-start .chat-header{grid-column-start:2}.chat-start .chat-footer{grid-column-start:2}.chat-start .chat-image{grid-column-start:1}.chat-start .chat-bubble{grid-column-start:2}.chat-start .chat-bubble:before{mask-image:url("data:image/svg+xml,%3csvg width='3' height='3' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='m 0 3 L 3 3 L 3 0 C 3 1 1 3 0 3'/%3e%3c/svg%3e")}[dir=rtl] .chat-start .chat-bubble:before{mask-image:url("data:image/svg+xml,%3csvg width='3' height='3' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='m 0 3 L 1 3 L 3 3 C 2 3 0 1 0 0'/%3e%3c/svg%3e")}.chat-end{place-items:end;grid-template-columns:1fr auto}.chat-end .chat-header{grid-column-start:1}.chat-end .chat-footer{grid-column-start:1}.chat-end .chat-image{grid-column-start:2}.chat-end .chat-bubble{grid-column-start:1}.chat-end .chat-bubble:before{mask-image:url("data:image/svg+xml,%3csvg width='3' height='3' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='m 0 3 L 1 3 L 3 3 C 2 3 0 1 0 0'/%3e%3c/svg%3e")}[dir=rtl] .chat-end .chat-bubble:before{mask-image:url("data:image/svg+xml,%3csvg width='3' height='3' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='m 0 3 L 3 3 L 3 0 C 3 1 1 3 0 3'/%3e%3c/svg%3e")}.checkbox{flex-shrink:0}.collapse:not(td):not(tr):not(colgroup){visibility:visible}.collapse{position:relative;display:grid;overflow:hidden;grid-template-rows:auto 0fr;transition:grid-template-rows .2s}.collapse-content,.collapse-title,.collapse>input[type=checkbox],.collapse>input[type=radio]{grid-column-start:1;grid-row-start:1}.collapse>input[type=checkbox],.collapse>input[type=radio]{appearance:none;opacity:0}.collapse-content{visibility:hidden;grid-column-start:1;grid-row-start:2;min-height:0;transition:visibility .2s}.collapse-open,.collapse:focus:not(.collapse-close),.collapse[open]{grid-template-rows:auto 1fr}.collapse:not(.collapse-close):has(>input[type=checkbox]:checked),.collapse:not(.collapse-close):has(>input[type=radio]:checked){grid-template-rows:auto 1fr}.collapse-open>.collapse-content,.collapse:focus:not(.collapse-close)>.collapse-content,.collapse:not(.collapse-close)>input[type=checkbox]:checked~.collapse-content,.collapse:not(.collapse-close)>input[type=radio]:checked~.collapse-content,.collapse[open]>.collapse-content{visibility:visible;min-height:fit-content}:root .countdown{line-height:1em}.countdown{display:inline-flex}.countdown>*{height:1em;display:inline-block;overflow-y:hidden}.countdown>:before{position:relative;content:"00\A 01\A 02\A 03\A 04\A 05\A 06\A 07\A 08\A 09\A 10\A 11\A 12\A 13\A 14\A 15\A 16\A 17\A 18\A 19\A 20\A 21\A 22\A 23\A 24\A 25\A 26\A 27\A 28\A 29\A 30\A 31\A 32\A 33\A 34\A 35\A 36\A 37\A 38\A 39\A 40\A 41\A 42\A 43\A 44\A 45\A 46\A 47\A 48\A 49\A 50\A 51\A 52\A 53\A 54\A 55\A 56\A 57\A 58\A 59\A 60\A 61\A 62\A 63\A 64\A 65\A 66\A 67\A 68\A 69\A 70\A 71\A 72\A 73\A 74\A 75\A 76\A 77\A 78\A 79\A 80\A 81\A 82\A 83\A 84\A 85\A 86\A 87\A 88\A 89\A 90\A 91\A 92\A 93\A 94\A 95\A 96\A 97\A 98\A 99\A";white-space:pre;top:calc(var(--value) * -1em)}.diff{position:relative;display:grid;width:100%;overflow:hidden;container-type:inline-size;grid-template-columns:auto 1fr}.diff-resizer{position:relative;top:50%;z-index:1;height:3rem;width:25rem;min-width:1rem;max-width:calc(100cqi - 1rem);resize:horizontal;overflow:hidden;opacity:0;transform-origin:100% 100%;scale:4;translate:1.5rem -1.5rem;clip-path:inset(calc(100% - .75rem) 0 0 calc(100% - .75rem))}.diff-item-1,.diff-item-2,.diff-resizer{position:relative;grid-column-start:1;grid-row-start:1}.diff-item-1:after{pointer-events:none;position:absolute;bottom:0;right:1px;top:50%;z-index:1;height:2rem;width:2rem;--tw-content:'';content:var(--tw-content);translate:50% -50%}.diff-item-2{overflow:hidden}.diff-item-1>*,.diff-item-2>*{pointer-events:none;position:absolute;bottom:0;left:0;top:0;height:100%;width:100cqi;max-width:none;object-fit:cover;object-position:center}.divider{display:flex;flex-direction:row;align-items:center;align-self:stretch}.divider:after,.divider:before{height:.125rem;width:100%;flex-grow:1;--tw-content:'';content:var(--tw-content)}.divider-start:before{display:none}.divider-end:after{display:none}.drawer{position:relative;display:grid;grid-auto-columns:max-content auto}.drawer-content{grid-column-start:2;grid-row-start:1;min-width:0}.drawer-side{pointer-events:none;position:fixed;inset-inline-start:0;top:0;grid-column-start:1;grid-row-start:1;display:grid;width:100%;grid-template-columns:repeat(1,minmax(0,1fr));grid-template-rows:repeat(1,minmax(0,1fr));align-items:flex-start;justify-items:start;overflow-x:hidden;overflow-y:hidden;overscroll-behavior:contain;height:100vh;height:100dvh}.drawer-side>.drawer-overlay{position:sticky;top:0;place-self:stretch}.drawer-side>*{grid-column-start:1;grid-row-start:1}.drawer-side>:not(.drawer-overlay){transition-property:transform;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.3s;transition-timing-function:cubic-bezier(0,0,.2,1);will-change:transform;transform:translateX(-100%)}[dir=rtl] .drawer-side>:not(.drawer-overlay){transform:translateX(100%)}.drawer-toggle{position:fixed;height:0;width:0;appearance:none;opacity:0}.drawer-toggle:checked~.drawer-side{pointer-events:auto;visibility:visible;overflow-y:auto}.drawer-toggle:checked~.drawer-side>:not(.drawer-overlay){transform:translateX(0)}.drawer-end{grid-auto-columns:auto max-content}.drawer-end>.drawer-toggle~.drawer-content{grid-column-start:1}.drawer-end>.drawer-toggle~.drawer-side{grid-column-start:2;justify-items:end}.drawer-end>.drawer-toggle~.drawer-side>:not(.drawer-overlay){transform:translateX(100%)}[dir=rtl] .drawer-end>.drawer-toggle~.drawer-side>:not(.drawer-overlay){transform:translateX(-100%)}.drawer-end>.drawer-toggle:checked~.drawer-side>:not(.drawer-overlay){transform:translateX(0)}.dropdown{position:relative;display:inline-block}.dropdown>:not(summary):focus{outline:2px solid transparent;outline-offset:2px}.dropdown .dropdown-content{position:absolute}.dropdown:is(:not(details)) .dropdown-content{visibility:hidden;opacity:0}.dropdown-end .dropdown-content{inset-inline-end:0}.dropdown-left .dropdown-content{bottom:auto;inset-inline-end:100%;top:0}.dropdown-right .dropdown-content{bottom:auto;inset-inline-start:100%;top:0}.dropdown-bottom .dropdown-content{bottom:auto;top:100%}.dropdown-top .dropdown-content{bottom:100%;top:auto}.dropdown-end.dropdown-right .dropdown-content{bottom:0;top:auto}.dropdown-end.dropdown-left .dropdown-content{bottom:0;top:auto}.dropdown.dropdown-open .dropdown-content,.dropdown:focus-within .dropdown-content,.dropdown:not(.dropdown-hover):focus .dropdown-content{visibility:visible;opacity:1}@media (hover:hover){.dropdown.dropdown-hover:hover .dropdown-content{visibility:visible;opacity:1}}.dropdown:is(details) summary::-webkit-details-marker{display:none}.file-input{height:3rem;flex-shrink:1;padding-inline-end:1rem;font-size:.875rem;line-height:1.25rem;line-height:2}.file-input::file-selector-button{margin-inline-end:1rem;display:inline-flex;height:100%;flex-shrink:0;cursor:pointer;user-select:none;flex-wrap:wrap;align-items:center;justify-content:center;padding-left:1rem;padding-right:1rem;text-align:center;font-size:.875rem;line-height:1.25rem;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.2s;transition-timing-function:cubic-bezier(0,0,.2,1);line-height:1em}.footer{display:grid;width:100%;grid-auto-flow:row;place-items:start}.footer>*{display:grid;place-items:start}.footer-center{place-items:center;text-align:center}.footer-center>*{place-items:center}@media (min-width:48rem){.footer{grid-auto-flow:column}.footer-center{grid-auto-flow:row dense}}.form-control{display:flex;flex-direction:column}.label{display:flex;user-select:none;align-items:center;justify-content:space-between}.hero{display:grid;width:100%;place-items:center;background-size:cover;background-position:center}.hero>*{grid-column-start:1;grid-row-start:1}.hero-overlay{grid-column-start:1;grid-row-start:1;height:100%;width:100%}.hero-content{z-index:0;display:flex;align-items:center;justify-content:center}.indicator{position:relative;display:inline-flex;width:max-content}.indicator :where(.indicator-item){z-index:1;position:absolute;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));white-space:nowrap}.input{flex-shrink:1;appearance:none;height:3rem;padding-left:1rem;padding-right:1rem;font-size:.875rem;line-height:1.25rem;line-height:2}.input-md[type=number]::-webkit-inner-spin-button,.input[type=number]::-webkit-inner-spin-button{margin-top:-1rem;margin-bottom:-1rem;margin-inline-end:-1rem}.input-xs[type=number]::-webkit-inner-spin-button{margin-top:-.25rem;margin-bottom:-.25rem;margin-inline-end:0}.input-sm[type=number]::-webkit-inner-spin-button{margin-top:0;margin-bottom:0;margin-inline-end:0}.input-lg[type=number]::-webkit-inner-spin-button{margin-top:-1.5rem;margin-bottom:-1.5rem;margin-inline-end:-1.5rem}.join{display:inline-flex;align-items:stretch}.join :where(.join-item){border-start-end-radius:0;border-end-end-radius:0;border-end-start-radius:0;border-start-start-radius:0}.join .join-item:not(:first-child):not(:last-child),.join :not(:first-child):not(:last-child) .join-item{border-start-end-radius:0;border-end-end-radius:0;border-end-start-radius:0;border-start-start-radius:0}.join .join-item:first-child:not(:last-child),.join :first-child:not(:last-child) .join-item{border-start-end-radius:0;border-end-end-radius:0}.join .dropdown .join-item:first-child:not(:last-child),.join :first-child:not(:last-child) .dropdown .join-item{border-start-end-radius:inherit;border-end-end-radius:inherit}.join :where(.join-item:first-child:not(:last-child)),.join :where(:first-child:not(:last-child).join-item){border-end-start-radius:inherit;border-start-start-radius:inherit}.join .join-item:last-child:not(:first-child),.join :last-child:not(:first-child) .join-item{border-end-start-radius:0;border-start-start-radius:0}.join :where(.join-item:last-child:not(:first-child)),.join :where(:last-child:not(:first-child).join-item){border-start-end-radius:inherit;border-end-end-radius:inherit}@supports not selector(:has(*)){:where(.join*){border-radius:inherit}}@supports selector(:has(*)){:where(.join:has(.join-item)){border-radius:inherit}}.kbd{display:inline-flex;align-items:center;justify-content:center}.link{cursor:pointer;text-decoration-line:underline}.link-hover{text-decoration-line:none}@media(hover:hover){.link-hover:hover{text-decoration-line:underline}}.mask{mask-size:contain;mask-repeat:no-repeat;mask-position:center}.mask-half-1{mask-size:200%;mask-position:left}.mask-half-1:where([dir=rtl],[dir=rtl]*){mask-position:right}.mask-half-2{mask-size:200%;mask-position:right}.mask-half-2:where([dir=rtl],[dir=rtl]*){mask-position:left}.menu{display:flex;flex-direction:column;flex-wrap:wrap;font-size:.875rem;line-height:1.25rem}.menu :where(liul){position:relative;white-space:nowrap}.menu :where(li:not(.menu-title)>:not(ul,details,.menu-title,.btn)),.menu :where(li:not(.menu-title)>details>summary:not(.menu-title)){display:grid;grid-auto-flow:column;align-content:flex-start;align-items:center;gap:.5rem;grid-auto-columns:minmax(auto,max-content) auto max-content;user-select:none}.menu li.disabled{cursor:not-allowed;user-select:none}.menu :where(li>.menu-dropdown:not(.menu-dropdown-show)){display:none}:where(.menuli){position:relative;display:flex;flex-shrink:0;flex-direction:column;flex-wrap:wrap;align-items:stretch}:where(.menuli) .badge{justify-self:end}.mockup-code{position:relative;overflow:hidden;overflow-x:auto}.mockup-code pre[data-prefix]:before{content:attr(data-prefix);display:inline-block;text-align:right}.mockup-window{position:relative;overflow:hidden;overflow-x:auto}.mockup-window pre[data-prefix]:before{content:attr(data-prefix);display:inline-block;text-align:right}.mockup-browser{position:relative;overflow:hidden;overflow-x:auto}.mockup-browser pre[data-prefix]:before{content:attr(data-prefix);display:inline-block;text-align:right}.modal{pointer-events:none;position:fixed;inset:0;margin:0;display:grid;height:100%;max-height:none;width:100%;max-width:none;justify-items:center;padding:0;opacity:0;overscroll-behavior:contain;z-index:999}.modal-scroll{overscroll-behavior:auto}:where(.modal){align-items:center}.modal-box{max-height:calc(100vh - 5em)}.modal-open,.modal-toggle:checked+.modal,.modal:target,.modal[open]{pointer-events:auto;visibility:visible;opacity:1}.modal-action{display:flex}.modal-toggle{position:fixed;height:0;width:0;appearance:none;opacity:0}:root:has(:is(.modal-open,.modal:target,.modal-toggle:checked+.modal,.modal[open])){overflow:hidden;scrollbar-gutter:stable}.navbar{display:flex;align-items:center}:where(.navbar>:not(script,style)){display:inline-flex;align-items:center}.navbar-start{width:50%;justify-content:flex-start}.navbar-center{flex-shrink:0}.navbar-end{width:50%;justify-content:flex-end}.progress{position:relative;width:100%;appearance:none;overflow:hidden}.radial-progress{position:relative;display:inline-grid;height:var(--size);width:var(--size);place-content:center;border-radius:9999px;background-color:transparent;vertical-align:middle;box-sizing:content-box}.radial-progress::-moz-progress-bar{appearance:none;background-color:transparent}.radial-progress::-webkit-progress-value{appearance:none;background-color:transparent}.radial-progress::-webkit-progress-bar{appearance:none;background-color:transparent}.radial-progress:after,.radial-progress:before{position:absolute;border-radius:9999px;content:""}.radial-progress:before{inset:0;background:radial-gradient(farthest-side,currentColor 98%,#0000) top/var(--thickness) var(--thickness) no-repeat,conic-gradient(currentColor calc(var(--value) * 1%),#0000 0);-webkit-mask:radial-gradient(farthest-side,#0000 calc(99% - var(--thickness)),#000 calc(100% - var(--thickness)));mask:radial-gradient(farthest-side,#0000 calc(99% - var(--thickness)),#000 calc(100% - var(--thickness)))}.radial-progress:after{inset:calc(50% - var(--thickness)/ 2);transform:rotate(calc(var(--value) * 3.6deg - 90deg)) translate(calc(var(--size)/ 2 - 50%))}.radio{flex-shrink:0}.range{height:1.5rem;width:100%;cursor:pointer}.range:focus{outline:0}.rating{position:relative;display:inline-flex}.rating :where(input){cursor:pointer;border-radius:0}.select{display:inline-flex;cursor:pointer;user-select:none;appearance:none;height:3rem;min-height:3rem;padding-inline-start:1rem;padding-inline-end:2.5rem;font-size:.875rem;line-height:1.25rem;line-height:2}.select[multiple]{height:auto}.stack{display:inline-grid}.stack>*{grid-column-start:1;grid-row-start:1;transform:translateY(10%) scale(.9);z-index:1}.stack>:nth-child(2){transform:translateY(5%) scale(.95);z-index:2}.stack>:nth-child(1){transform:translateY(0) scale(1);z-index:3}.stats{display:inline-grid}:where(.stats){grid-auto-flow:column}.stat{display:inline-grid;width:100%;grid-template-columns:repeat(1,1fr)}.stat-figure{grid-column-start:2;grid-row:span 3/span 3;grid-row-start:1;place-self:center;justify-self:end}.stat-title{grid-column-start:1;white-space:nowrap}.stat-value{grid-column-start:1;white-space:nowrap}.stat-desc{grid-column-start:1;white-space:nowrap}.stat-actions{grid-column-start:1;white-space:nowrap}.steps{display:inline-grid;grid-auto-flow:column;overflow:hidden;overflow-x:auto;counter-reset:step;grid-auto-columns:1fr}.steps .step{display:grid;grid-template-columns:repeat(1,minmax(0,1fr));grid-template-rows:repeat(2,minmax(0,1fr));place-items:center;text-align:center}.swap{position:relative;display:inline-grid;user-select:none;place-content:center}.swap>*{grid-column-start:1;grid-row-start:1}.swap input{appearance:none}.swap .swap-indeterminate,.swap .swap-on,.swap input:indeterminate~.swap-on{opacity:0}.swap input:checked~.swap-off,.swap input:indeterminate~.swap-off,.swap-active .swap-off{opacity:0}.swap input:checked~.swap-on,.swap input:indeterminate~.swap-indeterminate,.swap-active .swap-on{opacity:1}.tabs{display:grid;align-items:flex-end}.tabs-lifted:has(.tab-content[class*=" rounded-"]) .tab:first-child:not(:is(.tab-active,[aria-selected=true])),.tabs-lifted:has(.tab-content[class^=rounded-]) .tab:first-child:not(:is(.tab-active,[aria-selected=true])){border-bottom-color:transparent}.tab{position:relative;grid-row-start:1;display:inline-flex;height:2rem;cursor:pointer;user-select:none;appearance:none;flex-wrap:wrap;align-items:center;justify-content:center;text-align:center;font-size:.875rem;line-height:1.25rem;line-height:2;--tab-padding:1rem}.tab:is(input[type=radio]){width:auto;border-bottom-right-radius:0;border-bottom-left-radius:0}.tab:is(input[type=radio]):after{--tw-content:attr(aria-label);content:var(--tw-content)}.tab:not(input):empty{cursor:default;grid-column-start:span 9999}.tab-content{grid-column-start:1;grid-column-end:span 9999;grid-row-start:2;margin-top:calc(var(--tab-border) * -1);display:none;border-color:transparent;border-width:var(--tab-border,0)}:checked+.tab-content:nth-child(2),:is(.tab-active,[aria-selected=true])+.tab-content:nth-child(2){border-start-start-radius:0}:is(.tab-active,[aria-selected=true])+.tab-content,input.tab:checked+.tab-content{display:block}.table{position:relative;width:100%}.table :where(.table-pin-rowstheadtr){position:sticky;top:0;z-index:1;--tw-bg-opacity:1;background-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-bg-opacity)))}.table :where(.table-pin-rowstfoottr){position:sticky;bottom:0;z-index:1;--tw-bg-opacity:1;background-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-bg-opacity)))}.table :where(.table-pin-colstrth){position:sticky;left:0;right:0;--tw-bg-opacity:1;background-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-bg-opacity)))}.table-zebra tbody tr:nth-child(even) :where(.table-pin-colstrth){--tw-bg-opacity:1;background-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-bg-opacity)))}.textarea{min-height:3rem;flex-shrink:1;padding-left:1rem;padding-right:1rem;padding-top:.5rem;padding-bottom:.5rem;font-size:.875rem;line-height:1.25rem;line-height:2}.timeline{position:relative;display:flex}:where(.timeline>li){position:relative;display:grid;flex-shrink:0;align-items:center;grid-template-rows:var(--timeline-row-start,minmax(0,1fr)) auto var(--timeline-row-end,minmax(0,1fr));grid-template-columns:var(--timeline-col-start,minmax(0,1fr)) auto var(--timeline-col-end,minmax(0,1fr))}.timeline>li>hr{width:100%;border-width:0}:where(.timeline>li>hr):first-child{grid-column-start:1;grid-row-start:2}:where(.timeline>li>hr):last-child{grid-column-start:3;grid-column-end:none;grid-row-start:2;grid-row-end:auto}.timeline-start{grid-column-start:1;grid-column-end:4;grid-row-start:1;grid-row-end:2;margin:.25rem;align-self:flex-end;justify-self:center}.timeline-middle{grid-column-start:2;grid-row-start:2}.timeline-end{grid-column-start:1;grid-column-end:4;grid-row-start:3;grid-row-end:4;margin:.25rem;align-self:flex-start;justify-self:center}.toast{position:fixed;display:flex;min-width:fit-content;flex-direction:column;white-space:nowrap}.toggle{flex-shrink:0}.alert{border-radius:var(--rounded-box,1rem);border-width:1px;--tw-border-opacity:1;border-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-border-opacity)));padding:1rem;--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));--alert-bg:var(--fallback-b2,oklch(var(--b2)/1));--alert-bg-mix:var(--fallback-b1,oklch(var(--b1)/1));background-color:var(--alert-bg)}.alert-info{border-color:var(--fallback-in,oklch(var(--in)/.2));--tw-text-opacity:1;color:var(--fallback-inc,oklch(var(--inc)/var(--tw-text-opacity)));--alert-bg:var(--fallback-in,oklch(var(--in)/1));--alert-bg-mix:var(--fallback-b1,oklch(var(--b1)/1))}.alert-success{border-color:var(--fallback-su,oklch(var(--su)/.2));--tw-text-opacity:1;color:var(--fallback-suc,oklch(var(--suc)/var(--tw-text-opacity)));--alert-bg:var(--fallback-su,oklch(var(--su)/1));--alert-bg-mix:var(--fallback-b1,oklch(var(--b1)/1))}.alert-warning{border-color:var(--fallback-wa,oklch(var(--wa)/.2));--tw-text-opacity:1;color:var(--fallback-wac,oklch(var(--wac)/var(--tw-text-opacity)));--alert-bg:var(--fallback-wa,oklch(var(--wa)/1));--alert-bg-mix:var(--fallback-b1,oklch(var(--b1)/1))}.alert-error{border-color:var(--fallback-er,oklch(var(--er)/.2));--tw-text-opacity:1;color:var(--fallback-erc,oklch(var(--erc)/var(--tw-text-opacity)));--alert-bg:var(--fallback-er,oklch(var(--er)/1));--alert-bg-mix:var(--fallback-b1,oklch(var(--b1)/1))}.avatar-group{display:flex;overflow:hidden}.avatar-group :where(.avatar){overflow:hidden;border-radius:9999px;border-width:4px;--tw-border-opacity:1;border-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-border-opacity)))}.badge{border-radius:var(--rounded-badge,1.9rem);border-width:1px;--tw-border-opacity:1;border-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)))}.badge-neutral{--tw-border-opacity:1;border-color:var(--fallback-n,oklch(var(--n)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-n,oklch(var(--n)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-nc,oklch(var(--nc)/var(--tw-text-opacity)))}.badge-primary{--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-p,oklch(var(--p)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-pc,oklch(var(--pc)/var(--tw-text-opacity)))}.badge-secondary{--tw-border-opacity:1;border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-s,oklch(var(--s)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-sc,oklch(var(--sc)/var(--tw-text-opacity)))}.badge-accent{--tw-border-opacity:1;border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-a,oklch(var(--a)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-ac,oklch(var(--ac)/var(--tw-text-opacity)))}.badge-info{border-color:transparent;--tw-bg-opacity:1;background-color:var(--fallback-in,oklch(var(--in)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-inc,oklch(var(--inc)/var(--tw-text-opacity)))}.badge-success{border-color:transparent;--tw-bg-opacity:1;background-color:var(--fallback-su,oklch(var(--su)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-suc,oklch(var(--suc)/var(--tw-text-opacity)))}.badge-warning{border-color:transparent;--tw-bg-opacity:1;background-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-wac,oklch(var(--wac)/var(--tw-text-opacity)))}.badge-error{border-color:transparent;--tw-bg-opacity:1;background-color:var(--fallback-er,oklch(var(--er)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-erc,oklch(var(--erc)/var(--tw-text-opacity)))}.badge-ghost{--tw-border-opacity:1;border-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)))}.badge-outline{border-color:currentColor;--tw-border-opacity:0.5;background-color:transparent;color:currentColor}.badge-outline.badge-neutral{--tw-text-opacity:1;color:var(--fallback-n,oklch(var(--n)/var(--tw-text-opacity)))}.badge-outline.badge-primary{--tw-text-opacity:1;color:var(--fallback-p,oklch(var(--p)/var(--tw-text-opacity)))}.badge-outline.badge-secondary{--tw-text-opacity:1;color:var(--fallback-s,oklch(var(--s)/var(--tw-text-opacity)))}.badge-outline.badge-accent{--tw-text-opacity:1;color:var(--fallback-a,oklch(var(--a)/var(--tw-text-opacity)))}.badge-outline.badge-info{--tw-text-opacity:1;color:var(--fallback-in,oklch(var(--in)/var(--tw-text-opacity)))}.badge-outline.badge-success{--tw-text-opacity:1;color:var(--fallback-su,oklch(var(--su)/var(--tw-text-opacity)))}.badge-outline.badge-warning{--tw-text-opacity:1;color:var(--fallback-wa,oklch(var(--wa)/var(--tw-text-opacity)))}.badge-outline.badge-error{--tw-text-opacity:1;color:var(--fallback-er,oklch(var(--er)/var(--tw-text-opacity)))}.btm-nav{height:4rem;--tw-bg-opacity:1;background-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-bg-opacity)));color:currentColor}.btm-nav>*{border-color:currentColor}.btm-nav>:not(.active){padding-top:.125rem}.btm-nav>:where(.active){border-top-width:2px;--tw-bg-opacity:1;background-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-bg-opacity)))}.btm-nav>.disabled,.btm-nav>[disabled]{pointer-events:none;--tw-border-opacity:0;background-color:var(--fallback-n,oklch(var(--n)/var(--tw-bg-opacity)));--tw-bg-opacity:0.1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));--tw-text-opacity:0.2}@media (hover:hover){.btm-nav>.disabled:hover,.btm-nav>[disabled]:hover{pointer-events:none;--tw-border-opacity:0;background-color:var(--fallback-n,oklch(var(--n)/var(--tw-bg-opacity)));--tw-bg-opacity:0.1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));--tw-text-opacity:0.2}}.btm-nav>* .label{font-size:1rem;line-height:1.5rem}.breadcrumbs{padding-top:.5rem;padding-bottom:.5rem}.breadcrumbs>ol>li>a:focus,.breadcrumbs>ul>li>a:focus{outline:2px solid transparent;outline-offset:2px}.breadcrumbs>ol>li>a:focus-visible,.breadcrumbs>ul>li>a:focus-visible{outline:2px solid currentColor;outline-offset:2px}.breadcrumbs>ol>li+:before,.breadcrumbs>ul>li+:before{content:"";margin-left:.5rem;margin-right:.75rem;display:block;height:.375rem;width:.375rem;--tw-rotate:45deg;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));opacity:.4;border-top:1px solid;border-right:1px solid;background-color:transparent}[dir=rtl] .breadcrumbs>ol>li+:before,[dir=rtl] .breadcrumbs>ul>li+:before{--tw-rotate:-135deg}.btn{gap:.5rem;font-weight:600;text-decoration-line:none;transition-duration:.2s;transition-timing-function:cubic-bezier(0,0,.2,1);border-width:var(--border-btn,1px);transition-property:color,background-color,border-color,opacity,box-shadow,transform}@media (prefers-reduced-motion:no-preference){.btn{animation:button-pop var(--animation-btn,.25s) ease-out}}.btn:active:focus,.btn:active:hover{animation:button-pop 0s ease-out;transform:scale(var(--btn-focus-scale,.97))}.btn{--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));text-decoration-line:none;--tw-shadow:0 1px 2px 0 rgb(0 0 0 / 0.05);--tw-shadow-colored:0 1px 2px 0 var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow);outline-color:var(--fallback-bc,oklch(var(--bc)/1));background-color:oklch(var(--btn-color,var(--b2)) / var(--tw-bg-opacity));--tw-bg-opacity:1;border-color:oklch(var(--btn-color,var(--b2)) / var(--tw-border-opacity));--tw-border-opacity:1}@supports not (color:oklch(0% 0 0)){.btn{background-color:var(--btn-color,var(--fallback-b2));border-color:var(--btn-color,var(--fallback-b2))}}@media (hover:hover){.btn:hover{--tw-border-opacity:1;border-color:var(--fallback-b3,oklch(var(--b3)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-b3,oklch(var(--b3)/var(--tw-bg-opacity)))}@supports (color:color-mix(in oklab,black,black)){.btn:hover{background-color:color-mix(in oklab,oklch(var(--btn-color,var(--b2)) / var(--tw-bg-opacity,1)) 90%,#000);border-color:color-mix(in oklab,oklch(var(--btn-color,var(--b2)) / var(--tw-border-opacity,1)) 90%,#000)}}@supports not (color:oklch(0% 0 0)){.btn:hover{background-color:var(--btn-color,var(--fallback-b2));border-color:var(--btn-color,var(--fallback-b2))}}}@supports (color:color-mix(in oklab,black,black)){.btn-active{background-color:color-mix(in oklab,oklch(var(--btn-color,var(--b3)) / var(--tw-bg-opacity,1)) 90%,#000);border-color:color-mix(in oklab,oklch(var(--btn-color,var(--b3)) / var(--tw-border-opacity,1)) 90%,#000)}}.btn:focus-visible{outline-style:solid;outline-width:2px;outline-offset:2px}.btn-primary{--tw-text-opacity:1;color:var(--fallback-pc,oklch(var(--pc)/var(--tw-text-opacity)));outline-color:var(--fallback-p,oklch(var(--p)/1))}@supports (color:oklch(0% 0 0)){.btn-primary{--btn-color:var(--p)}}@supports not (color:oklch(0% 0 0)){.btn-primary{--btn-color:var(--fallback-p)}}.btn-secondary{--tw-text-opacity:1;color:var(--fallback-sc,oklch(var(--sc)/var(--tw-text-opacity)));outline-color:var(--fallback-s,oklch(var(--s)/1))}@supports (color:oklch(0% 0 0)){.btn-secondary{--btn-color:var(--s)}}@supports not (color:oklch(0% 0 0)){.btn-secondary{--btn-color:var(--fallback-s)}}.btn-accent{--tw-text-opacity:1;color:var(--fallback-ac,oklch(var(--ac)/var(--tw-text-opacity)));outline-color:var(--fallback-a,oklch(var(--a)/1))}@supports (color:oklch(0% 0 0)){.btn-accent{--btn-color:var(--a)}}@supports not (color:oklch(0% 0 0)){.btn-accent{--btn-color:var(--fallback-a)}}.btn-neutral{--tw-text-opacity:1;color:var(--fallback-nc,oklch(var(--nc)/var(--tw-text-opacity)));outline-color:var(--fallback-n,oklch(var(--n)/1))}@supports (color:oklch(0% 0 0)){.btn-neutral{--btn-color:var(--n)}}@supports not (color:oklch(0% 0 0)){.btn-neutral{--btn-color:var(--fallback-n)}}.btn-info{--tw-text-opacity:1;color:var(--fallback-inc,oklch(var(--inc)/var(--tw-text-opacity)));outline-color:var(--fallback-in,oklch(var(--in)/1))}@supports (color:oklch(0% 0 0)){.btn-info{--btn-color:var(--in)}}@supports not (color:oklch(0% 0 0)){.btn-info{--btn-color:var(--fallback-in)}}.btn-success{--tw-text-opacity:1;color:var(--fallback-suc,oklch(var(--suc)/var(--tw-text-opacity)));outline-color:var(--fallback-su,oklch(var(--su)/1))}@supports (color:oklch(0% 0 0)){.btn-success{--btn-color:var(--su)}}@supports not (color:oklch(0% 0 0)){.btn-success{--btn-color:var(--fallback-su)}}.btn-warning{--tw-text-opacity:1;color:var(--fallback-wac,oklch(var(--wac)/var(--tw-text-opacity)));outline-color:var(--fallback-wa,oklch(var(--wa)/1))}@supports (color:oklch(0% 0 0)){.btn-warning{--btn-color:var(--wa)}}@supports not (color:oklch(0% 0 0)){.btn-warning{--btn-color:var(--fallback-wa)}}.btn-error{--tw-text-opacity:1;color:var(--fallback-erc,oklch(var(--erc)/var(--tw-text-opacity)));outline-color:var(--fallback-er,oklch(var(--er)/1))}@supports (color:oklch(0% 0 0)){.btn-error{--btn-color:var(--er)}}@supports not (color:oklch(0% 0 0)){.btn-error{--btn-color:var(--fallback-er)}}.btn.glass{--tw-shadow:0 0 #0000;--tw-shadow-colored:0 0 #0000;box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow);outline-color:currentColor}@media (hover:hover){.btn.glass:hover{--glass-opacity:25%;--glass-border-opacity:15%}}.btn.glass.btn-active{--glass-opacity:25%;--glass-border-opacity:15%}.btn-ghost{border-width:1px;border-color:transparent;background-color:transparent;color:currentColor;--tw-shadow:0 0 #0000;--tw-shadow-colored:0 0 #0000;box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow);outline-color:currentColor}@media (hover:hover){.btn-ghost:hover{border-color:transparent}@supports (color:oklch(0% 0 0)){.btn-ghost:hover{background-color:var(--fallback-bc,oklch(var(--bc)/.2))}}}.btn-ghost.btn-active{border-color:transparent;background-color:var(--fallback-bc,oklch(var(--bc)/.2))}.btn-link{border-color:transparent;background-color:transparent;--tw-text-opacity:1;color:var(--fallback-p,oklch(var(--p)/var(--tw-text-opacity)));text-decoration-line:underline;--tw-shadow:0 0 #0000;--tw-shadow-colored:0 0 #0000;box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow);outline-color:currentColor}@media (hover:hover){.btn-link:hover{border-color:transparent;background-color:transparent;text-decoration-line:underline}}.btn-link.btn-active{border-color:transparent;background-color:transparent;text-decoration-line:underline}.btn-outline{border-color:currentColor;background-color:transparent;--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));--tw-shadow:0 0 #0000;--tw-shadow-colored:0 0 #0000;box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow)}@media (hover:hover){.btn-outline:hover{--tw-border-opacity:1;border-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-b1,oklch(var(--b1)/var(--tw-text-opacity)))}}.btn-outline.btn-active{--tw-border-opacity:1;border-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-b1,oklch(var(--b1)/var(--tw-text-opacity)))}.btn-outline.btn-primary{--tw-text-opacity:1;color:var(--fallback-p,oklch(var(--p)/var(--tw-text-opacity)))}@media (hover:hover){.btn-outline.btn-primary:hover{--tw-text-opacity:1;color:var(--fallback-pc,oklch(var(--pc)/var(--tw-text-opacity)))}@supports (color:color-mix(in oklab,black,black)){.btn-outline.btn-primary:hover{background-color:color-mix(in oklab,var(--fallback-p,oklch(var(--p)/1)) 90%,#000);border-color:color-mix(in oklab,var(--fallback-p,oklch(var(--p)/1)) 90%,#000)}}}.btn-outline.btn-primary.btn-active{--tw-text-opacity:1;color:var(--fallback-pc,oklch(var(--pc)/var(--tw-text-opacity)))}@supports (color:color-mix(in oklab,black,black)){.btn-outline.btn-primary.btn-active{background-color:color-mix(in oklab,var(--fallback-p,oklch(var(--p)/1)) 90%,#000);border-color:color-mix(in oklab,var(--fallback-p,oklch(var(--p)/1)) 90%,#000)}}.btn-outline.btn-secondary{--tw-text-opacity:1;color:var(--fallback-s,oklch(var(--s)/var(--tw-text-opacity)))}@media (hover:hover){.btn-outline.btn-secondary:hover{--tw-text-opacity:1;color:var(--fallback-sc,oklch(var(--sc)/var(--tw-text-opacity)))}@supports (color:color-mix(in oklab,black,black)){.btn-outline.btn-secondary:hover{background-color:color-mix(in oklab,var(--fallback-s,oklch(var(--s)/1)) 90%,#000);border-color:color-mix(in oklab,var(--fallback-s,oklch(var(--s)/1)) 90%,#000)}}}.btn-outline.btn-secondary.btn-active{--tw-text-opacity:1;color:var(--fallback-sc,oklch(var(--sc)/var(--tw-text-opacity)))}@supports (color:color-mix(in oklab,black,black)){.btn-outline.btn-secondary.btn-active{background-color:color-mix(in oklab,var(--fallback-s,oklch(var(--s)/1)) 90%,#000);border-color:color-mix(in oklab,var(--fallback-s,oklch(var(--s)/1)) 90%,#000)}}.btn-outline.btn-accent{--tw-text-opacity:1;color:var(--fallback-a,oklch(var(--a)/var(--tw-text-opacity)))}@media (hover:hover){.btn-outline.btn-accent:hover{--tw-text-opacity:1;color:var(--fallback-ac,oklch(var(--ac)/var(--tw-text-opacity)))}@supports (color:color-mix(in oklab,black,black)){.btn-outline.btn-accent:hover{background-color:color-mix(in oklab,var(--fallback-a,oklch(var(--a)/1)) 90%,#000);border-color:color-mix(in oklab,var(--fallback-a,oklch(var(--a)/1)) 90%,#000)}}}.btn-outline.btn-accent.btn-active{--tw-text-opacity:1;color:var(--fallback-ac,oklch(var(--ac)/var(--tw-text-opacity)))}@supports (color:color-mix(in oklab,black,black)){.btn-outline.btn-accent.btn-active{background-color:color-mix(in oklab,var(--fallback-a,oklch(var(--a)/1)) 90%,#000);border-color:color-mix(in oklab,var(--fallback-a,oklch(var(--a)/1)) 90%,#000)}}.btn-outline.btn-success{--tw-text-opacity:1;color:var(--fallback-su,oklch(var(--su)/var(--tw-text-opacity)))}@media (hover:hover){.btn-outline.btn-success:hover{--tw-text-opacity:1;color:var(--fallback-suc,oklch(var(--suc)/var(--tw-text-opacity)))}@supports (color:color-mix(in oklab,black,black)){.btn-outline.btn-success:hover{background-color:color-mix(in oklab,var(--fallback-su,oklch(var(--su)/1)) 90%,#000);border-color:color-mix(in oklab,var(--fallback-su,oklch(var(--su)/1)) 90%,#000)}}}.btn-outline.btn-success.btn-active{--tw-text-opacity:1;color:var(--fallback-suc,oklch(var(--suc)/var(--tw-text-opacity)))}@supports (color:color-mix(in oklab,black,black)){.btn-outline.btn-success.btn-active{background-color:color-mix(in oklab,var(--fallback-su,oklch(var(--su)/1)) 90%,#000);border-color:color-mix(in oklab,var(--fallback-su,oklch(var(--su)/1)) 90%,#000)}}.btn-outline.btn-info{--tw-text-opacity:1;color:var(--fallback-in,oklch(var(--in)/var(--tw-text-opacity)))}@media (hover:hover){.btn-outline.btn-info:hover{--tw-text-opacity:1;color:var(--fallback-inc,oklch(var(--inc)/var(--tw-text-opacity)))}@supports (color:color-mix(in oklab,black,black)){.btn-outline.btn-info:hover{background-color:color-mix(in oklab,var(--fallback-in,oklch(var(--in)/1)) 90%,#000);border-color:color-mix(in oklab,var(--fallback-in,oklch(var(--in)/1)) 90%,#000)}}}.btn-outline.btn-info.btn-active{--tw-text-opacity:1;color:var(--fallback-inc,oklch(var(--inc)/var(--tw-text-opacity)))}@supports (color:color-mix(in oklab,black,black)){.btn-outline.btn-info.btn-active{background-color:color-mix(in oklab,var(--fallback-in,oklch(var(--in)/1)) 90%,#000);border-color:color-mix(in oklab,var(--fallback-in,oklch(var(--in)/1)) 90%,#000)}}.btn-outline.btn-warning{--tw-text-opacity:1;color:var(--fallback-wa,oklch(var(--wa)/var(--tw-text-opacity)))}@media (hover:hover){.btn-outline.btn-warning:hover{--tw-text-opacity:1;color:var(--fallback-wac,oklch(var(--wac)/var(--tw-text-opacity)))}@supports (color:color-mix(in oklab,black,black)){.btn-outline.btn-warning:hover{background-color:color-mix(in oklab,var(--fallback-wa,oklch(var(--wa)/1)) 90%,#000);border-color:color-mix(in oklab,var(--fallback-wa,oklch(var(--wa)/1)) 90%,#000)}}}.btn-outline.btn-warning.btn-active{--tw-text-opacity:1;color:var(--fallback-wac,oklch(var(--wac)/var(--tw-text-opacity)))}@supports (color:color-mix(in oklab,black,black)){.btn-outline.btn-warning.btn-active{background-color:color-mix(in oklab,var(--fallback-wa,oklch(var(--wa)/1)) 90%,#000);border-color:color-mix(in oklab,var(--fallback-wa,oklch(var(--wa)/1)) 90%,#000)}}.btn-outline.btn-error{--tw-text-opacity:1;color:var(--fallback-er,oklch(var(--er)/var(--tw-text-opacity)))}@media (hover:hover){.btn-outline.btn-error:hover{--tw-text-opacity:1;color:var(--fallback-erc,oklch(var(--erc)/var(--tw-text-opacity)))}@supports (color:color-mix(in oklab,black,black)){.btn-outline.btn-error:hover{background-color:color-mix(in oklab,var(--fallback-er,oklch(var(--er)/1)) 90%,#000);border-color:color-mix(in oklab,var(--fallback-er,oklch(var(--er)/1)) 90%,#000)}}}.btn-outline.btn-error.btn-active{--tw-text-opacity:1;color:var(--fallback-erc,oklch(var(--erc)/var(--tw-text-opacity)))}@supports (color:color-mix(in oklab,black,black)){.btn-outline.btn-error.btn-active{background-color:color-mix(in oklab,var(--fallback-er,oklch(var(--er)/1)) 90%,#000);border-color:color-mix(in oklab,var(--fallback-er,oklch(var(--er)/1)) 90%,#000)}}.btn.btn-disabled,.btn:disabled,.btn[disabled]{--tw-border-opacity:0;background-color:var(--fallback-n,oklch(var(--n)/var(--tw-bg-opacity)));--tw-bg-opacity:0.2;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));--tw-text-opacity:0.2}@media (hover:hover){.btn-disabled:hover,.btn:disabled:hover,.btn[disabled]:hover{--tw-border-opacity:0;background-color:var(--fallback-n,oklch(var(--n)/var(--tw-bg-opacity)));--tw-bg-opacity:0.2;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));--tw-text-opacity:0.2}}.btn:is(input[type=checkbox]:checked),.btn:is(input[type=radio]:checked){--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-p,oklch(var(--p)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-pc,oklch(var(--pc)/var(--tw-text-opacity)))}@media (hover:hover){@supports (color:color-mix(in oklab,black,black)){.btn:is(input[type=checkbox]:checked):hover,.btn:is(input[type=radio]:checked):hover{background-color:color-mix(in oklab,var(--fallback-p,oklch(var(--p)/1)) 90%,#000);border-color:color-mix(in oklab,var(--fallback-p,oklch(var(--p)/1)) 90%,#000)}}}.btn:is(input[type=checkbox]:checked):focus-visible,.btn:is(input[type=radio]:checked):focus-visible{outline-color:var(--fallback-p,oklch(var(--p)/1))}@keyframes button-pop{0%{transform:scale(var(--btn-focus-scale,.98))}40%{transform:scale(1.02)}100%{transform:scale(1)}}.card{border-radius:var(--rounded-box,1rem)}.card :where(figure:first-child){overflow:hidden;border-start-start-radius:inherit;border-start-end-radius:inherit;border-end-start-radius:unset;border-end-end-radius:unset}.card :where(figure:last-child){overflow:hidden;border-start-start-radius:unset;border-start-end-radius:unset;border-end-start-radius:inherit;border-end-end-radius:inherit}.card:focus-visible{outline:2px solid currentColor;outline-offset:2px}.card.bordered{border-width:1px;--tw-border-opacity:1;border-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-border-opacity)))}.card-bordered{border-width:1px;--tw-border-opacity:1;border-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-border-opacity)))}.card.compact .card-body{padding:1rem;font-size:.875rem;line-height:1.25rem}.card-body{padding:var(--padding-card,2rem);display:flex;flex-direction:column;gap:.5rem}.card-title{display:flex;align-items:center;gap:.5rem;font-size:1.25rem;line-height:1.75rem;font-weight:600}.card.image-full:before{z-index:10;border-radius:var(--rounded-box,1rem);--tw-bg-opacity:1;background-color:var(--fallback-n,oklch(var(--n)/var(--tw-bg-opacity)));opacity:.75}.card.image-full>.card-body{z-index:20;--tw-text-opacity:1;color:var(--fallback-nc,oklch(var(--nc)/var(--tw-text-opacity)))}.card.image-full :where(figure){overflow:hidden;border-radius:inherit}.carousel{-ms-overflow-style:none;scrollbar-width:none}.carousel::-webkit-scrollbar{display:none}.chat-bubble{border-radius:var(--rounded-box,1rem);min-height:2.75rem;min-width:2.75rem}.chat-bubble{--tw-bg-opacity:1;background-color:var(--fallback-n,oklch(var(--n)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-nc,oklch(var(--nc)/var(--tw-text-opacity)))}.chat-bubble-primary{--tw-bg-opacity:1;background-color:var(--fallback-p,oklch(var(--p)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-pc,oklch(var(--pc)/var(--tw-text-opacity)))}.chat-bubble-secondary{--tw-bg-opacity:1;background-color:var(--fallback-s,oklch(var(--s)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-sc,oklch(var(--sc)/var(--tw-text-opacity)))}.chat-bubble-accent{--tw-bg-opacity:1;background-color:var(--fallback-a,oklch(var(--a)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-ac,oklch(var(--ac)/var(--tw-text-opacity)))}.chat-bubble-info{--tw-bg-opacity:1;background-color:var(--fallback-in,oklch(var(--in)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-inc,oklch(var(--inc)/var(--tw-text-opacity)))}.chat-bubble-success{--tw-bg-opacity:1;background-color:var(--fallback-su,oklch(var(--su)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-suc,oklch(var(--suc)/var(--tw-text-opacity)))}.chat-bubble-warning{--tw-bg-opacity:1;background-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-wac,oklch(var(--wac)/var(--tw-text-opacity)))}.chat-bubble-error{--tw-bg-opacity:1;background-color:var(--fallback-er,oklch(var(--er)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-erc,oklch(var(--erc)/var(--tw-text-opacity)))}.chat-start .chat-bubble{border-end-start-radius:0}.chat-start .chat-bubble:before{inset-inline-start:-.749rem}.chat-end .chat-bubble{border-end-end-radius:0}.chat-end .chat-bubble:before{inset-inline-start:99.9%}.checkbox{--chkbg:var(--fallback-bc,oklch(var(--bc)/1));--chkfg:var(--fallback-b1,oklch(var(--b1)/1));height:1.5rem;width:1.5rem;cursor:pointer;appearance:none;border-radius:var(--rounded-btn,.5rem);border-width:1px;border-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-border-opacity)));--tw-border-opacity:0.2}.checkbox:focus{box-shadow:none}.checkbox:focus-visible{outline-style:solid;outline-width:2px;outline-offset:2px;outline-color:var(--fallback-bc,oklch(var(--bc)/1))}.checkbox:disabled{border-width:0}.checkbox:checked,.checkbox[aria-checked=true]{background-repeat:no-repeat;animation:checkmark var(--animation-input,.2s) ease-out;background-color:var(--chkbg);background-image:linear-gradient(-45deg,transparent 65%,var(--chkbg) 65.99%),linear-gradient(45deg,transparent 75%,var(--chkbg) 75.99%),linear-gradient(-45deg,var(--chkbg) 40%,transparent 40.99%),linear-gradient(45deg,var(--chkbg) 30%,var(--chkfg) 30.99%,var(--chkfg) 40%,transparent 40.99%),linear-gradient(-45deg,var(--chkfg) 50%,var(--chkbg) 50.99%)}.checkbox:indeterminate{--tw-bg-opacity:1;background-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-bg-opacity)));background-repeat:no-repeat;animation:checkmark var(--animation-input,.2s) ease-out;background-image:linear-gradient(90deg,transparent 80%,var(--chkbg) 80%),linear-gradient(-90deg,transparent 80%,var(--chkbg) 80%),linear-gradient(0deg,var(--chkbg) 43%,var(--chkfg) 43%,var(--chkfg) 57%,var(--chkbg) 57%)}.checkbox-primary{--chkbg:var(--fallback-p,oklch(var(--p)/1));--chkfg:var(--fallback-pc,oklch(var(--pc)/1));--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)))}@media(hover:hover){.checkbox-primary:hover{--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)))}}.checkbox-primary:focus-visible{outline-color:var(--fallback-p,oklch(var(--p)/1))}.checkbox-primary:checked,.checkbox-primary[aria-checked=true]{--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-p,oklch(var(--p)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-pc,oklch(var(--pc)/var(--tw-text-opacity)))}.checkbox-secondary{--chkbg:var(--fallback-s,oklch(var(--s)/1));--chkfg:var(--fallback-sc,oklch(var(--sc)/1));--tw-border-opacity:1;border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)))}@media(hover:hover){.checkbox-secondary:hover{--tw-border-opacity:1;border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)))}}.checkbox-secondary:focus-visible{outline-color:var(--fallback-s,oklch(var(--s)/1))}.checkbox-secondary:checked,.checkbox-secondary[aria-checked=true]{--tw-border-opacity:1;border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-s,oklch(var(--s)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-sc,oklch(var(--sc)/var(--tw-text-opacity)))}.checkbox-accent{--chkbg:var(--fallback-a,oklch(var(--a)/1));--chkfg:var(--fallback-ac,oklch(var(--ac)/1));--tw-border-opacity:1;border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)))}@media(hover:hover){.checkbox-accent:hover{--tw-border-opacity:1;border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)))}}.checkbox-accent:focus-visible{outline-color:var(--fallback-a,oklch(var(--a)/1))}.checkbox-accent:checked,.checkbox-accent[aria-checked=true]{--tw-border-opacity:1;border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-a,oklch(var(--a)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-ac,oklch(var(--ac)/var(--tw-text-opacity)))}.checkbox-success{--chkbg:var(--fallback-su,oklch(var(--su)/1));--chkfg:var(--fallback-suc,oklch(var(--suc)/1));--tw-border-opacity:1;border-color:var(--fallback-su,oklch(var(--su)/var(--tw-border-opacity)))}@media(hover:hover){.checkbox-success:hover{--tw-border-opacity:1;border-color:var(--fallback-su,oklch(var(--su)/var(--tw-border-opacity)))}}.checkbox-success:focus-visible{outline-color:var(--fallback-su,oklch(var(--su)/1))}.checkbox-success:checked,.checkbox-success[aria-checked=true]{--tw-border-opacity:1;border-color:var(--fallback-su,oklch(var(--su)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-su,oklch(var(--su)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-suc,oklch(var(--suc)/var(--tw-text-opacity)))}.checkbox-warning{--chkbg:var(--fallback-wa,oklch(var(--wa)/1));--chkfg:var(--fallback-wac,oklch(var(--wac)/1));--tw-border-opacity:1;border-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-border-opacity)))}@media(hover:hover){.checkbox-warning:hover{--tw-border-opacity:1;border-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-border-opacity)))}}.checkbox-warning:focus-visible{outline-color:var(--fallback-wa,oklch(var(--wa)/1))}.checkbox-warning:checked,.checkbox-warning[aria-checked=true]{--tw-border-opacity:1;border-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-wac,oklch(var(--wac)/var(--tw-text-opacity)))}.checkbox-info{--chkbg:var(--fallback-in,oklch(var(--in)/1));--chkfg:var(--fallback-inc,oklch(var(--inc)/1));--tw-border-opacity:1;border-color:var(--fallback-in,oklch(var(--in)/var(--tw-border-opacity)))}@media(hover:hover){.checkbox-info:hover{--tw-border-opacity:1;border-color:var(--fallback-in,oklch(var(--in)/var(--tw-border-opacity)))}}.checkbox-info:focus-visible{outline-color:var(--fallback-in,oklch(var(--in)/1))}.checkbox-info:checked,.checkbox-info[aria-checked=true]{--tw-border-opacity:1;border-color:var(--fallback-in,oklch(var(--in)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-in,oklch(var(--in)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-inc,oklch(var(--inc)/var(--tw-text-opacity)))}.checkbox-error{--chkbg:var(--fallback-er,oklch(var(--er)/1));--chkfg:var(--fallback-erc,oklch(var(--erc)/1));--tw-border-opacity:1;border-color:var(--fallback-er,oklch(var(--er)/var(--tw-border-opacity)))}@media(hover:hover){.checkbox-error:hover{--tw-border-opacity:1;border-color:var(--fallback-er,oklch(var(--er)/var(--tw-border-opacity)))}}.checkbox-error:focus-visible{outline-color:var(--fallback-er,oklch(var(--er)/1))}.checkbox-error:checked,.checkbox-error[aria-checked=true]{--tw-border-opacity:1;border-color:var(--fallback-er,oklch(var(--er)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-er,oklch(var(--er)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-erc,oklch(var(--erc)/var(--tw-text-opacity)))}.checkbox:disabled{cursor:not-allowed;border-color:transparent;--tw-bg-opacity:1;background-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-bg-opacity)));opacity:.2}@keyframes checkmark{0%{background-position-y:5px}50%{background-position-y:-2px}100%{background-position-y:0}}.checkbox-mark{display:none}.collapse{width:100%;border-radius:var(--rounded-box,1rem)}details.collapse{width:100%}details.collapse summary{position:relative;display:block}details.collapse summary::-webkit-details-marker{display:none}.collapse:focus-visible{outline-style:solid;outline-width:2px;outline-offset:2px;outline-color:var(--fallback-bc,oklch(var(--bc)/1))}details.collapse summary{outline:2px solid transparent;outline-offset:2px}.collapse:has(.collapse-title:focus-visible),.collapse:has(>input[type=checkbox]:focus-visible),.collapse:has(>input[type=radio]:focus-visible){outline-style:solid;outline-width:2px;outline-offset:2px;outline-color:var(--fallback-bc,oklch(var(--bc)/1))}.collapse-arrow>.collapse-title:after{position:absolute;display:block;height:.5rem;width:.5rem;--tw-translate-y:-100%;--tw-rotate:45deg;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));transition-property:all;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:150ms;transition-timing-function:cubic-bezier(0,0,.2,1);transition-duration:.2s;top:1.9rem;inset-inline-end:1.4rem;content:"";transform-origin:75% 75%;box-shadow:2px 2px;pointer-events:none}.collapse-plus>.collapse-title:after{position:absolute;display:block;height:.5rem;width:.5rem;transition-property:all;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.3s;transition-timing-function:cubic-bezier(0,0,.2,1);top:.9rem;inset-inline-end:1.4rem;content:"+";pointer-events:none}.collapse:not(.collapse-open):not(.collapse-close)>.collapse-title,.collapse:not(.collapse-open):not(.collapse-close)>input[type=checkbox],.collapse:not(.collapse-open):not(.collapse-close)>input[type=radio]:not(:checked){cursor:pointer}.collapse:focus:not(.collapse-open):not(.collapse-close):not(.collapse[open])>.collapse-title{cursor:unset}.collapse-title{position:relative}:where(.collapse>input[type=checkbox]),:where(.collapse>input[type=radio]){z-index:1}.collapse-title,:where(.collapse>input[type=checkbox]),:where(.collapse>input[type=radio]){width:100%;padding:1rem;padding-inline-end:3rem;min-height:3.75rem;transition:background-color .2s ease-out}.collapse-content{padding-left:1rem;padding-right:1rem;cursor:unset;transition:padding .2s ease-out,background-color .2s ease-out}.collapse-open>:where(.collapse-content),.collapse:focus:not(.collapse-close)>:where(.collapse-content),.collapse:not(.collapse-close)>:where(input[type=checkbox]:checked~.collapse-content),.collapse:not(.collapse-close)>:where(input[type=radio]:checked~.collapse-content),.collapse[open]>:where(.collapse-content){padding-bottom:1rem;transition:padding .2s ease-out,background-color .2s ease-out}.collapse-arrow:focus:not(.collapse-close)>.collapse-title:after,.collapse-arrow:not(.collapse-close)>input[type=checkbox]:checked~.collapse-title:after,.collapse-arrow:not(.collapse-close)>input[type=radio]:checked~.collapse-title:after,.collapse-open.collapse-arrow>.collapse-title:after,.collapse[open].collapse-arrow>.collapse-title:after{--tw-translate-y:-50%;--tw-rotate:225deg;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.collapse-open.collapse-plus>.collapse-title:after,.collapse-plus:focus:not(.collapse-close)>.collapse-title:after,.collapse-plus:not(.collapse-close)>input[type=checkbox]:checked~.collapse-title:after,.collapse-plus:not(.collapse-close)>input[type=radio]:checked~.collapse-title:after,.collapse[open].collapse-plus>.collapse-title:after{content:"−"}.countdown>:before{text-align:center;transition:all 1s cubic-bezier(1,0,0,1)}.diff-item-1:after{border-radius:9999px;border-width:2px;--tw-border-opacity:1;border-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-border-opacity)));background-color:var(--fallback-b1,oklch(var(--b1)/.5));--tw-shadow:0 1px 2px 0 rgb(0 0 0 / 0.05);--tw-shadow-colored:0 1px 2px 0 var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow);outline-style:solid;outline-offset:-3px;outline-color:var(--fallback-bc,oklch(var(--bc)/.05));--tw-backdrop-blur:blur(8px);backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);translate:50% -50%}.diff-item-2{border-right-width:2px;--tw-border-opacity:1;border-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-border-opacity)))}.divider{margin-top:1rem;margin-bottom:1rem;height:1rem;white-space:nowrap}.divider:after,.divider:before{background-color:var(--fallback-bc,oklch(var(--bc)/.1))}.divider:not(:empty){gap:1rem}.divider-neutral:after,.divider-neutral:before{--tw-bg-opacity:1;background-color:var(--fallback-n,oklch(var(--n)/var(--tw-bg-opacity)))}.divider-primary:after,.divider-primary:before{--tw-bg-opacity:1;background-color:var(--fallback-p,oklch(var(--p)/var(--tw-bg-opacity)))}.divider-secondary:after,.divider-secondary:before{--tw-bg-opacity:1;background-color:var(--fallback-s,oklch(var(--s)/var(--tw-bg-opacity)))}.divider-accent:after,.divider-accent:before{--tw-bg-opacity:1;background-color:var(--fallback-a,oklch(var(--a)/var(--tw-bg-opacity)))}.divider-success:after,.divider-success:before{--tw-bg-opacity:1;background-color:var(--fallback-su,oklch(var(--su)/var(--tw-bg-opacity)))}.divider-warning:after,.divider-warning:before{--tw-bg-opacity:1;background-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-bg-opacity)))}.divider-info:after,.divider-info:before{--tw-bg-opacity:1;background-color:var(--fallback-in,oklch(var(--in)/var(--tw-bg-opacity)))}.divider-error:after,.divider-error:before{--tw-bg-opacity:1;background-color:var(--fallback-er,oklch(var(--er)/var(--tw-bg-opacity)))}.drawer{width:100%}.drawer-side>.drawer-overlay{cursor:pointer;background-color:transparent;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.2s;transition-timing-function:cubic-bezier(0,0,.2,1)}.drawer-toggle:checked~.drawer-side>.drawer-overlay{background-color:#0006}.drawer-toggle:focus-visible~.drawer-content label.drawer-button{outline-style:solid;outline-width:2px;outline-offset:2px}.dropdown:is(:not(details)) .dropdown-content{transform-origin:top;--tw-scale-x:.95;--tw-scale-y:.95;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.2s;transition-timing-function:cubic-bezier(0,0,.2,1)}.dropdown-bottom .dropdown-content{transform-origin:top}.dropdown-top .dropdown-content{transform-origin:bottom}.dropdown-left .dropdown-content{transform-origin:right}.dropdown-right .dropdown-content{transform-origin:left}.dropdown.dropdown-open .dropdown-content,.dropdown:focus .dropdown-content,.dropdown:focus-within .dropdown-content{--tw-scale-x:1;--tw-scale-y:1;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}@media (hover:hover){.dropdown.dropdown-hover:hover .dropdown-content{--tw-scale-x:1;--tw-scale-y:1;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}}.file-input{overflow:hidden;border-radius:var(--rounded-btn,.5rem);border-width:1px;border-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-border-opacity)));--tw-border-opacity:0;--tw-bg-opacity:1;background-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-bg-opacity)));font-size:1rem;line-height:1.5rem}.file-input::file-selector-button{border-style:solid;--tw-border-opacity:1;border-color:var(--fallback-n,oklch(var(--n)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-n,oklch(var(--n)/var(--tw-bg-opacity)));font-weight:600;text-transform:uppercase;--tw-text-opacity:1;color:var(--fallback-nc,oklch(var(--nc)/var(--tw-text-opacity)));text-decoration-line:none;border-width:var(--border-btn,1px);animation:button-pop var(--animation-btn,.25s) ease-out}.file-input-bordered{--tw-border-opacity:0.2}.file-input:focus{outline-style:solid;outline-width:2px;outline-offset:2px;outline-color:var(--fallback-bc,oklch(var(--bc)/.2))}.file-input-ghost{--tw-bg-opacity:0.05}.file-input-ghost:focus{--tw-bg-opacity:1;--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));box-shadow:none}.file-input-ghost::file-selector-button{border-width:1px;border-color:transparent;background-color:transparent;color:currentColor}.file-input-primary{--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)))}.file-input-primary:focus{outline-color:var(--fallback-p,oklch(var(--p)/1))}.file-input-primary::file-selector-button{--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-p,oklch(var(--p)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-pc,oklch(var(--pc)/var(--tw-text-opacity)))}.file-input-secondary{--tw-border-opacity:1;border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)))}.file-input-secondary:focus{outline-color:var(--fallback-s,oklch(var(--s)/1))}.file-input-secondary::file-selector-button{--tw-border-opacity:1;border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-s,oklch(var(--s)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-sc,oklch(var(--sc)/var(--tw-text-opacity)))}.file-input-accent{--tw-border-opacity:1;border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)))}.file-input-accent:focus{outline-color:var(--fallback-a,oklch(var(--a)/1))}.file-input-accent::file-selector-button{--tw-border-opacity:1;border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-a,oklch(var(--a)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-ac,oklch(var(--ac)/var(--tw-text-opacity)))}.file-input-info{--tw-border-opacity:1;border-color:var(--fallback-in,oklch(var(--in)/var(--tw-border-opacity)))}.file-input-info:focus{outline-color:var(--fallback-in,oklch(var(--in)/1))}.file-input-info::file-selector-button{--tw-border-opacity:1;border-color:var(--fallback-in,oklch(var(--in)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-in,oklch(var(--in)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-inc,oklch(var(--inc)/var(--tw-text-opacity)))}.file-input-success{--tw-border-opacity:1;border-color:var(--fallback-su,oklch(var(--su)/var(--tw-border-opacity)))}.file-input-success:focus{outline-color:var(--fallback-su,oklch(var(--su)/1))}.file-input-success::file-selector-button{--tw-border-opacity:1;border-color:var(--fallback-su,oklch(var(--su)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-su,oklch(var(--su)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-suc,oklch(var(--suc)/var(--tw-text-opacity)))}.file-input-warning{--tw-border-opacity:1;border-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-border-opacity)))}.file-input-warning:focus{outline-color:var(--fallback-wa,oklch(var(--wa)/1))}.file-input-warning::file-selector-button{--tw-border-opacity:1;border-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-wac,oklch(var(--wac)/var(--tw-text-opacity)))}.file-input-error{--tw-border-opacity:1;border-color:var(--fallback-er,oklch(var(--er)/var(--tw-border-opacity)))}.file-input-error:focus{outline-color:var(--fallback-er,oklch(var(--er)/1))}.file-input-error::file-selector-button{--tw-border-opacity:1;border-color:var(--fallback-er,oklch(var(--er)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-er,oklch(var(--er)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-erc,oklch(var(--erc)/var(--tw-text-opacity)))}.file-input-disabled,.file-input[disabled]{cursor:not-allowed;--tw-border-opacity:1;border-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-bg-opacity)));--tw-text-opacity:0.2}.file-input-disabled::placeholder,.file-input[disabled]::placeholder{color:var(--fallback-bc,oklch(var(--bc)/var(--tw-placeholder-opacity)));--tw-placeholder-opacity:0.2}.file-input-disabled::file-selector-button,.file-input[disabled]::file-selector-button{--tw-border-opacity:0;background-color:var(--fallback-n,oklch(var(--n)/var(--tw-bg-opacity)));--tw-bg-opacity:0.2;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));--tw-text-opacity:0.2}.footer{column-gap:1rem;row-gap:2.5rem;font-size:.875rem;line-height:1.25rem}.footer>*{gap:.5rem}.footer-title{margin-bottom:.5rem;font-weight:700;text-transform:uppercase;opacity:.6}.label{padding-left:.25rem;padding-right:.25rem;padding-top:.5rem;padding-bottom:.5rem}.label-text{font-size:.875rem;line-height:1.25rem;--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)))}.label-text-alt{font-size:.75rem;line-height:1rem;--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)))}@media(hover:hover){.label a:hover{--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)))}}.hero-overlay{background-color:var(--fallback-n,oklch(var(--n)/var(--tw-bg-opacity)));--tw-bg-opacity:0.5}.hero-content{max-width:80rem;gap:1rem;padding:1rem}.input{border-radius:var(--rounded-btn,.5rem);border-width:1px;border-color:transparent;--tw-bg-opacity:1;background-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-bg-opacity)));font-size:1rem;line-height:1.5rem}.input input{--tw-bg-opacity:1;background-color:var(--fallback-p,oklch(var(--p)/var(--tw-bg-opacity)));background-color:transparent}.input input:focus{outline:2px solid transparent;outline-offset:2px}.input[list]::-webkit-calendar-picker-indicator{line-height:1em}.input-bordered{border-color:var(--fallback-bc,oklch(var(--bc)/.2))}.input:focus,.input:focus-within{box-shadow:none;border-color:var(--fallback-bc,oklch(var(--bc)/.2));outline-style:solid;outline-width:2px;outline-offset:2px;outline-color:var(--fallback-bc,oklch(var(--bc)/.2))}.input-ghost{--tw-bg-opacity:0.05}.input-ghost:focus,.input-ghost:focus-within{--tw-bg-opacity:1;--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));box-shadow:none}.input-primary{--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)))}.input-primary:focus,.input-primary:focus-within{--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)));outline-color:var(--fallback-p,oklch(var(--p)/1))}.input-secondary{--tw-border-opacity:1;border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)))}.input-secondary:focus,.input-secondary:focus-within{--tw-border-opacity:1;border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)));outline-color:var(--fallback-s,oklch(var(--s)/1))}.input-accent{--tw-border-opacity:1;border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)))}.input-accent:focus,.input-accent:focus-within{--tw-border-opacity:1;border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)));outline-color:var(--fallback-a,oklch(var(--a)/1))}.input-info{--tw-border-opacity:1;border-color:var(--fallback-in,oklch(var(--in)/var(--tw-border-opacity)))}.input-info:focus,.input-info:focus-within{--tw-border-opacity:1;border-color:var(--fallback-in,oklch(var(--in)/var(--tw-border-opacity)));outline-color:var(--fallback-in,oklch(var(--in)/1))}.input-success{--tw-border-opacity:1;border-color:var(--fallback-su,oklch(var(--su)/var(--tw-border-opacity)))}.input-success:focus,.input-success:focus-within{--tw-border-opacity:1;border-color:var(--fallback-su,oklch(var(--su)/var(--tw-border-opacity)));outline-color:var(--fallback-su,oklch(var(--su)/1))}.input-warning{--tw-border-opacity:1;border-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-border-opacity)))}.input-warning:focus,.input-warning:focus-within{--tw-border-opacity:1;border-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-border-opacity)));outline-color:var(--fallback-wa,oklch(var(--wa)/1))}.input-error{--tw-border-opacity:1;border-color:var(--fallback-er,oklch(var(--er)/var(--tw-border-opacity)))}.input-error:focus,.input-error:focus-within{--tw-border-opacity:1;border-color:var(--fallback-er,oklch(var(--er)/var(--tw-border-opacity)));outline-color:var(--fallback-er,oklch(var(--er)/1))}.input-disabled,.input:disabled,.input:has(>input[disabled]),.input[disabled]{cursor:not-allowed;--tw-border-opacity:1;border-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-bg-opacity)));color:var(--fallback-bc,oklch(var(--bc)/.4))}.input-disabled::placeholder,.input:disabled::placeholder,.input:has(>input[disabled])::placeholder,.input[disabled]::placeholder{color:var(--fallback-bc,oklch(var(--bc)/var(--tw-placeholder-opacity)));--tw-placeholder-opacity:0.2}.input:has(>input[disabled])>input[disabled]{cursor:not-allowed}.input::-webkit-date-and-time-value{text-align:inherit}.join{border-radius:var(--rounded-btn,.5rem)}.join>:where(:not(:first-child)){margin-top:0;margin-bottom:0;margin-inline-start:-1px}.join>:where(:not(:first-child)):is(.btn){margin-inline-start:calc(var(--border-btn) * -1)}.join-item:focus{isolation:isolate}.kbd{border-radius:var(--rounded-btn,.5rem);border-width:1px;border-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-border-opacity)));--tw-border-opacity:0.2;--tw-bg-opacity:1;background-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-bg-opacity)));padding-left:.5rem;padding-right:.5rem;--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));border-bottom-width:2px;min-height:2.2em;min-width:2.2em}.link-primary{--tw-text-opacity:1;color:var(--fallback-p,oklch(var(--p)/var(--tw-text-opacity)))}@supports(color:color-mix(in oklab,black,black)){@media(hover:hover){.link-primary:hover{color:color-mix(in oklab,var(--fallback-p,oklch(var(--p)/1)) 80%,#000)}}}.link-secondary{--tw-text-opacity:1;color:var(--fallback-s,oklch(var(--s)/var(--tw-text-opacity)))}@supports(color:color-mix(in oklab,black,black)){@media(hover:hover){.link-secondary:hover{color:color-mix(in oklab,var(--fallback-s,oklch(var(--s)/1)) 80%,#000)}}}.link-accent{--tw-text-opacity:1;color:var(--fallback-a,oklch(var(--a)/var(--tw-text-opacity)))}@supports(color:color-mix(in oklab,black,black)){@media(hover:hover){.link-accent:hover{color:color-mix(in oklab,var(--fallback-a,oklch(var(--a)/1)) 80%,#000)}}}.link-neutral{--tw-text-opacity:1;color:var(--fallback-n,oklch(var(--n)/var(--tw-text-opacity)))}@supports(color:color-mix(in oklab,black,black)){@media(hover:hover){.link-neutral:hover{color:color-mix(in oklab,var(--fallback-n,oklch(var(--n)/1)) 80%,#000)}}}.link-success{--tw-text-opacity:1;color:var(--fallback-su,oklch(var(--su)/var(--tw-text-opacity)))}@supports(color:color-mix(in oklab,black,black)){@media(hover:hover){.link-success:hover{color:color-mix(in oklab,var(--fallback-su,oklch(var(--su)/1)) 80%,#000)}}}.link-info{--tw-text-opacity:1;color:var(--fallback-in,oklch(var(--in)/var(--tw-text-opacity)))}@supports(color:color-mix(in oklab,black,black)){@media(hover:hover){.link-info:hover{color:color-mix(in oklab,var(--fallback-in,oklch(var(--in)/1)) 80%,#000)}}}.link-warning{--tw-text-opacity:1;color:var(--fallback-wa,oklch(var(--wa)/var(--tw-text-opacity)))}@supports(color:color-mix(in oklab,black,black)){@media(hover:hover){.link-warning:hover{color:color-mix(in oklab,var(--fallback-wa,oklch(var(--wa)/1)) 80%,#000)}}}.link-error{--tw-text-opacity:1;color:var(--fallback-er,oklch(var(--er)/var(--tw-text-opacity)))}@supports(color:color-mix(in oklab,black,black)){@media(hover:hover){.link-error:hover{color:color-mix(in oklab,var(--fallback-er,oklch(var(--er)/1)) 80%,#000)}}}.link:focus{outline:2px solid transparent;outline-offset:2px}.link:focus-visible{outline:2px solid currentColor;outline-offset:2px}.loading{pointer-events:none;display:inline-block;aspect-ratio:1/1;width:1.5rem;background-color:currentColor;mask-size:100%;mask-repeat:no-repeat;mask-position:center;mask-image:url("data:image/svg+xml,%3Csvg width='24' height='24' stroke='%23000' viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cstyle%3E.spinner_V8m1%7Btransform-origin:center;animation:spinner_zKoa 2s linear infinite%7D.spinner_V8m1 circle%7Bstroke-linecap:round;animation:spinner_YpZS 1.5s ease-out infinite%7D%40keyframes spinner_zKoa%7B100%25%7Btransform:rotate(360deg)%7D%7D%40keyframes spinner_YpZS%7B0%25%7Bstroke-dasharray:0 150;stroke-dashoffset:0%7D47.5%25%7Bstroke-dasharray:42 150;stroke-dashoffset:-16%7D95%25%2C100%25%7Bstroke-dasharray:42 150;stroke-dashoffset:-59%7D%7D%3C%2Fstyle%3E%3Cg class='spinner_V8m1'%3E%3Ccircle cx='12' cy='12' r='9.5' fill='none' stroke-width='3'%3E%3C%2Fcircle%3E%3C%2Fg%3E%3C%2Fsvg%3E")}.loading-spinner{mask-image:url("data:image/svg+xml,%3Csvg width='24' height='24' stroke='%23000' viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cstyle%3E.spinner_V8m1%7Btransform-origin:center;animation:spinner_zKoa 2s linear infinite%7D.spinner_V8m1 circle%7Bstroke-linecap:round;animation:spinner_YpZS 1.5s ease-out infinite%7D%40keyframes spinner_zKoa%7B100%25%7Btransform:rotate(360deg)%7D%7D%40keyframes spinner_YpZS%7B0%25%7Bstroke-dasharray:0 150;stroke-dashoffset:0%7D47.5%25%7Bstroke-dasharray:42 150;stroke-dashoffset:-16%7D95%25%2C100%25%7Bstroke-dasharray:42 150;stroke-dashoffset:-59%7D%7D%3C%2Fstyle%3E%3Cg class='spinner_V8m1'%3E%3Ccircle cx='12' cy='12' r='9.5' fill='none' stroke-width='3'%3E%3C%2Fcircle%3E%3C%2Fg%3E%3C%2Fsvg%3E")}.loading-dots{mask-image:url("data:image/svg+xml,%3Csvg width='24' height='24' viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cstyle%3E.spinner_qM83%7Banimation:spinner_8HQG 1.05s infinite%7D.spinner_oXPr%7Banimation-delay:.1s%7D.spinner_ZTLf%7Banimation-delay:.2s%7D@keyframes spinner_8HQG%7B0%25,57.14%25%7Banimation-timing-function:cubic-bezier(0.33,.66,.66,1);transform:translate(0)%7D28.57%25%7Banimation-timing-function:cubic-bezier(0.33,0,.66,.33);transform:translateY(-6px)%7D100%25%7Btransform:translate(0)%7D%7D%3C/style%3E%3Ccircle class='spinner_qM83' cx='4' cy='12' r='3'/%3E%3Ccircle class='spinner_qM83 spinner_oXPr' cx='12' cy='12' r='3'/%3E%3Ccircle class='spinner_qM83 spinner_ZTLf' cx='20' cy='12' r='3'/%3E%3C/svg%3E")}.loading-ring{mask-image:url("data:image/svg+xml,%3Csvg width='44' height='44' viewBox='0 0 44 44' xmlns='http://www.w3.org/2000/svg' stroke='%23fff'%3E%3Cg fill='none' fill-rule='evenodd' stroke-width='2'%3E%3Ccircle cx='22' cy='22' r='1'%3E%3Canimate attributeName='r' begin='0s' dur='1.8s' values='1; 20' calcMode='spline' keyTimes='0; 1' keySplines='0.165, 0.84, 0.44, 1' repeatCount='indefinite' /%3E%3Canimate attributeName='stroke-opacity' begin='0s' dur='1.8s' values='1; 0' calcMode='spline' keyTimes='0; 1' keySplines='0.3, 0.61, 0.355, 1' repeatCount='indefinite' /%3E%3C/circle%3E%3Ccircle cx='22' cy='22' r='1'%3E%3Canimate attributeName='r' begin='-0.9s' dur='1.8s' values='1; 20' calcMode='spline' keyTimes='0; 1' keySplines='0.165, 0.84, 0.44, 1' repeatCount='indefinite' /%3E%3Canimate attributeName='stroke-opacity' begin='-0.9s' dur='1.8s' values='1; 0' calcMode='spline' keyTimes='0; 1' keySplines='0.3, 0.61, 0.355, 1' repeatCount='indefinite' /%3E%3C/circle%3E%3C/g%3E%3C/svg%3E")}.loading-ball{mask-image:url("data:image/svg+xml,%0A%3Csvg width='24' height='24' viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cstyle%3E.spinner_rXNP%7Banimation:spinner_YeBj .8s infinite%7D@keyframes spinner_YeBj%7B0%25%7Banimation-timing-function:cubic-bezier(0.33,0,.66,.33);cy:5px%7D46.875%25%7Bcy:20px;rx:4px;ry:4px%7D50%25%7Banimation-timing-function:cubic-bezier(0.33,.66,.66,1);cy:20.5px;rx:4.8px;ry:3px%7D53.125%25%7Brx:4px;ry:4px%7D100%25%7Bcy:5px%7D%7D%3C/style%3E%3Cellipse class='spinner_rXNP' cx='12' cy='5' rx='4' ry='4'/%3E%3C/svg%3E")}.loading-bars{mask-image:url("data:image/svg+xml,%0A%3Csvg width='24' height='24' viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cstyle%3E.spinner_hzlK%7Banimation:spinner_vc4H .8s linear infinite;animation-delay:-.8s%7D.spinner_koGT%7Banimation-delay:-.65s%7D.spinner_YF1u%7Banimation-delay:-.5s%7D@keyframes spinner_vc4H%7B0%25%7By:1px;height:22px%7D93.75%25%7By:5px;height:14px;opacity:.2%7D%7D%3C/style%3E%3Crect class='spinner_hzlK' x='1' y='1' width='6' height='22'/%3E%3Crect class='spinner_hzlK spinner_koGT' x='9' y='1' width='6' height='22'/%3E%3Crect class='spinner_hzlK spinner_YF1u' x='17' y='1' width='6' height='22'/%3E%3C/svg%3E")}.loading-infinity{mask-image:url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' xmlns:xlink='http://www.w3.org/1999/xlink' style='shape-rendering: auto;' width='200px' height='200px' viewBox='0 0 100 100' preserveAspectRatio='xMidYMid'%3E%3Cpath fill='none' stroke='%230a0a0a' stroke-width='10' stroke-dasharray='205.271142578125 51.317785644531256' d='M24.3 30C11.4 30 5 43.3 5 50s6.4 20 19.3 20c19.3 0 32.1-40 51.4-40 C88.6 30 95 43.3 95 50s-6.4 20-19.3 20C56.4 70 43.6 30 24.3 30z' stroke-linecap='round' style='transform:scale(0.8);transform-origin:50px 50px'%3E%3Canimate attributeName='stroke-dashoffset' repeatCount='indefinite' dur='2s' keyTimes='0;1' values='0;256.58892822265625'%3E%3C/animate%3E%3C/path%3E%3C/svg%3E")}.loading-xs{width:1rem}.loading-sm{width:1.25rem}.loading-md{width:1.5rem}.loading-lg{width:2.5rem}.mask-squircle{mask-image:url("data:image/svg+xml,%3csvg width='200' height='200' xmlns='http://www.w3.org/2000/svg'%3e%3cpath d='M100 0C20 0 0 20 0 100s20 100 100 100 100-20 100-100S180 0 100 0Z'/%3e%3c/svg%3e")}.mask-decagon{mask-image:url("data:image/svg+xml,%3csvg width='192' height='200' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='m96 0 58.779 19.098 36.327 50v61.804l-36.327 50L96 200l-58.779-19.098-36.327-50V69.098l36.327-50z' fill-rule='evenodd'/%3e%3c/svg%3e")}.mask-diamond{mask-image:url("data:image/svg+xml,%3csvg width='200' height='200' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='m100 0 100 100-100 100L0 100z' fill-rule='evenodd'/%3e%3c/svg%3e")}.mask-heart{mask-image:url("data:image/svg+xml,%3csvg width='200' height='185' xmlns='http://www.w3.org/2000/svg'%3e%3cpath d='M100 184.606a15.384 15.384 0 0 1-8.653-2.678C53.565 156.28 37.205 138.695 28.182 127.7 8.952 104.264-.254 80.202.005 54.146.308 24.287 24.264 0 53.406 0c21.192 0 35.869 11.937 44.416 21.879a2.884 2.884 0 0 0 4.356 0C110.725 11.927 125.402 0 146.594 0c29.142 0 53.098 24.287 53.4 54.151.26 26.061-8.956 50.122-28.176 73.554-9.023 10.994-25.383 28.58-63.165 54.228a15.384 15.384 0 0 1-8.653 2.673Z' fill='black' fill-rule='nonzero'/%3e%3c/svg%3e")}.mask-hexagon{mask-image:url("data:image/svg+xml,%3csvg width='182' height='201' xmlns='http://www.w3.org/2000/svg'%3e%3cpath d='M.3 65.486c0-9.196 6.687-20.063 14.211-25.078l61.86-35.946c8.36-5.016 20.899-5.016 29.258 0l61.86 35.946c8.36 5.015 14.211 15.882 14.211 25.078v71.055c0 9.196-6.687 20.063-14.211 25.079l-61.86 35.945c-8.36 4.18-20.899 4.18-29.258 0L14.51 161.62C6.151 157.44.3 145.737.3 136.54V65.486Z' fill='black' fill-rule='nonzero'/%3e%3c/svg%3e")}.mask-hexagon-2{mask-image:url("data:image/svg+xml,%3csvg width='200' height='182' xmlns='http://www.w3.org/2000/svg'%3e%3cpath d='M64.786 181.4c-9.196 0-20.063-6.687-25.079-14.21L3.762 105.33c-5.016-8.36-5.016-20.9 0-29.259l35.945-61.86C44.723 5.851 55.59 0 64.786 0h71.055c9.196 0 20.063 6.688 25.079 14.211l35.945 61.86c4.18 8.36 4.18 20.899 0 29.258l-35.945 61.86c-4.18 8.36-15.883 14.211-25.079 14.211H64.786Z' fill='black' fill-rule='nonzero'/%3e%3c/svg%3e")}.mask-circle{mask-image:url("data:image/svg+xml,%3csvg width='200' height='200' xmlns='http://www.w3.org/2000/svg'%3e%3ccircle fill='black' cx='100' cy='100' r='100' fill-rule='evenodd'/%3e%3c/svg%3e")}.mask-parallelogram{mask-image:url("data:image/svg+xml,%3csvg width='200' height='154' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='M46.154 0H200l-46.154 153.846H0z' fill-rule='evenodd'/%3e%3c/svg%3e")}.mask-parallelogram-2{mask-image:url("data:image/svg+xml,%3csvg width='200' height='154' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='M153.846 0H0l46.154 153.846H200z' fill-rule='evenodd'/%3e%3c/svg%3e")}.mask-parallelogram-3{mask-image:url("data:image/svg+xml,%3csvg width='154' height='201' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='M.077 47.077v153.846l153.846-46.154V.923z' fill-rule='evenodd'/%3e%3c/svg%3e")}.mask-parallelogram-4{mask-image:url("data:image/svg+xml,%3csvg width='154' height='201' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='M153.923 47.077v153.846L.077 154.77V.923z' fill-rule='evenodd'/%3e%3c/svg%3e")}.mask-pentagon{mask-image:url("data:image/svg+xml,%3csvg width='192' height='181' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='m96 0 95.106 69.098-36.327 111.804H37.22L.894 69.098z' fill-rule='evenodd'/%3e%3c/svg%3e")}.mask-square{mask-image:url("data:image/svg+xml,%3csvg width='200' height='200' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='M0 0h200v200H0z' fill-rule='evenodd'/%3e%3c/svg%3e")}.mask-star{mask-image:url("data:image/svg+xml,%3csvg width='192' height='180' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='m96 137.263-58.779 42.024 22.163-68.389L.894 68.481l72.476-.243L96 0l22.63 68.238 72.476.243-58.49 42.417 22.163 68.389z' fill-rule='evenodd'/%3e%3c/svg%3e")}.mask-star-2{mask-image:url("data:image/svg+xml,%3csvg width='192' height='180' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='m96 153.044-58.779 26.243 7.02-63.513L.894 68.481l63.117-13.01L96 0l31.989 55.472 63.117 13.01-43.347 47.292 7.02 63.513z' fill-rule='evenodd'/%3e%3c/svg%3e")}.mask-triangle{mask-image:url("data:image/svg+xml,%3csvg width='174' height='149' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='m87 148.476-86.603.185L43.86 74.423 87 0l43.14 74.423 43.463 74.238z' fill-rule='evenodd'/%3e%3c/svg%3e")}.mask-triangle-2{mask-image:url("data:image/svg+xml,%3csvg width='174' height='150' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='m87 .738 86.603-.184-43.463 74.238L87 149.214 43.86 74.792.397.554z' fill-rule='evenodd'/%3e%3c/svg%3e")}.mask-triangle-3{mask-image:url("data:image/svg+xml,%3csvg width='150' height='174' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='m149.369 87.107.185 86.603-74.239-43.463L.893 87.107l74.422-43.14L149.554.505z' fill-rule='evenodd'/%3e%3c/svg%3e")}.mask-triangle-4{mask-image:url("data:image/svg+xml,%3csvg width='150' height='174' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='M.631 87.107.446.505l74.239 43.462 74.422 43.14-74.422 43.14L.446 173.71z' fill-rule='evenodd'/%3e%3c/svg%3e")}.menu{padding:.5rem}:where(.menuli:empty){--tw-bg-opacity:1;background-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-bg-opacity)));opacity:.1;margin:.5rem 1rem;height:1px}.menu :where(liul){margin-inline-start:1rem;padding-inline-start:.5rem}.menu :where(liul):before{position:absolute;bottom:.75rem;inset-inline-start:0;top:.75rem;width:1px;--tw-bg-opacity:1;background-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-bg-opacity)));opacity:.1;content:""}.menu :where(li:not(.menu-title)>:not(ul,details,.menu-title,.btn)),.menu :where(li:not(.menu-title)>details>summary:not(.menu-title)){border-radius:var(--rounded-btn,.5rem);padding-left:1rem;padding-right:1rem;padding-top:.5rem;padding-bottom:.5rem;text-align:start;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.2s;transition-timing-function:cubic-bezier(0,0,.2,1);text-wrap:balance}:where(.menuli:not(.menu-title,.disabled)>:not(ul,details,.menu-title)):is(summary):not(.active,.btn):focus-visible,:where(.menuli:not(.menu-title,.disabled)>:not(ul,details,.menu-title)):not(summary,.active,.btn).focus,:where(.menuli:not(.menu-title,.disabled)>:not(ul,details,.menu-title)):not(summary,.active,.btn):focus,:where(.menuli:not(.menu-title,.disabled)>details>summary:not(.menu-title)):is(summary):not(.active,.btn):focus-visible,:where(.menuli:not(.menu-title,.disabled)>details>summary:not(.menu-title)):not(summary,.active,.btn).focus,:where(.menuli:not(.menu-title,.disabled)>details>summary:not(.menu-title)):not(summary,.active,.btn):focus{cursor:pointer;background-color:var(--fallback-bc,oklch(var(--bc)/.1));--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));outline:2px solid transparent;outline-offset:2px}@media (hover:hover){:where(.menuli:not(.menu-title,.disabled)>:not(ul,details,.menu-title)):not(.active,.btn):hover,:where(.menuli:not(.menu-title,.disabled)>details>summary:not(.menu-title)):not(.active,.btn):hover{cursor:pointer;outline:2px solid transparent;outline-offset:2px}@supports (color:oklch(0% 0 0)){:where(.menuli:not(.menu-title,.disabled)>:not(ul,details,.menu-title)):not(.active,.btn):hover,:where(.menuli:not(.menu-title,.disabled)>details>summary:not(.menu-title)):not(.active,.btn):hover{background-color:var(--fallback-bc,oklch(var(--bc)/.1))}}}.menu li>:not(ul,.menu-title,details,.btn).active,.menu li>:not(ul,.menu-title,details,.btn):active,.menu li>details>summary:active{--tw-bg-opacity:1;background-color:var(--fallback-n,oklch(var(--n)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-nc,oklch(var(--nc)/var(--tw-text-opacity)))}@media(hover:hover){.menu li>:not(ul,.menu-title,details,.btn).active,.menu li>:not(ul,.menu-title,details,.btn):active,.menu li>details>summary:active{--tw-bg-opacity:1;background-color:var(--fallback-n,oklch(var(--n)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-nc,oklch(var(--nc)/var(--tw-text-opacity)))}}.menu li.disabled{color:var(--fallback-bc,oklch(var(--bc)/.3))}.menu :where(li>details>summary)::-webkit-details-marker{display:none}.menu :where(li>.menu-dropdown-toggle):after,.menu :where(li>details>summary):after{justify-self:end;display:block;margin-top:-.5rem;height:.5rem;width:.5rem;transform:rotate(45deg);transition-property:transform,margin-top;transition-duration:.3s;transition-timing-function:cubic-bezier(.4,0,.2,1);content:"";transform-origin:75% 75%;box-shadow:2px 2px;pointer-events:none}.menu :where(li>.menu-dropdown-toggle.menu-dropdown-show):after,.menu :where(li>details[open]>summary):after{transform:rotate(225deg);margin-top:0}.menu-title{padding-left:1rem;padding-right:1rem;padding-top:.5rem;padding-bottom:.5rem;font-size:.875rem;line-height:1.25rem;font-weight:700;color:var(--fallback-bc,oklch(var(--bc)/.4))}.mockup-code{min-width:18rem;border-radius:var(--rounded-box,1rem);--tw-bg-opacity:1;background-color:var(--fallback-n,oklch(var(--n)/var(--tw-bg-opacity)));padding-top:1.25rem;padding-bottom:1.25rem;--tw-text-opacity:1;color:var(--fallback-nc,oklch(var(--nc)/var(--tw-text-opacity)));direction:ltr}.mockup-code:before{content:"";margin-bottom:1rem;display:block;height:.75rem;width:.75rem;border-radius:9999px;opacity:.3;box-shadow:1.4em 0,2.8em 0,4.2em 0}.mockup-code pre{padding-right:1.25rem}.mockup-code pre:before{content:"";margin-right:2ch}.mockup-code pre[data-prefix]:before{content:attr(data-prefix);width:2rem;opacity:.5}.mockup-window{display:flex;flex-direction:column;border-radius:var(--rounded-box,1rem);padding-top:1.25rem}.mockup-window:before{content:"";margin-bottom:1rem;display:block;aspect-ratio:1/1;height:.75rem;flex-shrink:0;align-self:flex-start;border-radius:9999px;opacity:.3}.mockup-window:where([dir=rtl],[dir=rtl]*):before{align-self:flex-end}.mockup-window:before{box-shadow:1.4em 0,2.8em 0,4.2em 0}.mockup-phone{display:inline-block;border:4px solid #444;border-radius:50px;background-color:#000;padding:10px;margin:0 auto;overflow:hidden}.mockup-phone .camera{position:relative;top:0;left:0;background:#000;height:25px;width:150px;margin:0 auto;border-bottom-left-radius:17px;border-bottom-right-radius:17px;z-index:11}.mockup-phone .camera:before{content:"";position:absolute;top:35%;left:50%;width:50px;height:4px;border-radius:5px;background-color:#0c0b0e;transform:translate(-50%,-50%)}.mockup-phone .camera:after{content:"";position:absolute;top:20%;left:70%;width:8px;height:8px;border-radius:5px;background-color:#0f0b25}.mockup-phone .display{overflow:hidden;border-radius:40px;margin-top:-25px}.mockup-browser{border-radius:var(--rounded-box,1rem)}.mockup-browser .mockup-browser-toolbar{margin-top:.75rem;margin-bottom:.75rem;display:inline-flex;width:100%;align-items:center;padding-right:1.4em}.mockup-browser .mockup-browser-toolbar:where([dir=rtl],[dir=rtl]*){flex-direction:row-reverse}.mockup-browser .mockup-browser-toolbar:before{content:"";margin-right:4.8rem;display:inline-block;aspect-ratio:1/1;height:.75rem;border-radius:9999px;opacity:.3;box-shadow:1.4em 0,2.8em 0,4.2em 0}.mockup-browser .mockup-browser-toolbar .input{position:relative;margin-left:auto;margin-right:auto;display:block;height:1.75rem;width:24rem;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;--tw-bg-opacity:1;background-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-bg-opacity)));padding-left:2rem;direction:ltr}.mockup-browser .mockup-browser-toolbar .input:before{content:"";position:absolute;left:.5rem;top:50%;aspect-ratio:1/1;height:.75rem;--tw-translate-y:-50%;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));border-radius:9999px;border-width:2px;border-color:currentColor;opacity:.6}.mockup-browser .mockup-browser-toolbar .input:after{content:"";position:absolute;left:1.25rem;top:50%;height:.5rem;--tw-translate-y:25%;--tw-rotate:-45deg;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));border-radius:9999px;border-width:1px;border-color:currentColor;opacity:.6}.modal{background-color:transparent;color:inherit;transition-duration:.2s;transition-timing-function:cubic-bezier(0,0,.2,1);transition-property:transform,opacity,visibility;overflow-y:hidden;overscroll-behavior:contain}.modal::backdrop,.modal:not(dialog:not(.modal-open)){background-color:#0006;animation:modal-pop .2s ease-out}.modal-backdrop{z-index:-1;grid-column-start:1;grid-row-start:1;display:grid;align-self:stretch;justify-self:stretch;color:transparent}.modal-box{grid-column-start:1;grid-row-start:1;width:91.666667%;max-width:32rem;--tw-scale-x:.9;--tw-scale-y:.9;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));border-bottom-right-radius:var(--rounded-box,1rem);border-bottom-left-radius:var(--rounded-box,1rem);border-top-left-radius:var(--rounded-box,1rem);border-top-right-radius:var(--rounded-box,1rem);--tw-bg-opacity:1;background-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-bg-opacity)));padding:1.5rem;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.2s;transition-timing-function:cubic-bezier(0,0,.2,1);box-shadow:rgba(0,0,0,.25) 0 25px 50px -12px;overflow-y:auto;overscroll-behavior:contain}.modal-open .modal-box,.modal-toggle:checked+.modal .modal-box,.modal:target .modal-box,.modal[open] .modal-box{--tw-translate-y:0px;--tw-scale-x:1;--tw-scale-y:1;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.modal-action{margin-top:1.5rem;justify-content:flex-end}.modal-action>:not([hidden])~:not([hidden]){--tw-space-x-reverse:0;margin-right:calc(.5rem * var(--tw-space-x-reverse));margin-left:calc(.5rem * calc(1 - var(--tw-space-x-reverse)))}@keyframes modal-pop{0%{opacity:0}}.navbar{padding:var(--navbar-padding,.5rem);min-height:4rem;width:100%}.progress{height:.5rem;border-radius:var(--rounded-box,1rem);background-color:var(--fallback-bc,oklch(var(--bc)/.2))}.progress::-moz-progress-bar{border-radius:var(--rounded-box,1rem);--tw-bg-opacity:1;background-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-bg-opacity)))}.progress-primary::-moz-progress-bar{border-radius:var(--rounded-box,1rem);--tw-bg-opacity:1;background-color:var(--fallback-p,oklch(var(--p)/var(--tw-bg-opacity)))}.progress-secondary::-moz-progress-bar{border-radius:var(--rounded-box,1rem);--tw-bg-opacity:1;background-color:var(--fallback-s,oklch(var(--s)/var(--tw-bg-opacity)))}.progress-accent::-moz-progress-bar{border-radius:var(--rounded-box,1rem);--tw-bg-opacity:1;background-color:var(--fallback-a,oklch(var(--a)/var(--tw-bg-opacity)))}.progress-info::-moz-progress-bar{border-radius:var(--rounded-box,1rem);--tw-bg-opacity:1;background-color:var(--fallback-in,oklch(var(--in)/var(--tw-bg-opacity)))}.progress-success::-moz-progress-bar{border-radius:var(--rounded-box,1rem);--tw-bg-opacity:1;background-color:var(--fallback-su,oklch(var(--su)/var(--tw-bg-opacity)))}.progress-warning::-moz-progress-bar{border-radius:var(--rounded-box,1rem);--tw-bg-opacity:1;background-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-bg-opacity)))}.progress-error::-moz-progress-bar{border-radius:var(--rounded-box,1rem);--tw-bg-opacity:1;background-color:var(--fallback-er,oklch(var(--er)/var(--tw-bg-opacity)))}.progress:indeterminate{--progress-color:var(--fallback-bc,oklch(var(--bc)/1))}.progress-primary:indeterminate{--progress-color:var(--fallback-p,oklch(var(--p)/1))}.progress-secondary:indeterminate{--progress-color:var(--fallback-s,oklch(var(--s)/1))}.progress-accent:indeterminate{--progress-color:var(--fallback-a,oklch(var(--a)/1))}.progress-info:indeterminate{--progress-color:var(--fallback-in,oklch(var(--in)/1))}.progress-success:indeterminate{--progress-color:var(--fallback-su,oklch(var(--su)/1))}.progress-warning:indeterminate{--progress-color:var(--fallback-wa,oklch(var(--wa)/1))}.progress-error:indeterminate{--progress-color:var(--fallback-er,oklch(var(--er)/1))}.progress::-webkit-progress-bar{border-radius:var(--rounded-box,1rem);background-color:transparent}.progress::-webkit-progress-value{border-radius:var(--rounded-box,1rem);--tw-bg-opacity:1;background-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-bg-opacity)))}.progress-primary::-webkit-progress-value{--tw-bg-opacity:1;background-color:var(--fallback-p,oklch(var(--p)/var(--tw-bg-opacity)))}.progress-secondary::-webkit-progress-value{--tw-bg-opacity:1;background-color:var(--fallback-s,oklch(var(--s)/var(--tw-bg-opacity)))}.progress-accent::-webkit-progress-value{--tw-bg-opacity:1;background-color:var(--fallback-a,oklch(var(--a)/var(--tw-bg-opacity)))}.progress-info::-webkit-progress-value{--tw-bg-opacity:1;background-color:var(--fallback-in,oklch(var(--in)/var(--tw-bg-opacity)))}.progress-success::-webkit-progress-value{--tw-bg-opacity:1;background-color:var(--fallback-su,oklch(var(--su)/var(--tw-bg-opacity)))}.progress-warning::-webkit-progress-value{--tw-bg-opacity:1;background-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-bg-opacity)))}.progress-error::-webkit-progress-value{--tw-bg-opacity:1;background-color:var(--fallback-er,oklch(var(--er)/var(--tw-bg-opacity)))}.progress:indeterminate{background-image:repeating-linear-gradient(90deg,var(--progress-color) -1%,var(--progress-color) 10%,transparent 10%,transparent 90%);background-size:200%;background-position-x:15%;animation:progress-loading 5s ease-in-out infinite}.progress:indeterminate::-moz-progress-bar{background-color:transparent;background-image:repeating-linear-gradient(90deg,var(--progress-color) -1%,var(--progress-color) 10%,transparent 10%,transparent 90%);background-size:200%;background-position-x:15%;animation:progress-loading 5s ease-in-out infinite}@keyframes progress-loading{50%{background-position-x:-115%}}.radial-progress{--value:0;--size:5rem;--thickness:calc(var(--size) / 10)}.radial-progress:after{background-color:currentColor}.radio{--chkbg:var(--bc);height:1.5rem;width:1.5rem;cursor:pointer;appearance:none;border-radius:9999px;border-width:1px;border-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-border-opacity)));--tw-border-opacity:0.2}.radio:focus{box-shadow:none}.radio:focus-visible{outline-style:solid;outline-width:2px;outline-offset:2px;outline-color:var(--fallback-bc,oklch(var(--bc)/1))}.radio:checked,.radio[aria-checked=true]{--tw-bg-opacity:1;background-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-bg-opacity)));background-image:none;animation:radiomark var(--animation-input,.2s) ease-out;box-shadow:0 0 0 4px var(--fallback-b1,oklch(var(--b1)/1)) inset,0 0 0 4px var(--fallback-b1,oklch(var(--b1)/1)) inset}.radio-primary{--chkbg:var(--p);--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)))}@media(hover:hover){.radio-primary:hover{--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)))}}.radio-primary:focus-visible{outline-color:var(--fallback-p,oklch(var(--p)/1))}.radio-primary:checked,.radio-primary[aria-checked=true]{--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-p,oklch(var(--p)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-pc,oklch(var(--pc)/var(--tw-text-opacity)))}.radio-secondary{--chkbg:var(--s);--tw-border-opacity:1;border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)))}@media(hover:hover){.radio-secondary:hover{--tw-border-opacity:1;border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)))}}.radio-secondary:focus-visible{outline-color:var(--fallback-s,oklch(var(--s)/1))}.radio-secondary:checked,.radio-secondary[aria-checked=true]{--tw-border-opacity:1;border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-s,oklch(var(--s)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-sc,oklch(var(--sc)/var(--tw-text-opacity)))}.radio-accent{--chkbg:var(--a);--tw-border-opacity:1;border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)))}@media(hover:hover){.radio-accent:hover{--tw-border-opacity:1;border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)))}}.radio-accent:focus-visible{outline-color:var(--fallback-a,oklch(var(--a)/1))}.radio-accent:checked,.radio-accent[aria-checked=true]{--tw-border-opacity:1;border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-a,oklch(var(--a)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-ac,oklch(var(--ac)/var(--tw-text-opacity)))}.radio-success{--chkbg:var(--su);--tw-border-opacity:1;border-color:var(--fallback-su,oklch(var(--su)/var(--tw-border-opacity)))}@media(hover:hover){.radio-success:hover{--tw-border-opacity:1;border-color:var(--fallback-su,oklch(var(--su)/var(--tw-border-opacity)))}}.radio-success:focus-visible{outline-color:var(--fallback-su,oklch(var(--su)/1))}.radio-success:checked,.radio-success[aria-checked=true]{--tw-border-opacity:1;border-color:var(--fallback-su,oklch(var(--su)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-su,oklch(var(--su)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-suc,oklch(var(--suc)/var(--tw-text-opacity)))}.radio-warning{--chkbg:var(--wa);--tw-border-opacity:1;border-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-border-opacity)))}@media(hover:hover){.radio-warning:hover{--tw-border-opacity:1;border-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-border-opacity)))}}.radio-warning:focus-visible{outline-color:var(--fallback-wa,oklch(var(--wa)/1))}.radio-warning:checked,.radio-warning[aria-checked=true]{--tw-border-opacity:1;border-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-wac,oklch(var(--wac)/var(--tw-text-opacity)))}.radio-info{--chkbg:var(--in);--tw-border-opacity:1;border-color:var(--fallback-in,oklch(var(--in)/var(--tw-border-opacity)))}@media(hover:hover){.radio-info:hover{--tw-border-opacity:1;border-color:var(--fallback-in,oklch(var(--in)/var(--tw-border-opacity)))}}.radio-info:focus-visible{outline-color:var(--fallback-in,oklch(var(--in)/1))}.radio-info:checked,.radio-info[aria-checked=true]{--tw-border-opacity:1;border-color:var(--fallback-in,oklch(var(--in)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-in,oklch(var(--in)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-inc,oklch(var(--inc)/var(--tw-text-opacity)))}.radio-error{--chkbg:var(--er);--tw-border-opacity:1;border-color:var(--fallback-er,oklch(var(--er)/var(--tw-border-opacity)))}@media(hover:hover){.radio-error:hover{--tw-border-opacity:1;border-color:var(--fallback-er,oklch(var(--er)/var(--tw-border-opacity)))}}.radio-error:focus-visible{outline-color:var(--fallback-er,oklch(var(--er)/1))}.radio-error:checked,.radio-error[aria-checked=true]{--tw-border-opacity:1;border-color:var(--fallback-er,oklch(var(--er)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-er,oklch(var(--er)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-erc,oklch(var(--erc)/var(--tw-text-opacity)))}.radio:disabled{cursor:not-allowed;opacity:.2}@keyframes radiomark{0%{box-shadow:0 0 0 12px var(--fallback-b1,oklch(var(--b1)/1)) inset,0 0 0 12px var(--fallback-b1,oklch(var(--b1)/1)) inset}50%{box-shadow:0 0 0 3px var(--fallback-b1,oklch(var(--b1)/1)) inset,0 0 0 3px var(--fallback-b1,oklch(var(--b1)/1)) inset}100%{box-shadow:0 0 0 4px var(--fallback-b1,oklch(var(--b1)/1)) inset,0 0 0 4px var(--fallback-b1,oklch(var(--b1)/1)) inset}}.radio-mark{display:none}.range{appearance:none;-webkit-appearance:none;--range-shdw:var(--fallback-bc,oklch(var(--bc)/1));overflow:hidden;border-radius:var(--rounded-box,1rem);background-color:transparent}.range:focus-visible::-webkit-slider-thumb{--focus-shadow:0 0 0 6px var(--fallback-b1,oklch(var(--b1)/1)) inset,0 0 0 2rem var(--range-shdw) inset}.range:focus-visible::-moz-range-thumb{--focus-shadow:0 0 0 6px var(--fallback-b1,oklch(var(--b1)/1)) inset,0 0 0 2rem var(--range-shdw) inset}.range::-webkit-slider-runnable-track{height:.5rem;width:100%;border-radius:var(--rounded-box,1rem);background-color:var(--fallback-bc,oklch(var(--bc)/.1))}.range::-moz-range-track{height:.5rem;width:100%;border-radius:var(--rounded-box,1rem);background-color:var(--fallback-bc,oklch(var(--bc)/.1))}.range::-webkit-slider-thumb{position:relative;height:1.5rem;width:1.5rem;border-radius:var(--rounded-box,1rem);border-style:none;--tw-bg-opacity:1;background-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-bg-opacity)));appearance:none;-webkit-appearance:none;top:50%;color:var(--range-shdw);transform:translateY(-50%);--filler-size:100rem;--filler-offset:0.6rem;box-shadow:0 0 0 3px var(--range-shdw) inset,var(--focus-shadow,0 0),calc(var(--filler-size) * -1 - var(--filler-offset)) 0 0 var(--filler-size)}.range::-moz-range-thumb{position:relative;height:1.5rem;width:1.5rem;border-radius:var(--rounded-box,1rem);border-style:none;--tw-bg-opacity:1;background-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-bg-opacity)));top:50%;color:var(--range-shdw);--filler-size:100rem;--filler-offset:0.5rem;box-shadow:0 0 0 3px var(--range-shdw) inset,var(--focus-shadow,0 0),calc(var(--filler-size) * -1 - var(--filler-offset)) 0 0 var(--filler-size)}.range-primary{--range-shdw:var(--fallback-p,oklch(var(--p)/1))}.range-secondary{--range-shdw:var(--fallback-s,oklch(var(--s)/1))}.range-accent{--range-shdw:var(--fallback-a,oklch(var(--a)/1))}.range-success{--range-shdw:var(--fallback-su,oklch(var(--su)/1))}.range-warning{--range-shdw:var(--fallback-wa,oklch(var(--wa)/1))}.range-info{--range-shdw:var(--fallback-in,oklch(var(--in)/1))}.range-error{--range-shdw:var(--fallback-er,oklch(var(--er)/1))}.rating input{appearance:none;-webkit-appearance:none}.rating :where(input){animation:rating-pop var(--animation-input,.25s) ease-out;height:1.5rem;width:1.5rem;background-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-bg-opacity)));--tw-bg-opacity:1}.rating .rating-hidden{width:.5rem;background-color:transparent}.rating input[type=radio]:checked{background-image:none}.rating input:checked~input,.rating input[aria-checked=true]~input{--tw-bg-opacity:0.2}.rating input:focus-visible{transition-property:transform;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.3s;transition-timing-function:cubic-bezier(0,0,.2,1);transform:translateY(-.125em)}.rating input:active:focus{animation:none;transform:translateY(-.125em)}.rating-half :where(input:not(.rating-hidden)){width:.75rem}@keyframes rating-pop{0%{transform:translateY(-.125em)}40%{transform:translateY(-.125em)}100%{transform:translateY(0)}}.select{border-radius:var(--rounded-btn,.5rem);border-width:1px;border-color:transparent;--tw-bg-opacity:1;background-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-bg-opacity)));padding-inline-end:2.5rem}.select-bordered{border-color:var(--fallback-bc,oklch(var(--bc)/.2))}.select{background-image:linear-gradient(45deg,transparent 50%,currentColor 50%),linear-gradient(135deg,currentColor 50%,transparent 50%);background-position:calc(100% - 20px) calc(1px + 50%),calc(100% - 16.1px) calc(1px + 50%);background-size:4px 4px,4px 4px;background-repeat:no-repeat}.select:focus{box-shadow:none;border-color:var(--fallback-bc,oklch(var(--bc)/.2));outline-style:solid;outline-width:2px;outline-offset:2px;outline-color:var(--fallback-bc,oklch(var(--bc)/.2))}.select-ghost{--tw-bg-opacity:0.05}.select-ghost:focus{--tw-bg-opacity:1;--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)))}.select-primary{--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)))}.select-primary:focus{--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)));outline-color:var(--fallback-p,oklch(var(--p)/1))}.select-secondary{--tw-border-opacity:1;border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)))}.select-secondary:focus{--tw-border-opacity:1;border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)));outline-color:var(--fallback-s,oklch(var(--s)/1))}.select-accent{--tw-border-opacity:1;border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)))}.select-accent:focus{--tw-border-opacity:1;border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)));outline-color:var(--fallback-a,oklch(var(--a)/1))}.select-info{--tw-border-opacity:1;border-color:var(--fallback-in,oklch(var(--in)/var(--tw-border-opacity)))}.select-info:focus{--tw-border-opacity:1;border-color:var(--fallback-in,oklch(var(--in)/var(--tw-border-opacity)));outline-color:var(--fallback-in,oklch(var(--in)/1))}.select-success{--tw-border-opacity:1;border-color:var(--fallback-su,oklch(var(--su)/var(--tw-border-opacity)))}.select-success:focus{--tw-border-opacity:1;border-color:var(--fallback-su,oklch(var(--su)/var(--tw-border-opacity)));outline-color:var(--fallback-su,oklch(var(--su)/1))}.select-warning{--tw-border-opacity:1;border-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-border-opacity)))}.select-warning:focus{--tw-border-opacity:1;border-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-border-opacity)));outline-color:var(--fallback-wa,oklch(var(--wa)/1))}.select-error{--tw-border-opacity:1;border-color:var(--fallback-er,oklch(var(--er)/var(--tw-border-opacity)))}.select-error:focus{--tw-border-opacity:1;border-color:var(--fallback-er,oklch(var(--er)/var(--tw-border-opacity)));outline-color:var(--fallback-er,oklch(var(--er)/1))}.select-disabled,.select:disabled,.select[disabled]{cursor:not-allowed;--tw-border-opacity:1;border-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-bg-opacity)));color:var(--fallback-bc,oklch(var(--bc)/.4))}.select-disabled::placeholder,.select:disabled::placeholder,.select[disabled]::placeholder{color:var(--fallback-bc,oklch(var(--bc)/var(--tw-placeholder-opacity)));--tw-placeholder-opacity:0.2}.select-multiple,.select[multiple],.select[size].select:not([size="1"]){background-image:none;padding-right:1rem}[dir=rtl] .select{background-position:calc(0% + 12px) calc(1px + 50%),calc(0% + 16px) calc(1px + 50%)}.skeleton{border-radius:var(--rounded-box,1rem);--tw-bg-opacity:1;background-color:var(--fallback-b3,oklch(var(--b3)/var(--tw-bg-opacity)));will-change:background-position;animation:skeleton 1.8s ease-in-out infinite;background-image:linear-gradient(105deg,transparent 0,transparent 40%,var(--fallback-b1,oklch(var(--b1)/1)) 50%,transparent 60%,transparent 100%);background-size:200% auto;background-repeat:no-repeat;background-position-x:-50%}@media (prefers-reduced-motion){.skeleton{animation-duration:15s}}@keyframes skeleton{from{background-position:150%}to{background-position:-50%}}.stack{place-items:center;align-items:flex-end}.stack>*{width:100%;opacity:.6}.stack>:nth-child(2){opacity:.8}.stack>:nth-child(1){opacity:1}.stats{border-radius:var(--rounded-box,1rem);--tw-bg-opacity:1;background-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)))}:where(.stats)>:not([hidden])~:not([hidden]){--tw-divide-x-reverse:0;border-right-width:calc(1px * var(--tw-divide-x-reverse));border-left-width:calc(1px * calc(1 - var(--tw-divide-x-reverse)));--tw-divide-y-reverse:0;border-top-width:calc(0px * calc(1 - var(--tw-divide-y-reverse)));border-bottom-width:calc(0px * var(--tw-divide-y-reverse))}:where(.stats){overflow-x:auto}[dir=rtl] .stats>:not([hidden])~:not([hidden]){--tw-divide-x-reverse:1}.stat{column-gap:1rem;border-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-border-opacity)));--tw-border-opacity:0.1;padding-left:1.5rem;padding-right:1.5rem;padding-top:1rem;padding-bottom:1rem}.stat-title{color:var(--fallback-bc,oklch(var(--bc)/.6))}.stat-value{font-size:2.25rem;line-height:2.5rem;font-weight:800}.stat-desc{font-size:.75rem;line-height:1rem;color:var(--fallback-bc,oklch(var(--bc)/.6))}.stat-actions{margin-top:1rem}.steps .step{grid-template-rows:40px 1fr;grid-template-columns:auto;min-width:4rem}.steps .step:before{top:0;grid-column-start:1;grid-row-start:1;height:.5rem;width:100%;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y));--tw-bg-opacity:1;background-color:var(--fallback-b3,oklch(var(--b3)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));content:"";margin-inline-start:-100%}.steps .step:after{content:counter(step);counter-increment:step;z-index:1;position:relative;grid-column-start:1;grid-row-start:1;display:grid;height:2rem;width:2rem;place-items:center;place-self:center;border-radius:9999px;--tw-bg-opacity:1;background-color:var(--fallback-b3,oklch(var(--b3)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)))}.steps .step:first-child:before{content:none}.steps .step[data-content]:after{content:attr(data-content)}.steps .step-neutral+.step-neutral:before,.steps .step-neutral:after{--tw-bg-opacity:1;background-color:var(--fallback-n,oklch(var(--n)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-nc,oklch(var(--nc)/var(--tw-text-opacity)))}.steps .step-primary+.step-primary:before,.steps .step-primary:after{--tw-bg-opacity:1;background-color:var(--fallback-p,oklch(var(--p)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-pc,oklch(var(--pc)/var(--tw-text-opacity)))}.steps .step-secondary+.step-secondary:before,.steps .step-secondary:after{--tw-bg-opacity:1;background-color:var(--fallback-s,oklch(var(--s)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-sc,oklch(var(--sc)/var(--tw-text-opacity)))}.steps .step-accent+.step-accent:before,.steps .step-accent:after{--tw-bg-opacity:1;background-color:var(--fallback-a,oklch(var(--a)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-ac,oklch(var(--ac)/var(--tw-text-opacity)))}.steps .step-info+.step-info:before{--tw-bg-opacity:1;background-color:var(--fallback-in,oklch(var(--in)/var(--tw-bg-opacity)))}.steps .step-info:after{--tw-bg-opacity:1;background-color:var(--fallback-in,oklch(var(--in)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-inc,oklch(var(--inc)/var(--tw-text-opacity)))}.steps .step-success+.step-success:before{--tw-bg-opacity:1;background-color:var(--fallback-su,oklch(var(--su)/var(--tw-bg-opacity)))}.steps .step-success:after{--tw-bg-opacity:1;background-color:var(--fallback-su,oklch(var(--su)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-suc,oklch(var(--suc)/var(--tw-text-opacity)))}.steps .step-warning+.step-warning:before{--tw-bg-opacity:1;background-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-bg-opacity)))}.steps .step-warning:after{--tw-bg-opacity:1;background-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-wac,oklch(var(--wac)/var(--tw-text-opacity)))}.steps .step-error+.step-error:before{--tw-bg-opacity:1;background-color:var(--fallback-er,oklch(var(--er)/var(--tw-bg-opacity)))}.steps .step-error:after{--tw-bg-opacity:1;background-color:var(--fallback-er,oklch(var(--er)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-erc,oklch(var(--erc)/var(--tw-text-opacity)))}.swap{cursor:pointer}.swap>*{transition-duration:.3s;transition-timing-function:cubic-bezier(0,0,.2,1);transition-property:transform,opacity}.swap-rotate .swap-indeterminate,.swap-rotate .swap-on,.swap-rotate input:indeterminate~.swap-on{--tw-rotate:45deg;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.swap-active:where(.swap-rotate) .swap-off,.swap-rotate input:checked~.swap-off,.swap-rotate input:indeterminate~.swap-off{--tw-rotate:-45deg;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.swap-active:where(.swap-rotate) .swap-on,.swap-rotate input:checked~.swap-on,.swap-rotate input:indeterminate~.swap-indeterminate{--tw-rotate:0deg;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.swap-flip{transform-style:preserve-3d;perspective:16em}.swap-flip .swap-indeterminate,.swap-flip .swap-on,.swap-flip input:indeterminate~.swap-on{transform:rotateY(180deg);backface-visibility:hidden;opacity:1}.swap-active:where(.swap-flip) .swap-off,.swap-flip input:checked~.swap-off,.swap-flip input:indeterminate~.swap-off{transform:rotateY(-180deg);backface-visibility:hidden;opacity:1}.swap-active:where(.swap-flip) .swap-on,.swap-flip input:checked~.swap-on,.swap-flip input:indeterminate~.swap-indeterminate{transform:rotateY(0)}.tabs-lifted>.tab:focus-visible{border-end-end-radius:0;border-end-start-radius:0}.tab{--tw-text-opacity:0.5}@media(hover:hover){.tab:hover{--tw-text-opacity:1}}.tab{--tab-color:var(--fallback-bc,oklch(var(--bc)/1));--tab-bg:var(--fallback-b1,oklch(var(--b1)/1));--tab-border-color:var(--fallback-b3,oklch(var(--b3)/1));color:var(--tab-color);padding-inline-start:var(--tab-padding,1rem);padding-inline-end:var(--tab-padding,1rem)}.tab:is(.tab-active,[aria-selected=true]):not(.tab-disabled):not([disabled]),.tab:is(input:checked){border-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-border-opacity)));--tw-border-opacity:1;--tw-text-opacity:1}.tab:focus{outline:2px solid transparent;outline-offset:2px}.tab:focus-visible{outline:2px solid currentColor;outline-offset:-5px}.tab-disabled,.tab[disabled]{cursor:not-allowed;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));--tw-text-opacity:0.2}@media (hover:hover){.tab[disabled],.tab[disabled]:hover{cursor:not-allowed;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));--tw-text-opacity:0.2}}.tabs-bordered>.tab{border-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-border-opacity)));--tw-border-opacity:0.2;border-style:solid;border-bottom-width:calc(var(--tab-border,1px) + 1px)}.tabs-lifted>.tab{border:var(--tab-border,1px) solid transparent;border-width:0 0 var(--tab-border,1px) 0;border-start-start-radius:var(--tab-radius,.5rem);border-start-end-radius:var(--tab-radius,.5rem);border-bottom-color:var(--tab-border-color);padding-inline-start:var(--tab-padding,1rem);padding-inline-end:var(--tab-padding,1rem);padding-top:var(--tab-border,1px)}.tabs-lifted>.tab:is(.tab-active,[aria-selected=true]):not(.tab-disabled):not([disabled]),.tabs-lifted>.tab:is(input:checked){background-color:var(--tab-bg);border-width:var(--tab-border,1px) var(--tab-border,1px) 0 var(--tab-border,1px);border-inline-start-color:var(--tab-border-color);border-inline-end-color:var(--tab-border-color);border-top-color:var(--tab-border-color);padding-inline-start:calc(var(--tab-padding,1rem) - var(--tab-border,1px));padding-inline-end:calc(var(--tab-padding,1rem) - var(--tab-border,1px));padding-bottom:var(--tab-border,1px);padding-top:0}.tabs-lifted>.tab:is(.tab-active,[aria-selected=true]):not(.tab-disabled):not([disabled]):before,.tabs-lifted>.tab:is(input:checked):before{z-index:1;content:"";display:block;position:absolute;width:calc(100% + var(--tab-radius,.5rem) * 2);height:var(--tab-radius,.5rem);bottom:0;background-size:var(--tab-radius,.5rem);background-position:top left,top right;background-repeat:no-repeat;--tab-grad:calc(69% - var(--tab-border, 1px));--radius-start:radial-gradient( - circle at top left, - transparent var(--tab-grad), - var(--tab-border-color) calc(var(--tab-grad) + 0.25px), - var(--tab-border-color) calc(var(--tab-grad) + var(--tab-border, 1px)), - var(--tab-bg) calc(var(--tab-grad) + var(--tab-border, 1px) + 0.25px) - );--radius-end:radial-gradient( - circle at top right, - transparent var(--tab-grad), - var(--tab-border-color) calc(var(--tab-grad) + 0.25px), - var(--tab-border-color) calc(var(--tab-grad) + var(--tab-border, 1px)), - var(--tab-bg) calc(var(--tab-grad) + var(--tab-border, 1px) + 0.25px) - );background-image:var(--radius-start),var(--radius-end)}.tabs-lifted>.tab:is(.tab-active,[aria-selected=true]):not(.tab-disabled):not([disabled]):first-child:before,.tabs-lifted>.tab:is(input:checked):first-child:before{background-image:var(--radius-end);background-position:top right}[dir=rtl] .tabs-lifted>.tab:is(.tab-active,[aria-selected=true]):not(.tab-disabled):not([disabled]):first-child:before,[dir=rtl] .tabs-lifted>.tab:is(input:checked):first-child:before{background-image:var(--radius-start);background-position:top left}.tabs-lifted>.tab:is(.tab-active,[aria-selected=true]):not(.tab-disabled):not([disabled]):last-child:before,.tabs-lifted>.tab:is(input:checked):last-child:before{background-image:var(--radius-start);background-position:top left}[dir=rtl] .tabs-lifted>.tab:is(.tab-active,[aria-selected=true]):not(.tab-disabled):not([disabled]):last-child:before,[dir=rtl] .tabs-lifted>.tab:is(input:checked):last-child:before{background-image:var(--radius-end);background-position:top right}.tabs-lifted>.tab:is(input:checked)+.tabs-lifted .tab:is(input:checked):before,.tabs-lifted>:is(.tab-active,[aria-selected=true]):not(.tab-disabled):not([disabled])+.tabs-lifted :is(.tab-active,[aria-selected=true]):not(.tab-disabled):not([disabled]):before{background-image:var(--radius-end);background-position:top right}.tabs-boxed{border-radius:var(--rounded-btn,.5rem);--tw-bg-opacity:1;background-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-bg-opacity)));padding:.25rem}.tabs-boxed .tab{border-radius:var(--rounded-btn,.5rem)}.tabs-boxed :is(.tab-active,[aria-selected=true]):not(.tab-disabled):not([disabled]),.tabs-boxed :is(input:checked){--tw-bg-opacity:1;background-color:var(--fallback-p,oklch(var(--p)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-pc,oklch(var(--pc)/var(--tw-text-opacity)))}@media(hover:hover){.tabs-boxed :is(.tab-active,[aria-selected=true]):not(.tab-disabled):not([disabled]):hover,.tabs-boxed :is(input:checked):hover{--tw-text-opacity:1;color:var(--fallback-pc,oklch(var(--pc)/var(--tw-text-opacity)))}}.table{border-radius:var(--rounded-box,1rem);text-align:left;font-size:.875rem;line-height:1.25rem}.table:where([dir=rtl],[dir=rtl]*){text-align:right}.table :where(th,td){padding-left:1rem;padding-right:1rem;padding-top:.75rem;padding-bottom:.75rem;vertical-align:middle}.table tr.active,.table tr.active:nth-child(even),.table-zebra tbody tr:nth-child(even){--tw-bg-opacity:1;background-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-bg-opacity)))}@media(hover:hover){.table tr.hover:hover,.table tr.hover:nth-child(even):hover{--tw-bg-opacity:1;background-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-bg-opacity)))}}.table-zebra tr.active,.table-zebra tr.active:nth-child(even),.table-zebra-zebra tbody tr:nth-child(even){--tw-bg-opacity:1;background-color:var(--fallback-b3,oklch(var(--b3)/var(--tw-bg-opacity)))}@media(hover:hover){.table-zebra tr.hover:hover,.table-zebra tr.hover:nth-child(even):hover{--tw-bg-opacity:1;background-color:var(--fallback-b3,oklch(var(--b3)/var(--tw-bg-opacity)))}}.table :where(theadtr,tbodytr:not(:last-child),tbodytr:first-child:last-child){border-bottom-width:1px;--tw-border-opacity:1;border-bottom-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-border-opacity)))}.table :where(thead,tfoot){white-space:nowrap;font-size:.75rem;line-height:1rem;font-weight:700;color:var(--fallback-bc,oklch(var(--bc)/.6))}.table :where(tfoot){border-top-width:1px;--tw-border-opacity:1;border-top-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-border-opacity)))}.textarea{border-radius:var(--rounded-btn,.5rem);border-width:1px;border-color:transparent;--tw-bg-opacity:1;background-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-bg-opacity)))}.textarea-bordered{border-color:var(--fallback-bc,oklch(var(--bc)/.2))}.textarea:focus{box-shadow:none;border-color:var(--fallback-bc,oklch(var(--bc)/.2));outline-style:solid;outline-width:2px;outline-offset:2px;outline-color:var(--fallback-bc,oklch(var(--bc)/.2))}.textarea-ghost{--tw-bg-opacity:0.05}.textarea-ghost:focus{--tw-bg-opacity:1;--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));box-shadow:none}.textarea-primary{--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)))}.textarea-primary:focus{--tw-border-opacity:1;border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)));outline-color:var(--fallback-p,oklch(var(--p)/1))}.textarea-secondary{--tw-border-opacity:1;border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)))}.textarea-secondary:focus{--tw-border-opacity:1;border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)));outline-color:var(--fallback-s,oklch(var(--s)/1))}.textarea-accent{--tw-border-opacity:1;border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)))}.textarea-accent:focus{--tw-border-opacity:1;border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)));outline-color:var(--fallback-a,oklch(var(--a)/1))}.textarea-info{--tw-border-opacity:1;border-color:var(--fallback-in,oklch(var(--in)/var(--tw-border-opacity)))}.textarea-info:focus{--tw-border-opacity:1;border-color:var(--fallback-in,oklch(var(--in)/var(--tw-border-opacity)));outline-color:var(--fallback-in,oklch(var(--in)/1))}.textarea-success{--tw-border-opacity:1;border-color:var(--fallback-su,oklch(var(--su)/var(--tw-border-opacity)))}.textarea-success:focus{--tw-border-opacity:1;border-color:var(--fallback-su,oklch(var(--su)/var(--tw-border-opacity)));outline-color:var(--fallback-su,oklch(var(--su)/1))}.textarea-warning{--tw-border-opacity:1;border-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-border-opacity)))}.textarea-warning:focus{--tw-border-opacity:1;border-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-border-opacity)));outline-color:var(--fallback-wa,oklch(var(--wa)/1))}.textarea-error{--tw-border-opacity:1;border-color:var(--fallback-er,oklch(var(--er)/var(--tw-border-opacity)))}.textarea-error:focus{--tw-border-opacity:1;border-color:var(--fallback-er,oklch(var(--er)/var(--tw-border-opacity)));outline-color:var(--fallback-er,oklch(var(--er)/1))}.textarea-disabled,.textarea:disabled,.textarea[disabled]{cursor:not-allowed;--tw-border-opacity:1;border-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-b2,oklch(var(--b2)/var(--tw-bg-opacity)));color:var(--fallback-bc,oklch(var(--bc)/.4))}.textarea-disabled::placeholder,.textarea:disabled::placeholder,.textarea[disabled]::placeholder{color:var(--fallback-bc,oklch(var(--bc)/var(--tw-placeholder-opacity)));--tw-placeholder-opacity:0.2}.timeline hr{height:.25rem}:where(.timelinehr){--tw-bg-opacity:1;background-color:var(--fallback-b3,oklch(var(--b3)/var(--tw-bg-opacity)))}:where(.timeline:has(.timeline-middle)hr):first-child{border-start-end-radius:var(--rounded-badge,1.9rem);border-end-end-radius:var(--rounded-badge,1.9rem);border-start-start-radius:0;border-end-start-radius:0}:where(.timeline:has(.timeline-middle)hr):last-child{border-start-start-radius:var(--rounded-badge,1.9rem);border-end-start-radius:var(--rounded-badge,1.9rem);border-start-end-radius:0;border-end-end-radius:0}:where(.timeline:not(:has(.timeline-middle)):first-childhr:last-child){border-start-start-radius:var(--rounded-badge,1.9rem);border-end-start-radius:var(--rounded-badge,1.9rem);border-start-end-radius:0;border-end-end-radius:0}:where(.timeline:not(:has(.timeline-middle)):last-childhr:first-child){border-start-end-radius:var(--rounded-badge,1.9rem);border-end-end-radius:var(--rounded-badge,1.9rem);border-start-start-radius:0;border-end-start-radius:0}.timeline-box{border-radius:var(--rounded-box,1rem);border-width:1px;--tw-border-opacity:1;border-color:var(--fallback-b3,oklch(var(--b3)/var(--tw-border-opacity)));--tw-bg-opacity:1;background-color:var(--fallback-b1,oklch(var(--b1)/var(--tw-bg-opacity)));padding-left:1rem;padding-right:1rem;padding-top:.5rem;padding-bottom:.5rem;--tw-shadow:0 1px 2px 0 rgb(0 0 0 / 0.05);--tw-shadow-colored:0 1px 2px 0 var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow)}.toast{gap:.5rem;padding:1rem}.toast>*{animation:toast-pop .25s ease-out}@keyframes toast-pop{0%{transform:scale(.9);opacity:0}100%{transform:scale(1);opacity:1}}.toggle{--tglbg:var(--fallback-b1,oklch(var(--b1)/1));--handleoffset:1.5rem;--handleoffsetcalculator:calc(var(--handleoffset) * -1);--togglehandleborder:0 0;height:1.5rem;width:3rem;cursor:pointer;appearance:none;border-radius:var(--rounded-badge,1.9rem);border-width:1px;border-color:currentColor;background-color:currentColor;color:var(--fallback-bc,oklch(var(--bc)/.5));transition:background,box-shadow var(--animation-input,.2s) ease-out;box-shadow:var(--handleoffsetcalculator) 0 0 2px var(--tglbg) inset,0 0 0 2px var(--tglbg) inset,var(--togglehandleborder)}[dir=rtl] .toggle{--handleoffsetcalculator:calc(var(--handleoffset) * 1)}.toggle:focus-visible{outline-style:solid;outline-width:2px;outline-offset:2px;outline-color:var(--fallback-bc,oklch(var(--bc)/.2))}.toggle:hover{background-color:currentColor}.toggle:checked,.toggle[aria-checked=true]{background-image:none;--handleoffsetcalculator:var(--handleoffset);--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)))}[dir=rtl] .toggle:checked,[dir=rtl] .toggle[aria-checked=true]{--handleoffsetcalculator:calc(var(--handleoffset) * -1)}.toggle:indeterminate{--tw-text-opacity:1;color:var(--fallback-bc,oklch(var(--bc)/var(--tw-text-opacity)));box-shadow:calc(var(--handleoffset)/ 2) 0 0 2px var(--tglbg) inset,calc(var(--handleoffset)/ -2) 0 0 2px var(--tglbg) inset,0 0 0 2px var(--tglbg) inset}[dir=rtl] .toggle:indeterminate{box-shadow:calc(var(--handleoffset)/ 2) 0 0 2px var(--tglbg) inset,calc(var(--handleoffset)/ -2) 0 0 2px var(--tglbg) inset,0 0 0 2px var(--tglbg) inset}.toggle-primary:focus-visible{outline-color:var(--fallback-p,oklch(var(--p)/1))}.toggle-primary:checked,.toggle-primary[aria-checked=true]{border-color:var(--fallback-p,oklch(var(--p)/var(--tw-border-opacity)));--tw-border-opacity:0.1;--tw-bg-opacity:1;background-color:var(--fallback-p,oklch(var(--p)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-pc,oklch(var(--pc)/var(--tw-text-opacity)))}.toggle-secondary:focus-visible{outline-color:var(--fallback-s,oklch(var(--s)/1))}.toggle-secondary:checked,.toggle-secondary[aria-checked=true]{border-color:var(--fallback-s,oklch(var(--s)/var(--tw-border-opacity)));--tw-border-opacity:0.1;--tw-bg-opacity:1;background-color:var(--fallback-s,oklch(var(--s)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-sc,oklch(var(--sc)/var(--tw-text-opacity)))}.toggle-accent:focus-visible{outline-color:var(--fallback-a,oklch(var(--a)/1))}.toggle-accent:checked,.toggle-accent[aria-checked=true]{border-color:var(--fallback-a,oklch(var(--a)/var(--tw-border-opacity)));--tw-border-opacity:0.1;--tw-bg-opacity:1;background-color:var(--fallback-a,oklch(var(--a)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-ac,oklch(var(--ac)/var(--tw-text-opacity)))}.toggle-success:focus-visible{outline-color:var(--fallback-su,oklch(var(--su)/1))}.toggle-success:checked,.toggle-success[aria-checked=true]{border-color:var(--fallback-su,oklch(var(--su)/var(--tw-border-opacity)));--tw-border-opacity:0.1;--tw-bg-opacity:1;background-color:var(--fallback-su,oklch(var(--su)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-suc,oklch(var(--suc)/var(--tw-text-opacity)))}.toggle-warning:focus-visible{outline-color:var(--fallback-wa,oklch(var(--wa)/1))}.toggle-warning:checked,.toggle-warning[aria-checked=true]{border-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-border-opacity)));--tw-border-opacity:0.1;--tw-bg-opacity:1;background-color:var(--fallback-wa,oklch(var(--wa)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-wac,oklch(var(--wac)/var(--tw-text-opacity)))}.toggle-info:focus-visible{outline-color:var(--fallback-in,oklch(var(--in)/1))}.toggle-info:checked,.toggle-info[aria-checked=true]{border-color:var(--fallback-in,oklch(var(--in)/var(--tw-border-opacity)));--tw-border-opacity:0.1;--tw-bg-opacity:1;background-color:var(--fallback-in,oklch(var(--in)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-inc,oklch(var(--inc)/var(--tw-text-opacity)))}.toggle-error:focus-visible{outline-color:var(--fallback-er,oklch(var(--er)/1))}.toggle-error:checked,.toggle-error[aria-checked=true]{border-color:var(--fallback-er,oklch(var(--er)/var(--tw-border-opacity)));--tw-border-opacity:0.1;--tw-bg-opacity:1;background-color:var(--fallback-er,oklch(var(--er)/var(--tw-bg-opacity)));--tw-text-opacity:1;color:var(--fallback-erc,oklch(var(--erc)/var(--tw-text-opacity)))}.toggle:disabled{cursor:not-allowed;--tw-border-opacity:1;border-color:var(--fallback-bc,oklch(var(--bc)/var(--tw-border-opacity)));background-color:transparent;opacity:.3;--togglehandleborder:0 0 0 3px var(--fallback-bc,oklch(var(--bc)/1)) inset,var(--handleoffsetcalculator) 0 0 3px var(--fallback-bc,oklch(var(--bc)/1)) inset}.toggle-mark{display:none}:root .prose{--tw-prose-body:var(--fallback-bc,oklch(var(--bc)/0.8));--tw-prose-headings:var(--fallback-bc,oklch(var(--bc)/1));--tw-prose-lead:var(--fallback-bc,oklch(var(--bc)/1));--tw-prose-links:var(--fallback-bc,oklch(var(--bc)/1));--tw-prose-bold:var(--fallback-bc,oklch(var(--bc)/1));--tw-prose-counters:var(--fallback-bc,oklch(var(--bc)/1));--tw-prose-bullets:var(--fallback-bc,oklch(var(--bc)/0.5));--tw-prose-hr:var(--fallback-bc,oklch(var(--bc)/0.2));--tw-prose-quotes:var(--fallback-bc,oklch(var(--bc)/1));--tw-prose-quote-borders:var(--fallback-bc,oklch(var(--bc)/0.2));--tw-prose-captions:var(--fallback-bc,oklch(var(--bc)/0.5));--tw-prose-code:var(--fallback-bc,oklch(var(--bc)/1));--tw-prose-pre-code:var(--fallback-nc,oklch(var(--nc)/1));--tw-prose-pre-bg:var(--fallback-n,oklch(var(--n)/1));--tw-prose-th-borders:var(--fallback-bc,oklch(var(--bc)/0.5));--tw-prose-td-borders:var(--fallback-bc,oklch(var(--bc)/0.2))}.prose :where(code):not(:where([class~=not-prose]*,pre*)){padding:1px 8px;border-radius:var(--rounded-badge);font-weight:initial;background-color:var(--fallback-bc,oklch(var(--bc)/.1))}@supports not (color:oklch(0% 0 0)){.prose :where(code):not(:where([class~=not-prose]*,pre*)){background-color:var(--fallback-b3,oklch(var(--b3)/1))}}.prose :where(code):not(:where([class~=not-prose],[class~=not-prose]*))::after,.prose :where(code):not(:where([class~=not-prose],[class~=not-prose]*))::before{display:none}.prose pre code{border-radius:0;padding:0}.prose :where(tbodytr,thead):not(:where([class~=not-prose]*)){border-bottom-color:var(--fallback-bc,oklch(var(--bc)/.2))}:root{color-scheme:light;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--pc:89.824% 0.06192 275.75;--ac:15.352% 0.0368 183.61;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:49.12% 0.3096 275.75;--s:69.71% 0.329 342.55;--sc:98.71% 0.0106 342.55;--a:76.76% 0.184 183.61;--n:32.1785% 0.02476 255.701624;--nc:89.4994% 0.011585 252.096176;--b1:100% 0 0;--b2:96.1151% 0 0;--b3:92.4169% 0.00108 197.137559;--bc:27.8078% 0.029596 256.847952}@media (prefers-color-scheme:dark){:root{color-scheme:dark;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--pc:13.138% 0.0392 275.75;--sc:14.96% 0.052 342.55;--ac:14.902% 0.0334 183.61;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:65.69% 0.196 275.75;--s:74.8% 0.26 342.55;--a:74.51% 0.167 183.61;--n:31.3815% 0.021108 254.139175;--nc:74.6477% 0.0216 264.435964;--b1:25.3267% 0.015896 252.417568;--b2:23.2607% 0.013807 253.100675;--b3:21.1484% 0.01165 254.087939;--bc:74.6477% 0.0216 264.435964}}[data-theme=light]{color-scheme:light;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--pc:89.824% 0.06192 275.75;--ac:15.352% 0.0368 183.61;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:49.12% 0.3096 275.75;--s:69.71% 0.329 342.55;--sc:98.71% 0.0106 342.55;--a:76.76% 0.184 183.61;--n:32.1785% 0.02476 255.701624;--nc:89.4994% 0.011585 252.096176;--b1:100% 0 0;--b2:96.1151% 0 0;--b3:92.4169% 0.00108 197.137559;--bc:27.8078% 0.029596 256.847952}:root:has(input.theme-controller[value=light]:checked){color-scheme:light;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--pc:89.824% 0.06192 275.75;--ac:15.352% 0.0368 183.61;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:49.12% 0.3096 275.75;--s:69.71% 0.329 342.55;--sc:98.71% 0.0106 342.55;--a:76.76% 0.184 183.61;--n:32.1785% 0.02476 255.701624;--nc:89.4994% 0.011585 252.096176;--b1:100% 0 0;--b2:96.1151% 0 0;--b3:92.4169% 0.00108 197.137559;--bc:27.8078% 0.029596 256.847952}[data-theme=dark]{color-scheme:dark;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--pc:13.138% 0.0392 275.75;--sc:14.96% 0.052 342.55;--ac:14.902% 0.0334 183.61;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:65.69% 0.196 275.75;--s:74.8% 0.26 342.55;--a:74.51% 0.167 183.61;--n:31.3815% 0.021108 254.139175;--nc:74.6477% 0.0216 264.435964;--b1:25.3267% 0.015896 252.417568;--b2:23.2607% 0.013807 253.100675;--b3:21.1484% 0.01165 254.087939;--bc:74.6477% 0.0216 264.435964}:root:has(input.theme-controller[value=dark]:checked){color-scheme:dark;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--pc:13.138% 0.0392 275.75;--sc:14.96% 0.052 342.55;--ac:14.902% 0.0334 183.61;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:65.69% 0.196 275.75;--s:74.8% 0.26 342.55;--a:74.51% 0.167 183.61;--n:31.3815% 0.021108 254.139175;--nc:74.6477% 0.0216 264.435964;--b1:25.3267% 0.015896 252.417568;--b2:23.2607% 0.013807 253.100675;--b3:21.1484% 0.01165 254.087939;--bc:74.6477% 0.0216 264.435964}[data-theme=cupcake]{color-scheme:light;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--pc:15.2344% 0.017892 200.026556;--sc:15.787% 0.020249 356.29965;--ac:15.8762% 0.029206 78.618794;--nc:84.7148% 0.013247 313.189598;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--p:76.172% 0.089459 200.026556;--s:78.9351% 0.101246 356.29965;--a:79.3811% 0.146032 78.618794;--n:23.5742% 0.066235 313.189598;--b1:97.7882% 0.00418 56.375637;--b2:93.9822% 0.007638 61.449292;--b3:91.5861% 0.006811 53.440502;--bc:23.5742% 0.066235 313.189598;--rounded-btn:1.9rem;--tab-border:2px;--tab-radius:0.7rem}:root:has(input.theme-controller[value=cupcake]:checked){color-scheme:light;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--pc:15.2344% 0.017892 200.026556;--sc:15.787% 0.020249 356.29965;--ac:15.8762% 0.029206 78.618794;--nc:84.7148% 0.013247 313.189598;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--p:76.172% 0.089459 200.026556;--s:78.9351% 0.101246 356.29965;--a:79.3811% 0.146032 78.618794;--n:23.5742% 0.066235 313.189598;--b1:97.7882% 0.00418 56.375637;--b2:93.9822% 0.007638 61.449292;--b3:91.5861% 0.006811 53.440502;--bc:23.5742% 0.066235 313.189598;--rounded-btn:1.9rem;--tab-border:2px;--tab-radius:0.7rem}[data-theme=bumblebee]{color-scheme:light;--b2:93% 0 0;--b3:86% 0 0;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--bc:20% 0 0;--ac:16.254% 0.0314 56.52;--nc:82.55% 0.015 281.99;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:89.51% 0.2132 96.61;--pc:38.92% 0.046 96.61;--s:80.39% 0.194 70.76;--sc:39.38% 0.068 70.76;--a:81.27% 0.157 56.52;--n:12.75% 0.075 281.99;--b1:100% 0 0}:root:has(input.theme-controller[value=bumblebee]:checked){color-scheme:light;--b2:93% 0 0;--b3:86% 0 0;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--bc:20% 0 0;--ac:16.254% 0.0314 56.52;--nc:82.55% 0.015 281.99;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:89.51% 0.2132 96.61;--pc:38.92% 0.046 96.61;--s:80.39% 0.194 70.76;--sc:39.38% 0.068 70.76;--a:81.27% 0.157 56.52;--n:12.75% 0.075 281.99;--b1:100% 0 0}[data-theme=emerald]{color-scheme:light;--b2:93% 0 0;--b3:86% 0 0;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:76.6626% 0.135433 153.450024;--pc:33.3872% 0.040618 162.240129;--s:61.3028% 0.202368 261.294233;--sc:100% 0 0;--a:72.7725% 0.149783 33.200363;--ac:0% 0 0;--n:35.5192% 0.032071 262.988584;--nc:98.4625% 0.001706 247.838921;--b1:100% 0 0;--bc:35.5192% 0.032071 262.988584;--animation-btn:0;--animation-input:0;--btn-focus-scale:1}:root:has(input.theme-controller[value=emerald]:checked){color-scheme:light;--b2:93% 0 0;--b3:86% 0 0;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:76.6626% 0.135433 153.450024;--pc:33.3872% 0.040618 162.240129;--s:61.3028% 0.202368 261.294233;--sc:100% 0 0;--a:72.7725% 0.149783 33.200363;--ac:0% 0 0;--n:35.5192% 0.032071 262.988584;--nc:98.4625% 0.001706 247.838921;--b1:100% 0 0;--bc:35.5192% 0.032071 262.988584;--animation-btn:0;--animation-input:0;--btn-focus-scale:1}[data-theme=corporate]{color-scheme:light;--b2:93% 0 0;--b3:86% 0 0;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--pc:12.078% 0.0456 269.1;--sc:13.0739% 0.010951 256.688055;--ac:15.3934% 0.022799 163.57888;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--border-btn:1px;--tab-border:1px;--p:60.39% 0.228 269.1;--s:65.3694% 0.054756 256.688055;--a:76.9669% 0.113994 163.57888;--n:22.3899% 0.031305 278.07229;--nc:95.8796% 0.008588 247.915135;--b1:100% 0 0;--bc:22.3899% 0.031305 278.07229;--rounded-box:0.25rem;--rounded-btn:.125rem;--rounded-badge:.125rem;--tab-radius:0.25rem;--animation-btn:0;--animation-input:0;--btn-focus-scale:1}:root:has(input.theme-controller[value=corporate]:checked){color-scheme:light;--b2:93% 0 0;--b3:86% 0 0;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--pc:12.078% 0.0456 269.1;--sc:13.0739% 0.010951 256.688055;--ac:15.3934% 0.022799 163.57888;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--border-btn:1px;--tab-border:1px;--p:60.39% 0.228 269.1;--s:65.3694% 0.054756 256.688055;--a:76.9669% 0.113994 163.57888;--n:22.3899% 0.031305 278.07229;--nc:95.8796% 0.008588 247.915135;--b1:100% 0 0;--bc:22.3899% 0.031305 278.07229;--rounded-box:0.25rem;--rounded-btn:.125rem;--rounded-badge:.125rem;--tab-radius:0.25rem;--animation-btn:0;--animation-input:0;--btn-focus-scale:1}[data-theme=synthwave]{color-scheme:dark;--b2:20.2941% 0.076211 287.835609;--b3:18.7665% 0.070475 287.835609;--pc:14.4421% 0.031903 342.009383;--sc:15.6543% 0.02362 227.382405;--ac:17.608% 0.0412 93.72;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:72.2105% 0.159514 342.009383;--s:78.2714% 0.118101 227.382405;--a:88.04% 0.206 93.72;--n:25.5554% 0.103537 286.507967;--nc:97.9365% 0.00819 301.358346;--b1:21.8216% 0.081948 287.835609;--bc:97.9365% 0.00819 301.358346;--in:76.5197% 0.12273 231.831603;--inc:23.5017% 0.096418 290.329844;--su:86.0572% 0.115038 178.624677;--suc:23.5017% 0.096418 290.329844;--wa:85.531% 0.122117 93.722227;--wac:23.5017% 0.096418 290.329844;--er:73.7005% 0.121339 32.639257;--erc:23.5017% 0.096418 290.329844}:root:has(input.theme-controller[value=synthwave]:checked){color-scheme:dark;--b2:20.2941% 0.076211 287.835609;--b3:18.7665% 0.070475 287.835609;--pc:14.4421% 0.031903 342.009383;--sc:15.6543% 0.02362 227.382405;--ac:17.608% 0.0412 93.72;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:72.2105% 0.159514 342.009383;--s:78.2714% 0.118101 227.382405;--a:88.04% 0.206 93.72;--n:25.5554% 0.103537 286.507967;--nc:97.9365% 0.00819 301.358346;--b1:21.8216% 0.081948 287.835609;--bc:97.9365% 0.00819 301.358346;--in:76.5197% 0.12273 231.831603;--inc:23.5017% 0.096418 290.329844;--su:86.0572% 0.115038 178.624677;--suc:23.5017% 0.096418 290.329844;--wa:85.531% 0.122117 93.722227;--wac:23.5017% 0.096418 290.329844;--er:73.7005% 0.121339 32.639257;--erc:23.5017% 0.096418 290.329844}[data-theme=retro]{color-scheme:light;--inc:90.923% 0.043042 262.880917;--suc:12.541% 0.033982 149.213788;--wac:13.3168% 0.031484 58.31834;--erc:13.144% 0.0398 27.33;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--p:76.8664% 0.104092 22.664655;--pc:26.5104% 0.006243 0.522862;--s:80.7415% 0.052534 159.094608;--sc:26.5104% 0.006243 0.522862;--a:70.3919% 0.125455 52.953428;--ac:26.5104% 0.006243 0.522862;--n:28.4181% 0.009519 355.534017;--nc:92.5604% 0.025113 89.217311;--b1:91.6374% 0.034554 90.51575;--b2:88.2722% 0.049418 91.774344;--b3:84.133% 0.065952 90.856665;--bc:26.5104% 0.006243 0.522862;--in:54.615% 0.215208 262.880917;--su:62.7052% 0.169912 149.213788;--wa:66.584% 0.157422 58.31834;--er:65.72% 0.199 27.33;--rounded-box:0.4rem;--rounded-btn:0.4rem;--rounded-badge:0.4rem;--tab-radius:0.4rem}:root:has(input.theme-controller[value=retro]:checked){color-scheme:light;--inc:90.923% 0.043042 262.880917;--suc:12.541% 0.033982 149.213788;--wac:13.3168% 0.031484 58.31834;--erc:13.144% 0.0398 27.33;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--p:76.8664% 0.104092 22.664655;--pc:26.5104% 0.006243 0.522862;--s:80.7415% 0.052534 159.094608;--sc:26.5104% 0.006243 0.522862;--a:70.3919% 0.125455 52.953428;--ac:26.5104% 0.006243 0.522862;--n:28.4181% 0.009519 355.534017;--nc:92.5604% 0.025113 89.217311;--b1:91.6374% 0.034554 90.51575;--b2:88.2722% 0.049418 91.774344;--b3:84.133% 0.065952 90.856665;--bc:26.5104% 0.006243 0.522862;--in:54.615% 0.215208 262.880917;--su:62.7052% 0.169912 149.213788;--wa:66.584% 0.157422 58.31834;--er:65.72% 0.199 27.33;--rounded-box:0.4rem;--rounded-btn:0.4rem;--rounded-badge:0.4rem;--tab-radius:0.4rem}[data-theme=cyberpunk]{color-scheme:light;--b2:87.8943% 0.16647 104.32;--b3:81.2786% 0.15394 104.32;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--bc:18.902% 0.0358 104.32;--pc:14.844% 0.0418 6.35;--sc:16.666% 0.0368 204.72;--ac:14.372% 0.04352 310.43;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;--p:74.22% 0.209 6.35;--s:83.33% 0.184 204.72;--a:71.86% 0.2176 310.43;--n:23.04% 0.065 269.31;--nc:94.51% 0.179 104.32;--b1:94.51% 0.179 104.32;--rounded-box:0;--rounded-btn:0;--rounded-badge:0;--tab-radius:0}:root:has(input.theme-controller[value=cyberpunk]:checked){color-scheme:light;--b2:87.8943% 0.16647 104.32;--b3:81.2786% 0.15394 104.32;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--bc:18.902% 0.0358 104.32;--pc:14.844% 0.0418 6.35;--sc:16.666% 0.0368 204.72;--ac:14.372% 0.04352 310.43;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;--p:74.22% 0.209 6.35;--s:83.33% 0.184 204.72;--a:71.86% 0.2176 310.43;--n:23.04% 0.065 269.31;--nc:94.51% 0.179 104.32;--b1:94.51% 0.179 104.32;--rounded-box:0;--rounded-btn:0;--rounded-badge:0;--tab-radius:0}[data-theme=valentine]{color-scheme:light;--b2:88.0567% 0.024834 337.06289;--b3:81.4288% 0.022964 337.06289;--pc:13.7239% 0.030755 15.066527;--sc:14.3942% 0.029258 293.189609;--ac:14.2537% 0.014961 197.828857;--inc:90.923% 0.043042 262.880917;--suc:12.541% 0.033982 149.213788;--wac:13.3168% 0.031484 58.31834;--erc:14.614% 0.0414 27.33;--rounded-box:1rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--p:68.6197% 0.153774 15.066527;--s:71.971% 0.14629 293.189609;--a:71.2685% 0.074804 197.828857;--n:54.6053% 0.143342 358.004839;--nc:90.2701% 0.037202 336.955191;--b1:94.6846% 0.026703 337.06289;--bc:37.3085% 0.081131 4.606426;--in:54.615% 0.215208 262.880917;--su:62.7052% 0.169912 149.213788;--wa:66.584% 0.157422 58.31834;--er:73.07% 0.207 27.33;--rounded-btn:1.9rem;--tab-radius:0.7rem}:root:has(input.theme-controller[value=valentine]:checked){color-scheme:light;--b2:88.0567% 0.024834 337.06289;--b3:81.4288% 0.022964 337.06289;--pc:13.7239% 0.030755 15.066527;--sc:14.3942% 0.029258 293.189609;--ac:14.2537% 0.014961 197.828857;--inc:90.923% 0.043042 262.880917;--suc:12.541% 0.033982 149.213788;--wac:13.3168% 0.031484 58.31834;--erc:14.614% 0.0414 27.33;--rounded-box:1rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--p:68.6197% 0.153774 15.066527;--s:71.971% 0.14629 293.189609;--a:71.2685% 0.074804 197.828857;--n:54.6053% 0.143342 358.004839;--nc:90.2701% 0.037202 336.955191;--b1:94.6846% 0.026703 337.06289;--bc:37.3085% 0.081131 4.606426;--in:54.615% 0.215208 262.880917;--su:62.7052% 0.169912 149.213788;--wa:66.584% 0.157422 58.31834;--er:73.07% 0.207 27.33;--rounded-btn:1.9rem;--tab-radius:0.7rem}[data-theme=halloween]{color-scheme:dark;--b2:23.0416% 0 0;--b3:21.3072% 0 0;--bc:84.9552% 0 0;--sc:89.196% 0.0496 305.03;--nc:84.8742% 0.009322 65.681484;--inc:90.923% 0.043042 262.880917;--suc:12.541% 0.033982 149.213788;--wac:13.3168% 0.031484 58.31834;--erc:13.144% 0.0398 27.33;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:77.48% 0.204 60.62;--pc:19.6935% 0.004671 196.779412;--s:45.98% 0.248 305.03;--a:64.8% 0.223 136.073479;--ac:0% 0 0;--n:24.371% 0.046608 65.681484;--b1:24.7759% 0 0;--in:54.615% 0.215208 262.880917;--su:62.7052% 0.169912 149.213788;--wa:66.584% 0.157422 58.31834;--er:65.72% 0.199 27.33}:root:has(input.theme-controller[value=halloween]:checked){color-scheme:dark;--b2:23.0416% 0 0;--b3:21.3072% 0 0;--bc:84.9552% 0 0;--sc:89.196% 0.0496 305.03;--nc:84.8742% 0.009322 65.681484;--inc:90.923% 0.043042 262.880917;--suc:12.541% 0.033982 149.213788;--wac:13.3168% 0.031484 58.31834;--erc:13.144% 0.0398 27.33;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:77.48% 0.204 60.62;--pc:19.6935% 0.004671 196.779412;--s:45.98% 0.248 305.03;--a:64.8% 0.223 136.073479;--ac:0% 0 0;--n:24.371% 0.046608 65.681484;--b1:24.7759% 0 0;--in:54.615% 0.215208 262.880917;--su:62.7052% 0.169912 149.213788;--wa:66.584% 0.157422 58.31834;--er:65.72% 0.199 27.33}[data-theme=garden]{color-scheme:light;--b2:86.4453% 0.002011 17.197414;--b3:79.9386% 0.00186 17.197414;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--sc:89.699% 0.022197 355.095988;--ac:11.2547% 0.010859 154.390187;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:62.45% 0.278 3.83636;--pc:100% 0 0;--s:48.4952% 0.110985 355.095988;--a:56.2735% 0.054297 154.390187;--n:24.1559% 0.049362 89.070594;--nc:92.9519% 0.002163 17.197414;--b1:92.9519% 0.002163 17.197414;--bc:16.9617% 0.001664 17.32068}:root:has(input.theme-controller[value=garden]:checked){color-scheme:light;--b2:86.4453% 0.002011 17.197414;--b3:79.9386% 0.00186 17.197414;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--sc:89.699% 0.022197 355.095988;--ac:11.2547% 0.010859 154.390187;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:62.45% 0.278 3.83636;--pc:100% 0 0;--s:48.4952% 0.110985 355.095988;--a:56.2735% 0.054297 154.390187;--n:24.1559% 0.049362 89.070594;--nc:92.9519% 0.002163 17.197414;--b1:92.9519% 0.002163 17.197414;--bc:16.9617% 0.001664 17.32068}[data-theme=forest]{color-scheme:dark;--b2:17.522% 0.007709 17.911578;--b3:16.2032% 0.007129 17.911578;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--bc:83.7682% 0.001658 17.911578;--sc:13.9553% 0.027077 168.327128;--ac:14.1257% 0.02389 185.713193;--nc:86.1397% 0.007806 171.364646;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:68.6283% 0.185567 148.958922;--pc:0% 0 0;--s:69.7764% 0.135385 168.327128;--a:70.6285% 0.119451 185.713193;--n:30.6985% 0.039032 171.364646;--b1:18.8409% 0.00829 17.911578;--rounded-btn:1.9rem}:root:has(input.theme-controller[value=forest]:checked){color-scheme:dark;--b2:17.522% 0.007709 17.911578;--b3:16.2032% 0.007129 17.911578;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--bc:83.7682% 0.001658 17.911578;--sc:13.9553% 0.027077 168.327128;--ac:14.1257% 0.02389 185.713193;--nc:86.1397% 0.007806 171.364646;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:68.6283% 0.185567 148.958922;--pc:0% 0 0;--s:69.7764% 0.135385 168.327128;--a:70.6285% 0.119451 185.713193;--n:30.6985% 0.039032 171.364646;--b1:18.8409% 0.00829 17.911578;--rounded-btn:1.9rem}[data-theme=aqua]{color-scheme:dark;--b2:45.3464% 0.118611 261.181672;--b3:41.9333% 0.109683 261.181672;--bc:89.7519% 0.025508 261.181672;--sc:12.1365% 0.02175 309.782946;--ac:18.6854% 0.020445 94.555431;--nc:12.2124% 0.023402 243.760661;--inc:90.923% 0.043042 262.880917;--suc:12.541% 0.033982 149.213788;--wac:13.3168% 0.031484 58.31834;--erc:14.79% 0.038 27.33;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:85.6617% 0.14498 198.6458;--pc:40.1249% 0.068266 197.603872;--s:60.6827% 0.108752 309.782946;--a:93.4269% 0.102225 94.555431;--n:61.0622% 0.117009 243.760661;--b1:48.7596% 0.127539 261.181672;--in:54.615% 0.215208 262.880917;--su:62.7052% 0.169912 149.213788;--wa:66.584% 0.157422 58.31834;--er:73.95% 0.19 27.33}:root:has(input.theme-controller[value=aqua]:checked){color-scheme:dark;--b2:45.3464% 0.118611 261.181672;--b3:41.9333% 0.109683 261.181672;--bc:89.7519% 0.025508 261.181672;--sc:12.1365% 0.02175 309.782946;--ac:18.6854% 0.020445 94.555431;--nc:12.2124% 0.023402 243.760661;--inc:90.923% 0.043042 262.880917;--suc:12.541% 0.033982 149.213788;--wac:13.3168% 0.031484 58.31834;--erc:14.79% 0.038 27.33;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:85.6617% 0.14498 198.6458;--pc:40.1249% 0.068266 197.603872;--s:60.6827% 0.108752 309.782946;--a:93.4269% 0.102225 94.555431;--n:61.0622% 0.117009 243.760661;--b1:48.7596% 0.127539 261.181672;--in:54.615% 0.215208 262.880917;--su:62.7052% 0.169912 149.213788;--wa:66.584% 0.157422 58.31834;--er:73.95% 0.19 27.33}[data-theme=lofi]{color-scheme:light;--inc:15.908% 0.0206 205.9;--suc:18.026% 0.0306 164.14;--wac:17.674% 0.027 79.94;--erc:15.732% 0.03 28.47;--border-btn:1px;--tab-border:1px;--p:15.9066% 0 0;--pc:100% 0 0;--s:21.455% 0.001566 17.278957;--sc:100% 0 0;--a:26.8618% 0 0;--ac:100% 0 0;--n:0% 0 0;--nc:100% 0 0;--b1:100% 0 0;--b2:96.1151% 0 0;--b3:92.268% 0.001082 17.17934;--bc:0% 0 0;--in:79.54% 0.103 205.9;--su:90.13% 0.153 164.14;--wa:88.37% 0.135 79.94;--er:78.66% 0.15 28.47;--rounded-box:0.25rem;--rounded-btn:0.125rem;--rounded-badge:0.125rem;--tab-radius:0.125rem;--animation-btn:0;--animation-input:0;--btn-focus-scale:1}:root:has(input.theme-controller[value=lofi]:checked){color-scheme:light;--inc:15.908% 0.0206 205.9;--suc:18.026% 0.0306 164.14;--wac:17.674% 0.027 79.94;--erc:15.732% 0.03 28.47;--border-btn:1px;--tab-border:1px;--p:15.9066% 0 0;--pc:100% 0 0;--s:21.455% 0.001566 17.278957;--sc:100% 0 0;--a:26.8618% 0 0;--ac:100% 0 0;--n:0% 0 0;--nc:100% 0 0;--b1:100% 0 0;--b2:96.1151% 0 0;--b3:92.268% 0.001082 17.17934;--bc:0% 0 0;--in:79.54% 0.103 205.9;--su:90.13% 0.153 164.14;--wa:88.37% 0.135 79.94;--er:78.66% 0.15 28.47;--rounded-box:0.25rem;--rounded-btn:0.125rem;--rounded-badge:0.125rem;--tab-radius:0.125rem;--animation-btn:0;--animation-input:0;--btn-focus-scale:1}[data-theme=pastel]{color-scheme:light;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--bc:20% 0 0;--pc:16.6166% 0.006979 316.8737;--sc:17.6153% 0.009839 8.688364;--ac:17.8419% 0.012056 170.923263;--nc:14.2681% 0.014702 228.183906;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--p:83.0828% 0.034896 316.8737;--s:88.0763% 0.049197 8.688364;--a:89.2096% 0.06028 170.923263;--n:71.3406% 0.07351 228.183906;--b1:100% 0 0;--b2:98.4625% 0.001706 247.838921;--b3:87.1681% 0.009339 258.338227;--rounded-btn:1.9rem;--tab-radius:0.7rem}:root:has(input.theme-controller[value=pastel]:checked){color-scheme:light;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--bc:20% 0 0;--pc:16.6166% 0.006979 316.8737;--sc:17.6153% 0.009839 8.688364;--ac:17.8419% 0.012056 170.923263;--nc:14.2681% 0.014702 228.183906;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--p:83.0828% 0.034896 316.8737;--s:88.0763% 0.049197 8.688364;--a:89.2096% 0.06028 170.923263;--n:71.3406% 0.07351 228.183906;--b1:100% 0 0;--b2:98.4625% 0.001706 247.838921;--b3:87.1681% 0.009339 258.338227;--rounded-btn:1.9rem;--tab-radius:0.7rem}[data-theme=fantasy]{color-scheme:light;--b2:93% 0 0;--b3:86% 0 0;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--pc:87.49% 0.0378 325.02;--sc:90.784% 0.0324 241.36;--ac:15.196% 0.0408 56.72;--nc:85.5616% 0.005919 256.847952;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:37.45% 0.189 325.02;--s:53.92% 0.162 241.36;--a:75.98% 0.204 56.72;--n:27.8078% 0.029596 256.847952;--b1:100% 0 0;--bc:27.8078% 0.029596 256.847952}:root:has(input.theme-controller[value=fantasy]:checked){color-scheme:light;--b2:93% 0 0;--b3:86% 0 0;--in:72.06% 0.191 231.6;--su:64.8% 0.150 160;--wa:84.71% 0.199 83.87;--er:71.76% 0.221 22.18;--pc:87.49% 0.0378 325.02;--sc:90.784% 0.0324 241.36;--ac:15.196% 0.0408 56.72;--nc:85.5616% 0.005919 256.847952;--inc:0% 0 0;--suc:0% 0 0;--wac:0% 0 0;--erc:0% 0 0;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:37.45% 0.189 325.02;--s:53.92% 0.162 241.36;--a:75.98% 0.204 56.72;--n:27.8078% 0.029596 256.847952;--b1:100% 0 0;--bc:27.8078% 0.029596 256.847952}[data-theme=wireframe]{color-scheme:light;--bc:20% 0 0;--pc:15.6521% 0 0;--sc:15.6521% 0 0;--ac:15.6521% 0 0;--nc:18.8014% 0 0;--inc:89.0403% 0.062643 264.052021;--suc:90.395% 0.035372 142.495339;--wac:14.1626% 0.019994 108.702381;--erc:12.5591% 0.051537 29.233885;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;font-family:Chalkboard,comic sans ms,sans-serif;--p:78.2604% 0 0;--s:78.2604% 0 0;--a:78.2604% 0 0;--n:94.007% 0 0;--b1:100% 0 0;--b2:94.9119% 0 0;--b3:89.7547% 0 0;--in:45.2014% 0.313214 264.052021;--su:51.9752% 0.176858 142.495339;--wa:70.8131% 0.099969 108.702381;--er:62.7955% 0.257683 29.233885;--rounded-box:0.2rem;--rounded-btn:0.2rem;--rounded-badge:0.2rem;--tab-radius:0.2rem}:root:has(input.theme-controller[value=wireframe]:checked){color-scheme:light;--bc:20% 0 0;--pc:15.6521% 0 0;--sc:15.6521% 0 0;--ac:15.6521% 0 0;--nc:18.8014% 0 0;--inc:89.0403% 0.062643 264.052021;--suc:90.395% 0.035372 142.495339;--wac:14.1626% 0.019994 108.702381;--erc:12.5591% 0.051537 29.233885;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;font-family:Chalkboard,comic sans ms,sans-serif;--p:78.2604% 0 0;--s:78.2604% 0 0;--a:78.2604% 0 0;--n:94.007% 0 0;--b1:100% 0 0;--b2:94.9119% 0 0;--b3:89.7547% 0 0;--in:45.2014% 0.313214 264.052021;--su:51.9752% 0.176858 142.495339;--wa:70.8131% 0.099969 108.702381;--er:62.7955% 0.257683 29.233885;--rounded-box:0.2rem;--rounded-btn:0.2rem;--rounded-badge:0.2rem;--tab-radius:0.2rem}[data-theme=black]{color-scheme:dark;--pc:86.736% 0 0;--sc:86.736% 0 0;--ac:86.736% 0 0;--nc:86.736% 0 0;--inc:89.0403% 0.062643 264.052021;--suc:90.395% 0.035372 142.495339;--wac:19.3597% 0.042201 109.769232;--erc:12.5591% 0.051537 29.233885;--border-btn:1px;--tab-border:1px;--p:33.6799% 0 0;--s:33.6799% 0 0;--a:33.6799% 0 0;--b1:0% 0 0;--b2:19.1251% 0 0;--b3:26.8618% 0 0;--bc:87.6096% 0 0;--n:33.6799% 0 0;--in:45.2014% 0.313214 264.052021;--su:51.9752% 0.176858 142.495339;--wa:96.7983% 0.211006 109.769232;--er:62.7955% 0.257683 29.233885;--rounded-box:0;--rounded-btn:0;--rounded-badge:0;--animation-btn:0;--animation-input:0;--btn-focus-scale:1;--tab-radius:0}:root:has(input.theme-controller[value=black]:checked){color-scheme:dark;--pc:86.736% 0 0;--sc:86.736% 0 0;--ac:86.736% 0 0;--nc:86.736% 0 0;--inc:89.0403% 0.062643 264.052021;--suc:90.395% 0.035372 142.495339;--wac:19.3597% 0.042201 109.769232;--erc:12.5591% 0.051537 29.233885;--border-btn:1px;--tab-border:1px;--p:33.6799% 0 0;--s:33.6799% 0 0;--a:33.6799% 0 0;--b1:0% 0 0;--b2:19.1251% 0 0;--b3:26.8618% 0 0;--bc:87.6096% 0 0;--n:33.6799% 0 0;--in:45.2014% 0.313214 264.052021;--su:51.9752% 0.176858 142.495339;--wa:96.7983% 0.211006 109.769232;--er:62.7955% 0.257683 29.233885;--rounded-box:0;--rounded-btn:0;--rounded-badge:0;--animation-btn:0;--animation-input:0;--btn-focus-scale:1;--tab-radius:0}[data-theme=luxury]{color-scheme:dark;--pc:20% 0 0;--sc:85.5163% 0.012821 261.069149;--ac:87.3349% 0.010348 338.82597;--inc:15.8122% 0.024356 237.133883;--suc:15.6239% 0.038579 132.154381;--wac:17.2255% 0.027305 102.89115;--erc:14.3506% 0.035271 22.568916;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:100% 0 0;--s:27.5815% 0.064106 261.069149;--a:36.6744% 0.051741 338.82597;--n:24.27% 0.057015 59.825019;--nc:93.2033% 0.089631 90.861683;--b1:14.0765% 0.004386 285.822869;--b2:20.2191% 0.004211 308.22937;--b3:29.8961% 0.003818 308.318612;--bc:75.6879% 0.123666 76.890484;--in:79.0612% 0.121778 237.133883;--su:78.1197% 0.192894 132.154381;--wa:86.1274% 0.136524 102.89115;--er:71.7531% 0.176357 22.568916}:root:has(input.theme-controller[value=luxury]:checked){color-scheme:dark;--pc:20% 0 0;--sc:85.5163% 0.012821 261.069149;--ac:87.3349% 0.010348 338.82597;--inc:15.8122% 0.024356 237.133883;--suc:15.6239% 0.038579 132.154381;--wac:17.2255% 0.027305 102.89115;--erc:14.3506% 0.035271 22.568916;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:100% 0 0;--s:27.5815% 0.064106 261.069149;--a:36.6744% 0.051741 338.82597;--n:24.27% 0.057015 59.825019;--nc:93.2033% 0.089631 90.861683;--b1:14.0765% 0.004386 285.822869;--b2:20.2191% 0.004211 308.22937;--b3:29.8961% 0.003818 308.318612;--bc:75.6879% 0.123666 76.890484;--in:79.0612% 0.121778 237.133883;--su:78.1197% 0.192894 132.154381;--wa:86.1274% 0.136524 102.89115;--er:71.7531% 0.176357 22.568916}[data-theme=dracula]{color-scheme:dark;--b2:26.8053% 0.020556 277.508664;--b3:24.7877% 0.019009 277.508664;--pc:15.0922% 0.036614 346.812432;--sc:14.8405% 0.029709 301.883095;--ac:16.6785% 0.024826 66.558491;--nc:87.8891% 0.006515 275.524078;--inc:17.6526% 0.018676 212.846491;--suc:17.4199% 0.043903 148.024881;--wac:19.1068% 0.026849 112.757109;--erc:13.6441% 0.041266 24.430965;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:75.4611% 0.18307 346.812432;--s:74.2023% 0.148546 301.883095;--a:83.3927% 0.124132 66.558491;--n:39.4456% 0.032576 275.524078;--b1:28.8229% 0.022103 277.508664;--bc:97.7477% 0.007913 106.545019;--in:88.263% 0.09338 212.846491;--su:87.0995% 0.219516 148.024881;--wa:95.5338% 0.134246 112.757109;--er:68.2204% 0.206328 24.430965}:root:has(input.theme-controller[value=dracula]:checked){color-scheme:dark;--b2:26.8053% 0.020556 277.508664;--b3:24.7877% 0.019009 277.508664;--pc:15.0922% 0.036614 346.812432;--sc:14.8405% 0.029709 301.883095;--ac:16.6785% 0.024826 66.558491;--nc:87.8891% 0.006515 275.524078;--inc:17.6526% 0.018676 212.846491;--suc:17.4199% 0.043903 148.024881;--wac:19.1068% 0.026849 112.757109;--erc:13.6441% 0.041266 24.430965;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:75.4611% 0.18307 346.812432;--s:74.2023% 0.148546 301.883095;--a:83.3927% 0.124132 66.558491;--n:39.4456% 0.032576 275.524078;--b1:28.8229% 0.022103 277.508664;--bc:97.7477% 0.007913 106.545019;--in:88.263% 0.09338 212.846491;--su:87.0995% 0.219516 148.024881;--wa:95.5338% 0.134246 112.757109;--er:68.2204% 0.206328 24.430965}[data-theme=cmyk]{color-scheme:light;--b2:93% 0 0;--b3:86% 0 0;--bc:20% 0 0;--pc:14.3544% 0.02666 239.443325;--sc:12.8953% 0.040552 359.339283;--ac:18.8458% 0.037948 105.306968;--nc:84.3557% 0 0;--inc:13.6952% 0.0189 217.284104;--suc:89.3898% 0.032505 321.406278;--wac:14.2473% 0.031969 52.023412;--erc:12.4027% 0.041677 28.717543;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:71.7722% 0.133298 239.443325;--s:64.4766% 0.202758 359.339283;--a:94.2289% 0.189741 105.306968;--n:21.7787% 0 0;--b1:100% 0 0;--in:68.4759% 0.094499 217.284104;--su:46.949% 0.162524 321.406278;--wa:71.2364% 0.159843 52.023412;--er:62.0133% 0.208385 28.717543}:root:has(input.theme-controller[value=cmyk]:checked){color-scheme:light;--b2:93% 0 0;--b3:86% 0 0;--bc:20% 0 0;--pc:14.3544% 0.02666 239.443325;--sc:12.8953% 0.040552 359.339283;--ac:18.8458% 0.037948 105.306968;--nc:84.3557% 0 0;--inc:13.6952% 0.0189 217.284104;--suc:89.3898% 0.032505 321.406278;--wac:14.2473% 0.031969 52.023412;--erc:12.4027% 0.041677 28.717543;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:71.7722% 0.133298 239.443325;--s:64.4766% 0.202758 359.339283;--a:94.2289% 0.189741 105.306968;--n:21.7787% 0 0;--b1:100% 0 0;--in:68.4759% 0.094499 217.284104;--su:46.949% 0.162524 321.406278;--wa:71.2364% 0.159843 52.023412;--er:62.0133% 0.208385 28.717543}[data-theme=autumn]{color-scheme:light;--b2:89.1077% 0 0;--b3:82.4006% 0 0;--bc:19.1629% 0 0;--pc:88.1446% 0.032232 17.530175;--sc:12.3353% 0.033821 23.865865;--ac:14.6851% 0.018999 60.729616;--nc:90.8734% 0.007475 51.902819;--inc:13.8449% 0.019596 207.284192;--suc:12.199% 0.016032 174.616213;--wac:14.0163% 0.032982 56.844303;--erc:90.614% 0.0482 24.16;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:40.7232% 0.16116 17.530175;--s:61.6763% 0.169105 23.865865;--a:73.4253% 0.094994 60.729616;--n:54.3672% 0.037374 51.902819;--b1:95.8147% 0 0;--in:69.2245% 0.097979 207.284192;--su:60.9951% 0.080159 174.616213;--wa:70.0817% 0.164909 56.844303;--er:53.07% 0.241 24.16}:root:has(input.theme-controller[value=autumn]:checked){color-scheme:light;--b2:89.1077% 0 0;--b3:82.4006% 0 0;--bc:19.1629% 0 0;--pc:88.1446% 0.032232 17.530175;--sc:12.3353% 0.033821 23.865865;--ac:14.6851% 0.018999 60.729616;--nc:90.8734% 0.007475 51.902819;--inc:13.8449% 0.019596 207.284192;--suc:12.199% 0.016032 174.616213;--wac:14.0163% 0.032982 56.844303;--erc:90.614% 0.0482 24.16;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:40.7232% 0.16116 17.530175;--s:61.6763% 0.169105 23.865865;--a:73.4253% 0.094994 60.729616;--n:54.3672% 0.037374 51.902819;--b1:95.8147% 0 0;--in:69.2245% 0.097979 207.284192;--su:60.9951% 0.080159 174.616213;--wa:70.0817% 0.164909 56.844303;--er:53.07% 0.241 24.16}[data-theme=business]{color-scheme:dark;--b2:22.6487% 0 0;--b3:20.944% 0 0;--bc:84.8707% 0 0;--pc:88.3407% 0.019811 251.473931;--sc:12.8185% 0.005481 229.389418;--ac:13.4542% 0.033545 35.791525;--nc:85.4882% 0.00265 253.041249;--inc:12.5233% 0.028702 240.033697;--suc:14.0454% 0.018919 156.59611;--wac:15.4965% 0.023141 81.519177;--erc:90.3221% 0.029356 29.674507;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:41.7036% 0.099057 251.473931;--s:64.0924% 0.027405 229.389418;--a:67.271% 0.167726 35.791525;--n:27.441% 0.01325 253.041249;--b1:24.3535% 0 0;--in:62.6163% 0.143511 240.033697;--su:70.2268% 0.094594 156.59611;--wa:77.4824% 0.115704 81.519177;--er:51.6105% 0.14678 29.674507;--rounded-box:0.25rem;--rounded-btn:.125rem;--rounded-badge:.125rem}:root:has(input.theme-controller[value=business]:checked){color-scheme:dark;--b2:22.6487% 0 0;--b3:20.944% 0 0;--bc:84.8707% 0 0;--pc:88.3407% 0.019811 251.473931;--sc:12.8185% 0.005481 229.389418;--ac:13.4542% 0.033545 35.791525;--nc:85.4882% 0.00265 253.041249;--inc:12.5233% 0.028702 240.033697;--suc:14.0454% 0.018919 156.59611;--wac:15.4965% 0.023141 81.519177;--erc:90.3221% 0.029356 29.674507;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:41.7036% 0.099057 251.473931;--s:64.0924% 0.027405 229.389418;--a:67.271% 0.167726 35.791525;--n:27.441% 0.01325 253.041249;--b1:24.3535% 0 0;--in:62.6163% 0.143511 240.033697;--su:70.2268% 0.094594 156.59611;--wa:77.4824% 0.115704 81.519177;--er:51.6105% 0.14678 29.674507;--rounded-box:0.25rem;--rounded-btn:.125rem;--rounded-badge:.125rem}[data-theme=acid]{color-scheme:light;--b2:91.6146% 0 0;--b3:84.7189% 0 0;--bc:19.7021% 0 0;--pc:14.38% 0.0714 330.759573;--sc:14.674% 0.0448 48.250878;--ac:18.556% 0.0528 122.962951;--nc:84.262% 0.0256 278.68;--inc:12.144% 0.0454 252.05;--suc:17.144% 0.0532 158.53;--wac:18.202% 0.0424 100.5;--erc:12.968% 0.0586 29.349188;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--p:71.9% 0.357 330.759573;--s:73.37% 0.224 48.250878;--a:92.78% 0.264 122.962951;--n:21.31% 0.128 278.68;--b1:98.5104% 0 0;--in:60.72% 0.227 252.05;--su:85.72% 0.266 158.53;--wa:91.01% 0.212 100.5;--er:64.84% 0.293 29.349188;--rounded-box:1.25rem;--rounded-btn:1rem;--rounded-badge:1rem;--tab-radius:0.7rem}:root:has(input.theme-controller[value=acid]:checked){color-scheme:light;--b2:91.6146% 0 0;--b3:84.7189% 0 0;--bc:19.7021% 0 0;--pc:14.38% 0.0714 330.759573;--sc:14.674% 0.0448 48.250878;--ac:18.556% 0.0528 122.962951;--nc:84.262% 0.0256 278.68;--inc:12.144% 0.0454 252.05;--suc:17.144% 0.0532 158.53;--wac:18.202% 0.0424 100.5;--erc:12.968% 0.0586 29.349188;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--p:71.9% 0.357 330.759573;--s:73.37% 0.224 48.250878;--a:92.78% 0.264 122.962951;--n:21.31% 0.128 278.68;--b1:98.5104% 0 0;--in:60.72% 0.227 252.05;--su:85.72% 0.266 158.53;--wa:91.01% 0.212 100.5;--er:64.84% 0.293 29.349188;--rounded-box:1.25rem;--rounded-btn:1rem;--rounded-badge:1rem;--tab-radius:0.7rem}[data-theme=lemonade]{color-scheme:light;--b2:91.8003% 0.0186 123.72;--b3:84.8906% 0.0172 123.72;--bc:19.742% 0.004 123.72;--pc:11.784% 0.0398 134.6;--sc:15.55% 0.0392 111.09;--ac:17.078% 0.0402 100.73;--nc:86.196% 0.015 108.6;--inc:17.238% 0.0094 224.14;--suc:17.238% 0.0094 157.85;--wac:17.238% 0.0094 102.15;--erc:17.238% 0.0094 25.85;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:58.92% 0.199 134.6;--s:77.75% 0.196 111.09;--a:85.39% 0.201 100.73;--n:30.98% 0.075 108.6;--b1:98.71% 0.02 123.72;--in:86.19% 0.047 224.14;--su:86.19% 0.047 157.85;--wa:86.19% 0.047 102.15;--er:86.19% 0.047 25.85}:root:has(input.theme-controller[value=lemonade]:checked){color-scheme:light;--b2:91.8003% 0.0186 123.72;--b3:84.8906% 0.0172 123.72;--bc:19.742% 0.004 123.72;--pc:11.784% 0.0398 134.6;--sc:15.55% 0.0392 111.09;--ac:17.078% 0.0402 100.73;--nc:86.196% 0.015 108.6;--inc:17.238% 0.0094 224.14;--suc:17.238% 0.0094 157.85;--wac:17.238% 0.0094 102.15;--erc:17.238% 0.0094 25.85;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:58.92% 0.199 134.6;--s:77.75% 0.196 111.09;--a:85.39% 0.201 100.73;--n:30.98% 0.075 108.6;--b1:98.71% 0.02 123.72;--in:86.19% 0.047 224.14;--su:86.19% 0.047 157.85;--wa:86.19% 0.047 102.15;--er:86.19% 0.047 25.85}[data-theme=night]{color-scheme:dark;--b2:19.3144% 0.037037 265.754874;--b3:17.8606% 0.034249 265.754874;--bc:84.1536% 0.007965 265.754874;--pc:15.0703% 0.027798 232.66148;--sc:13.6023% 0.031661 276.934902;--ac:14.4721% 0.035244 350.048739;--nc:85.5899% 0.00737 260.030984;--suc:15.6904% 0.026506 181.911977;--wac:16.6486% 0.027912 82.95003;--erc:14.3572% 0.034051 13.11834;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:75.3513% 0.138989 232.66148;--s:68.0113% 0.158303 276.934902;--a:72.3603% 0.176218 350.048739;--n:27.9495% 0.036848 260.030984;--b1:20.7682% 0.039824 265.754874;--in:68.4553% 0.148062 237.25135;--inc:0% 0 0;--su:78.452% 0.132529 181.911977;--wa:83.2428% 0.139558 82.95003;--er:71.7858% 0.170255 13.11834}:root:has(input.theme-controller[value=night]:checked){color-scheme:dark;--b2:19.3144% 0.037037 265.754874;--b3:17.8606% 0.034249 265.754874;--bc:84.1536% 0.007965 265.754874;--pc:15.0703% 0.027798 232.66148;--sc:13.6023% 0.031661 276.934902;--ac:14.4721% 0.035244 350.048739;--nc:85.5899% 0.00737 260.030984;--suc:15.6904% 0.026506 181.911977;--wac:16.6486% 0.027912 82.95003;--erc:14.3572% 0.034051 13.11834;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:75.3513% 0.138989 232.66148;--s:68.0113% 0.158303 276.934902;--a:72.3603% 0.176218 350.048739;--n:27.9495% 0.036848 260.030984;--b1:20.7682% 0.039824 265.754874;--in:68.4553% 0.148062 237.25135;--inc:0% 0 0;--su:78.452% 0.132529 181.911977;--wa:83.2428% 0.139558 82.95003;--er:71.7858% 0.170255 13.11834}[data-theme=coffee]{color-scheme:dark;--b2:20.1585% 0.021457 329.708637;--b3:18.6412% 0.019842 329.708637;--pc:14.3993% 0.024765 62.756393;--sc:86.893% 0.00597 199.19444;--ac:88.5243% 0.014881 224.389184;--nc:83.3022% 0.003149 326.261446;--inc:15.898% 0.012774 184.558367;--suc:14.9445% 0.014491 131.116276;--wac:17.6301% 0.028162 87.722413;--erc:15.4637% 0.025644 31.871922;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:71.9967% 0.123825 62.756393;--s:34.465% 0.029849 199.19444;--a:42.6213% 0.074405 224.389184;--n:16.5109% 0.015743 326.261446;--b1:21.6758% 0.023072 329.708637;--bc:72.3547% 0.092794 79.129387;--in:79.4902% 0.063869 184.558367;--su:74.7224% 0.072456 131.116276;--wa:88.1503% 0.140812 87.722413;--er:77.3187% 0.12822 31.871922}:root:has(input.theme-controller[value=coffee]:checked){color-scheme:dark;--b2:20.1585% 0.021457 329.708637;--b3:18.6412% 0.019842 329.708637;--pc:14.3993% 0.024765 62.756393;--sc:86.893% 0.00597 199.19444;--ac:88.5243% 0.014881 224.389184;--nc:83.3022% 0.003149 326.261446;--inc:15.898% 0.012774 184.558367;--suc:14.9445% 0.014491 131.116276;--wac:17.6301% 0.028162 87.722413;--erc:15.4637% 0.025644 31.871922;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:71.9967% 0.123825 62.756393;--s:34.465% 0.029849 199.19444;--a:42.6213% 0.074405 224.389184;--n:16.5109% 0.015743 326.261446;--b1:21.6758% 0.023072 329.708637;--bc:72.3547% 0.092794 79.129387;--in:79.4902% 0.063869 184.558367;--su:74.7224% 0.072456 131.116276;--wa:88.1503% 0.140812 87.722413;--er:77.3187% 0.12822 31.871922}[data-theme=winter]{color-scheme:light;--pc:91.372% 0.051 257.57;--sc:88.5103% 0.03222 282.339433;--ac:11.988% 0.038303 335.171434;--nc:83.9233% 0.012704 257.651965;--inc:17.6255% 0.017178 214.515264;--suc:16.0988% 0.015404 197.823719;--wac:17.8345% 0.009167 71.47031;--erc:14.6185% 0.022037 20.076293;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:56.86% 0.255 257.57;--s:42.5516% 0.161098 282.339433;--a:59.9398% 0.191515 335.171434;--n:19.6166% 0.063518 257.651965;--b1:100% 0 0;--b2:97.4663% 0.011947 259.822565;--b3:93.2686% 0.016223 262.751375;--bc:41.8869% 0.053885 255.824911;--in:88.1275% 0.085888 214.515264;--su:80.4941% 0.077019 197.823719;--wa:89.1725% 0.045833 71.47031;--er:73.0926% 0.110185 20.076293}:root:has(input.theme-controller[value=winter]:checked){color-scheme:light;--pc:91.372% 0.051 257.57;--sc:88.5103% 0.03222 282.339433;--ac:11.988% 0.038303 335.171434;--nc:83.9233% 0.012704 257.651965;--inc:17.6255% 0.017178 214.515264;--suc:16.0988% 0.015404 197.823719;--wac:17.8345% 0.009167 71.47031;--erc:14.6185% 0.022037 20.076293;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:56.86% 0.255 257.57;--s:42.5516% 0.161098 282.339433;--a:59.9398% 0.191515 335.171434;--n:19.6166% 0.063518 257.651965;--b1:100% 0 0;--b2:97.4663% 0.011947 259.822565;--b3:93.2686% 0.016223 262.751375;--bc:41.8869% 0.053885 255.824911;--in:88.1275% 0.085888 214.515264;--su:80.4941% 0.077019 197.823719;--wa:89.1725% 0.045833 71.47031;--er:73.0926% 0.110185 20.076293}[data-theme=dim]{color-scheme:dark;--pc:17.2267% 0.028331 139.549991;--sc:14.6752% 0.033181 35.353059;--ac:14.8459% 0.026728 311.37924;--inc:17.2157% 0.028409 206.182959;--suc:17.2343% 0.028437 166.534048;--wac:17.2327% 0.028447 94.818679;--erc:16.4838% 0.019914 33.756357;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:86.1335% 0.141656 139.549991;--s:73.3759% 0.165904 35.353059;--a:74.2296% 0.133641 311.37924;--n:24.7311% 0.020483 264.094728;--nc:82.9011% 0.031335 222.959324;--b1:30.8577% 0.023243 264.149498;--b2:28.0368% 0.01983 264.182074;--b3:26.3469% 0.018403 262.177739;--bc:82.9011% 0.031335 222.959324;--in:86.0785% 0.142046 206.182959;--su:86.1717% 0.142187 166.534048;--wa:86.1634% 0.142236 94.818679;--er:82.4189% 0.09957 33.756357}:root:has(input.theme-controller[value=dim]:checked){color-scheme:dark;--pc:17.2267% 0.028331 139.549991;--sc:14.6752% 0.033181 35.353059;--ac:14.8459% 0.026728 311.37924;--inc:17.2157% 0.028409 206.182959;--suc:17.2343% 0.028437 166.534048;--wac:17.2327% 0.028447 94.818679;--erc:16.4838% 0.019914 33.756357;--rounded-box:1rem;--rounded-btn:0.5rem;--rounded-badge:1.9rem;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--tab-radius:0.5rem;--p:86.1335% 0.141656 139.549991;--s:73.3759% 0.165904 35.353059;--a:74.2296% 0.133641 311.37924;--n:24.7311% 0.020483 264.094728;--nc:82.9011% 0.031335 222.959324;--b1:30.8577% 0.023243 264.149498;--b2:28.0368% 0.01983 264.182074;--b3:26.3469% 0.018403 262.177739;--bc:82.9011% 0.031335 222.959324;--in:86.0785% 0.142046 206.182959;--su:86.1717% 0.142187 166.534048;--wa:86.1634% 0.142236 94.818679;--er:82.4189% 0.09957 33.756357}[data-theme=nord]{color-scheme:light;--pc:11.8872% 0.015449 254.027774;--sc:13.9303% 0.011822 248.687186;--ac:15.4929% 0.01245 217.469017;--inc:13.8414% 0.012499 332.664922;--suc:15.3654% 0.01498 131.063061;--wac:17.0972% 0.017847 84.093335;--erc:12.122% 0.024119 15.341883;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--p:59.4359% 0.077246 254.027774;--s:69.6516% 0.059108 248.687186;--a:77.4643% 0.062249 217.469017;--n:45.229% 0.035214 264.1312;--nc:89.9258% 0.016374 262.749256;--b1:95.1276% 0.007445 260.731539;--b2:93.2996% 0.010389 261.788485;--b3:89.9258% 0.016374 262.749256;--bc:32.4374% 0.022945 264.182036;--in:69.2072% 0.062496 332.664922;--su:76.827% 0.074899 131.063061;--wa:85.4862% 0.089234 84.093335;--er:60.61% 0.120594 15.341883;--rounded-box:0.4rem;--rounded-btn:0.2rem;--rounded-badge:0.4rem;--tab-radius:0.2rem}:root:has(input.theme-controller[value=nord]:checked){color-scheme:light;--pc:11.8872% 0.015449 254.027774;--sc:13.9303% 0.011822 248.687186;--ac:15.4929% 0.01245 217.469017;--inc:13.8414% 0.012499 332.664922;--suc:15.3654% 0.01498 131.063061;--wac:17.0972% 0.017847 84.093335;--erc:12.122% 0.024119 15.341883;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--p:59.4359% 0.077246 254.027774;--s:69.6516% 0.059108 248.687186;--a:77.4643% 0.062249 217.469017;--n:45.229% 0.035214 264.1312;--nc:89.9258% 0.016374 262.749256;--b1:95.1276% 0.007445 260.731539;--b2:93.2996% 0.010389 261.788485;--b3:89.9258% 0.016374 262.749256;--bc:32.4374% 0.022945 264.182036;--in:69.2072% 0.062496 332.664922;--su:76.827% 0.074899 131.063061;--wa:85.4862% 0.089234 84.093335;--er:60.61% 0.120594 15.341883;--rounded-box:0.4rem;--rounded-btn:0.2rem;--rounded-badge:0.4rem;--tab-radius:0.2rem}[data-theme=sunset]{color-scheme:dark;--pc:14.9408% 0.031656 39.94703;--sc:14.5075% 0.035531 2.72034;--ac:14.2589% 0.033336 299.844533;--inc:17.1119% 0.017054 206.015183;--suc:17.1122% 0.017172 144.77874;--wac:17.1139% 0.016961 74.427797;--erc:17.1023% 0.015778 16.886379;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--p:74.7039% 0.158278 39.94703;--s:72.5375% 0.177654 2.72034;--a:71.2947% 0.166678 299.844533;--n:26% 0.019 237.69;--nc:70% 0.019 237.69;--b1:22% 0.019 237.69;--b2:20% 0.019 237.69;--b3:18% 0.019 237.69;--bc:77.3835% 0.043586 245.096534;--in:85.5596% 0.085271 206.015183;--su:85.5609% 0.08586 144.77874;--wa:85.5695% 0.084806 74.427797;--er:85.5116% 0.07889 16.886379;--rounded-box:1.2rem;--rounded-btn:0.8rem;--rounded-badge:0.4rem;--tab-radius:0.7rem}:root:has(input.theme-controller[value=sunset]:checked){color-scheme:dark;--pc:14.9408% 0.031656 39.94703;--sc:14.5075% 0.035531 2.72034;--ac:14.2589% 0.033336 299.844533;--inc:17.1119% 0.017054 206.015183;--suc:17.1122% 0.017172 144.77874;--wac:17.1139% 0.016961 74.427797;--erc:17.1023% 0.015778 16.886379;--animation-btn:0.25s;--animation-input:.2s;--btn-focus-scale:0.95;--border-btn:1px;--tab-border:1px;--p:74.7039% 0.158278 39.94703;--s:72.5375% 0.177654 2.72034;--a:71.2947% 0.166678 299.844533;--n:26% 0.019 237.69;--nc:70% 0.019 237.69;--b1:22% 0.019 237.69;--b2:20% 0.019 237.69;--b3:18% 0.019 237.69;--bc:77.3835% 0.043586 245.096534;--in:85.5596% 0.085271 206.015183;--su:85.5609% 0.08586 144.77874;--wa:85.5695% 0.084806 74.427797;--er:85.5116% 0.07889 16.886379;--rounded-box:1.2rem;--rounded-btn:0.8rem;--rounded-badge:0.4rem;--tab-radius:0.7rem} diff --git a/examples/server/public/deps_markdown-it.js b/examples/server/public/deps_markdown-it.js deleted file mode 100644 index 1be0cebe6a440..0000000000000 --- a/examples/server/public/deps_markdown-it.js +++ /dev/null @@ -1,8442 +0,0 @@ -/*! markdown-it 13.0.2 https://github.com/markdown-it/markdown-it @license MIT */ -(function(global, factory) { - typeof exports === "object" && typeof module !== "undefined" ? module.exports = factory() : typeof define === "function" && define.amd ? define(factory) : (global = typeof globalThis !== "undefined" ? globalThis : global || self, - global.markdownit = factory()); -})(this, (function() { - "use strict"; - function createCommonjsModule(fn, basedir, module) { - return module = { - path: basedir, - exports: {}, - require: function(path, base) { - return commonjsRequire(path, base === undefined || base === null ? module.path : base); - } - }, fn(module, module.exports), module.exports; - } - function getAugmentedNamespace(n) { - if (n.__esModule) return n; - var a = Object.defineProperty({}, "__esModule", { - value: true - }); - Object.keys(n).forEach((function(k) { - var d = Object.getOwnPropertyDescriptor(n, k); - Object.defineProperty(a, k, d.get ? d : { - enumerable: true, - get: function() { - return n[k]; - } - }); - })); - return a; - } - function commonjsRequire() { - throw new Error("Dynamic requires are not currently supported by @rollup/plugin-commonjs"); - } - var require$$0 = { - Aacute: "\xc1", - aacute: "\xe1", - Abreve: "\u0102", - abreve: "\u0103", - ac: "\u223e", - acd: "\u223f", - acE: "\u223e\u0333", - Acirc: "\xc2", - acirc: "\xe2", - acute: "\xb4", - Acy: "\u0410", - acy: "\u0430", - AElig: "\xc6", - aelig: "\xe6", - af: "\u2061", - Afr: "\ud835\udd04", - afr: "\ud835\udd1e", - Agrave: "\xc0", - agrave: "\xe0", - alefsym: "\u2135", - aleph: "\u2135", - Alpha: "\u0391", - alpha: "\u03b1", - Amacr: "\u0100", - amacr: "\u0101", - amalg: "\u2a3f", - amp: "&", - AMP: "&", - andand: "\u2a55", - And: "\u2a53", - and: "\u2227", - andd: "\u2a5c", - andslope: "\u2a58", - andv: "\u2a5a", - ang: "\u2220", - ange: "\u29a4", - angle: "\u2220", - angmsdaa: "\u29a8", - angmsdab: "\u29a9", - angmsdac: "\u29aa", - angmsdad: "\u29ab", - angmsdae: "\u29ac", - angmsdaf: "\u29ad", - angmsdag: "\u29ae", - angmsdah: "\u29af", - angmsd: "\u2221", - angrt: "\u221f", - angrtvb: "\u22be", - angrtvbd: "\u299d", - angsph: "\u2222", - angst: "\xc5", - angzarr: "\u237c", - Aogon: "\u0104", - aogon: "\u0105", - Aopf: "\ud835\udd38", - aopf: "\ud835\udd52", - apacir: "\u2a6f", - ap: "\u2248", - apE: "\u2a70", - ape: "\u224a", - apid: "\u224b", - apos: "'", - ApplyFunction: "\u2061", - approx: "\u2248", - approxeq: "\u224a", - Aring: "\xc5", - aring: "\xe5", - Ascr: "\ud835\udc9c", - ascr: "\ud835\udcb6", - Assign: "\u2254", - ast: "*", - asymp: "\u2248", - asympeq: "\u224d", - Atilde: "\xc3", - atilde: "\xe3", - Auml: "\xc4", - auml: "\xe4", - awconint: "\u2233", - awint: "\u2a11", - backcong: "\u224c", - backepsilon: "\u03f6", - backprime: "\u2035", - backsim: "\u223d", - backsimeq: "\u22cd", - Backslash: "\u2216", - Barv: "\u2ae7", - barvee: "\u22bd", - barwed: "\u2305", - Barwed: "\u2306", - barwedge: "\u2305", - bbrk: "\u23b5", - bbrktbrk: "\u23b6", - bcong: "\u224c", - Bcy: "\u0411", - bcy: "\u0431", - bdquo: "\u201e", - becaus: "\u2235", - because: "\u2235", - Because: "\u2235", - bemptyv: "\u29b0", - bepsi: "\u03f6", - bernou: "\u212c", - Bernoullis: "\u212c", - Beta: "\u0392", - beta: "\u03b2", - beth: "\u2136", - between: "\u226c", - Bfr: "\ud835\udd05", - bfr: "\ud835\udd1f", - bigcap: "\u22c2", - bigcirc: "\u25ef", - bigcup: "\u22c3", - bigodot: "\u2a00", - bigoplus: "\u2a01", - bigotimes: "\u2a02", - bigsqcup: "\u2a06", - bigstar: "\u2605", - bigtriangledown: "\u25bd", - bigtriangleup: "\u25b3", - biguplus: "\u2a04", - bigvee: "\u22c1", - bigwedge: "\u22c0", - bkarow: "\u290d", - blacklozenge: "\u29eb", - blacksquare: "\u25aa", - blacktriangle: "\u25b4", - blacktriangledown: "\u25be", - blacktriangleleft: "\u25c2", - blacktriangleright: "\u25b8", - blank: "\u2423", - blk12: "\u2592", - blk14: "\u2591", - blk34: "\u2593", - block: "\u2588", - bne: "=\u20e5", - bnequiv: "\u2261\u20e5", - bNot: "\u2aed", - bnot: "\u2310", - Bopf: "\ud835\udd39", - bopf: "\ud835\udd53", - bot: "\u22a5", - bottom: "\u22a5", - bowtie: "\u22c8", - boxbox: "\u29c9", - boxdl: "\u2510", - boxdL: "\u2555", - boxDl: "\u2556", - boxDL: "\u2557", - boxdr: "\u250c", - boxdR: "\u2552", - boxDr: "\u2553", - boxDR: "\u2554", - boxh: "\u2500", - boxH: "\u2550", - boxhd: "\u252c", - boxHd: "\u2564", - boxhD: "\u2565", - boxHD: "\u2566", - boxhu: "\u2534", - boxHu: "\u2567", - boxhU: "\u2568", - boxHU: "\u2569", - boxminus: "\u229f", - boxplus: "\u229e", - boxtimes: "\u22a0", - boxul: "\u2518", - boxuL: "\u255b", - boxUl: "\u255c", - boxUL: "\u255d", - boxur: "\u2514", - boxuR: "\u2558", - boxUr: "\u2559", - boxUR: "\u255a", - boxv: "\u2502", - boxV: "\u2551", - boxvh: "\u253c", - boxvH: "\u256a", - boxVh: "\u256b", - boxVH: "\u256c", - boxvl: "\u2524", - boxvL: "\u2561", - boxVl: "\u2562", - boxVL: "\u2563", - boxvr: "\u251c", - boxvR: "\u255e", - boxVr: "\u255f", - boxVR: "\u2560", - bprime: "\u2035", - breve: "\u02d8", - Breve: "\u02d8", - brvbar: "\xa6", - bscr: "\ud835\udcb7", - Bscr: "\u212c", - bsemi: "\u204f", - bsim: "\u223d", - bsime: "\u22cd", - bsolb: "\u29c5", - bsol: "\\", - bsolhsub: "\u27c8", - bull: "\u2022", - bullet: "\u2022", - bump: "\u224e", - bumpE: "\u2aae", - bumpe: "\u224f", - Bumpeq: "\u224e", - bumpeq: "\u224f", - Cacute: "\u0106", - cacute: "\u0107", - capand: "\u2a44", - capbrcup: "\u2a49", - capcap: "\u2a4b", - cap: "\u2229", - Cap: "\u22d2", - capcup: "\u2a47", - capdot: "\u2a40", - CapitalDifferentialD: "\u2145", - caps: "\u2229\ufe00", - caret: "\u2041", - caron: "\u02c7", - Cayleys: "\u212d", - ccaps: "\u2a4d", - Ccaron: "\u010c", - ccaron: "\u010d", - Ccedil: "\xc7", - ccedil: "\xe7", - Ccirc: "\u0108", - ccirc: "\u0109", - Cconint: "\u2230", - ccups: "\u2a4c", - ccupssm: "\u2a50", - Cdot: "\u010a", - cdot: "\u010b", - cedil: "\xb8", - Cedilla: "\xb8", - cemptyv: "\u29b2", - cent: "\xa2", - centerdot: "\xb7", - CenterDot: "\xb7", - cfr: "\ud835\udd20", - Cfr: "\u212d", - CHcy: "\u0427", - chcy: "\u0447", - check: "\u2713", - checkmark: "\u2713", - Chi: "\u03a7", - chi: "\u03c7", - circ: "\u02c6", - circeq: "\u2257", - circlearrowleft: "\u21ba", - circlearrowright: "\u21bb", - circledast: "\u229b", - circledcirc: "\u229a", - circleddash: "\u229d", - CircleDot: "\u2299", - circledR: "\xae", - circledS: "\u24c8", - CircleMinus: "\u2296", - CirclePlus: "\u2295", - CircleTimes: "\u2297", - cir: "\u25cb", - cirE: "\u29c3", - cire: "\u2257", - cirfnint: "\u2a10", - cirmid: "\u2aef", - cirscir: "\u29c2", - ClockwiseContourIntegral: "\u2232", - CloseCurlyDoubleQuote: "\u201d", - CloseCurlyQuote: "\u2019", - clubs: "\u2663", - clubsuit: "\u2663", - colon: ":", - Colon: "\u2237", - Colone: "\u2a74", - colone: "\u2254", - coloneq: "\u2254", - comma: ",", - commat: "@", - comp: "\u2201", - compfn: "\u2218", - complement: "\u2201", - complexes: "\u2102", - cong: "\u2245", - congdot: "\u2a6d", - Congruent: "\u2261", - conint: "\u222e", - Conint: "\u222f", - ContourIntegral: "\u222e", - copf: "\ud835\udd54", - Copf: "\u2102", - coprod: "\u2210", - Coproduct: "\u2210", - copy: "\xa9", - COPY: "\xa9", - copysr: "\u2117", - CounterClockwiseContourIntegral: "\u2233", - crarr: "\u21b5", - cross: "\u2717", - Cross: "\u2a2f", - Cscr: "\ud835\udc9e", - cscr: "\ud835\udcb8", - csub: "\u2acf", - csube: "\u2ad1", - csup: "\u2ad0", - csupe: "\u2ad2", - ctdot: "\u22ef", - cudarrl: "\u2938", - cudarrr: "\u2935", - cuepr: "\u22de", - cuesc: "\u22df", - cularr: "\u21b6", - cularrp: "\u293d", - cupbrcap: "\u2a48", - cupcap: "\u2a46", - CupCap: "\u224d", - cup: "\u222a", - Cup: "\u22d3", - cupcup: "\u2a4a", - cupdot: "\u228d", - cupor: "\u2a45", - cups: "\u222a\ufe00", - curarr: "\u21b7", - curarrm: "\u293c", - curlyeqprec: "\u22de", - curlyeqsucc: "\u22df", - curlyvee: "\u22ce", - curlywedge: "\u22cf", - curren: "\xa4", - curvearrowleft: "\u21b6", - curvearrowright: "\u21b7", - cuvee: "\u22ce", - cuwed: "\u22cf", - cwconint: "\u2232", - cwint: "\u2231", - cylcty: "\u232d", - dagger: "\u2020", - Dagger: "\u2021", - daleth: "\u2138", - darr: "\u2193", - Darr: "\u21a1", - dArr: "\u21d3", - dash: "\u2010", - Dashv: "\u2ae4", - dashv: "\u22a3", - dbkarow: "\u290f", - dblac: "\u02dd", - Dcaron: "\u010e", - dcaron: "\u010f", - Dcy: "\u0414", - dcy: "\u0434", - ddagger: "\u2021", - ddarr: "\u21ca", - DD: "\u2145", - dd: "\u2146", - DDotrahd: "\u2911", - ddotseq: "\u2a77", - deg: "\xb0", - Del: "\u2207", - Delta: "\u0394", - delta: "\u03b4", - demptyv: "\u29b1", - dfisht: "\u297f", - Dfr: "\ud835\udd07", - dfr: "\ud835\udd21", - dHar: "\u2965", - dharl: "\u21c3", - dharr: "\u21c2", - DiacriticalAcute: "\xb4", - DiacriticalDot: "\u02d9", - DiacriticalDoubleAcute: "\u02dd", - DiacriticalGrave: "`", - DiacriticalTilde: "\u02dc", - diam: "\u22c4", - diamond: "\u22c4", - Diamond: "\u22c4", - diamondsuit: "\u2666", - diams: "\u2666", - die: "\xa8", - DifferentialD: "\u2146", - digamma: "\u03dd", - disin: "\u22f2", - div: "\xf7", - divide: "\xf7", - divideontimes: "\u22c7", - divonx: "\u22c7", - DJcy: "\u0402", - djcy: "\u0452", - dlcorn: "\u231e", - dlcrop: "\u230d", - dollar: "$", - Dopf: "\ud835\udd3b", - dopf: "\ud835\udd55", - Dot: "\xa8", - dot: "\u02d9", - DotDot: "\u20dc", - doteq: "\u2250", - doteqdot: "\u2251", - DotEqual: "\u2250", - dotminus: "\u2238", - dotplus: "\u2214", - dotsquare: "\u22a1", - doublebarwedge: "\u2306", - DoubleContourIntegral: "\u222f", - DoubleDot: "\xa8", - DoubleDownArrow: "\u21d3", - DoubleLeftArrow: "\u21d0", - DoubleLeftRightArrow: "\u21d4", - DoubleLeftTee: "\u2ae4", - DoubleLongLeftArrow: "\u27f8", - DoubleLongLeftRightArrow: "\u27fa", - DoubleLongRightArrow: "\u27f9", - DoubleRightArrow: "\u21d2", - DoubleRightTee: "\u22a8", - DoubleUpArrow: "\u21d1", - DoubleUpDownArrow: "\u21d5", - DoubleVerticalBar: "\u2225", - DownArrowBar: "\u2913", - downarrow: "\u2193", - DownArrow: "\u2193", - Downarrow: "\u21d3", - DownArrowUpArrow: "\u21f5", - DownBreve: "\u0311", - downdownarrows: "\u21ca", - downharpoonleft: "\u21c3", - downharpoonright: "\u21c2", - DownLeftRightVector: "\u2950", - DownLeftTeeVector: "\u295e", - DownLeftVectorBar: "\u2956", - DownLeftVector: "\u21bd", - DownRightTeeVector: "\u295f", - DownRightVectorBar: "\u2957", - DownRightVector: "\u21c1", - DownTeeArrow: "\u21a7", - DownTee: "\u22a4", - drbkarow: "\u2910", - drcorn: "\u231f", - drcrop: "\u230c", - Dscr: "\ud835\udc9f", - dscr: "\ud835\udcb9", - DScy: "\u0405", - dscy: "\u0455", - dsol: "\u29f6", - Dstrok: "\u0110", - dstrok: "\u0111", - dtdot: "\u22f1", - dtri: "\u25bf", - dtrif: "\u25be", - duarr: "\u21f5", - duhar: "\u296f", - dwangle: "\u29a6", - DZcy: "\u040f", - dzcy: "\u045f", - dzigrarr: "\u27ff", - Eacute: "\xc9", - eacute: "\xe9", - easter: "\u2a6e", - Ecaron: "\u011a", - ecaron: "\u011b", - Ecirc: "\xca", - ecirc: "\xea", - ecir: "\u2256", - ecolon: "\u2255", - Ecy: "\u042d", - ecy: "\u044d", - eDDot: "\u2a77", - Edot: "\u0116", - edot: "\u0117", - eDot: "\u2251", - ee: "\u2147", - efDot: "\u2252", - Efr: "\ud835\udd08", - efr: "\ud835\udd22", - eg: "\u2a9a", - Egrave: "\xc8", - egrave: "\xe8", - egs: "\u2a96", - egsdot: "\u2a98", - el: "\u2a99", - Element: "\u2208", - elinters: "\u23e7", - ell: "\u2113", - els: "\u2a95", - elsdot: "\u2a97", - Emacr: "\u0112", - emacr: "\u0113", - empty: "\u2205", - emptyset: "\u2205", - EmptySmallSquare: "\u25fb", - emptyv: "\u2205", - EmptyVerySmallSquare: "\u25ab", - emsp13: "\u2004", - emsp14: "\u2005", - emsp: "\u2003", - ENG: "\u014a", - eng: "\u014b", - ensp: "\u2002", - Eogon: "\u0118", - eogon: "\u0119", - Eopf: "\ud835\udd3c", - eopf: "\ud835\udd56", - epar: "\u22d5", - eparsl: "\u29e3", - eplus: "\u2a71", - epsi: "\u03b5", - Epsilon: "\u0395", - epsilon: "\u03b5", - epsiv: "\u03f5", - eqcirc: "\u2256", - eqcolon: "\u2255", - eqsim: "\u2242", - eqslantgtr: "\u2a96", - eqslantless: "\u2a95", - Equal: "\u2a75", - equals: "=", - EqualTilde: "\u2242", - equest: "\u225f", - Equilibrium: "\u21cc", - equiv: "\u2261", - equivDD: "\u2a78", - eqvparsl: "\u29e5", - erarr: "\u2971", - erDot: "\u2253", - escr: "\u212f", - Escr: "\u2130", - esdot: "\u2250", - Esim: "\u2a73", - esim: "\u2242", - Eta: "\u0397", - eta: "\u03b7", - ETH: "\xd0", - eth: "\xf0", - Euml: "\xcb", - euml: "\xeb", - euro: "\u20ac", - excl: "!", - exist: "\u2203", - Exists: "\u2203", - expectation: "\u2130", - exponentiale: "\u2147", - ExponentialE: "\u2147", - fallingdotseq: "\u2252", - Fcy: "\u0424", - fcy: "\u0444", - female: "\u2640", - ffilig: "\ufb03", - fflig: "\ufb00", - ffllig: "\ufb04", - Ffr: "\ud835\udd09", - ffr: "\ud835\udd23", - filig: "\ufb01", - FilledSmallSquare: "\u25fc", - FilledVerySmallSquare: "\u25aa", - fjlig: "fj", - flat: "\u266d", - fllig: "\ufb02", - fltns: "\u25b1", - fnof: "\u0192", - Fopf: "\ud835\udd3d", - fopf: "\ud835\udd57", - forall: "\u2200", - ForAll: "\u2200", - fork: "\u22d4", - forkv: "\u2ad9", - Fouriertrf: "\u2131", - fpartint: "\u2a0d", - frac12: "\xbd", - frac13: "\u2153", - frac14: "\xbc", - frac15: "\u2155", - frac16: "\u2159", - frac18: "\u215b", - frac23: "\u2154", - frac25: "\u2156", - frac34: "\xbe", - frac35: "\u2157", - frac38: "\u215c", - frac45: "\u2158", - frac56: "\u215a", - frac58: "\u215d", - frac78: "\u215e", - frasl: "\u2044", - frown: "\u2322", - fscr: "\ud835\udcbb", - Fscr: "\u2131", - gacute: "\u01f5", - Gamma: "\u0393", - gamma: "\u03b3", - Gammad: "\u03dc", - gammad: "\u03dd", - gap: "\u2a86", - Gbreve: "\u011e", - gbreve: "\u011f", - Gcedil: "\u0122", - Gcirc: "\u011c", - gcirc: "\u011d", - Gcy: "\u0413", - gcy: "\u0433", - Gdot: "\u0120", - gdot: "\u0121", - ge: "\u2265", - gE: "\u2267", - gEl: "\u2a8c", - gel: "\u22db", - geq: "\u2265", - geqq: "\u2267", - geqslant: "\u2a7e", - gescc: "\u2aa9", - ges: "\u2a7e", - gesdot: "\u2a80", - gesdoto: "\u2a82", - gesdotol: "\u2a84", - gesl: "\u22db\ufe00", - gesles: "\u2a94", - Gfr: "\ud835\udd0a", - gfr: "\ud835\udd24", - gg: "\u226b", - Gg: "\u22d9", - ggg: "\u22d9", - gimel: "\u2137", - GJcy: "\u0403", - gjcy: "\u0453", - gla: "\u2aa5", - gl: "\u2277", - glE: "\u2a92", - glj: "\u2aa4", - gnap: "\u2a8a", - gnapprox: "\u2a8a", - gne: "\u2a88", - gnE: "\u2269", - gneq: "\u2a88", - gneqq: "\u2269", - gnsim: "\u22e7", - Gopf: "\ud835\udd3e", - gopf: "\ud835\udd58", - grave: "`", - GreaterEqual: "\u2265", - GreaterEqualLess: "\u22db", - GreaterFullEqual: "\u2267", - GreaterGreater: "\u2aa2", - GreaterLess: "\u2277", - GreaterSlantEqual: "\u2a7e", - GreaterTilde: "\u2273", - Gscr: "\ud835\udca2", - gscr: "\u210a", - gsim: "\u2273", - gsime: "\u2a8e", - gsiml: "\u2a90", - gtcc: "\u2aa7", - gtcir: "\u2a7a", - gt: ">", - GT: ">", - Gt: "\u226b", - gtdot: "\u22d7", - gtlPar: "\u2995", - gtquest: "\u2a7c", - gtrapprox: "\u2a86", - gtrarr: "\u2978", - gtrdot: "\u22d7", - gtreqless: "\u22db", - gtreqqless: "\u2a8c", - gtrless: "\u2277", - gtrsim: "\u2273", - gvertneqq: "\u2269\ufe00", - gvnE: "\u2269\ufe00", - Hacek: "\u02c7", - hairsp: "\u200a", - half: "\xbd", - hamilt: "\u210b", - HARDcy: "\u042a", - hardcy: "\u044a", - harrcir: "\u2948", - harr: "\u2194", - hArr: "\u21d4", - harrw: "\u21ad", - Hat: "^", - hbar: "\u210f", - Hcirc: "\u0124", - hcirc: "\u0125", - hearts: "\u2665", - heartsuit: "\u2665", - hellip: "\u2026", - hercon: "\u22b9", - hfr: "\ud835\udd25", - Hfr: "\u210c", - HilbertSpace: "\u210b", - hksearow: "\u2925", - hkswarow: "\u2926", - hoarr: "\u21ff", - homtht: "\u223b", - hookleftarrow: "\u21a9", - hookrightarrow: "\u21aa", - hopf: "\ud835\udd59", - Hopf: "\u210d", - horbar: "\u2015", - HorizontalLine: "\u2500", - hscr: "\ud835\udcbd", - Hscr: "\u210b", - hslash: "\u210f", - Hstrok: "\u0126", - hstrok: "\u0127", - HumpDownHump: "\u224e", - HumpEqual: "\u224f", - hybull: "\u2043", - hyphen: "\u2010", - Iacute: "\xcd", - iacute: "\xed", - ic: "\u2063", - Icirc: "\xce", - icirc: "\xee", - Icy: "\u0418", - icy: "\u0438", - Idot: "\u0130", - IEcy: "\u0415", - iecy: "\u0435", - iexcl: "\xa1", - iff: "\u21d4", - ifr: "\ud835\udd26", - Ifr: "\u2111", - Igrave: "\xcc", - igrave: "\xec", - ii: "\u2148", - iiiint: "\u2a0c", - iiint: "\u222d", - iinfin: "\u29dc", - iiota: "\u2129", - IJlig: "\u0132", - ijlig: "\u0133", - Imacr: "\u012a", - imacr: "\u012b", - image: "\u2111", - ImaginaryI: "\u2148", - imagline: "\u2110", - imagpart: "\u2111", - imath: "\u0131", - Im: "\u2111", - imof: "\u22b7", - imped: "\u01b5", - Implies: "\u21d2", - incare: "\u2105", - in: "\u2208", - infin: "\u221e", - infintie: "\u29dd", - inodot: "\u0131", - intcal: "\u22ba", - int: "\u222b", - Int: "\u222c", - integers: "\u2124", - Integral: "\u222b", - intercal: "\u22ba", - Intersection: "\u22c2", - intlarhk: "\u2a17", - intprod: "\u2a3c", - InvisibleComma: "\u2063", - InvisibleTimes: "\u2062", - IOcy: "\u0401", - iocy: "\u0451", - Iogon: "\u012e", - iogon: "\u012f", - Iopf: "\ud835\udd40", - iopf: "\ud835\udd5a", - Iota: "\u0399", - iota: "\u03b9", - iprod: "\u2a3c", - iquest: "\xbf", - iscr: "\ud835\udcbe", - Iscr: "\u2110", - isin: "\u2208", - isindot: "\u22f5", - isinE: "\u22f9", - isins: "\u22f4", - isinsv: "\u22f3", - isinv: "\u2208", - it: "\u2062", - Itilde: "\u0128", - itilde: "\u0129", - Iukcy: "\u0406", - iukcy: "\u0456", - Iuml: "\xcf", - iuml: "\xef", - Jcirc: "\u0134", - jcirc: "\u0135", - Jcy: "\u0419", - jcy: "\u0439", - Jfr: "\ud835\udd0d", - jfr: "\ud835\udd27", - jmath: "\u0237", - Jopf: "\ud835\udd41", - jopf: "\ud835\udd5b", - Jscr: "\ud835\udca5", - jscr: "\ud835\udcbf", - Jsercy: "\u0408", - jsercy: "\u0458", - Jukcy: "\u0404", - jukcy: "\u0454", - Kappa: "\u039a", - kappa: "\u03ba", - kappav: "\u03f0", - Kcedil: "\u0136", - kcedil: "\u0137", - Kcy: "\u041a", - kcy: "\u043a", - Kfr: "\ud835\udd0e", - kfr: "\ud835\udd28", - kgreen: "\u0138", - KHcy: "\u0425", - khcy: "\u0445", - KJcy: "\u040c", - kjcy: "\u045c", - Kopf: "\ud835\udd42", - kopf: "\ud835\udd5c", - Kscr: "\ud835\udca6", - kscr: "\ud835\udcc0", - lAarr: "\u21da", - Lacute: "\u0139", - lacute: "\u013a", - laemptyv: "\u29b4", - lagran: "\u2112", - Lambda: "\u039b", - lambda: "\u03bb", - lang: "\u27e8", - Lang: "\u27ea", - langd: "\u2991", - langle: "\u27e8", - lap: "\u2a85", - Laplacetrf: "\u2112", - laquo: "\xab", - larrb: "\u21e4", - larrbfs: "\u291f", - larr: "\u2190", - Larr: "\u219e", - lArr: "\u21d0", - larrfs: "\u291d", - larrhk: "\u21a9", - larrlp: "\u21ab", - larrpl: "\u2939", - larrsim: "\u2973", - larrtl: "\u21a2", - latail: "\u2919", - lAtail: "\u291b", - lat: "\u2aab", - late: "\u2aad", - lates: "\u2aad\ufe00", - lbarr: "\u290c", - lBarr: "\u290e", - lbbrk: "\u2772", - lbrace: "{", - lbrack: "[", - lbrke: "\u298b", - lbrksld: "\u298f", - lbrkslu: "\u298d", - Lcaron: "\u013d", - lcaron: "\u013e", - Lcedil: "\u013b", - lcedil: "\u013c", - lceil: "\u2308", - lcub: "{", - Lcy: "\u041b", - lcy: "\u043b", - ldca: "\u2936", - ldquo: "\u201c", - ldquor: "\u201e", - ldrdhar: "\u2967", - ldrushar: "\u294b", - ldsh: "\u21b2", - le: "\u2264", - lE: "\u2266", - LeftAngleBracket: "\u27e8", - LeftArrowBar: "\u21e4", - leftarrow: "\u2190", - LeftArrow: "\u2190", - Leftarrow: "\u21d0", - LeftArrowRightArrow: "\u21c6", - leftarrowtail: "\u21a2", - LeftCeiling: "\u2308", - LeftDoubleBracket: "\u27e6", - LeftDownTeeVector: "\u2961", - LeftDownVectorBar: "\u2959", - LeftDownVector: "\u21c3", - LeftFloor: "\u230a", - leftharpoondown: "\u21bd", - leftharpoonup: "\u21bc", - leftleftarrows: "\u21c7", - leftrightarrow: "\u2194", - LeftRightArrow: "\u2194", - Leftrightarrow: "\u21d4", - leftrightarrows: "\u21c6", - leftrightharpoons: "\u21cb", - leftrightsquigarrow: "\u21ad", - LeftRightVector: "\u294e", - LeftTeeArrow: "\u21a4", - LeftTee: "\u22a3", - LeftTeeVector: "\u295a", - leftthreetimes: "\u22cb", - LeftTriangleBar: "\u29cf", - LeftTriangle: "\u22b2", - LeftTriangleEqual: "\u22b4", - LeftUpDownVector: "\u2951", - LeftUpTeeVector: "\u2960", - LeftUpVectorBar: "\u2958", - LeftUpVector: "\u21bf", - LeftVectorBar: "\u2952", - LeftVector: "\u21bc", - lEg: "\u2a8b", - leg: "\u22da", - leq: "\u2264", - leqq: "\u2266", - leqslant: "\u2a7d", - lescc: "\u2aa8", - les: "\u2a7d", - lesdot: "\u2a7f", - lesdoto: "\u2a81", - lesdotor: "\u2a83", - lesg: "\u22da\ufe00", - lesges: "\u2a93", - lessapprox: "\u2a85", - lessdot: "\u22d6", - lesseqgtr: "\u22da", - lesseqqgtr: "\u2a8b", - LessEqualGreater: "\u22da", - LessFullEqual: "\u2266", - LessGreater: "\u2276", - lessgtr: "\u2276", - LessLess: "\u2aa1", - lesssim: "\u2272", - LessSlantEqual: "\u2a7d", - LessTilde: "\u2272", - lfisht: "\u297c", - lfloor: "\u230a", - Lfr: "\ud835\udd0f", - lfr: "\ud835\udd29", - lg: "\u2276", - lgE: "\u2a91", - lHar: "\u2962", - lhard: "\u21bd", - lharu: "\u21bc", - lharul: "\u296a", - lhblk: "\u2584", - LJcy: "\u0409", - ljcy: "\u0459", - llarr: "\u21c7", - ll: "\u226a", - Ll: "\u22d8", - llcorner: "\u231e", - Lleftarrow: "\u21da", - llhard: "\u296b", - lltri: "\u25fa", - Lmidot: "\u013f", - lmidot: "\u0140", - lmoustache: "\u23b0", - lmoust: "\u23b0", - lnap: "\u2a89", - lnapprox: "\u2a89", - lne: "\u2a87", - lnE: "\u2268", - lneq: "\u2a87", - lneqq: "\u2268", - lnsim: "\u22e6", - loang: "\u27ec", - loarr: "\u21fd", - lobrk: "\u27e6", - longleftarrow: "\u27f5", - LongLeftArrow: "\u27f5", - Longleftarrow: "\u27f8", - longleftrightarrow: "\u27f7", - LongLeftRightArrow: "\u27f7", - Longleftrightarrow: "\u27fa", - longmapsto: "\u27fc", - longrightarrow: "\u27f6", - LongRightArrow: "\u27f6", - Longrightarrow: "\u27f9", - looparrowleft: "\u21ab", - looparrowright: "\u21ac", - lopar: "\u2985", - Lopf: "\ud835\udd43", - lopf: "\ud835\udd5d", - loplus: "\u2a2d", - lotimes: "\u2a34", - lowast: "\u2217", - lowbar: "_", - LowerLeftArrow: "\u2199", - LowerRightArrow: "\u2198", - loz: "\u25ca", - lozenge: "\u25ca", - lozf: "\u29eb", - lpar: "(", - lparlt: "\u2993", - lrarr: "\u21c6", - lrcorner: "\u231f", - lrhar: "\u21cb", - lrhard: "\u296d", - lrm: "\u200e", - lrtri: "\u22bf", - lsaquo: "\u2039", - lscr: "\ud835\udcc1", - Lscr: "\u2112", - lsh: "\u21b0", - Lsh: "\u21b0", - lsim: "\u2272", - lsime: "\u2a8d", - lsimg: "\u2a8f", - lsqb: "[", - lsquo: "\u2018", - lsquor: "\u201a", - Lstrok: "\u0141", - lstrok: "\u0142", - ltcc: "\u2aa6", - ltcir: "\u2a79", - lt: "<", - LT: "<", - Lt: "\u226a", - ltdot: "\u22d6", - lthree: "\u22cb", - ltimes: "\u22c9", - ltlarr: "\u2976", - ltquest: "\u2a7b", - ltri: "\u25c3", - ltrie: "\u22b4", - ltrif: "\u25c2", - ltrPar: "\u2996", - lurdshar: "\u294a", - luruhar: "\u2966", - lvertneqq: "\u2268\ufe00", - lvnE: "\u2268\ufe00", - macr: "\xaf", - male: "\u2642", - malt: "\u2720", - maltese: "\u2720", - Map: "\u2905", - map: "\u21a6", - mapsto: "\u21a6", - mapstodown: "\u21a7", - mapstoleft: "\u21a4", - mapstoup: "\u21a5", - marker: "\u25ae", - mcomma: "\u2a29", - Mcy: "\u041c", - mcy: "\u043c", - mdash: "\u2014", - mDDot: "\u223a", - measuredangle: "\u2221", - MediumSpace: "\u205f", - Mellintrf: "\u2133", - Mfr: "\ud835\udd10", - mfr: "\ud835\udd2a", - mho: "\u2127", - micro: "\xb5", - midast: "*", - midcir: "\u2af0", - mid: "\u2223", - middot: "\xb7", - minusb: "\u229f", - minus: "\u2212", - minusd: "\u2238", - minusdu: "\u2a2a", - MinusPlus: "\u2213", - mlcp: "\u2adb", - mldr: "\u2026", - mnplus: "\u2213", - models: "\u22a7", - Mopf: "\ud835\udd44", - mopf: "\ud835\udd5e", - mp: "\u2213", - mscr: "\ud835\udcc2", - Mscr: "\u2133", - mstpos: "\u223e", - Mu: "\u039c", - mu: "\u03bc", - multimap: "\u22b8", - mumap: "\u22b8", - nabla: "\u2207", - Nacute: "\u0143", - nacute: "\u0144", - nang: "\u2220\u20d2", - nap: "\u2249", - napE: "\u2a70\u0338", - napid: "\u224b\u0338", - napos: "\u0149", - napprox: "\u2249", - natural: "\u266e", - naturals: "\u2115", - natur: "\u266e", - nbsp: "\xa0", - nbump: "\u224e\u0338", - nbumpe: "\u224f\u0338", - ncap: "\u2a43", - Ncaron: "\u0147", - ncaron: "\u0148", - Ncedil: "\u0145", - ncedil: "\u0146", - ncong: "\u2247", - ncongdot: "\u2a6d\u0338", - ncup: "\u2a42", - Ncy: "\u041d", - ncy: "\u043d", - ndash: "\u2013", - nearhk: "\u2924", - nearr: "\u2197", - neArr: "\u21d7", - nearrow: "\u2197", - ne: "\u2260", - nedot: "\u2250\u0338", - NegativeMediumSpace: "\u200b", - NegativeThickSpace: "\u200b", - NegativeThinSpace: "\u200b", - NegativeVeryThinSpace: "\u200b", - nequiv: "\u2262", - nesear: "\u2928", - nesim: "\u2242\u0338", - NestedGreaterGreater: "\u226b", - NestedLessLess: "\u226a", - NewLine: "\n", - nexist: "\u2204", - nexists: "\u2204", - Nfr: "\ud835\udd11", - nfr: "\ud835\udd2b", - ngE: "\u2267\u0338", - nge: "\u2271", - ngeq: "\u2271", - ngeqq: "\u2267\u0338", - ngeqslant: "\u2a7e\u0338", - nges: "\u2a7e\u0338", - nGg: "\u22d9\u0338", - ngsim: "\u2275", - nGt: "\u226b\u20d2", - ngt: "\u226f", - ngtr: "\u226f", - nGtv: "\u226b\u0338", - nharr: "\u21ae", - nhArr: "\u21ce", - nhpar: "\u2af2", - ni: "\u220b", - nis: "\u22fc", - nisd: "\u22fa", - niv: "\u220b", - NJcy: "\u040a", - njcy: "\u045a", - nlarr: "\u219a", - nlArr: "\u21cd", - nldr: "\u2025", - nlE: "\u2266\u0338", - nle: "\u2270", - nleftarrow: "\u219a", - nLeftarrow: "\u21cd", - nleftrightarrow: "\u21ae", - nLeftrightarrow: "\u21ce", - nleq: "\u2270", - nleqq: "\u2266\u0338", - nleqslant: "\u2a7d\u0338", - nles: "\u2a7d\u0338", - nless: "\u226e", - nLl: "\u22d8\u0338", - nlsim: "\u2274", - nLt: "\u226a\u20d2", - nlt: "\u226e", - nltri: "\u22ea", - nltrie: "\u22ec", - nLtv: "\u226a\u0338", - nmid: "\u2224", - NoBreak: "\u2060", - NonBreakingSpace: "\xa0", - nopf: "\ud835\udd5f", - Nopf: "\u2115", - Not: "\u2aec", - not: "\xac", - NotCongruent: "\u2262", - NotCupCap: "\u226d", - NotDoubleVerticalBar: "\u2226", - NotElement: "\u2209", - NotEqual: "\u2260", - NotEqualTilde: "\u2242\u0338", - NotExists: "\u2204", - NotGreater: "\u226f", - NotGreaterEqual: "\u2271", - NotGreaterFullEqual: "\u2267\u0338", - NotGreaterGreater: "\u226b\u0338", - NotGreaterLess: "\u2279", - NotGreaterSlantEqual: "\u2a7e\u0338", - NotGreaterTilde: "\u2275", - NotHumpDownHump: "\u224e\u0338", - NotHumpEqual: "\u224f\u0338", - notin: "\u2209", - notindot: "\u22f5\u0338", - notinE: "\u22f9\u0338", - notinva: "\u2209", - notinvb: "\u22f7", - notinvc: "\u22f6", - NotLeftTriangleBar: "\u29cf\u0338", - NotLeftTriangle: "\u22ea", - NotLeftTriangleEqual: "\u22ec", - NotLess: "\u226e", - NotLessEqual: "\u2270", - NotLessGreater: "\u2278", - NotLessLess: "\u226a\u0338", - NotLessSlantEqual: "\u2a7d\u0338", - NotLessTilde: "\u2274", - NotNestedGreaterGreater: "\u2aa2\u0338", - NotNestedLessLess: "\u2aa1\u0338", - notni: "\u220c", - notniva: "\u220c", - notnivb: "\u22fe", - notnivc: "\u22fd", - NotPrecedes: "\u2280", - NotPrecedesEqual: "\u2aaf\u0338", - NotPrecedesSlantEqual: "\u22e0", - NotReverseElement: "\u220c", - NotRightTriangleBar: "\u29d0\u0338", - NotRightTriangle: "\u22eb", - NotRightTriangleEqual: "\u22ed", - NotSquareSubset: "\u228f\u0338", - NotSquareSubsetEqual: "\u22e2", - NotSquareSuperset: "\u2290\u0338", - NotSquareSupersetEqual: "\u22e3", - NotSubset: "\u2282\u20d2", - NotSubsetEqual: "\u2288", - NotSucceeds: "\u2281", - NotSucceedsEqual: "\u2ab0\u0338", - NotSucceedsSlantEqual: "\u22e1", - NotSucceedsTilde: "\u227f\u0338", - NotSuperset: "\u2283\u20d2", - NotSupersetEqual: "\u2289", - NotTilde: "\u2241", - NotTildeEqual: "\u2244", - NotTildeFullEqual: "\u2247", - NotTildeTilde: "\u2249", - NotVerticalBar: "\u2224", - nparallel: "\u2226", - npar: "\u2226", - nparsl: "\u2afd\u20e5", - npart: "\u2202\u0338", - npolint: "\u2a14", - npr: "\u2280", - nprcue: "\u22e0", - nprec: "\u2280", - npreceq: "\u2aaf\u0338", - npre: "\u2aaf\u0338", - nrarrc: "\u2933\u0338", - nrarr: "\u219b", - nrArr: "\u21cf", - nrarrw: "\u219d\u0338", - nrightarrow: "\u219b", - nRightarrow: "\u21cf", - nrtri: "\u22eb", - nrtrie: "\u22ed", - nsc: "\u2281", - nsccue: "\u22e1", - nsce: "\u2ab0\u0338", - Nscr: "\ud835\udca9", - nscr: "\ud835\udcc3", - nshortmid: "\u2224", - nshortparallel: "\u2226", - nsim: "\u2241", - nsime: "\u2244", - nsimeq: "\u2244", - nsmid: "\u2224", - nspar: "\u2226", - nsqsube: "\u22e2", - nsqsupe: "\u22e3", - nsub: "\u2284", - nsubE: "\u2ac5\u0338", - nsube: "\u2288", - nsubset: "\u2282\u20d2", - nsubseteq: "\u2288", - nsubseteqq: "\u2ac5\u0338", - nsucc: "\u2281", - nsucceq: "\u2ab0\u0338", - nsup: "\u2285", - nsupE: "\u2ac6\u0338", - nsupe: "\u2289", - nsupset: "\u2283\u20d2", - nsupseteq: "\u2289", - nsupseteqq: "\u2ac6\u0338", - ntgl: "\u2279", - Ntilde: "\xd1", - ntilde: "\xf1", - ntlg: "\u2278", - ntriangleleft: "\u22ea", - ntrianglelefteq: "\u22ec", - ntriangleright: "\u22eb", - ntrianglerighteq: "\u22ed", - Nu: "\u039d", - nu: "\u03bd", - num: "#", - numero: "\u2116", - numsp: "\u2007", - nvap: "\u224d\u20d2", - nvdash: "\u22ac", - nvDash: "\u22ad", - nVdash: "\u22ae", - nVDash: "\u22af", - nvge: "\u2265\u20d2", - nvgt: ">\u20d2", - nvHarr: "\u2904", - nvinfin: "\u29de", - nvlArr: "\u2902", - nvle: "\u2264\u20d2", - nvlt: "<\u20d2", - nvltrie: "\u22b4\u20d2", - nvrArr: "\u2903", - nvrtrie: "\u22b5\u20d2", - nvsim: "\u223c\u20d2", - nwarhk: "\u2923", - nwarr: "\u2196", - nwArr: "\u21d6", - nwarrow: "\u2196", - nwnear: "\u2927", - Oacute: "\xd3", - oacute: "\xf3", - oast: "\u229b", - Ocirc: "\xd4", - ocirc: "\xf4", - ocir: "\u229a", - Ocy: "\u041e", - ocy: "\u043e", - odash: "\u229d", - Odblac: "\u0150", - odblac: "\u0151", - odiv: "\u2a38", - odot: "\u2299", - odsold: "\u29bc", - OElig: "\u0152", - oelig: "\u0153", - ofcir: "\u29bf", - Ofr: "\ud835\udd12", - ofr: "\ud835\udd2c", - ogon: "\u02db", - Ograve: "\xd2", - ograve: "\xf2", - ogt: "\u29c1", - ohbar: "\u29b5", - ohm: "\u03a9", - oint: "\u222e", - olarr: "\u21ba", - olcir: "\u29be", - olcross: "\u29bb", - oline: "\u203e", - olt: "\u29c0", - Omacr: "\u014c", - omacr: "\u014d", - Omega: "\u03a9", - omega: "\u03c9", - Omicron: "\u039f", - omicron: "\u03bf", - omid: "\u29b6", - ominus: "\u2296", - Oopf: "\ud835\udd46", - oopf: "\ud835\udd60", - opar: "\u29b7", - OpenCurlyDoubleQuote: "\u201c", - OpenCurlyQuote: "\u2018", - operp: "\u29b9", - oplus: "\u2295", - orarr: "\u21bb", - Or: "\u2a54", - or: "\u2228", - ord: "\u2a5d", - order: "\u2134", - orderof: "\u2134", - ordf: "\xaa", - ordm: "\xba", - origof: "\u22b6", - oror: "\u2a56", - orslope: "\u2a57", - orv: "\u2a5b", - oS: "\u24c8", - Oscr: "\ud835\udcaa", - oscr: "\u2134", - Oslash: "\xd8", - oslash: "\xf8", - osol: "\u2298", - Otilde: "\xd5", - otilde: "\xf5", - otimesas: "\u2a36", - Otimes: "\u2a37", - otimes: "\u2297", - Ouml: "\xd6", - ouml: "\xf6", - ovbar: "\u233d", - OverBar: "\u203e", - OverBrace: "\u23de", - OverBracket: "\u23b4", - OverParenthesis: "\u23dc", - para: "\xb6", - parallel: "\u2225", - par: "\u2225", - parsim: "\u2af3", - parsl: "\u2afd", - part: "\u2202", - PartialD: "\u2202", - Pcy: "\u041f", - pcy: "\u043f", - percnt: "%", - period: ".", - permil: "\u2030", - perp: "\u22a5", - pertenk: "\u2031", - Pfr: "\ud835\udd13", - pfr: "\ud835\udd2d", - Phi: "\u03a6", - phi: "\u03c6", - phiv: "\u03d5", - phmmat: "\u2133", - phone: "\u260e", - Pi: "\u03a0", - pi: "\u03c0", - pitchfork: "\u22d4", - piv: "\u03d6", - planck: "\u210f", - planckh: "\u210e", - plankv: "\u210f", - plusacir: "\u2a23", - plusb: "\u229e", - pluscir: "\u2a22", - plus: "+", - plusdo: "\u2214", - plusdu: "\u2a25", - pluse: "\u2a72", - PlusMinus: "\xb1", - plusmn: "\xb1", - plussim: "\u2a26", - plustwo: "\u2a27", - pm: "\xb1", - Poincareplane: "\u210c", - pointint: "\u2a15", - popf: "\ud835\udd61", - Popf: "\u2119", - pound: "\xa3", - prap: "\u2ab7", - Pr: "\u2abb", - pr: "\u227a", - prcue: "\u227c", - precapprox: "\u2ab7", - prec: "\u227a", - preccurlyeq: "\u227c", - Precedes: "\u227a", - PrecedesEqual: "\u2aaf", - PrecedesSlantEqual: "\u227c", - PrecedesTilde: "\u227e", - preceq: "\u2aaf", - precnapprox: "\u2ab9", - precneqq: "\u2ab5", - precnsim: "\u22e8", - pre: "\u2aaf", - prE: "\u2ab3", - precsim: "\u227e", - prime: "\u2032", - Prime: "\u2033", - primes: "\u2119", - prnap: "\u2ab9", - prnE: "\u2ab5", - prnsim: "\u22e8", - prod: "\u220f", - Product: "\u220f", - profalar: "\u232e", - profline: "\u2312", - profsurf: "\u2313", - prop: "\u221d", - Proportional: "\u221d", - Proportion: "\u2237", - propto: "\u221d", - prsim: "\u227e", - prurel: "\u22b0", - Pscr: "\ud835\udcab", - pscr: "\ud835\udcc5", - Psi: "\u03a8", - psi: "\u03c8", - puncsp: "\u2008", - Qfr: "\ud835\udd14", - qfr: "\ud835\udd2e", - qint: "\u2a0c", - qopf: "\ud835\udd62", - Qopf: "\u211a", - qprime: "\u2057", - Qscr: "\ud835\udcac", - qscr: "\ud835\udcc6", - quaternions: "\u210d", - quatint: "\u2a16", - quest: "?", - questeq: "\u225f", - quot: '"', - QUOT: '"', - rAarr: "\u21db", - race: "\u223d\u0331", - Racute: "\u0154", - racute: "\u0155", - radic: "\u221a", - raemptyv: "\u29b3", - rang: "\u27e9", - Rang: "\u27eb", - rangd: "\u2992", - range: "\u29a5", - rangle: "\u27e9", - raquo: "\xbb", - rarrap: "\u2975", - rarrb: "\u21e5", - rarrbfs: "\u2920", - rarrc: "\u2933", - rarr: "\u2192", - Rarr: "\u21a0", - rArr: "\u21d2", - rarrfs: "\u291e", - rarrhk: "\u21aa", - rarrlp: "\u21ac", - rarrpl: "\u2945", - rarrsim: "\u2974", - Rarrtl: "\u2916", - rarrtl: "\u21a3", - rarrw: "\u219d", - ratail: "\u291a", - rAtail: "\u291c", - ratio: "\u2236", - rationals: "\u211a", - rbarr: "\u290d", - rBarr: "\u290f", - RBarr: "\u2910", - rbbrk: "\u2773", - rbrace: "}", - rbrack: "]", - rbrke: "\u298c", - rbrksld: "\u298e", - rbrkslu: "\u2990", - Rcaron: "\u0158", - rcaron: "\u0159", - Rcedil: "\u0156", - rcedil: "\u0157", - rceil: "\u2309", - rcub: "}", - Rcy: "\u0420", - rcy: "\u0440", - rdca: "\u2937", - rdldhar: "\u2969", - rdquo: "\u201d", - rdquor: "\u201d", - rdsh: "\u21b3", - real: "\u211c", - realine: "\u211b", - realpart: "\u211c", - reals: "\u211d", - Re: "\u211c", - rect: "\u25ad", - reg: "\xae", - REG: "\xae", - ReverseElement: "\u220b", - ReverseEquilibrium: "\u21cb", - ReverseUpEquilibrium: "\u296f", - rfisht: "\u297d", - rfloor: "\u230b", - rfr: "\ud835\udd2f", - Rfr: "\u211c", - rHar: "\u2964", - rhard: "\u21c1", - rharu: "\u21c0", - rharul: "\u296c", - Rho: "\u03a1", - rho: "\u03c1", - rhov: "\u03f1", - RightAngleBracket: "\u27e9", - RightArrowBar: "\u21e5", - rightarrow: "\u2192", - RightArrow: "\u2192", - Rightarrow: "\u21d2", - RightArrowLeftArrow: "\u21c4", - rightarrowtail: "\u21a3", - RightCeiling: "\u2309", - RightDoubleBracket: "\u27e7", - RightDownTeeVector: "\u295d", - RightDownVectorBar: "\u2955", - RightDownVector: "\u21c2", - RightFloor: "\u230b", - rightharpoondown: "\u21c1", - rightharpoonup: "\u21c0", - rightleftarrows: "\u21c4", - rightleftharpoons: "\u21cc", - rightrightarrows: "\u21c9", - rightsquigarrow: "\u219d", - RightTeeArrow: "\u21a6", - RightTee: "\u22a2", - RightTeeVector: "\u295b", - rightthreetimes: "\u22cc", - RightTriangleBar: "\u29d0", - RightTriangle: "\u22b3", - RightTriangleEqual: "\u22b5", - RightUpDownVector: "\u294f", - RightUpTeeVector: "\u295c", - RightUpVectorBar: "\u2954", - RightUpVector: "\u21be", - RightVectorBar: "\u2953", - RightVector: "\u21c0", - ring: "\u02da", - risingdotseq: "\u2253", - rlarr: "\u21c4", - rlhar: "\u21cc", - rlm: "\u200f", - rmoustache: "\u23b1", - rmoust: "\u23b1", - rnmid: "\u2aee", - roang: "\u27ed", - roarr: "\u21fe", - robrk: "\u27e7", - ropar: "\u2986", - ropf: "\ud835\udd63", - Ropf: "\u211d", - roplus: "\u2a2e", - rotimes: "\u2a35", - RoundImplies: "\u2970", - rpar: ")", - rpargt: "\u2994", - rppolint: "\u2a12", - rrarr: "\u21c9", - Rrightarrow: "\u21db", - rsaquo: "\u203a", - rscr: "\ud835\udcc7", - Rscr: "\u211b", - rsh: "\u21b1", - Rsh: "\u21b1", - rsqb: "]", - rsquo: "\u2019", - rsquor: "\u2019", - rthree: "\u22cc", - rtimes: "\u22ca", - rtri: "\u25b9", - rtrie: "\u22b5", - rtrif: "\u25b8", - rtriltri: "\u29ce", - RuleDelayed: "\u29f4", - ruluhar: "\u2968", - rx: "\u211e", - Sacute: "\u015a", - sacute: "\u015b", - sbquo: "\u201a", - scap: "\u2ab8", - Scaron: "\u0160", - scaron: "\u0161", - Sc: "\u2abc", - sc: "\u227b", - sccue: "\u227d", - sce: "\u2ab0", - scE: "\u2ab4", - Scedil: "\u015e", - scedil: "\u015f", - Scirc: "\u015c", - scirc: "\u015d", - scnap: "\u2aba", - scnE: "\u2ab6", - scnsim: "\u22e9", - scpolint: "\u2a13", - scsim: "\u227f", - Scy: "\u0421", - scy: "\u0441", - sdotb: "\u22a1", - sdot: "\u22c5", - sdote: "\u2a66", - searhk: "\u2925", - searr: "\u2198", - seArr: "\u21d8", - searrow: "\u2198", - sect: "\xa7", - semi: ";", - seswar: "\u2929", - setminus: "\u2216", - setmn: "\u2216", - sext: "\u2736", - Sfr: "\ud835\udd16", - sfr: "\ud835\udd30", - sfrown: "\u2322", - sharp: "\u266f", - SHCHcy: "\u0429", - shchcy: "\u0449", - SHcy: "\u0428", - shcy: "\u0448", - ShortDownArrow: "\u2193", - ShortLeftArrow: "\u2190", - shortmid: "\u2223", - shortparallel: "\u2225", - ShortRightArrow: "\u2192", - ShortUpArrow: "\u2191", - shy: "\xad", - Sigma: "\u03a3", - sigma: "\u03c3", - sigmaf: "\u03c2", - sigmav: "\u03c2", - sim: "\u223c", - simdot: "\u2a6a", - sime: "\u2243", - simeq: "\u2243", - simg: "\u2a9e", - simgE: "\u2aa0", - siml: "\u2a9d", - simlE: "\u2a9f", - simne: "\u2246", - simplus: "\u2a24", - simrarr: "\u2972", - slarr: "\u2190", - SmallCircle: "\u2218", - smallsetminus: "\u2216", - smashp: "\u2a33", - smeparsl: "\u29e4", - smid: "\u2223", - smile: "\u2323", - smt: "\u2aaa", - smte: "\u2aac", - smtes: "\u2aac\ufe00", - SOFTcy: "\u042c", - softcy: "\u044c", - solbar: "\u233f", - solb: "\u29c4", - sol: "/", - Sopf: "\ud835\udd4a", - sopf: "\ud835\udd64", - spades: "\u2660", - spadesuit: "\u2660", - spar: "\u2225", - sqcap: "\u2293", - sqcaps: "\u2293\ufe00", - sqcup: "\u2294", - sqcups: "\u2294\ufe00", - Sqrt: "\u221a", - sqsub: "\u228f", - sqsube: "\u2291", - sqsubset: "\u228f", - sqsubseteq: "\u2291", - sqsup: "\u2290", - sqsupe: "\u2292", - sqsupset: "\u2290", - sqsupseteq: "\u2292", - square: "\u25a1", - Square: "\u25a1", - SquareIntersection: "\u2293", - SquareSubset: "\u228f", - SquareSubsetEqual: "\u2291", - SquareSuperset: "\u2290", - SquareSupersetEqual: "\u2292", - SquareUnion: "\u2294", - squarf: "\u25aa", - squ: "\u25a1", - squf: "\u25aa", - srarr: "\u2192", - Sscr: "\ud835\udcae", - sscr: "\ud835\udcc8", - ssetmn: "\u2216", - ssmile: "\u2323", - sstarf: "\u22c6", - Star: "\u22c6", - star: "\u2606", - starf: "\u2605", - straightepsilon: "\u03f5", - straightphi: "\u03d5", - strns: "\xaf", - sub: "\u2282", - Sub: "\u22d0", - subdot: "\u2abd", - subE: "\u2ac5", - sube: "\u2286", - subedot: "\u2ac3", - submult: "\u2ac1", - subnE: "\u2acb", - subne: "\u228a", - subplus: "\u2abf", - subrarr: "\u2979", - subset: "\u2282", - Subset: "\u22d0", - subseteq: "\u2286", - subseteqq: "\u2ac5", - SubsetEqual: "\u2286", - subsetneq: "\u228a", - subsetneqq: "\u2acb", - subsim: "\u2ac7", - subsub: "\u2ad5", - subsup: "\u2ad3", - succapprox: "\u2ab8", - succ: "\u227b", - succcurlyeq: "\u227d", - Succeeds: "\u227b", - SucceedsEqual: "\u2ab0", - SucceedsSlantEqual: "\u227d", - SucceedsTilde: "\u227f", - succeq: "\u2ab0", - succnapprox: "\u2aba", - succneqq: "\u2ab6", - succnsim: "\u22e9", - succsim: "\u227f", - SuchThat: "\u220b", - sum: "\u2211", - Sum: "\u2211", - sung: "\u266a", - sup1: "\xb9", - sup2: "\xb2", - sup3: "\xb3", - sup: "\u2283", - Sup: "\u22d1", - supdot: "\u2abe", - supdsub: "\u2ad8", - supE: "\u2ac6", - supe: "\u2287", - supedot: "\u2ac4", - Superset: "\u2283", - SupersetEqual: "\u2287", - suphsol: "\u27c9", - suphsub: "\u2ad7", - suplarr: "\u297b", - supmult: "\u2ac2", - supnE: "\u2acc", - supne: "\u228b", - supplus: "\u2ac0", - supset: "\u2283", - Supset: "\u22d1", - supseteq: "\u2287", - supseteqq: "\u2ac6", - supsetneq: "\u228b", - supsetneqq: "\u2acc", - supsim: "\u2ac8", - supsub: "\u2ad4", - supsup: "\u2ad6", - swarhk: "\u2926", - swarr: "\u2199", - swArr: "\u21d9", - swarrow: "\u2199", - swnwar: "\u292a", - szlig: "\xdf", - Tab: "\t", - target: "\u2316", - Tau: "\u03a4", - tau: "\u03c4", - tbrk: "\u23b4", - Tcaron: "\u0164", - tcaron: "\u0165", - Tcedil: "\u0162", - tcedil: "\u0163", - Tcy: "\u0422", - tcy: "\u0442", - tdot: "\u20db", - telrec: "\u2315", - Tfr: "\ud835\udd17", - tfr: "\ud835\udd31", - there4: "\u2234", - therefore: "\u2234", - Therefore: "\u2234", - Theta: "\u0398", - theta: "\u03b8", - thetasym: "\u03d1", - thetav: "\u03d1", - thickapprox: "\u2248", - thicksim: "\u223c", - ThickSpace: "\u205f\u200a", - ThinSpace: "\u2009", - thinsp: "\u2009", - thkap: "\u2248", - thksim: "\u223c", - THORN: "\xde", - thorn: "\xfe", - tilde: "\u02dc", - Tilde: "\u223c", - TildeEqual: "\u2243", - TildeFullEqual: "\u2245", - TildeTilde: "\u2248", - timesbar: "\u2a31", - timesb: "\u22a0", - times: "\xd7", - timesd: "\u2a30", - tint: "\u222d", - toea: "\u2928", - topbot: "\u2336", - topcir: "\u2af1", - top: "\u22a4", - Topf: "\ud835\udd4b", - topf: "\ud835\udd65", - topfork: "\u2ada", - tosa: "\u2929", - tprime: "\u2034", - trade: "\u2122", - TRADE: "\u2122", - triangle: "\u25b5", - triangledown: "\u25bf", - triangleleft: "\u25c3", - trianglelefteq: "\u22b4", - triangleq: "\u225c", - triangleright: "\u25b9", - trianglerighteq: "\u22b5", - tridot: "\u25ec", - trie: "\u225c", - triminus: "\u2a3a", - TripleDot: "\u20db", - triplus: "\u2a39", - trisb: "\u29cd", - tritime: "\u2a3b", - trpezium: "\u23e2", - Tscr: "\ud835\udcaf", - tscr: "\ud835\udcc9", - TScy: "\u0426", - tscy: "\u0446", - TSHcy: "\u040b", - tshcy: "\u045b", - Tstrok: "\u0166", - tstrok: "\u0167", - twixt: "\u226c", - twoheadleftarrow: "\u219e", - twoheadrightarrow: "\u21a0", - Uacute: "\xda", - uacute: "\xfa", - uarr: "\u2191", - Uarr: "\u219f", - uArr: "\u21d1", - Uarrocir: "\u2949", - Ubrcy: "\u040e", - ubrcy: "\u045e", - Ubreve: "\u016c", - ubreve: "\u016d", - Ucirc: "\xdb", - ucirc: "\xfb", - Ucy: "\u0423", - ucy: "\u0443", - udarr: "\u21c5", - Udblac: "\u0170", - udblac: "\u0171", - udhar: "\u296e", - ufisht: "\u297e", - Ufr: "\ud835\udd18", - ufr: "\ud835\udd32", - Ugrave: "\xd9", - ugrave: "\xf9", - uHar: "\u2963", - uharl: "\u21bf", - uharr: "\u21be", - uhblk: "\u2580", - ulcorn: "\u231c", - ulcorner: "\u231c", - ulcrop: "\u230f", - ultri: "\u25f8", - Umacr: "\u016a", - umacr: "\u016b", - uml: "\xa8", - UnderBar: "_", - UnderBrace: "\u23df", - UnderBracket: "\u23b5", - UnderParenthesis: "\u23dd", - Union: "\u22c3", - UnionPlus: "\u228e", - Uogon: "\u0172", - uogon: "\u0173", - Uopf: "\ud835\udd4c", - uopf: "\ud835\udd66", - UpArrowBar: "\u2912", - uparrow: "\u2191", - UpArrow: "\u2191", - Uparrow: "\u21d1", - UpArrowDownArrow: "\u21c5", - updownarrow: "\u2195", - UpDownArrow: "\u2195", - Updownarrow: "\u21d5", - UpEquilibrium: "\u296e", - upharpoonleft: "\u21bf", - upharpoonright: "\u21be", - uplus: "\u228e", - UpperLeftArrow: "\u2196", - UpperRightArrow: "\u2197", - upsi: "\u03c5", - Upsi: "\u03d2", - upsih: "\u03d2", - Upsilon: "\u03a5", - upsilon: "\u03c5", - UpTeeArrow: "\u21a5", - UpTee: "\u22a5", - upuparrows: "\u21c8", - urcorn: "\u231d", - urcorner: "\u231d", - urcrop: "\u230e", - Uring: "\u016e", - uring: "\u016f", - urtri: "\u25f9", - Uscr: "\ud835\udcb0", - uscr: "\ud835\udcca", - utdot: "\u22f0", - Utilde: "\u0168", - utilde: "\u0169", - utri: "\u25b5", - utrif: "\u25b4", - uuarr: "\u21c8", - Uuml: "\xdc", - uuml: "\xfc", - uwangle: "\u29a7", - vangrt: "\u299c", - varepsilon: "\u03f5", - varkappa: "\u03f0", - varnothing: "\u2205", - varphi: "\u03d5", - varpi: "\u03d6", - varpropto: "\u221d", - varr: "\u2195", - vArr: "\u21d5", - varrho: "\u03f1", - varsigma: "\u03c2", - varsubsetneq: "\u228a\ufe00", - varsubsetneqq: "\u2acb\ufe00", - varsupsetneq: "\u228b\ufe00", - varsupsetneqq: "\u2acc\ufe00", - vartheta: "\u03d1", - vartriangleleft: "\u22b2", - vartriangleright: "\u22b3", - vBar: "\u2ae8", - Vbar: "\u2aeb", - vBarv: "\u2ae9", - Vcy: "\u0412", - vcy: "\u0432", - vdash: "\u22a2", - vDash: "\u22a8", - Vdash: "\u22a9", - VDash: "\u22ab", - Vdashl: "\u2ae6", - veebar: "\u22bb", - vee: "\u2228", - Vee: "\u22c1", - veeeq: "\u225a", - vellip: "\u22ee", - verbar: "|", - Verbar: "\u2016", - vert: "|", - Vert: "\u2016", - VerticalBar: "\u2223", - VerticalLine: "|", - VerticalSeparator: "\u2758", - VerticalTilde: "\u2240", - VeryThinSpace: "\u200a", - Vfr: "\ud835\udd19", - vfr: "\ud835\udd33", - vltri: "\u22b2", - vnsub: "\u2282\u20d2", - vnsup: "\u2283\u20d2", - Vopf: "\ud835\udd4d", - vopf: "\ud835\udd67", - vprop: "\u221d", - vrtri: "\u22b3", - Vscr: "\ud835\udcb1", - vscr: "\ud835\udccb", - vsubnE: "\u2acb\ufe00", - vsubne: "\u228a\ufe00", - vsupnE: "\u2acc\ufe00", - vsupne: "\u228b\ufe00", - Vvdash: "\u22aa", - vzigzag: "\u299a", - Wcirc: "\u0174", - wcirc: "\u0175", - wedbar: "\u2a5f", - wedge: "\u2227", - Wedge: "\u22c0", - wedgeq: "\u2259", - weierp: "\u2118", - Wfr: "\ud835\udd1a", - wfr: "\ud835\udd34", - Wopf: "\ud835\udd4e", - wopf: "\ud835\udd68", - wp: "\u2118", - wr: "\u2240", - wreath: "\u2240", - Wscr: "\ud835\udcb2", - wscr: "\ud835\udccc", - xcap: "\u22c2", - xcirc: "\u25ef", - xcup: "\u22c3", - xdtri: "\u25bd", - Xfr: "\ud835\udd1b", - xfr: "\ud835\udd35", - xharr: "\u27f7", - xhArr: "\u27fa", - Xi: "\u039e", - xi: "\u03be", - xlarr: "\u27f5", - xlArr: "\u27f8", - xmap: "\u27fc", - xnis: "\u22fb", - xodot: "\u2a00", - Xopf: "\ud835\udd4f", - xopf: "\ud835\udd69", - xoplus: "\u2a01", - xotime: "\u2a02", - xrarr: "\u27f6", - xrArr: "\u27f9", - Xscr: "\ud835\udcb3", - xscr: "\ud835\udccd", - xsqcup: "\u2a06", - xuplus: "\u2a04", - xutri: "\u25b3", - xvee: "\u22c1", - xwedge: "\u22c0", - Yacute: "\xdd", - yacute: "\xfd", - YAcy: "\u042f", - yacy: "\u044f", - Ycirc: "\u0176", - ycirc: "\u0177", - Ycy: "\u042b", - ycy: "\u044b", - yen: "\xa5", - Yfr: "\ud835\udd1c", - yfr: "\ud835\udd36", - YIcy: "\u0407", - yicy: "\u0457", - Yopf: "\ud835\udd50", - yopf: "\ud835\udd6a", - Yscr: "\ud835\udcb4", - yscr: "\ud835\udcce", - YUcy: "\u042e", - yucy: "\u044e", - yuml: "\xff", - Yuml: "\u0178", - Zacute: "\u0179", - zacute: "\u017a", - Zcaron: "\u017d", - zcaron: "\u017e", - Zcy: "\u0417", - zcy: "\u0437", - Zdot: "\u017b", - zdot: "\u017c", - zeetrf: "\u2128", - ZeroWidthSpace: "\u200b", - Zeta: "\u0396", - zeta: "\u03b6", - zfr: "\ud835\udd37", - Zfr: "\u2128", - ZHcy: "\u0416", - zhcy: "\u0436", - zigrarr: "\u21dd", - zopf: "\ud835\udd6b", - Zopf: "\u2124", - Zscr: "\ud835\udcb5", - zscr: "\ud835\udccf", - zwj: "\u200d", - zwnj: "\u200c" - }; - /*eslint quotes:0*/ var entities = require$$0; - var regex$4 = /[!-#%-\*,-\/:;\?@\[-\]_\{\}\xA1\xA7\xAB\xB6\xB7\xBB\xBF\u037E\u0387\u055A-\u055F\u0589\u058A\u05BE\u05C0\u05C3\u05C6\u05F3\u05F4\u0609\u060A\u060C\u060D\u061B\u061E\u061F\u066A-\u066D\u06D4\u0700-\u070D\u07F7-\u07F9\u0830-\u083E\u085E\u0964\u0965\u0970\u09FD\u0A76\u0AF0\u0C84\u0DF4\u0E4F\u0E5A\u0E5B\u0F04-\u0F12\u0F14\u0F3A-\u0F3D\u0F85\u0FD0-\u0FD4\u0FD9\u0FDA\u104A-\u104F\u10FB\u1360-\u1368\u1400\u166D\u166E\u169B\u169C\u16EB-\u16ED\u1735\u1736\u17D4-\u17D6\u17D8-\u17DA\u1800-\u180A\u1944\u1945\u1A1E\u1A1F\u1AA0-\u1AA6\u1AA8-\u1AAD\u1B5A-\u1B60\u1BFC-\u1BFF\u1C3B-\u1C3F\u1C7E\u1C7F\u1CC0-\u1CC7\u1CD3\u2010-\u2027\u2030-\u2043\u2045-\u2051\u2053-\u205E\u207D\u207E\u208D\u208E\u2308-\u230B\u2329\u232A\u2768-\u2775\u27C5\u27C6\u27E6-\u27EF\u2983-\u2998\u29D8-\u29DB\u29FC\u29FD\u2CF9-\u2CFC\u2CFE\u2CFF\u2D70\u2E00-\u2E2E\u2E30-\u2E4E\u3001-\u3003\u3008-\u3011\u3014-\u301F\u3030\u303D\u30A0\u30FB\uA4FE\uA4FF\uA60D-\uA60F\uA673\uA67E\uA6F2-\uA6F7\uA874-\uA877\uA8CE\uA8CF\uA8F8-\uA8FA\uA8FC\uA92E\uA92F\uA95F\uA9C1-\uA9CD\uA9DE\uA9DF\uAA5C-\uAA5F\uAADE\uAADF\uAAF0\uAAF1\uABEB\uFD3E\uFD3F\uFE10-\uFE19\uFE30-\uFE52\uFE54-\uFE61\uFE63\uFE68\uFE6A\uFE6B\uFF01-\uFF03\uFF05-\uFF0A\uFF0C-\uFF0F\uFF1A\uFF1B\uFF1F\uFF20\uFF3B-\uFF3D\uFF3F\uFF5B\uFF5D\uFF5F-\uFF65]|\uD800[\uDD00-\uDD02\uDF9F\uDFD0]|\uD801\uDD6F|\uD802[\uDC57\uDD1F\uDD3F\uDE50-\uDE58\uDE7F\uDEF0-\uDEF6\uDF39-\uDF3F\uDF99-\uDF9C]|\uD803[\uDF55-\uDF59]|\uD804[\uDC47-\uDC4D\uDCBB\uDCBC\uDCBE-\uDCC1\uDD40-\uDD43\uDD74\uDD75\uDDC5-\uDDC8\uDDCD\uDDDB\uDDDD-\uDDDF\uDE38-\uDE3D\uDEA9]|\uD805[\uDC4B-\uDC4F\uDC5B\uDC5D\uDCC6\uDDC1-\uDDD7\uDE41-\uDE43\uDE60-\uDE6C\uDF3C-\uDF3E]|\uD806[\uDC3B\uDE3F-\uDE46\uDE9A-\uDE9C\uDE9E-\uDEA2]|\uD807[\uDC41-\uDC45\uDC70\uDC71\uDEF7\uDEF8]|\uD809[\uDC70-\uDC74]|\uD81A[\uDE6E\uDE6F\uDEF5\uDF37-\uDF3B\uDF44]|\uD81B[\uDE97-\uDE9A]|\uD82F\uDC9F|\uD836[\uDE87-\uDE8B]|\uD83A[\uDD5E\uDD5F]/; - var encodeCache = {}; - // Create a lookup array where anything but characters in `chars` string - // and alphanumeric chars is percent-encoded. - - function getEncodeCache(exclude) { - var i, ch, cache = encodeCache[exclude]; - if (cache) { - return cache; - } - cache = encodeCache[exclude] = []; - for (i = 0; i < 128; i++) { - ch = String.fromCharCode(i); - if (/^[0-9a-z]$/i.test(ch)) { - // always allow unencoded alphanumeric characters - cache.push(ch); - } else { - cache.push("%" + ("0" + i.toString(16).toUpperCase()).slice(-2)); - } - } - for (i = 0; i < exclude.length; i++) { - cache[exclude.charCodeAt(i)] = exclude[i]; - } - return cache; - } - // Encode unsafe characters with percent-encoding, skipping already - // encoded sequences. - - // - string - string to encode - // - exclude - list of characters to ignore (in addition to a-zA-Z0-9) - // - keepEscaped - don't encode '%' in a correct escape sequence (default: true) - - function encode$2(string, exclude, keepEscaped) { - var i, l, code, nextCode, cache, result = ""; - if (typeof exclude !== "string") { - // encode(string, keepEscaped) - keepEscaped = exclude; - exclude = encode$2.defaultChars; - } - if (typeof keepEscaped === "undefined") { - keepEscaped = true; - } - cache = getEncodeCache(exclude); - for (i = 0, l = string.length; i < l; i++) { - code = string.charCodeAt(i); - if (keepEscaped && code === 37 /* % */ && i + 2 < l) { - if (/^[0-9a-f]{2}$/i.test(string.slice(i + 1, i + 3))) { - result += string.slice(i, i + 3); - i += 2; - continue; - } - } - if (code < 128) { - result += cache[code]; - continue; - } - if (code >= 55296 && code <= 57343) { - if (code >= 55296 && code <= 56319 && i + 1 < l) { - nextCode = string.charCodeAt(i + 1); - if (nextCode >= 56320 && nextCode <= 57343) { - result += encodeURIComponent(string[i] + string[i + 1]); - i++; - continue; - } - } - result += "%EF%BF%BD"; - continue; - } - result += encodeURIComponent(string[i]); - } - return result; - } - encode$2.defaultChars = ";/?:@&=+$,-_.!~*'()#"; - encode$2.componentChars = "-_.!~*'()"; - var encode_1 = encode$2; - /* eslint-disable no-bitwise */ var decodeCache = {}; - function getDecodeCache(exclude) { - var i, ch, cache = decodeCache[exclude]; - if (cache) { - return cache; - } - cache = decodeCache[exclude] = []; - for (i = 0; i < 128; i++) { - ch = String.fromCharCode(i); - cache.push(ch); - } - for (i = 0; i < exclude.length; i++) { - ch = exclude.charCodeAt(i); - cache[ch] = "%" + ("0" + ch.toString(16).toUpperCase()).slice(-2); - } - return cache; - } - // Decode percent-encoded string. - - function decode$2(string, exclude) { - var cache; - if (typeof exclude !== "string") { - exclude = decode$2.defaultChars; - } - cache = getDecodeCache(exclude); - return string.replace(/(%[a-f0-9]{2})+/gi, (function(seq) { - var i, l, b1, b2, b3, b4, chr, result = ""; - for (i = 0, l = seq.length; i < l; i += 3) { - b1 = parseInt(seq.slice(i + 1, i + 3), 16); - if (b1 < 128) { - result += cache[b1]; - continue; - } - if ((b1 & 224) === 192 && i + 3 < l) { - // 110xxxxx 10xxxxxx - b2 = parseInt(seq.slice(i + 4, i + 6), 16); - if ((b2 & 192) === 128) { - chr = b1 << 6 & 1984 | b2 & 63; - if (chr < 128) { - result += "\ufffd\ufffd"; - } else { - result += String.fromCharCode(chr); - } - i += 3; - continue; - } - } - if ((b1 & 240) === 224 && i + 6 < l) { - // 1110xxxx 10xxxxxx 10xxxxxx - b2 = parseInt(seq.slice(i + 4, i + 6), 16); - b3 = parseInt(seq.slice(i + 7, i + 9), 16); - if ((b2 & 192) === 128 && (b3 & 192) === 128) { - chr = b1 << 12 & 61440 | b2 << 6 & 4032 | b3 & 63; - if (chr < 2048 || chr >= 55296 && chr <= 57343) { - result += "\ufffd\ufffd\ufffd"; - } else { - result += String.fromCharCode(chr); - } - i += 6; - continue; - } - } - if ((b1 & 248) === 240 && i + 9 < l) { - // 111110xx 10xxxxxx 10xxxxxx 10xxxxxx - b2 = parseInt(seq.slice(i + 4, i + 6), 16); - b3 = parseInt(seq.slice(i + 7, i + 9), 16); - b4 = parseInt(seq.slice(i + 10, i + 12), 16); - if ((b2 & 192) === 128 && (b3 & 192) === 128 && (b4 & 192) === 128) { - chr = b1 << 18 & 1835008 | b2 << 12 & 258048 | b3 << 6 & 4032 | b4 & 63; - if (chr < 65536 || chr > 1114111) { - result += "\ufffd\ufffd\ufffd\ufffd"; - } else { - chr -= 65536; - result += String.fromCharCode(55296 + (chr >> 10), 56320 + (chr & 1023)); - } - i += 9; - continue; - } - } - result += "\ufffd"; - } - return result; - })); - } - decode$2.defaultChars = ";/?:@&=+$,#"; - decode$2.componentChars = ""; - var decode_1 = decode$2; - var format$1 = function format(url) { - var result = ""; - result += url.protocol || ""; - result += url.slashes ? "//" : ""; - result += url.auth ? url.auth + "@" : ""; - if (url.hostname && url.hostname.indexOf(":") !== -1) { - // ipv6 address - result += "[" + url.hostname + "]"; - } else { - result += url.hostname || ""; - } - result += url.port ? ":" + url.port : ""; - result += url.pathname || ""; - result += url.search || ""; - result += url.hash || ""; - return result; - }; - // Copyright Joyent, Inc. and other Node contributors. - - // Changes from joyent/node: - - // 1. No leading slash in paths, - // e.g. in `url.parse('http://foo?bar')` pathname is ``, not `/` - - // 2. Backslashes are not replaced with slashes, - // so `http:\\example.org\` is treated like a relative path - - // 3. Trailing colon is treated like a part of the path, - // i.e. in `http://example.org:foo` pathname is `:foo` - - // 4. Nothing is URL-encoded in the resulting object, - // (in joyent/node some chars in auth and paths are encoded) - - // 5. `url.parse()` does not have `parseQueryString` argument - - // 6. Removed extraneous result properties: `host`, `path`, `query`, etc., - // which can be constructed using other parts of the url. - - function Url() { - this.protocol = null; - this.slashes = null; - this.auth = null; - this.port = null; - this.hostname = null; - this.hash = null; - this.search = null; - this.pathname = null; - } - // Reference: RFC 3986, RFC 1808, RFC 2396 - // define these here so at least they only have to be - // compiled once on the first module load. - var protocolPattern = /^([a-z0-9.+-]+:)/i, portPattern = /:[0-9]*$/, - // Special case for a simple path URL - simplePathPattern = /^(\/\/?(?!\/)[^\?\s]*)(\?[^\s]*)?$/, - // RFC 2396: characters reserved for delimiting URLs. - // We actually just auto-escape these. - delims = [ "<", ">", '"', "`", " ", "\r", "\n", "\t" ], - // RFC 2396: characters not allowed for various reasons. - unwise = [ "{", "}", "|", "\\", "^", "`" ].concat(delims), - // Allowed by RFCs, but cause of XSS attacks. Always escape these. - autoEscape = [ "'" ].concat(unwise), - // Characters that are never ever allowed in a hostname. - // Note that any invalid chars are also handled, but these - // are the ones that are *expected* to be seen, so we fast-path - // them. - nonHostChars = [ "%", "/", "?", ";", "#" ].concat(autoEscape), hostEndingChars = [ "/", "?", "#" ], hostnameMaxLen = 255, hostnamePartPattern = /^[+a-z0-9A-Z_-]{0,63}$/, hostnamePartStart = /^([+a-z0-9A-Z_-]{0,63})(.*)$/, - // protocols that can allow "unsafe" and "unwise" chars. - /* eslint-disable no-script-url */ - // protocols that never have a hostname. - hostlessProtocol = { - javascript: true, - "javascript:": true - }, - // protocols that always contain a // bit. - slashedProtocol = { - http: true, - https: true, - ftp: true, - gopher: true, - file: true, - "http:": true, - "https:": true, - "ftp:": true, - "gopher:": true, - "file:": true - }; - /* eslint-enable no-script-url */ function urlParse(url, slashesDenoteHost) { - if (url && url instanceof Url) { - return url; - } - var u = new Url; - u.parse(url, slashesDenoteHost); - return u; - } - Url.prototype.parse = function(url, slashesDenoteHost) { - var i, l, lowerProto, hec, slashes, rest = url; - // trim before proceeding. - // This is to support parse stuff like " http://foo.com \n" - rest = rest.trim(); - if (!slashesDenoteHost && url.split("#").length === 1) { - // Try fast path regexp - var simplePath = simplePathPattern.exec(rest); - if (simplePath) { - this.pathname = simplePath[1]; - if (simplePath[2]) { - this.search = simplePath[2]; - } - return this; - } - } - var proto = protocolPattern.exec(rest); - if (proto) { - proto = proto[0]; - lowerProto = proto.toLowerCase(); - this.protocol = proto; - rest = rest.substr(proto.length); - } - // figure out if it's got a host - // user@server is *always* interpreted as a hostname, and url - // resolution will treat //foo/bar as host=foo,path=bar because that's - // how the browser resolves relative URLs. - if (slashesDenoteHost || proto || rest.match(/^\/\/[^@\/]+@[^@\/]+/)) { - slashes = rest.substr(0, 2) === "//"; - if (slashes && !(proto && hostlessProtocol[proto])) { - rest = rest.substr(2); - this.slashes = true; - } - } - if (!hostlessProtocol[proto] && (slashes || proto && !slashedProtocol[proto])) { - // there's a hostname. - // the first instance of /, ?, ;, or # ends the host. - // If there is an @ in the hostname, then non-host chars *are* allowed - // to the left of the last @ sign, unless some host-ending character - // comes *before* the @-sign. - // URLs are obnoxious. - // ex: - // http://a@b@c/ => user:a@b host:c - // http://a@b?@c => user:a host:c path:/?@c - // v0.12 TODO(isaacs): This is not quite how Chrome does things. - // Review our test case against browsers more comprehensively. - // find the first instance of any hostEndingChars - var hostEnd = -1; - for (i = 0; i < hostEndingChars.length; i++) { - hec = rest.indexOf(hostEndingChars[i]); - if (hec !== -1 && (hostEnd === -1 || hec < hostEnd)) { - hostEnd = hec; - } - } - // at this point, either we have an explicit point where the - // auth portion cannot go past, or the last @ char is the decider. - var auth, atSign; - if (hostEnd === -1) { - // atSign can be anywhere. - atSign = rest.lastIndexOf("@"); - } else { - // atSign must be in auth portion. - // http://a@b/c@d => host:b auth:a path:/c@d - atSign = rest.lastIndexOf("@", hostEnd); - } - // Now we have a portion which is definitely the auth. - // Pull that off. - if (atSign !== -1) { - auth = rest.slice(0, atSign); - rest = rest.slice(atSign + 1); - this.auth = auth; - } - // the host is the remaining to the left of the first non-host char - hostEnd = -1; - for (i = 0; i < nonHostChars.length; i++) { - hec = rest.indexOf(nonHostChars[i]); - if (hec !== -1 && (hostEnd === -1 || hec < hostEnd)) { - hostEnd = hec; - } - } - // if we still have not hit it, then the entire thing is a host. - if (hostEnd === -1) { - hostEnd = rest.length; - } - if (rest[hostEnd - 1] === ":") { - hostEnd--; - } - var host = rest.slice(0, hostEnd); - rest = rest.slice(hostEnd); - // pull out port. - this.parseHost(host); - // we've indicated that there is a hostname, - // so even if it's empty, it has to be present. - this.hostname = this.hostname || ""; - // if hostname begins with [ and ends with ] - // assume that it's an IPv6 address. - var ipv6Hostname = this.hostname[0] === "[" && this.hostname[this.hostname.length - 1] === "]"; - // validate a little. - if (!ipv6Hostname) { - var hostparts = this.hostname.split(/\./); - for (i = 0, l = hostparts.length; i < l; i++) { - var part = hostparts[i]; - if (!part) { - continue; - } - if (!part.match(hostnamePartPattern)) { - var newpart = ""; - for (var j = 0, k = part.length; j < k; j++) { - if (part.charCodeAt(j) > 127) { - // we replace non-ASCII char with a temporary placeholder - // we need this to make sure size of hostname is not - // broken by replacing non-ASCII by nothing - newpart += "x"; - } else { - newpart += part[j]; - } - } - // we test again with ASCII char only - if (!newpart.match(hostnamePartPattern)) { - var validParts = hostparts.slice(0, i); - var notHost = hostparts.slice(i + 1); - var bit = part.match(hostnamePartStart); - if (bit) { - validParts.push(bit[1]); - notHost.unshift(bit[2]); - } - if (notHost.length) { - rest = notHost.join(".") + rest; - } - this.hostname = validParts.join("."); - break; - } - } - } - } - if (this.hostname.length > hostnameMaxLen) { - this.hostname = ""; - } - // strip [ and ] from the hostname - // the host field still retains them, though - if (ipv6Hostname) { - this.hostname = this.hostname.substr(1, this.hostname.length - 2); - } - } - // chop off from the tail first. - var hash = rest.indexOf("#"); - if (hash !== -1) { - // got a fragment string. - this.hash = rest.substr(hash); - rest = rest.slice(0, hash); - } - var qm = rest.indexOf("?"); - if (qm !== -1) { - this.search = rest.substr(qm); - rest = rest.slice(0, qm); - } - if (rest) { - this.pathname = rest; - } - if (slashedProtocol[lowerProto] && this.hostname && !this.pathname) { - this.pathname = ""; - } - return this; - }; - Url.prototype.parseHost = function(host) { - var port = portPattern.exec(host); - if (port) { - port = port[0]; - if (port !== ":") { - this.port = port.substr(1); - } - host = host.substr(0, host.length - port.length); - } - if (host) { - this.hostname = host; - } - }; - var parse$1 = urlParse; - var encode$1 = encode_1; - var decode$1 = decode_1; - var format = format$1; - var parse = parse$1; - var mdurl = { - encode: encode$1, - decode: decode$1, - format: format, - parse: parse - }; - var regex$3 = /[\0-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; - var regex$2 = /[\0-\x1F\x7F-\x9F]/; - var regex$1 = /[\xAD\u0600-\u0605\u061C\u06DD\u070F\u08E2\u180E\u200B-\u200F\u202A-\u202E\u2060-\u2064\u2066-\u206F\uFEFF\uFFF9-\uFFFB]|\uD804[\uDCBD\uDCCD]|\uD82F[\uDCA0-\uDCA3]|\uD834[\uDD73-\uDD7A]|\uDB40[\uDC01\uDC20-\uDC7F]/; - var regex = /[ \xA0\u1680\u2000-\u200A\u2028\u2029\u202F\u205F\u3000]/; - var Any = regex$3; - var Cc = regex$2; - var Cf = regex$1; - var P = regex$4; - var Z = regex; - var uc_micro = { - Any: Any, - Cc: Cc, - Cf: Cf, - P: P, - Z: Z - }; - var utils = createCommonjsModule((function(module, exports) { - function _class(obj) { - return Object.prototype.toString.call(obj); - } - function isString(obj) { - return _class(obj) === "[object String]"; - } - var _hasOwnProperty = Object.prototype.hasOwnProperty; - function has(object, key) { - return _hasOwnProperty.call(object, key); - } - // Merge objects - - function assign(obj /*from1, from2, from3, ...*/) { - var sources = Array.prototype.slice.call(arguments, 1); - sources.forEach((function(source) { - if (!source) { - return; - } - if (typeof source !== "object") { - throw new TypeError(source + "must be object"); - } - Object.keys(source).forEach((function(key) { - obj[key] = source[key]; - })); - })); - return obj; - } - // Remove element from array and put another array at those position. - // Useful for some operations with tokens - function arrayReplaceAt(src, pos, newElements) { - return [].concat(src.slice(0, pos), newElements, src.slice(pos + 1)); - } - //////////////////////////////////////////////////////////////////////////////// - function isValidEntityCode(c) { - /*eslint no-bitwise:0*/ - // broken sequence - if (c >= 55296 && c <= 57343) { - return false; - } - // never used - if (c >= 64976 && c <= 65007) { - return false; - } - if ((c & 65535) === 65535 || (c & 65535) === 65534) { - return false; - } - // control codes - if (c >= 0 && c <= 8) { - return false; - } - if (c === 11) { - return false; - } - if (c >= 14 && c <= 31) { - return false; - } - if (c >= 127 && c <= 159) { - return false; - } - // out of range - if (c > 1114111) { - return false; - } - return true; - } - function fromCodePoint(c) { - /*eslint no-bitwise:0*/ - if (c > 65535) { - c -= 65536; - var surrogate1 = 55296 + (c >> 10), surrogate2 = 56320 + (c & 1023); - return String.fromCharCode(surrogate1, surrogate2); - } - return String.fromCharCode(c); - } - var UNESCAPE_MD_RE = /\\([!"#$%&'()*+,\-.\/:;<=>?@[\\\]^_`{|}~])/g; - var ENTITY_RE = /&([a-z#][a-z0-9]{1,31});/gi; - var UNESCAPE_ALL_RE = new RegExp(UNESCAPE_MD_RE.source + "|" + ENTITY_RE.source, "gi"); - var DIGITAL_ENTITY_TEST_RE = /^#((?:x[a-f0-9]{1,8}|[0-9]{1,8}))$/i; - function replaceEntityPattern(match, name) { - var code; - if (has(entities, name)) { - return entities[name]; - } - if (name.charCodeAt(0) === 35 /* # */ && DIGITAL_ENTITY_TEST_RE.test(name)) { - code = name[1].toLowerCase() === "x" ? parseInt(name.slice(2), 16) : parseInt(name.slice(1), 10); - if (isValidEntityCode(code)) { - return fromCodePoint(code); - } - } - return match; - } - /*function replaceEntities(str) { - if (str.indexOf('&') < 0) { return str; } - - return str.replace(ENTITY_RE, replaceEntityPattern); - }*/ function unescapeMd(str) { - if (str.indexOf("\\") < 0) { - return str; - } - return str.replace(UNESCAPE_MD_RE, "$1"); - } - function unescapeAll(str) { - if (str.indexOf("\\") < 0 && str.indexOf("&") < 0) { - return str; - } - return str.replace(UNESCAPE_ALL_RE, (function(match, escaped, entity) { - if (escaped) { - return escaped; - } - return replaceEntityPattern(match, entity); - })); - } - //////////////////////////////////////////////////////////////////////////////// - var HTML_ESCAPE_TEST_RE = /[&<>"]/; - var HTML_ESCAPE_REPLACE_RE = /[&<>"]/g; - var HTML_REPLACEMENTS = { - "&": "&", - "<": "<", - ">": ">", - '"': """ - }; - function replaceUnsafeChar(ch) { - return HTML_REPLACEMENTS[ch]; - } - function escapeHtml(str) { - if (HTML_ESCAPE_TEST_RE.test(str)) { - return str.replace(HTML_ESCAPE_REPLACE_RE, replaceUnsafeChar); - } - return str; - } - //////////////////////////////////////////////////////////////////////////////// - var REGEXP_ESCAPE_RE = /[.?*+^$[\]\\(){}|-]/g; - function escapeRE(str) { - return str.replace(REGEXP_ESCAPE_RE, "\\$&"); - } - //////////////////////////////////////////////////////////////////////////////// - function isSpace(code) { - switch (code) { - case 9: - case 32: - return true; - } - return false; - } - // Zs (unicode class) || [\t\f\v\r\n] - function isWhiteSpace(code) { - if (code >= 8192 && code <= 8202) { - return true; - } - switch (code) { - case 9: - // \t - case 10: - // \n - case 11: - // \v - case 12: - // \f - case 13: - // \r - case 32: - case 160: - case 5760: - case 8239: - case 8287: - case 12288: - return true; - } - return false; - } - //////////////////////////////////////////////////////////////////////////////// - /*eslint-disable max-len*/ - // Currently without astral characters support. - function isPunctChar(ch) { - return regex$4.test(ch); - } - // Markdown ASCII punctuation characters. - - // !, ", #, $, %, &, ', (, ), *, +, ,, -, ., /, :, ;, <, =, >, ?, @, [, \, ], ^, _, `, {, |, }, or ~ - // http://spec.commonmark.org/0.15/#ascii-punctuation-character - - // Don't confuse with unicode punctuation !!! It lacks some chars in ascii range. - - function isMdAsciiPunct(ch) { - switch (ch) { - case 33 /* ! */ : - case 34 /* " */ : - case 35 /* # */ : - case 36 /* $ */ : - case 37 /* % */ : - case 38 /* & */ : - case 39 /* ' */ : - case 40 /* ( */ : - case 41 /* ) */ : - case 42 /* * */ : - case 43 /* + */ : - case 44 /* , */ : - case 45 /* - */ : - case 46 /* . */ : - case 47 /* / */ : - case 58 /* : */ : - case 59 /* ; */ : - case 60 /* < */ : - case 61 /* = */ : - case 62 /* > */ : - case 63 /* ? */ : - case 64 /* @ */ : - case 91 /* [ */ : - case 92 /* \ */ : - case 93 /* ] */ : - case 94 /* ^ */ : - case 95 /* _ */ : - case 96 /* ` */ : - case 123 /* { */ : - case 124 /* | */ : - case 125 /* } */ : - case 126 /* ~ */ : - return true; - - default: - return false; - } - } - // Hepler to unify [reference labels]. - - function normalizeReference(str) { - // Trim and collapse whitespace - str = str.trim().replace(/\s+/g, " "); - // In node v10 'ẞ'.toLowerCase() === 'Ṿ', which is presumed to be a bug - // fixed in v12 (couldn't find any details). - - // So treat this one as a special case - // (remove this when node v10 is no longer supported). - - if ("\u1e9e".toLowerCase() === "\u1e7e") { - str = str.replace(/\u1e9e/g, "\xdf"); - } - // .toLowerCase().toUpperCase() should get rid of all differences - // between letter variants. - - // Simple .toLowerCase() doesn't normalize 125 code points correctly, - // and .toUpperCase doesn't normalize 6 of them (list of exceptions: - // İ, ϴ, ẞ, Ω, K, Å - those are already uppercased, but have differently - // uppercased versions). - - // Here's an example showing how it happens. Lets take greek letter omega: - // uppercase U+0398 (Θ), U+03f4 (ϴ) and lowercase U+03b8 (θ), U+03d1 (ϑ) - - // Unicode entries: - // 0398;GREEK CAPITAL LETTER THETA;Lu;0;L;;;;;N;;;;03B8; - // 03B8;GREEK SMALL LETTER THETA;Ll;0;L;;;;;N;;;0398;;0398 - // 03D1;GREEK THETA SYMBOL;Ll;0;L; 03B8;;;;N;GREEK SMALL LETTER SCRIPT THETA;;0398;;0398 - // 03F4;GREEK CAPITAL THETA SYMBOL;Lu;0;L; 0398;;;;N;;;;03B8; - - // Case-insensitive comparison should treat all of them as equivalent. - - // But .toLowerCase() doesn't change ϑ (it's already lowercase), - // and .toUpperCase() doesn't change ϴ (already uppercase). - - // Applying first lower then upper case normalizes any character: - // '\u0398\u03f4\u03b8\u03d1'.toLowerCase().toUpperCase() === '\u0398\u0398\u0398\u0398' - - // Note: this is equivalent to unicode case folding; unicode normalization - // is a different step that is not required here. - - // Final result should be uppercased, because it's later stored in an object - // (this avoid a conflict with Object.prototype members, - // most notably, `__proto__`) - - return str.toLowerCase().toUpperCase(); - } - //////////////////////////////////////////////////////////////////////////////// - // Re-export libraries commonly used in both markdown-it and its plugins, - // so plugins won't have to depend on them explicitly, which reduces their - // bundled size (e.g. a browser build). - - exports.lib = {}; - exports.lib.mdurl = mdurl; - exports.lib.ucmicro = uc_micro; - exports.assign = assign; - exports.isString = isString; - exports.has = has; - exports.unescapeMd = unescapeMd; - exports.unescapeAll = unescapeAll; - exports.isValidEntityCode = isValidEntityCode; - exports.fromCodePoint = fromCodePoint; - // exports.replaceEntities = replaceEntities; - exports.escapeHtml = escapeHtml; - exports.arrayReplaceAt = arrayReplaceAt; - exports.isSpace = isSpace; - exports.isWhiteSpace = isWhiteSpace; - exports.isMdAsciiPunct = isMdAsciiPunct; - exports.isPunctChar = isPunctChar; - exports.escapeRE = escapeRE; - exports.normalizeReference = normalizeReference; - })); - // Parse link label - var parse_link_label = function parseLinkLabel(state, start, disableNested) { - var level, found, marker, prevPos, labelEnd = -1, max = state.posMax, oldPos = state.pos; - state.pos = start + 1; - level = 1; - while (state.pos < max) { - marker = state.src.charCodeAt(state.pos); - if (marker === 93 /* ] */) { - level--; - if (level === 0) { - found = true; - break; - } - } - prevPos = state.pos; - state.md.inline.skipToken(state); - if (marker === 91 /* [ */) { - if (prevPos === state.pos - 1) { - // increase level if we find text `[`, which is not a part of any token - level++; - } else if (disableNested) { - state.pos = oldPos; - return -1; - } - } - } - if (found) { - labelEnd = state.pos; - } - // restore old state - state.pos = oldPos; - return labelEnd; - }; - var unescapeAll$2 = utils.unescapeAll; - var parse_link_destination = function parseLinkDestination(str, start, max) { - var code, level, pos = start, result = { - ok: false, - pos: 0, - lines: 0, - str: "" - }; - if (str.charCodeAt(pos) === 60 /* < */) { - pos++; - while (pos < max) { - code = str.charCodeAt(pos); - if (code === 10 /* \n */) { - return result; - } - if (code === 60 /* < */) { - return result; - } - if (code === 62 /* > */) { - result.pos = pos + 1; - result.str = unescapeAll$2(str.slice(start + 1, pos)); - result.ok = true; - return result; - } - if (code === 92 /* \ */ && pos + 1 < max) { - pos += 2; - continue; - } - pos++; - } - // no closing '>' - return result; - } - // this should be ... } else { ... branch - level = 0; - while (pos < max) { - code = str.charCodeAt(pos); - if (code === 32) { - break; - } - // ascii control characters - if (code < 32 || code === 127) { - break; - } - if (code === 92 /* \ */ && pos + 1 < max) { - if (str.charCodeAt(pos + 1) === 32) { - break; - } - pos += 2; - continue; - } - if (code === 40 /* ( */) { - level++; - if (level > 32) { - return result; - } - } - if (code === 41 /* ) */) { - if (level === 0) { - break; - } - level--; - } - pos++; - } - if (start === pos) { - return result; - } - if (level !== 0) { - return result; - } - result.str = unescapeAll$2(str.slice(start, pos)); - result.pos = pos; - result.ok = true; - return result; - }; - var unescapeAll$1 = utils.unescapeAll; - var parse_link_title = function parseLinkTitle(str, start, max) { - var code, marker, lines = 0, pos = start, result = { - ok: false, - pos: 0, - lines: 0, - str: "" - }; - if (pos >= max) { - return result; - } - marker = str.charCodeAt(pos); - if (marker !== 34 /* " */ && marker !== 39 /* ' */ && marker !== 40 /* ( */) { - return result; - } - pos++; - // if opening marker is "(", switch it to closing marker ")" - if (marker === 40) { - marker = 41; - } - while (pos < max) { - code = str.charCodeAt(pos); - if (code === marker) { - result.pos = pos + 1; - result.lines = lines; - result.str = unescapeAll$1(str.slice(start + 1, pos)); - result.ok = true; - return result; - } else if (code === 40 /* ( */ && marker === 41 /* ) */) { - return result; - } else if (code === 10) { - lines++; - } else if (code === 92 /* \ */ && pos + 1 < max) { - pos++; - if (str.charCodeAt(pos) === 10) { - lines++; - } - } - pos++; - } - return result; - }; - var parseLinkLabel = parse_link_label; - var parseLinkDestination = parse_link_destination; - var parseLinkTitle = parse_link_title; - var helpers = { - parseLinkLabel: parseLinkLabel, - parseLinkDestination: parseLinkDestination, - parseLinkTitle: parseLinkTitle - }; - var assign$1 = utils.assign; - var unescapeAll = utils.unescapeAll; - var escapeHtml = utils.escapeHtml; - //////////////////////////////////////////////////////////////////////////////// - var default_rules = {}; - default_rules.code_inline = function(tokens, idx, options, env, slf) { - var token = tokens[idx]; - return "" + escapeHtml(token.content) + ""; - }; - default_rules.code_block = function(tokens, idx, options, env, slf) { - var token = tokens[idx]; - return "" + escapeHtml(tokens[idx].content) + "\n"; - }; - default_rules.fence = function(tokens, idx, options, env, slf) { - var token = tokens[idx], info = token.info ? unescapeAll(token.info).trim() : "", langName = "", langAttrs = "", highlighted, i, arr, tmpAttrs, tmpToken; - if (info) { - arr = info.split(/(\s+)/g); - langName = arr[0]; - langAttrs = arr.slice(2).join(""); - } - if (options.highlight) { - highlighted = options.highlight(token.content, langName, langAttrs) || escapeHtml(token.content); - } else { - highlighted = escapeHtml(token.content); - } - if (highlighted.indexOf("" + highlighted + "\n"; - } - return "
" + highlighted + "
\n"; - }; - default_rules.image = function(tokens, idx, options, env, slf) { - var token = tokens[idx]; - // "alt" attr MUST be set, even if empty. Because it's mandatory and - // should be placed on proper position for tests. - - // Replace content with actual value - token.attrs[token.attrIndex("alt")][1] = slf.renderInlineAsText(token.children, options, env); - return slf.renderToken(tokens, idx, options); - }; - default_rules.hardbreak = function(tokens, idx, options /*, env */) { - return options.xhtmlOut ? "
\n" : "
\n"; - }; - default_rules.softbreak = function(tokens, idx, options /*, env */) { - return options.breaks ? options.xhtmlOut ? "
\n" : "
\n" : "\n"; - }; - default_rules.text = function(tokens, idx /*, options, env */) { - return escapeHtml(tokens[idx].content); - }; - default_rules.html_block = function(tokens, idx /*, options, env */) { - return tokens[idx].content; - }; - default_rules.html_inline = function(tokens, idx /*, options, env */) { - return tokens[idx].content; - }; - /** - * new Renderer() - * - * Creates new [[Renderer]] instance and fill [[Renderer#rules]] with defaults. - **/ function Renderer() { - /** - * Renderer#rules -> Object - * - * Contains render rules for tokens. Can be updated and extended. - * - * ##### Example - * - * ```javascript - * var md = require('markdown-it')(); - * - * md.renderer.rules.strong_open = function () { return ''; }; - * md.renderer.rules.strong_close = function () { return ''; }; - * - * var result = md.renderInline(...); - * ``` - * - * Each rule is called as independent static function with fixed signature: - * - * ```javascript - * function my_token_render(tokens, idx, options, env, renderer) { - * // ... - * return renderedHTML; - * } - * ``` - * - * See [source code](https://github.com/markdown-it/markdown-it/blob/master/lib/renderer.js) - * for more details and examples. - **/ - this.rules = assign$1({}, default_rules); - } - /** - * Renderer.renderAttrs(token) -> String - * - * Render token attributes to string. - **/ Renderer.prototype.renderAttrs = function renderAttrs(token) { - var i, l, result; - if (!token.attrs) { - return ""; - } - result = ""; - for (i = 0, l = token.attrs.length; i < l; i++) { - result += " " + escapeHtml(token.attrs[i][0]) + '="' + escapeHtml(token.attrs[i][1]) + '"'; - } - return result; - }; - /** - * Renderer.renderToken(tokens, idx, options) -> String - * - tokens (Array): list of tokens - * - idx (Numbed): token index to render - * - options (Object): params of parser instance - * - * Default token renderer. Can be overriden by custom function - * in [[Renderer#rules]]. - **/ Renderer.prototype.renderToken = function renderToken(tokens, idx, options) { - var nextToken, result = "", needLf = false, token = tokens[idx]; - // Tight list paragraphs - if (token.hidden) { - return ""; - } - // Insert a newline between hidden paragraph and subsequent opening - // block-level tag. - - // For example, here we should insert a newline before blockquote: - // - a - // > - - if (token.block && token.nesting !== -1 && idx && tokens[idx - 1].hidden) { - result += "\n"; - } - // Add token name, e.g. ``. - needLf = false; - } - } - } - } - result += needLf ? ">\n" : ">"; - return result; - }; - /** - * Renderer.renderInline(tokens, options, env) -> String - * - tokens (Array): list on block tokens to render - * - options (Object): params of parser instance - * - env (Object): additional data from parsed input (references, for example) - * - * The same as [[Renderer.render]], but for single token of `inline` type. - **/ Renderer.prototype.renderInline = function(tokens, options, env) { - var type, result = "", rules = this.rules; - for (var i = 0, len = tokens.length; i < len; i++) { - type = tokens[i].type; - if (typeof rules[type] !== "undefined") { - result += rules[type](tokens, i, options, env, this); - } else { - result += this.renderToken(tokens, i, options); - } - } - return result; - }; - /** internal - * Renderer.renderInlineAsText(tokens, options, env) -> String - * - tokens (Array): list on block tokens to render - * - options (Object): params of parser instance - * - env (Object): additional data from parsed input (references, for example) - * - * Special kludge for image `alt` attributes to conform CommonMark spec. - * Don't try to use it! Spec requires to show `alt` content with stripped markup, - * instead of simple escaping. - **/ Renderer.prototype.renderInlineAsText = function(tokens, options, env) { - var result = ""; - for (var i = 0, len = tokens.length; i < len; i++) { - if (tokens[i].type === "text") { - result += tokens[i].content; - } else if (tokens[i].type === "image") { - result += this.renderInlineAsText(tokens[i].children, options, env); - } else if (tokens[i].type === "softbreak") { - result += "\n"; - } - } - return result; - }; - /** - * Renderer.render(tokens, options, env) -> String - * - tokens (Array): list on block tokens to render - * - options (Object): params of parser instance - * - env (Object): additional data from parsed input (references, for example) - * - * Takes token stream and generates HTML. Probably, you will never need to call - * this method directly. - **/ Renderer.prototype.render = function(tokens, options, env) { - var i, len, type, result = "", rules = this.rules; - for (i = 0, len = tokens.length; i < len; i++) { - type = tokens[i].type; - if (type === "inline") { - result += this.renderInline(tokens[i].children, options, env); - } else if (typeof rules[type] !== "undefined") { - result += rules[type](tokens, i, options, env, this); - } else { - result += this.renderToken(tokens, i, options, env); - } - } - return result; - }; - var renderer = Renderer; - /** - * class Ruler - * - * Helper class, used by [[MarkdownIt#core]], [[MarkdownIt#block]] and - * [[MarkdownIt#inline]] to manage sequences of functions (rules): - * - * - keep rules in defined order - * - assign the name to each rule - * - enable/disable rules - * - add/replace rules - * - allow assign rules to additional named chains (in the same) - * - cacheing lists of active rules - * - * You will not need use this class directly until write plugins. For simple - * rules control use [[MarkdownIt.disable]], [[MarkdownIt.enable]] and - * [[MarkdownIt.use]]. - **/ - /** - * new Ruler() - **/ function Ruler() { - // List of added rules. Each element is: - // { - // name: XXX, - // enabled: Boolean, - // fn: Function(), - // alt: [ name2, name3 ] - // } - this.__rules__ = []; - // Cached rule chains. - - // First level - chain name, '' for default. - // Second level - diginal anchor for fast filtering by charcodes. - - this.__cache__ = null; - } - //////////////////////////////////////////////////////////////////////////////// - // Helper methods, should not be used directly - // Find rule index by name - - Ruler.prototype.__find__ = function(name) { - for (var i = 0; i < this.__rules__.length; i++) { - if (this.__rules__[i].name === name) { - return i; - } - } - return -1; - }; - // Build rules lookup cache - - Ruler.prototype.__compile__ = function() { - var self = this; - var chains = [ "" ]; - // collect unique names - self.__rules__.forEach((function(rule) { - if (!rule.enabled) { - return; - } - rule.alt.forEach((function(altName) { - if (chains.indexOf(altName) < 0) { - chains.push(altName); - } - })); - })); - self.__cache__ = {}; - chains.forEach((function(chain) { - self.__cache__[chain] = []; - self.__rules__.forEach((function(rule) { - if (!rule.enabled) { - return; - } - if (chain && rule.alt.indexOf(chain) < 0) { - return; - } - self.__cache__[chain].push(rule.fn); - })); - })); - }; - /** - * Ruler.at(name, fn [, options]) - * - name (String): rule name to replace. - * - fn (Function): new rule function. - * - options (Object): new rule options (not mandatory). - * - * Replace rule by name with new function & options. Throws error if name not - * found. - * - * ##### Options: - * - * - __alt__ - array with names of "alternate" chains. - * - * ##### Example - * - * Replace existing typographer replacement rule with new one: - * - * ```javascript - * var md = require('markdown-it')(); - * - * md.core.ruler.at('replacements', function replace(state) { - * //... - * }); - * ``` - **/ Ruler.prototype.at = function(name, fn, options) { - var index = this.__find__(name); - var opt = options || {}; - if (index === -1) { - throw new Error("Parser rule not found: " + name); - } - this.__rules__[index].fn = fn; - this.__rules__[index].alt = opt.alt || []; - this.__cache__ = null; - }; - /** - * Ruler.before(beforeName, ruleName, fn [, options]) - * - beforeName (String): new rule will be added before this one. - * - ruleName (String): name of added rule. - * - fn (Function): rule function. - * - options (Object): rule options (not mandatory). - * - * Add new rule to chain before one with given name. See also - * [[Ruler.after]], [[Ruler.push]]. - * - * ##### Options: - * - * - __alt__ - array with names of "alternate" chains. - * - * ##### Example - * - * ```javascript - * var md = require('markdown-it')(); - * - * md.block.ruler.before('paragraph', 'my_rule', function replace(state) { - * //... - * }); - * ``` - **/ Ruler.prototype.before = function(beforeName, ruleName, fn, options) { - var index = this.__find__(beforeName); - var opt = options || {}; - if (index === -1) { - throw new Error("Parser rule not found: " + beforeName); - } - this.__rules__.splice(index, 0, { - name: ruleName, - enabled: true, - fn: fn, - alt: opt.alt || [] - }); - this.__cache__ = null; - }; - /** - * Ruler.after(afterName, ruleName, fn [, options]) - * - afterName (String): new rule will be added after this one. - * - ruleName (String): name of added rule. - * - fn (Function): rule function. - * - options (Object): rule options (not mandatory). - * - * Add new rule to chain after one with given name. See also - * [[Ruler.before]], [[Ruler.push]]. - * - * ##### Options: - * - * - __alt__ - array with names of "alternate" chains. - * - * ##### Example - * - * ```javascript - * var md = require('markdown-it')(); - * - * md.inline.ruler.after('text', 'my_rule', function replace(state) { - * //... - * }); - * ``` - **/ Ruler.prototype.after = function(afterName, ruleName, fn, options) { - var index = this.__find__(afterName); - var opt = options || {}; - if (index === -1) { - throw new Error("Parser rule not found: " + afterName); - } - this.__rules__.splice(index + 1, 0, { - name: ruleName, - enabled: true, - fn: fn, - alt: opt.alt || [] - }); - this.__cache__ = null; - }; - /** - * Ruler.push(ruleName, fn [, options]) - * - ruleName (String): name of added rule. - * - fn (Function): rule function. - * - options (Object): rule options (not mandatory). - * - * Push new rule to the end of chain. See also - * [[Ruler.before]], [[Ruler.after]]. - * - * ##### Options: - * - * - __alt__ - array with names of "alternate" chains. - * - * ##### Example - * - * ```javascript - * var md = require('markdown-it')(); - * - * md.core.ruler.push('my_rule', function replace(state) { - * //... - * }); - * ``` - **/ Ruler.prototype.push = function(ruleName, fn, options) { - var opt = options || {}; - this.__rules__.push({ - name: ruleName, - enabled: true, - fn: fn, - alt: opt.alt || [] - }); - this.__cache__ = null; - }; - /** - * Ruler.enable(list [, ignoreInvalid]) -> Array - * - list (String|Array): list of rule names to enable. - * - ignoreInvalid (Boolean): set `true` to ignore errors when rule not found. - * - * Enable rules with given names. If any rule name not found - throw Error. - * Errors can be disabled by second param. - * - * Returns list of found rule names (if no exception happened). - * - * See also [[Ruler.disable]], [[Ruler.enableOnly]]. - **/ Ruler.prototype.enable = function(list, ignoreInvalid) { - if (!Array.isArray(list)) { - list = [ list ]; - } - var result = []; - // Search by name and enable - list.forEach((function(name) { - var idx = this.__find__(name); - if (idx < 0) { - if (ignoreInvalid) { - return; - } - throw new Error("Rules manager: invalid rule name " + name); - } - this.__rules__[idx].enabled = true; - result.push(name); - }), this); - this.__cache__ = null; - return result; - }; - /** - * Ruler.enableOnly(list [, ignoreInvalid]) - * - list (String|Array): list of rule names to enable (whitelist). - * - ignoreInvalid (Boolean): set `true` to ignore errors when rule not found. - * - * Enable rules with given names, and disable everything else. If any rule name - * not found - throw Error. Errors can be disabled by second param. - * - * See also [[Ruler.disable]], [[Ruler.enable]]. - **/ Ruler.prototype.enableOnly = function(list, ignoreInvalid) { - if (!Array.isArray(list)) { - list = [ list ]; - } - this.__rules__.forEach((function(rule) { - rule.enabled = false; - })); - this.enable(list, ignoreInvalid); - }; - /** - * Ruler.disable(list [, ignoreInvalid]) -> Array - * - list (String|Array): list of rule names to disable. - * - ignoreInvalid (Boolean): set `true` to ignore errors when rule not found. - * - * Disable rules with given names. If any rule name not found - throw Error. - * Errors can be disabled by second param. - * - * Returns list of found rule names (if no exception happened). - * - * See also [[Ruler.enable]], [[Ruler.enableOnly]]. - **/ Ruler.prototype.disable = function(list, ignoreInvalid) { - if (!Array.isArray(list)) { - list = [ list ]; - } - var result = []; - // Search by name and disable - list.forEach((function(name) { - var idx = this.__find__(name); - if (idx < 0) { - if (ignoreInvalid) { - return; - } - throw new Error("Rules manager: invalid rule name " + name); - } - this.__rules__[idx].enabled = false; - result.push(name); - }), this); - this.__cache__ = null; - return result; - }; - /** - * Ruler.getRules(chainName) -> Array - * - * Return array of active functions (rules) for given chain name. It analyzes - * rules configuration, compiles caches if not exists and returns result. - * - * Default chain name is `''` (empty string). It can't be skipped. That's - * done intentionally, to keep signature monomorphic for high speed. - **/ Ruler.prototype.getRules = function(chainName) { - if (this.__cache__ === null) { - this.__compile__(); - } - // Chain can be empty, if rules disabled. But we still have to return Array. - return this.__cache__[chainName] || []; - }; - var ruler = Ruler; - // Normalize input string - // https://spec.commonmark.org/0.29/#line-ending - var NEWLINES_RE = /\r\n?|\n/g; - var NULL_RE = /\0/g; - var normalize = function normalize(state) { - var str; - // Normalize newlines - str = state.src.replace(NEWLINES_RE, "\n"); - // Replace NULL characters - str = str.replace(NULL_RE, "\ufffd"); - state.src = str; - }; - var block = function block(state) { - var token; - if (state.inlineMode) { - token = new state.Token("inline", "", 0); - token.content = state.src; - token.map = [ 0, 1 ]; - token.children = []; - state.tokens.push(token); - } else { - state.md.block.parse(state.src, state.md, state.env, state.tokens); - } - }; - var inline = function inline(state) { - var tokens = state.tokens, tok, i, l; - // Parse inlines - for (i = 0, l = tokens.length; i < l; i++) { - tok = tokens[i]; - if (tok.type === "inline") { - state.md.inline.parse(tok.content, state.md, state.env, tok.children); - } - } - }; - var arrayReplaceAt = utils.arrayReplaceAt; - function isLinkOpen$1(str) { - return /^\s]/i.test(str); - } - function isLinkClose$1(str) { - return /^<\/a\s*>/i.test(str); - } - var linkify$1 = function linkify(state) { - var i, j, l, tokens, token, currentToken, nodes, ln, text, pos, lastPos, level, htmlLinkLevel, url, fullUrl, urlText, blockTokens = state.tokens, links; - if (!state.md.options.linkify) { - return; - } - for (j = 0, l = blockTokens.length; j < l; j++) { - if (blockTokens[j].type !== "inline" || !state.md.linkify.pretest(blockTokens[j].content)) { - continue; - } - tokens = blockTokens[j].children; - htmlLinkLevel = 0; - // We scan from the end, to keep position when new tags added. - // Use reversed logic in links start/end match - for (i = tokens.length - 1; i >= 0; i--) { - currentToken = tokens[i]; - // Skip content of markdown links - if (currentToken.type === "link_close") { - i--; - while (tokens[i].level !== currentToken.level && tokens[i].type !== "link_open") { - i--; - } - continue; - } - // Skip content of html tag links - if (currentToken.type === "html_inline") { - if (isLinkOpen$1(currentToken.content) && htmlLinkLevel > 0) { - htmlLinkLevel--; - } - if (isLinkClose$1(currentToken.content)) { - htmlLinkLevel++; - } - } - if (htmlLinkLevel > 0) { - continue; - } - if (currentToken.type === "text" && state.md.linkify.test(currentToken.content)) { - text = currentToken.content; - links = state.md.linkify.match(text); - // Now split string to nodes - nodes = []; - level = currentToken.level; - lastPos = 0; - // forbid escape sequence at the start of the string, - // this avoids http\://example.com/ from being linkified as - // http://example.com/ - if (links.length > 0 && links[0].index === 0 && i > 0 && tokens[i - 1].type === "text_special") { - links = links.slice(1); - } - for (ln = 0; ln < links.length; ln++) { - url = links[ln].url; - fullUrl = state.md.normalizeLink(url); - if (!state.md.validateLink(fullUrl)) { - continue; - } - urlText = links[ln].text; - // Linkifier might send raw hostnames like "example.com", where url - // starts with domain name. So we prepend http:// in those cases, - // and remove it afterwards. - - if (!links[ln].schema) { - urlText = state.md.normalizeLinkText("http://" + urlText).replace(/^http:\/\//, ""); - } else if (links[ln].schema === "mailto:" && !/^mailto:/i.test(urlText)) { - urlText = state.md.normalizeLinkText("mailto:" + urlText).replace(/^mailto:/, ""); - } else { - urlText = state.md.normalizeLinkText(urlText); - } - pos = links[ln].index; - if (pos > lastPos) { - token = new state.Token("text", "", 0); - token.content = text.slice(lastPos, pos); - token.level = level; - nodes.push(token); - } - token = new state.Token("link_open", "a", 1); - token.attrs = [ [ "href", fullUrl ] ]; - token.level = level++; - token.markup = "linkify"; - token.info = "auto"; - nodes.push(token); - token = new state.Token("text", "", 0); - token.content = urlText; - token.level = level; - nodes.push(token); - token = new state.Token("link_close", "a", -1); - token.level = --level; - token.markup = "linkify"; - token.info = "auto"; - nodes.push(token); - lastPos = links[ln].lastIndex; - } - if (lastPos < text.length) { - token = new state.Token("text", "", 0); - token.content = text.slice(lastPos); - token.level = level; - nodes.push(token); - } - // replace current node - blockTokens[j].children = tokens = arrayReplaceAt(tokens, i, nodes); - } - } - } - }; - // Simple typographic replacements - // TODO: - // - fractionals 1/2, 1/4, 3/4 -> ½, ¼, ¾ - // - multiplications 2 x 4 -> 2 × 4 - var RARE_RE = /\+-|\.\.|\?\?\?\?|!!!!|,,|--/; - // Workaround for phantomjs - need regex without /g flag, - // or root check will fail every second time - var SCOPED_ABBR_TEST_RE = /\((c|tm|r)\)/i; - var SCOPED_ABBR_RE = /\((c|tm|r)\)/gi; - var SCOPED_ABBR = { - c: "\xa9", - r: "\xae", - tm: "\u2122" - }; - function replaceFn(match, name) { - return SCOPED_ABBR[name.toLowerCase()]; - } - function replace_scoped(inlineTokens) { - var i, token, inside_autolink = 0; - for (i = inlineTokens.length - 1; i >= 0; i--) { - token = inlineTokens[i]; - if (token.type === "text" && !inside_autolink) { - token.content = token.content.replace(SCOPED_ABBR_RE, replaceFn); - } - if (token.type === "link_open" && token.info === "auto") { - inside_autolink--; - } - if (token.type === "link_close" && token.info === "auto") { - inside_autolink++; - } - } - } - function replace_rare(inlineTokens) { - var i, token, inside_autolink = 0; - for (i = inlineTokens.length - 1; i >= 0; i--) { - token = inlineTokens[i]; - if (token.type === "text" && !inside_autolink) { - if (RARE_RE.test(token.content)) { - token.content = token.content.replace(/\+-/g, "\xb1").replace(/\.{2,}/g, "\u2026").replace(/([?!])\u2026/g, "$1..").replace(/([?!]){4,}/g, "$1$1$1").replace(/,{2,}/g, ",").replace(/(^|[^-])---(?=[^-]|$)/gm, "$1\u2014").replace(/(^|\s)--(?=\s|$)/gm, "$1\u2013").replace(/(^|[^-\s])--(?=[^-\s]|$)/gm, "$1\u2013"); - } - } - if (token.type === "link_open" && token.info === "auto") { - inside_autolink--; - } - if (token.type === "link_close" && token.info === "auto") { - inside_autolink++; - } - } - } - var replacements = function replace(state) { - var blkIdx; - if (!state.md.options.typographer) { - return; - } - for (blkIdx = state.tokens.length - 1; blkIdx >= 0; blkIdx--) { - if (state.tokens[blkIdx].type !== "inline") { - continue; - } - if (SCOPED_ABBR_TEST_RE.test(state.tokens[blkIdx].content)) { - replace_scoped(state.tokens[blkIdx].children); - } - if (RARE_RE.test(state.tokens[blkIdx].content)) { - replace_rare(state.tokens[blkIdx].children); - } - } - }; - var isWhiteSpace$1 = utils.isWhiteSpace; - var isPunctChar$1 = utils.isPunctChar; - var isMdAsciiPunct$1 = utils.isMdAsciiPunct; - var QUOTE_TEST_RE = /['"]/; - var QUOTE_RE = /['"]/g; - var APOSTROPHE = "\u2019"; - /* ’ */ function replaceAt(str, index, ch) { - return str.slice(0, index) + ch + str.slice(index + 1); - } - function process_inlines(tokens, state) { - var i, token, text, t, pos, max, thisLevel, item, lastChar, nextChar, isLastPunctChar, isNextPunctChar, isLastWhiteSpace, isNextWhiteSpace, canOpen, canClose, j, isSingle, stack, openQuote, closeQuote; - stack = []; - for (i = 0; i < tokens.length; i++) { - token = tokens[i]; - thisLevel = tokens[i].level; - for (j = stack.length - 1; j >= 0; j--) { - if (stack[j].level <= thisLevel) { - break; - } - } - stack.length = j + 1; - if (token.type !== "text") { - continue; - } - text = token.content; - pos = 0; - max = text.length; - /*eslint no-labels:0,block-scoped-var:0*/ OUTER: while (pos < max) { - QUOTE_RE.lastIndex = pos; - t = QUOTE_RE.exec(text); - if (!t) { - break; - } - canOpen = canClose = true; - pos = t.index + 1; - isSingle = t[0] === "'"; - // Find previous character, - // default to space if it's the beginning of the line - - lastChar = 32; - if (t.index - 1 >= 0) { - lastChar = text.charCodeAt(t.index - 1); - } else { - for (j = i - 1; j >= 0; j--) { - if (tokens[j].type === "softbreak" || tokens[j].type === "hardbreak") break; - // lastChar defaults to 0x20 - if (!tokens[j].content) continue; - // should skip all tokens except 'text', 'html_inline' or 'code_inline' - lastChar = tokens[j].content.charCodeAt(tokens[j].content.length - 1); - break; - } - } - // Find next character, - // default to space if it's the end of the line - - nextChar = 32; - if (pos < max) { - nextChar = text.charCodeAt(pos); - } else { - for (j = i + 1; j < tokens.length; j++) { - if (tokens[j].type === "softbreak" || tokens[j].type === "hardbreak") break; - // nextChar defaults to 0x20 - if (!tokens[j].content) continue; - // should skip all tokens except 'text', 'html_inline' or 'code_inline' - nextChar = tokens[j].content.charCodeAt(0); - break; - } - } - isLastPunctChar = isMdAsciiPunct$1(lastChar) || isPunctChar$1(String.fromCharCode(lastChar)); - isNextPunctChar = isMdAsciiPunct$1(nextChar) || isPunctChar$1(String.fromCharCode(nextChar)); - isLastWhiteSpace = isWhiteSpace$1(lastChar); - isNextWhiteSpace = isWhiteSpace$1(nextChar); - if (isNextWhiteSpace) { - canOpen = false; - } else if (isNextPunctChar) { - if (!(isLastWhiteSpace || isLastPunctChar)) { - canOpen = false; - } - } - if (isLastWhiteSpace) { - canClose = false; - } else if (isLastPunctChar) { - if (!(isNextWhiteSpace || isNextPunctChar)) { - canClose = false; - } - } - if (nextChar === 34 /* " */ && t[0] === '"') { - if (lastChar >= 48 /* 0 */ && lastChar <= 57 /* 9 */) { - // special case: 1"" - count first quote as an inch - canClose = canOpen = false; - } - } - if (canOpen && canClose) { - // Replace quotes in the middle of punctuation sequence, but not - // in the middle of the words, i.e.: - // 1. foo " bar " baz - not replaced - // 2. foo-"-bar-"-baz - replaced - // 3. foo"bar"baz - not replaced - canOpen = isLastPunctChar; - canClose = isNextPunctChar; - } - if (!canOpen && !canClose) { - // middle of word - if (isSingle) { - token.content = replaceAt(token.content, t.index, APOSTROPHE); - } - continue; - } - if (canClose) { - // this could be a closing quote, rewind the stack to get a match - for (j = stack.length - 1; j >= 0; j--) { - item = stack[j]; - if (stack[j].level < thisLevel) { - break; - } - if (item.single === isSingle && stack[j].level === thisLevel) { - item = stack[j]; - if (isSingle) { - openQuote = state.md.options.quotes[2]; - closeQuote = state.md.options.quotes[3]; - } else { - openQuote = state.md.options.quotes[0]; - closeQuote = state.md.options.quotes[1]; - } - // replace token.content *before* tokens[item.token].content, - // because, if they are pointing at the same token, replaceAt - // could mess up indices when quote length != 1 - token.content = replaceAt(token.content, t.index, closeQuote); - tokens[item.token].content = replaceAt(tokens[item.token].content, item.pos, openQuote); - pos += closeQuote.length - 1; - if (item.token === i) { - pos += openQuote.length - 1; - } - text = token.content; - max = text.length; - stack.length = j; - continue OUTER; - } - } - } - if (canOpen) { - stack.push({ - token: i, - pos: t.index, - single: isSingle, - level: thisLevel - }); - } else if (canClose && isSingle) { - token.content = replaceAt(token.content, t.index, APOSTROPHE); - } - } - } - } - var smartquotes = function smartquotes(state) { - /*eslint max-depth:0*/ - var blkIdx; - if (!state.md.options.typographer) { - return; - } - for (blkIdx = state.tokens.length - 1; blkIdx >= 0; blkIdx--) { - if (state.tokens[blkIdx].type !== "inline" || !QUOTE_TEST_RE.test(state.tokens[blkIdx].content)) { - continue; - } - process_inlines(state.tokens[blkIdx].children, state); - } - }; - // Join raw text tokens with the rest of the text - var text_join = function text_join(state) { - var j, l, tokens, curr, max, last, blockTokens = state.tokens; - for (j = 0, l = blockTokens.length; j < l; j++) { - if (blockTokens[j].type !== "inline") continue; - tokens = blockTokens[j].children; - max = tokens.length; - for (curr = 0; curr < max; curr++) { - if (tokens[curr].type === "text_special") { - tokens[curr].type = "text"; - } - } - for (curr = last = 0; curr < max; curr++) { - if (tokens[curr].type === "text" && curr + 1 < max && tokens[curr + 1].type === "text") { - // collapse two adjacent text nodes - tokens[curr + 1].content = tokens[curr].content + tokens[curr + 1].content; - } else { - if (curr !== last) { - tokens[last] = tokens[curr]; - } - last++; - } - } - if (curr !== last) { - tokens.length = last; - } - } - }; - // Token class - /** - * class Token - **/ - /** - * new Token(type, tag, nesting) - * - * Create new token and fill passed properties. - **/ function Token(type, tag, nesting) { - /** - * Token#type -> String - * - * Type of the token (string, e.g. "paragraph_open") - **/ - this.type = type; - /** - * Token#tag -> String - * - * html tag name, e.g. "p" - **/ this.tag = tag; - /** - * Token#attrs -> Array - * - * Html attributes. Format: `[ [ name1, value1 ], [ name2, value2 ] ]` - **/ this.attrs = null; - /** - * Token#map -> Array - * - * Source map info. Format: `[ line_begin, line_end ]` - **/ this.map = null; - /** - * Token#nesting -> Number - * - * Level change (number in {-1, 0, 1} set), where: - * - * - `1` means the tag is opening - * - `0` means the tag is self-closing - * - `-1` means the tag is closing - **/ this.nesting = nesting; - /** - * Token#level -> Number - * - * nesting level, the same as `state.level` - **/ this.level = 0; - /** - * Token#children -> Array - * - * An array of child nodes (inline and img tokens) - **/ this.children = null; - /** - * Token#content -> String - * - * In a case of self-closing tag (code, html, fence, etc.), - * it has contents of this tag. - **/ this.content = ""; - /** - * Token#markup -> String - * - * '*' or '_' for emphasis, fence string for fence, etc. - **/ this.markup = ""; - /** - * Token#info -> String - * - * Additional information: - * - * - Info string for "fence" tokens - * - The value "auto" for autolink "link_open" and "link_close" tokens - * - The string value of the item marker for ordered-list "list_item_open" tokens - **/ this.info = ""; - /** - * Token#meta -> Object - * - * A place for plugins to store an arbitrary data - **/ this.meta = null; - /** - * Token#block -> Boolean - * - * True for block-level tokens, false for inline tokens. - * Used in renderer to calculate line breaks - **/ this.block = false; - /** - * Token#hidden -> Boolean - * - * If it's true, ignore this element when rendering. Used for tight lists - * to hide paragraphs. - **/ this.hidden = false; - } - /** - * Token.attrIndex(name) -> Number - * - * Search attribute index by name. - **/ Token.prototype.attrIndex = function attrIndex(name) { - var attrs, i, len; - if (!this.attrs) { - return -1; - } - attrs = this.attrs; - for (i = 0, len = attrs.length; i < len; i++) { - if (attrs[i][0] === name) { - return i; - } - } - return -1; - }; - /** - * Token.attrPush(attrData) - * - * Add `[ name, value ]` attribute to list. Init attrs if necessary - **/ Token.prototype.attrPush = function attrPush(attrData) { - if (this.attrs) { - this.attrs.push(attrData); - } else { - this.attrs = [ attrData ]; - } - }; - /** - * Token.attrSet(name, value) - * - * Set `name` attribute to `value`. Override old value if exists. - **/ Token.prototype.attrSet = function attrSet(name, value) { - var idx = this.attrIndex(name), attrData = [ name, value ]; - if (idx < 0) { - this.attrPush(attrData); - } else { - this.attrs[idx] = attrData; - } - }; - /** - * Token.attrGet(name) - * - * Get the value of attribute `name`, or null if it does not exist. - **/ Token.prototype.attrGet = function attrGet(name) { - var idx = this.attrIndex(name), value = null; - if (idx >= 0) { - value = this.attrs[idx][1]; - } - return value; - }; - /** - * Token.attrJoin(name, value) - * - * Join value to existing attribute via space. Or create new attribute if not - * exists. Useful to operate with token classes. - **/ Token.prototype.attrJoin = function attrJoin(name, value) { - var idx = this.attrIndex(name); - if (idx < 0) { - this.attrPush([ name, value ]); - } else { - this.attrs[idx][1] = this.attrs[idx][1] + " " + value; - } - }; - var token = Token; - function StateCore(src, md, env) { - this.src = src; - this.env = env; - this.tokens = []; - this.inlineMode = false; - this.md = md; - // link to parser instance - } - // re-export Token class to use in core rules - StateCore.prototype.Token = token; - var state_core = StateCore; - var _rules$2 = [ [ "normalize", normalize ], [ "block", block ], [ "inline", inline ], [ "linkify", linkify$1 ], [ "replacements", replacements ], [ "smartquotes", smartquotes ], - // `text_join` finds `text_special` tokens (for escape sequences) - // and joins them with the rest of the text - [ "text_join", text_join ] ]; - /** - * new Core() - **/ function Core() { - /** - * Core#ruler -> Ruler - * - * [[Ruler]] instance. Keep configuration of core rules. - **/ - this.ruler = new ruler; - for (var i = 0; i < _rules$2.length; i++) { - this.ruler.push(_rules$2[i][0], _rules$2[i][1]); - } - } - /** - * Core.process(state) - * - * Executes core chain rules. - **/ Core.prototype.process = function(state) { - var i, l, rules; - rules = this.ruler.getRules(""); - for (i = 0, l = rules.length; i < l; i++) { - rules[i](state); - } - }; - Core.prototype.State = state_core; - var parser_core = Core; - var isSpace$a = utils.isSpace; - function getLine(state, line) { - var pos = state.bMarks[line] + state.tShift[line], max = state.eMarks[line]; - return state.src.slice(pos, max); - } - function escapedSplit(str) { - var result = [], pos = 0, max = str.length, ch, isEscaped = false, lastPos = 0, current = ""; - ch = str.charCodeAt(pos); - while (pos < max) { - if (ch === 124 /* | */) { - if (!isEscaped) { - // pipe separating cells, '|' - result.push(current + str.substring(lastPos, pos)); - current = ""; - lastPos = pos + 1; - } else { - // escaped pipe, '\|' - current += str.substring(lastPos, pos - 1); - lastPos = pos; - } - } - isEscaped = ch === 92 /* \ */; - pos++; - ch = str.charCodeAt(pos); - } - result.push(current + str.substring(lastPos)); - return result; - } - var table = function table(state, startLine, endLine, silent) { - var ch, lineText, pos, i, l, nextLine, columns, columnCount, token, aligns, t, tableLines, tbodyLines, oldParentType, terminate, terminatorRules, firstCh, secondCh; - // should have at least two lines - if (startLine + 2 > endLine) { - return false; - } - nextLine = startLine + 1; - if (state.sCount[nextLine] < state.blkIndent) { - return false; - } - // if it's indented more than 3 spaces, it should be a code block - if (state.sCount[nextLine] - state.blkIndent >= 4) { - return false; - } - // first character of the second line should be '|', '-', ':', - // and no other characters are allowed but spaces; - // basically, this is the equivalent of /^[-:|][-:|\s]*$/ regexp - pos = state.bMarks[nextLine] + state.tShift[nextLine]; - if (pos >= state.eMarks[nextLine]) { - return false; - } - firstCh = state.src.charCodeAt(pos++); - if (firstCh !== 124 /* | */ && firstCh !== 45 /* - */ && firstCh !== 58 /* : */) { - return false; - } - if (pos >= state.eMarks[nextLine]) { - return false; - } - secondCh = state.src.charCodeAt(pos++); - if (secondCh !== 124 /* | */ && secondCh !== 45 /* - */ && secondCh !== 58 /* : */ && !isSpace$a(secondCh)) { - return false; - } - // if first character is '-', then second character must not be a space - // (due to parsing ambiguity with list) - if (firstCh === 45 /* - */ && isSpace$a(secondCh)) { - return false; - } - while (pos < state.eMarks[nextLine]) { - ch = state.src.charCodeAt(pos); - if (ch !== 124 /* | */ && ch !== 45 /* - */ && ch !== 58 /* : */ && !isSpace$a(ch)) { - return false; - } - pos++; - } - lineText = getLine(state, startLine + 1); - columns = lineText.split("|"); - aligns = []; - for (i = 0; i < columns.length; i++) { - t = columns[i].trim(); - if (!t) { - // allow empty columns before and after table, but not in between columns; - // e.g. allow ` |---| `, disallow ` ---||--- ` - if (i === 0 || i === columns.length - 1) { - continue; - } else { - return false; - } - } - if (!/^:?-+:?$/.test(t)) { - return false; - } - if (t.charCodeAt(t.length - 1) === 58 /* : */) { - aligns.push(t.charCodeAt(0) === 58 /* : */ ? "center" : "right"); - } else if (t.charCodeAt(0) === 58 /* : */) { - aligns.push("left"); - } else { - aligns.push(""); - } - } - lineText = getLine(state, startLine).trim(); - if (lineText.indexOf("|") === -1) { - return false; - } - if (state.sCount[startLine] - state.blkIndent >= 4) { - return false; - } - columns = escapedSplit(lineText); - if (columns.length && columns[0] === "") columns.shift(); - if (columns.length && columns[columns.length - 1] === "") columns.pop(); - // header row will define an amount of columns in the entire table, - // and align row should be exactly the same (the rest of the rows can differ) - columnCount = columns.length; - if (columnCount === 0 || columnCount !== aligns.length) { - return false; - } - if (silent) { - return true; - } - oldParentType = state.parentType; - state.parentType = "table"; - // use 'blockquote' lists for termination because it's - // the most similar to tables - terminatorRules = state.md.block.ruler.getRules("blockquote"); - token = state.push("table_open", "table", 1); - token.map = tableLines = [ startLine, 0 ]; - token = state.push("thead_open", "thead", 1); - token.map = [ startLine, startLine + 1 ]; - token = state.push("tr_open", "tr", 1); - token.map = [ startLine, startLine + 1 ]; - for (i = 0; i < columns.length; i++) { - token = state.push("th_open", "th", 1); - if (aligns[i]) { - token.attrs = [ [ "style", "text-align:" + aligns[i] ] ]; - } - token = state.push("inline", "", 0); - token.content = columns[i].trim(); - token.children = []; - token = state.push("th_close", "th", -1); - } - token = state.push("tr_close", "tr", -1); - token = state.push("thead_close", "thead", -1); - for (nextLine = startLine + 2; nextLine < endLine; nextLine++) { - if (state.sCount[nextLine] < state.blkIndent) { - break; - } - terminate = false; - for (i = 0, l = terminatorRules.length; i < l; i++) { - if (terminatorRules[i](state, nextLine, endLine, true)) { - terminate = true; - break; - } - } - if (terminate) { - break; - } - lineText = getLine(state, nextLine).trim(); - if (!lineText) { - break; - } - if (state.sCount[nextLine] - state.blkIndent >= 4) { - break; - } - columns = escapedSplit(lineText); - if (columns.length && columns[0] === "") columns.shift(); - if (columns.length && columns[columns.length - 1] === "") columns.pop(); - if (nextLine === startLine + 2) { - token = state.push("tbody_open", "tbody", 1); - token.map = tbodyLines = [ startLine + 2, 0 ]; - } - token = state.push("tr_open", "tr", 1); - token.map = [ nextLine, nextLine + 1 ]; - for (i = 0; i < columnCount; i++) { - token = state.push("td_open", "td", 1); - if (aligns[i]) { - token.attrs = [ [ "style", "text-align:" + aligns[i] ] ]; - } - token = state.push("inline", "", 0); - token.content = columns[i] ? columns[i].trim() : ""; - token.children = []; - token = state.push("td_close", "td", -1); - } - token = state.push("tr_close", "tr", -1); - } - if (tbodyLines) { - token = state.push("tbody_close", "tbody", -1); - tbodyLines[1] = nextLine; - } - token = state.push("table_close", "table", -1); - tableLines[1] = nextLine; - state.parentType = oldParentType; - state.line = nextLine; - return true; - }; - // Code block (4 spaces padded) - var code = function code(state, startLine, endLine /*, silent*/) { - var nextLine, last, token; - if (state.sCount[startLine] - state.blkIndent < 4) { - return false; - } - last = nextLine = startLine + 1; - while (nextLine < endLine) { - if (state.isEmpty(nextLine)) { - nextLine++; - continue; - } - if (state.sCount[nextLine] - state.blkIndent >= 4) { - nextLine++; - last = nextLine; - continue; - } - break; - } - state.line = last; - token = state.push("code_block", "code", 0); - token.content = state.getLines(startLine, last, 4 + state.blkIndent, false) + "\n"; - token.map = [ startLine, state.line ]; - return true; - }; - // fences (``` lang, ~~~ lang) - var fence = function fence(state, startLine, endLine, silent) { - var marker, len, params, nextLine, mem, token, markup, haveEndMarker = false, pos = state.bMarks[startLine] + state.tShift[startLine], max = state.eMarks[startLine]; - // if it's indented more than 3 spaces, it should be a code block - if (state.sCount[startLine] - state.blkIndent >= 4) { - return false; - } - if (pos + 3 > max) { - return false; - } - marker = state.src.charCodeAt(pos); - if (marker !== 126 /* ~ */ && marker !== 96 /* ` */) { - return false; - } - // scan marker length - mem = pos; - pos = state.skipChars(pos, marker); - len = pos - mem; - if (len < 3) { - return false; - } - markup = state.src.slice(mem, pos); - params = state.src.slice(pos, max); - if (marker === 96 /* ` */) { - if (params.indexOf(String.fromCharCode(marker)) >= 0) { - return false; - } - } - // Since start is found, we can report success here in validation mode - if (silent) { - return true; - } - // search end of block - nextLine = startLine; - for (;;) { - nextLine++; - if (nextLine >= endLine) { - // unclosed block should be autoclosed by end of document. - // also block seems to be autoclosed by end of parent - break; - } - pos = mem = state.bMarks[nextLine] + state.tShift[nextLine]; - max = state.eMarks[nextLine]; - if (pos < max && state.sCount[nextLine] < state.blkIndent) { - // non-empty line with negative indent should stop the list: - // - ``` - // test - break; - } - if (state.src.charCodeAt(pos) !== marker) { - continue; - } - if (state.sCount[nextLine] - state.blkIndent >= 4) { - // closing fence should be indented less than 4 spaces - continue; - } - pos = state.skipChars(pos, marker); - // closing code fence must be at least as long as the opening one - if (pos - mem < len) { - continue; - } - // make sure tail has spaces only - pos = state.skipSpaces(pos); - if (pos < max) { - continue; - } - haveEndMarker = true; - // found! - break; - } - // If a fence has heading spaces, they should be removed from its inner block - len = state.sCount[startLine]; - state.line = nextLine + (haveEndMarker ? 1 : 0); - token = state.push("fence", "code", 0); - token.info = params; - token.content = state.getLines(startLine + 1, nextLine, len, true); - token.markup = markup; - token.map = [ startLine, state.line ]; - return true; - }; - var isSpace$9 = utils.isSpace; - var blockquote = function blockquote(state, startLine, endLine, silent) { - var adjustTab, ch, i, initial, l, lastLineEmpty, lines, nextLine, offset, oldBMarks, oldBSCount, oldIndent, oldParentType, oldSCount, oldTShift, spaceAfterMarker, terminate, terminatorRules, token, isOutdented, oldLineMax = state.lineMax, pos = state.bMarks[startLine] + state.tShift[startLine], max = state.eMarks[startLine]; - // if it's indented more than 3 spaces, it should be a code block - if (state.sCount[startLine] - state.blkIndent >= 4) { - return false; - } - // check the block quote marker - if (state.src.charCodeAt(pos) !== 62 /* > */) { - return false; - } - // we know that it's going to be a valid blockquote, - // so no point trying to find the end of it in silent mode - if (silent) { - return true; - } - oldBMarks = []; - oldBSCount = []; - oldSCount = []; - oldTShift = []; - terminatorRules = state.md.block.ruler.getRules("blockquote"); - oldParentType = state.parentType; - state.parentType = "blockquote"; - // Search the end of the block - - // Block ends with either: - // 1. an empty line outside: - // ``` - // > test - - // ``` - // 2. an empty line inside: - // ``` - // > - // test - // ``` - // 3. another tag: - // ``` - // > test - // - - - - // ``` - for (nextLine = startLine; nextLine < endLine; nextLine++) { - // check if it's outdented, i.e. it's inside list item and indented - // less than said list item: - // ``` - // 1. anything - // > current blockquote - // 2. checking this line - // ``` - isOutdented = state.sCount[nextLine] < state.blkIndent; - pos = state.bMarks[nextLine] + state.tShift[nextLine]; - max = state.eMarks[nextLine]; - if (pos >= max) { - // Case 1: line is not inside the blockquote, and this line is empty. - break; - } - if (state.src.charCodeAt(pos++) === 62 /* > */ && !isOutdented) { - // This line is inside the blockquote. - // set offset past spaces and ">" - initial = state.sCount[nextLine] + 1; - // skip one optional space after '>' - if (state.src.charCodeAt(pos) === 32 /* space */) { - // ' > test ' - // ^ -- position start of line here: - pos++; - initial++; - adjustTab = false; - spaceAfterMarker = true; - } else if (state.src.charCodeAt(pos) === 9 /* tab */) { - spaceAfterMarker = true; - if ((state.bsCount[nextLine] + initial) % 4 === 3) { - // ' >\t test ' - // ^ -- position start of line here (tab has width===1) - pos++; - initial++; - adjustTab = false; - } else { - // ' >\t test ' - // ^ -- position start of line here + shift bsCount slightly - // to make extra space appear - adjustTab = true; - } - } else { - spaceAfterMarker = false; - } - offset = initial; - oldBMarks.push(state.bMarks[nextLine]); - state.bMarks[nextLine] = pos; - while (pos < max) { - ch = state.src.charCodeAt(pos); - if (isSpace$9(ch)) { - if (ch === 9) { - offset += 4 - (offset + state.bsCount[nextLine] + (adjustTab ? 1 : 0)) % 4; - } else { - offset++; - } - } else { - break; - } - pos++; - } - lastLineEmpty = pos >= max; - oldBSCount.push(state.bsCount[nextLine]); - state.bsCount[nextLine] = state.sCount[nextLine] + 1 + (spaceAfterMarker ? 1 : 0); - oldSCount.push(state.sCount[nextLine]); - state.sCount[nextLine] = offset - initial; - oldTShift.push(state.tShift[nextLine]); - state.tShift[nextLine] = pos - state.bMarks[nextLine]; - continue; - } - // Case 2: line is not inside the blockquote, and the last line was empty. - if (lastLineEmpty) { - break; - } - // Case 3: another tag found. - terminate = false; - for (i = 0, l = terminatorRules.length; i < l; i++) { - if (terminatorRules[i](state, nextLine, endLine, true)) { - terminate = true; - break; - } - } - if (terminate) { - // Quirk to enforce "hard termination mode" for paragraphs; - // normally if you call `tokenize(state, startLine, nextLine)`, - // paragraphs will look below nextLine for paragraph continuation, - // but if blockquote is terminated by another tag, they shouldn't - state.lineMax = nextLine; - if (state.blkIndent !== 0) { - // state.blkIndent was non-zero, we now set it to zero, - // so we need to re-calculate all offsets to appear as - // if indent wasn't changed - oldBMarks.push(state.bMarks[nextLine]); - oldBSCount.push(state.bsCount[nextLine]); - oldTShift.push(state.tShift[nextLine]); - oldSCount.push(state.sCount[nextLine]); - state.sCount[nextLine] -= state.blkIndent; - } - break; - } - oldBMarks.push(state.bMarks[nextLine]); - oldBSCount.push(state.bsCount[nextLine]); - oldTShift.push(state.tShift[nextLine]); - oldSCount.push(state.sCount[nextLine]); - // A negative indentation means that this is a paragraph continuation - - state.sCount[nextLine] = -1; - } - oldIndent = state.blkIndent; - state.blkIndent = 0; - token = state.push("blockquote_open", "blockquote", 1); - token.markup = ">"; - token.map = lines = [ startLine, 0 ]; - state.md.block.tokenize(state, startLine, nextLine); - token = state.push("blockquote_close", "blockquote", -1); - token.markup = ">"; - state.lineMax = oldLineMax; - state.parentType = oldParentType; - lines[1] = state.line; - // Restore original tShift; this might not be necessary since the parser - // has already been here, but just to make sure we can do that. - for (i = 0; i < oldTShift.length; i++) { - state.bMarks[i + startLine] = oldBMarks[i]; - state.tShift[i + startLine] = oldTShift[i]; - state.sCount[i + startLine] = oldSCount[i]; - state.bsCount[i + startLine] = oldBSCount[i]; - } - state.blkIndent = oldIndent; - return true; - }; - var isSpace$8 = utils.isSpace; - var hr = function hr(state, startLine, endLine, silent) { - var marker, cnt, ch, token, pos = state.bMarks[startLine] + state.tShift[startLine], max = state.eMarks[startLine]; - // if it's indented more than 3 spaces, it should be a code block - if (state.sCount[startLine] - state.blkIndent >= 4) { - return false; - } - marker = state.src.charCodeAt(pos++); - // Check hr marker - if (marker !== 42 /* * */ && marker !== 45 /* - */ && marker !== 95 /* _ */) { - return false; - } - // markers can be mixed with spaces, but there should be at least 3 of them - cnt = 1; - while (pos < max) { - ch = state.src.charCodeAt(pos++); - if (ch !== marker && !isSpace$8(ch)) { - return false; - } - if (ch === marker) { - cnt++; - } - } - if (cnt < 3) { - return false; - } - if (silent) { - return true; - } - state.line = startLine + 1; - token = state.push("hr", "hr", 0); - token.map = [ startLine, state.line ]; - token.markup = Array(cnt + 1).join(String.fromCharCode(marker)); - return true; - }; - var isSpace$7 = utils.isSpace; - // Search `[-+*][\n ]`, returns next pos after marker on success - // or -1 on fail. - function skipBulletListMarker(state, startLine) { - var marker, pos, max, ch; - pos = state.bMarks[startLine] + state.tShift[startLine]; - max = state.eMarks[startLine]; - marker = state.src.charCodeAt(pos++); - // Check bullet - if (marker !== 42 /* * */ && marker !== 45 /* - */ && marker !== 43 /* + */) { - return -1; - } - if (pos < max) { - ch = state.src.charCodeAt(pos); - if (!isSpace$7(ch)) { - // " -test " - is not a list item - return -1; - } - } - return pos; - } - // Search `\d+[.)][\n ]`, returns next pos after marker on success - // or -1 on fail. - function skipOrderedListMarker(state, startLine) { - var ch, start = state.bMarks[startLine] + state.tShift[startLine], pos = start, max = state.eMarks[startLine]; - // List marker should have at least 2 chars (digit + dot) - if (pos + 1 >= max) { - return -1; - } - ch = state.src.charCodeAt(pos++); - if (ch < 48 /* 0 */ || ch > 57 /* 9 */) { - return -1; - } - for (;;) { - // EOL -> fail - if (pos >= max) { - return -1; - } - ch = state.src.charCodeAt(pos++); - if (ch >= 48 /* 0 */ && ch <= 57 /* 9 */) { - // List marker should have no more than 9 digits - // (prevents integer overflow in browsers) - if (pos - start >= 10) { - return -1; - } - continue; - } - // found valid marker - if (ch === 41 /* ) */ || ch === 46 /* . */) { - break; - } - return -1; - } - if (pos < max) { - ch = state.src.charCodeAt(pos); - if (!isSpace$7(ch)) { - // " 1.test " - is not a list item - return -1; - } - } - return pos; - } - function markTightParagraphs(state, idx) { - var i, l, level = state.level + 2; - for (i = idx + 2, l = state.tokens.length - 2; i < l; i++) { - if (state.tokens[i].level === level && state.tokens[i].type === "paragraph_open") { - state.tokens[i + 2].hidden = true; - state.tokens[i].hidden = true; - i += 2; - } - } - } - var list = function list(state, startLine, endLine, silent) { - var ch, contentStart, i, indent, indentAfterMarker, initial, isOrdered, itemLines, l, listLines, listTokIdx, markerCharCode, markerValue, max, offset, oldListIndent, oldParentType, oldSCount, oldTShift, oldTight, pos, posAfterMarker, prevEmptyEnd, start, terminate, terminatorRules, token, nextLine = startLine, isTerminatingParagraph = false, tight = true; - // if it's indented more than 3 spaces, it should be a code block - if (state.sCount[nextLine] - state.blkIndent >= 4) { - return false; - } - // Special case: - // - item 1 - // - item 2 - // - item 3 - // - item 4 - // - this one is a paragraph continuation - if (state.listIndent >= 0 && state.sCount[nextLine] - state.listIndent >= 4 && state.sCount[nextLine] < state.blkIndent) { - return false; - } - // limit conditions when list can interrupt - // a paragraph (validation mode only) - if (silent && state.parentType === "paragraph") { - // Next list item should still terminate previous list item; - // This code can fail if plugins use blkIndent as well as lists, - // but I hope the spec gets fixed long before that happens. - if (state.sCount[nextLine] >= state.blkIndent) { - isTerminatingParagraph = true; - } - } - // Detect list type and position after marker - if ((posAfterMarker = skipOrderedListMarker(state, nextLine)) >= 0) { - isOrdered = true; - start = state.bMarks[nextLine] + state.tShift[nextLine]; - markerValue = Number(state.src.slice(start, posAfterMarker - 1)); - // If we're starting a new ordered list right after - // a paragraph, it should start with 1. - if (isTerminatingParagraph && markerValue !== 1) return false; - } else if ((posAfterMarker = skipBulletListMarker(state, nextLine)) >= 0) { - isOrdered = false; - } else { - return false; - } - // If we're starting a new unordered list right after - // a paragraph, first line should not be empty. - if (isTerminatingParagraph) { - if (state.skipSpaces(posAfterMarker) >= state.eMarks[nextLine]) return false; - } - // For validation mode we can terminate immediately - if (silent) { - return true; - } - // We should terminate list on style change. Remember first one to compare. - markerCharCode = state.src.charCodeAt(posAfterMarker - 1); - // Start list - listTokIdx = state.tokens.length; - if (isOrdered) { - token = state.push("ordered_list_open", "ol", 1); - if (markerValue !== 1) { - token.attrs = [ [ "start", markerValue ] ]; - } - } else { - token = state.push("bullet_list_open", "ul", 1); - } - token.map = listLines = [ nextLine, 0 ]; - token.markup = String.fromCharCode(markerCharCode); - - // Iterate list items - - prevEmptyEnd = false; - terminatorRules = state.md.block.ruler.getRules("list"); - oldParentType = state.parentType; - state.parentType = "list"; - while (nextLine < endLine) { - pos = posAfterMarker; - max = state.eMarks[nextLine]; - initial = offset = state.sCount[nextLine] + posAfterMarker - (state.bMarks[nextLine] + state.tShift[nextLine]); - while (pos < max) { - ch = state.src.charCodeAt(pos); - if (ch === 9) { - offset += 4 - (offset + state.bsCount[nextLine]) % 4; - } else if (ch === 32) { - offset++; - } else { - break; - } - pos++; - } - contentStart = pos; - if (contentStart >= max) { - // trimming space in "- \n 3" case, indent is 1 here - indentAfterMarker = 1; - } else { - indentAfterMarker = offset - initial; - } - // If we have more than 4 spaces, the indent is 1 - // (the rest is just indented code block) - if (indentAfterMarker > 4) { - indentAfterMarker = 1; - } - // " - test" - // ^^^^^ - calculating total length of this thing - indent = initial + indentAfterMarker; - // Run subparser & write tokens - token = state.push("list_item_open", "li", 1); - token.markup = String.fromCharCode(markerCharCode); - token.map = itemLines = [ nextLine, 0 ]; - if (isOrdered) { - token.info = state.src.slice(start, posAfterMarker - 1); - } - // change current state, then restore it after parser subcall - oldTight = state.tight; - oldTShift = state.tShift[nextLine]; - oldSCount = state.sCount[nextLine]; - // - example list - // ^ listIndent position will be here - // ^ blkIndent position will be here - - oldListIndent = state.listIndent; - state.listIndent = state.blkIndent; - state.blkIndent = indent; - state.tight = true; - state.tShift[nextLine] = contentStart - state.bMarks[nextLine]; - state.sCount[nextLine] = offset; - if (contentStart >= max && state.isEmpty(nextLine + 1)) { - // workaround for this case - // (list item is empty, list terminates before "foo"): - // ~~~~~~~~ - // - - // foo - // ~~~~~~~~ - state.line = Math.min(state.line + 2, endLine); - } else { - state.md.block.tokenize(state, nextLine, endLine, true); - } - // If any of list item is tight, mark list as tight - if (!state.tight || prevEmptyEnd) { - tight = false; - } - // Item become loose if finish with empty line, - // but we should filter last element, because it means list finish - prevEmptyEnd = state.line - nextLine > 1 && state.isEmpty(state.line - 1); - state.blkIndent = state.listIndent; - state.listIndent = oldListIndent; - state.tShift[nextLine] = oldTShift; - state.sCount[nextLine] = oldSCount; - state.tight = oldTight; - token = state.push("list_item_close", "li", -1); - token.markup = String.fromCharCode(markerCharCode); - nextLine = state.line; - itemLines[1] = nextLine; - if (nextLine >= endLine) { - break; - } - - // Try to check if list is terminated or continued. - - if (state.sCount[nextLine] < state.blkIndent) { - break; - } - // if it's indented more than 3 spaces, it should be a code block - if (state.sCount[nextLine] - state.blkIndent >= 4) { - break; - } - // fail if terminating block found - terminate = false; - for (i = 0, l = terminatorRules.length; i < l; i++) { - if (terminatorRules[i](state, nextLine, endLine, true)) { - terminate = true; - break; - } - } - if (terminate) { - break; - } - // fail if list has another type - if (isOrdered) { - posAfterMarker = skipOrderedListMarker(state, nextLine); - if (posAfterMarker < 0) { - break; - } - start = state.bMarks[nextLine] + state.tShift[nextLine]; - } else { - posAfterMarker = skipBulletListMarker(state, nextLine); - if (posAfterMarker < 0) { - break; - } - } - if (markerCharCode !== state.src.charCodeAt(posAfterMarker - 1)) { - break; - } - } - // Finalize list - if (isOrdered) { - token = state.push("ordered_list_close", "ol", -1); - } else { - token = state.push("bullet_list_close", "ul", -1); - } - token.markup = String.fromCharCode(markerCharCode); - listLines[1] = nextLine; - state.line = nextLine; - state.parentType = oldParentType; - // mark paragraphs tight if needed - if (tight) { - markTightParagraphs(state, listTokIdx); - } - return true; - }; - var normalizeReference$2 = utils.normalizeReference; - var isSpace$6 = utils.isSpace; - var reference = function reference(state, startLine, _endLine, silent) { - var ch, destEndPos, destEndLineNo, endLine, href, i, l, label, labelEnd, oldParentType, res, start, str, terminate, terminatorRules, title, lines = 0, pos = state.bMarks[startLine] + state.tShift[startLine], max = state.eMarks[startLine], nextLine = startLine + 1; - // if it's indented more than 3 spaces, it should be a code block - if (state.sCount[startLine] - state.blkIndent >= 4) { - return false; - } - if (state.src.charCodeAt(pos) !== 91 /* [ */) { - return false; - } - // Simple check to quickly interrupt scan on [link](url) at the start of line. - // Can be useful on practice: https://github.com/markdown-it/markdown-it/issues/54 - while (++pos < max) { - if (state.src.charCodeAt(pos) === 93 /* ] */ && state.src.charCodeAt(pos - 1) !== 92 /* \ */) { - if (pos + 1 === max) { - return false; - } - if (state.src.charCodeAt(pos + 1) !== 58 /* : */) { - return false; - } - break; - } - } - endLine = state.lineMax; - // jump line-by-line until empty one or EOF - terminatorRules = state.md.block.ruler.getRules("reference"); - oldParentType = state.parentType; - state.parentType = "reference"; - for (;nextLine < endLine && !state.isEmpty(nextLine); nextLine++) { - // this would be a code block normally, but after paragraph - // it's considered a lazy continuation regardless of what's there - if (state.sCount[nextLine] - state.blkIndent > 3) { - continue; - } - // quirk for blockquotes, this line should already be checked by that rule - if (state.sCount[nextLine] < 0) { - continue; - } - // Some tags can terminate paragraph without empty line. - terminate = false; - for (i = 0, l = terminatorRules.length; i < l; i++) { - if (terminatorRules[i](state, nextLine, endLine, true)) { - terminate = true; - break; - } - } - if (terminate) { - break; - } - } - str = state.getLines(startLine, nextLine, state.blkIndent, false).trim(); - max = str.length; - for (pos = 1; pos < max; pos++) { - ch = str.charCodeAt(pos); - if (ch === 91 /* [ */) { - return false; - } else if (ch === 93 /* ] */) { - labelEnd = pos; - break; - } else if (ch === 10 /* \n */) { - lines++; - } else if (ch === 92 /* \ */) { - pos++; - if (pos < max && str.charCodeAt(pos) === 10) { - lines++; - } - } - } - if (labelEnd < 0 || str.charCodeAt(labelEnd + 1) !== 58 /* : */) { - return false; - } - // [label]: destination 'title' - // ^^^ skip optional whitespace here - for (pos = labelEnd + 2; pos < max; pos++) { - ch = str.charCodeAt(pos); - if (ch === 10) { - lines++; - } else if (isSpace$6(ch)) ; else { - break; - } - } - // [label]: destination 'title' - // ^^^^^^^^^^^ parse this - res = state.md.helpers.parseLinkDestination(str, pos, max); - if (!res.ok) { - return false; - } - href = state.md.normalizeLink(res.str); - if (!state.md.validateLink(href)) { - return false; - } - pos = res.pos; - lines += res.lines; - // save cursor state, we could require to rollback later - destEndPos = pos; - destEndLineNo = lines; - // [label]: destination 'title' - // ^^^ skipping those spaces - start = pos; - for (;pos < max; pos++) { - ch = str.charCodeAt(pos); - if (ch === 10) { - lines++; - } else if (isSpace$6(ch)) ; else { - break; - } - } - // [label]: destination 'title' - // ^^^^^^^ parse this - res = state.md.helpers.parseLinkTitle(str, pos, max); - if (pos < max && start !== pos && res.ok) { - title = res.str; - pos = res.pos; - lines += res.lines; - } else { - title = ""; - pos = destEndPos; - lines = destEndLineNo; - } - // skip trailing spaces until the rest of the line - while (pos < max) { - ch = str.charCodeAt(pos); - if (!isSpace$6(ch)) { - break; - } - pos++; - } - if (pos < max && str.charCodeAt(pos) !== 10) { - if (title) { - // garbage at the end of the line after title, - // but it could still be a valid reference if we roll back - title = ""; - pos = destEndPos; - lines = destEndLineNo; - while (pos < max) { - ch = str.charCodeAt(pos); - if (!isSpace$6(ch)) { - break; - } - pos++; - } - } - } - if (pos < max && str.charCodeAt(pos) !== 10) { - // garbage at the end of the line - return false; - } - label = normalizeReference$2(str.slice(1, labelEnd)); - if (!label) { - // CommonMark 0.20 disallows empty labels - return false; - } - // Reference can not terminate anything. This check is for safety only. - /*istanbul ignore if*/ if (silent) { - return true; - } - if (typeof state.env.references === "undefined") { - state.env.references = {}; - } - if (typeof state.env.references[label] === "undefined") { - state.env.references[label] = { - title: title, - href: href - }; - } - state.parentType = oldParentType; - state.line = startLine + lines + 1; - return true; - }; - // List of valid html blocks names, accorting to commonmark spec - var html_blocks = [ "address", "article", "aside", "base", "basefont", "blockquote", "body", "caption", "center", "col", "colgroup", "dd", "details", "dialog", "dir", "div", "dl", "dt", "fieldset", "figcaption", "figure", "footer", "form", "frame", "frameset", "h1", "h2", "h3", "h4", "h5", "h6", "head", "header", "hr", "html", "iframe", "legend", "li", "link", "main", "menu", "menuitem", "nav", "noframes", "ol", "optgroup", "option", "p", "param", "section", "source", "summary", "table", "tbody", "td", "tfoot", "th", "thead", "title", "tr", "track", "ul" ]; - // Regexps to match html elements - var attr_name = "[a-zA-Z_:][a-zA-Z0-9:._-]*"; - var unquoted = "[^\"'=<>`\\x00-\\x20]+"; - var single_quoted = "'[^']*'"; - var double_quoted = '"[^"]*"'; - var attr_value = "(?:" + unquoted + "|" + single_quoted + "|" + double_quoted + ")"; - var attribute = "(?:\\s+" + attr_name + "(?:\\s*=\\s*" + attr_value + ")?)"; - var open_tag = "<[A-Za-z][A-Za-z0-9\\-]*" + attribute + "*\\s*\\/?>"; - var close_tag = "<\\/[A-Za-z][A-Za-z0-9\\-]*\\s*>"; - var comment = "\x3c!----\x3e|\x3c!--(?:-?[^>-])(?:-?[^-])*--\x3e"; - var processing = "<[?][\\s\\S]*?[?]>"; - var declaration = "]*>"; - var cdata = ""; - var HTML_TAG_RE$1 = new RegExp("^(?:" + open_tag + "|" + close_tag + "|" + comment + "|" + processing + "|" + declaration + "|" + cdata + ")"); - var HTML_OPEN_CLOSE_TAG_RE$1 = new RegExp("^(?:" + open_tag + "|" + close_tag + ")"); - var HTML_TAG_RE_1 = HTML_TAG_RE$1; - var HTML_OPEN_CLOSE_TAG_RE_1 = HTML_OPEN_CLOSE_TAG_RE$1; - var html_re = { - HTML_TAG_RE: HTML_TAG_RE_1, - HTML_OPEN_CLOSE_TAG_RE: HTML_OPEN_CLOSE_TAG_RE_1 - }; - var HTML_OPEN_CLOSE_TAG_RE = html_re.HTML_OPEN_CLOSE_TAG_RE; - // An array of opening and corresponding closing sequences for html tags, - // last argument defines whether it can terminate a paragraph or not - - var HTML_SEQUENCES = [ [ /^<(script|pre|style|textarea)(?=(\s|>|$))/i, /<\/(script|pre|style|textarea)>/i, true ], [ /^/, true ], [ /^<\?/, /\?>/, true ], [ /^/, true ], [ /^/, true ], [ new RegExp("^|$))", "i"), /^$/, true ], [ new RegExp(HTML_OPEN_CLOSE_TAG_RE.source + "\\s*$"), /^$/, false ] ]; - var html_block = function html_block(state, startLine, endLine, silent) { - var i, nextLine, token, lineText, pos = state.bMarks[startLine] + state.tShift[startLine], max = state.eMarks[startLine]; - // if it's indented more than 3 spaces, it should be a code block - if (state.sCount[startLine] - state.blkIndent >= 4) { - return false; - } - if (!state.md.options.html) { - return false; - } - if (state.src.charCodeAt(pos) !== 60 /* < */) { - return false; - } - lineText = state.src.slice(pos, max); - for (i = 0; i < HTML_SEQUENCES.length; i++) { - if (HTML_SEQUENCES[i][0].test(lineText)) { - break; - } - } - if (i === HTML_SEQUENCES.length) { - return false; - } - if (silent) { - // true if this sequence can be a terminator, false otherwise - return HTML_SEQUENCES[i][2]; - } - nextLine = startLine + 1; - // If we are here - we detected HTML block. - // Let's roll down till block end. - if (!HTML_SEQUENCES[i][1].test(lineText)) { - for (;nextLine < endLine; nextLine++) { - if (state.sCount[nextLine] < state.blkIndent) { - break; - } - pos = state.bMarks[nextLine] + state.tShift[nextLine]; - max = state.eMarks[nextLine]; - lineText = state.src.slice(pos, max); - if (HTML_SEQUENCES[i][1].test(lineText)) { - if (lineText.length !== 0) { - nextLine++; - } - break; - } - } - } - state.line = nextLine; - token = state.push("html_block", "", 0); - token.map = [ startLine, nextLine ]; - token.content = state.getLines(startLine, nextLine, state.blkIndent, true); - return true; - }; - var isSpace$5 = utils.isSpace; - var heading = function heading(state, startLine, endLine, silent) { - var ch, level, tmp, token, pos = state.bMarks[startLine] + state.tShift[startLine], max = state.eMarks[startLine]; - // if it's indented more than 3 spaces, it should be a code block - if (state.sCount[startLine] - state.blkIndent >= 4) { - return false; - } - ch = state.src.charCodeAt(pos); - if (ch !== 35 /* # */ || pos >= max) { - return false; - } - // count heading level - level = 1; - ch = state.src.charCodeAt(++pos); - while (ch === 35 /* # */ && pos < max && level <= 6) { - level++; - ch = state.src.charCodeAt(++pos); - } - if (level > 6 || pos < max && !isSpace$5(ch)) { - return false; - } - if (silent) { - return true; - } - // Let's cut tails like ' ### ' from the end of string - max = state.skipSpacesBack(max, pos); - tmp = state.skipCharsBack(max, 35, pos); - // # - if (tmp > pos && isSpace$5(state.src.charCodeAt(tmp - 1))) { - max = tmp; - } - state.line = startLine + 1; - token = state.push("heading_open", "h" + String(level), 1); - token.markup = "########".slice(0, level); - token.map = [ startLine, state.line ]; - token = state.push("inline", "", 0); - token.content = state.src.slice(pos, max).trim(); - token.map = [ startLine, state.line ]; - token.children = []; - token = state.push("heading_close", "h" + String(level), -1); - token.markup = "########".slice(0, level); - return true; - }; - // lheading (---, ===) - var lheading = function lheading(state, startLine, endLine /*, silent*/) { - var content, terminate, i, l, token, pos, max, level, marker, nextLine = startLine + 1, oldParentType, terminatorRules = state.md.block.ruler.getRules("paragraph"); - // if it's indented more than 3 spaces, it should be a code block - if (state.sCount[startLine] - state.blkIndent >= 4) { - return false; - } - oldParentType = state.parentType; - state.parentType = "paragraph"; - // use paragraph to match terminatorRules - // jump line-by-line until empty one or EOF - for (;nextLine < endLine && !state.isEmpty(nextLine); nextLine++) { - // this would be a code block normally, but after paragraph - // it's considered a lazy continuation regardless of what's there - if (state.sCount[nextLine] - state.blkIndent > 3) { - continue; - } - - // Check for underline in setext header - - if (state.sCount[nextLine] >= state.blkIndent) { - pos = state.bMarks[nextLine] + state.tShift[nextLine]; - max = state.eMarks[nextLine]; - if (pos < max) { - marker = state.src.charCodeAt(pos); - if (marker === 45 /* - */ || marker === 61 /* = */) { - pos = state.skipChars(pos, marker); - pos = state.skipSpaces(pos); - if (pos >= max) { - level = marker === 61 /* = */ ? 1 : 2; - break; - } - } - } - } - // quirk for blockquotes, this line should already be checked by that rule - if (state.sCount[nextLine] < 0) { - continue; - } - // Some tags can terminate paragraph without empty line. - terminate = false; - for (i = 0, l = terminatorRules.length; i < l; i++) { - if (terminatorRules[i](state, nextLine, endLine, true)) { - terminate = true; - break; - } - } - if (terminate) { - break; - } - } - if (!level) { - // Didn't find valid underline - return false; - } - content = state.getLines(startLine, nextLine, state.blkIndent, false).trim(); - state.line = nextLine + 1; - token = state.push("heading_open", "h" + String(level), 1); - token.markup = String.fromCharCode(marker); - token.map = [ startLine, state.line ]; - token = state.push("inline", "", 0); - token.content = content; - token.map = [ startLine, state.line - 1 ]; - token.children = []; - token = state.push("heading_close", "h" + String(level), -1); - token.markup = String.fromCharCode(marker); - state.parentType = oldParentType; - return true; - }; - // Paragraph - var paragraph = function paragraph(state, startLine, endLine) { - var content, terminate, i, l, token, oldParentType, nextLine = startLine + 1, terminatorRules = state.md.block.ruler.getRules("paragraph"); - oldParentType = state.parentType; - state.parentType = "paragraph"; - // jump line-by-line until empty one or EOF - for (;nextLine < endLine && !state.isEmpty(nextLine); nextLine++) { - // this would be a code block normally, but after paragraph - // it's considered a lazy continuation regardless of what's there - if (state.sCount[nextLine] - state.blkIndent > 3) { - continue; - } - // quirk for blockquotes, this line should already be checked by that rule - if (state.sCount[nextLine] < 0) { - continue; - } - // Some tags can terminate paragraph without empty line. - terminate = false; - for (i = 0, l = terminatorRules.length; i < l; i++) { - if (terminatorRules[i](state, nextLine, endLine, true)) { - terminate = true; - break; - } - } - if (terminate) { - break; - } - } - content = state.getLines(startLine, nextLine, state.blkIndent, false).trim(); - state.line = nextLine; - token = state.push("paragraph_open", "p", 1); - token.map = [ startLine, state.line ]; - token = state.push("inline", "", 0); - token.content = content; - token.map = [ startLine, state.line ]; - token.children = []; - token = state.push("paragraph_close", "p", -1); - state.parentType = oldParentType; - return true; - }; - var isSpace$4 = utils.isSpace; - function StateBlock(src, md, env, tokens) { - var ch, s, start, pos, len, indent, offset, indent_found; - this.src = src; - // link to parser instance - this.md = md; - this.env = env; - - // Internal state vartiables - - this.tokens = tokens; - this.bMarks = []; - // line begin offsets for fast jumps - this.eMarks = []; - // line end offsets for fast jumps - this.tShift = []; - // offsets of the first non-space characters (tabs not expanded) - this.sCount = []; - // indents for each line (tabs expanded) - // An amount of virtual spaces (tabs expanded) between beginning - // of each line (bMarks) and real beginning of that line. - - // It exists only as a hack because blockquotes override bMarks - // losing information in the process. - - // It's used only when expanding tabs, you can think about it as - // an initial tab length, e.g. bsCount=21 applied to string `\t123` - // means first tab should be expanded to 4-21%4 === 3 spaces. - - this.bsCount = []; - // block parser variables - this.blkIndent = 0; - // required block content indent (for example, if we are - // inside a list, it would be positioned after list marker) - this.line = 0; - // line index in src - this.lineMax = 0; - // lines count - this.tight = false; - // loose/tight mode for lists - this.ddIndent = -1; - // indent of the current dd block (-1 if there isn't any) - this.listIndent = -1; - // indent of the current list block (-1 if there isn't any) - // can be 'blockquote', 'list', 'root', 'paragraph' or 'reference' - // used in lists to determine if they interrupt a paragraph - this.parentType = "root"; - this.level = 0; - // renderer - this.result = ""; - // Create caches - // Generate markers. - s = this.src; - indent_found = false; - for (start = pos = indent = offset = 0, len = s.length; pos < len; pos++) { - ch = s.charCodeAt(pos); - if (!indent_found) { - if (isSpace$4(ch)) { - indent++; - if (ch === 9) { - offset += 4 - offset % 4; - } else { - offset++; - } - continue; - } else { - indent_found = true; - } - } - if (ch === 10 || pos === len - 1) { - if (ch !== 10) { - pos++; - } - this.bMarks.push(start); - this.eMarks.push(pos); - this.tShift.push(indent); - this.sCount.push(offset); - this.bsCount.push(0); - indent_found = false; - indent = 0; - offset = 0; - start = pos + 1; - } - } - // Push fake entry to simplify cache bounds checks - this.bMarks.push(s.length); - this.eMarks.push(s.length); - this.tShift.push(0); - this.sCount.push(0); - this.bsCount.push(0); - this.lineMax = this.bMarks.length - 1; - // don't count last fake line - } - // Push new token to "stream". - - StateBlock.prototype.push = function(type, tag, nesting) { - var token$1 = new token(type, tag, nesting); - token$1.block = true; - if (nesting < 0) this.level--; - // closing tag - token$1.level = this.level; - if (nesting > 0) this.level++; - // opening tag - this.tokens.push(token$1); - return token$1; - }; - StateBlock.prototype.isEmpty = function isEmpty(line) { - return this.bMarks[line] + this.tShift[line] >= this.eMarks[line]; - }; - StateBlock.prototype.skipEmptyLines = function skipEmptyLines(from) { - for (var max = this.lineMax; from < max; from++) { - if (this.bMarks[from] + this.tShift[from] < this.eMarks[from]) { - break; - } - } - return from; - }; - // Skip spaces from given position. - StateBlock.prototype.skipSpaces = function skipSpaces(pos) { - var ch; - for (var max = this.src.length; pos < max; pos++) { - ch = this.src.charCodeAt(pos); - if (!isSpace$4(ch)) { - break; - } - } - return pos; - }; - // Skip spaces from given position in reverse. - StateBlock.prototype.skipSpacesBack = function skipSpacesBack(pos, min) { - if (pos <= min) { - return pos; - } - while (pos > min) { - if (!isSpace$4(this.src.charCodeAt(--pos))) { - return pos + 1; - } - } - return pos; - }; - // Skip char codes from given position - StateBlock.prototype.skipChars = function skipChars(pos, code) { - for (var max = this.src.length; pos < max; pos++) { - if (this.src.charCodeAt(pos) !== code) { - break; - } - } - return pos; - }; - // Skip char codes reverse from given position - 1 - StateBlock.prototype.skipCharsBack = function skipCharsBack(pos, code, min) { - if (pos <= min) { - return pos; - } - while (pos > min) { - if (code !== this.src.charCodeAt(--pos)) { - return pos + 1; - } - } - return pos; - }; - // cut lines range from source. - StateBlock.prototype.getLines = function getLines(begin, end, indent, keepLastLF) { - var i, lineIndent, ch, first, last, queue, lineStart, line = begin; - if (begin >= end) { - return ""; - } - queue = new Array(end - begin); - for (i = 0; line < end; line++, i++) { - lineIndent = 0; - lineStart = first = this.bMarks[line]; - if (line + 1 < end || keepLastLF) { - // No need for bounds check because we have fake entry on tail. - last = this.eMarks[line] + 1; - } else { - last = this.eMarks[line]; - } - while (first < last && lineIndent < indent) { - ch = this.src.charCodeAt(first); - if (isSpace$4(ch)) { - if (ch === 9) { - lineIndent += 4 - (lineIndent + this.bsCount[line]) % 4; - } else { - lineIndent++; - } - } else if (first - lineStart < this.tShift[line]) { - // patched tShift masked characters to look like spaces (blockquotes, list markers) - lineIndent++; - } else { - break; - } - first++; - } - if (lineIndent > indent) { - // partially expanding tabs in code blocks, e.g '\t\tfoobar' - // with indent=2 becomes ' \tfoobar' - queue[i] = new Array(lineIndent - indent + 1).join(" ") + this.src.slice(first, last); - } else { - queue[i] = this.src.slice(first, last); - } - } - return queue.join(""); - }; - // re-export Token class to use in block rules - StateBlock.prototype.Token = token; - var state_block = StateBlock; - var _rules$1 = [ - // First 2 params - rule name & source. Secondary array - list of rules, - // which can be terminated by this one. - [ "table", table, [ "paragraph", "reference" ] ], [ "code", code ], [ "fence", fence, [ "paragraph", "reference", "blockquote", "list" ] ], [ "blockquote", blockquote, [ "paragraph", "reference", "blockquote", "list" ] ], [ "hr", hr, [ "paragraph", "reference", "blockquote", "list" ] ], [ "list", list, [ "paragraph", "reference", "blockquote" ] ], [ "reference", reference ], [ "html_block", html_block, [ "paragraph", "reference", "blockquote" ] ], [ "heading", heading, [ "paragraph", "reference", "blockquote" ] ], [ "lheading", lheading ], [ "paragraph", paragraph ] ]; - /** - * new ParserBlock() - **/ function ParserBlock() { - /** - * ParserBlock#ruler -> Ruler - * - * [[Ruler]] instance. Keep configuration of block rules. - **/ - this.ruler = new ruler; - for (var i = 0; i < _rules$1.length; i++) { - this.ruler.push(_rules$1[i][0], _rules$1[i][1], { - alt: (_rules$1[i][2] || []).slice() - }); - } - } - // Generate tokens for input range - - ParserBlock.prototype.tokenize = function(state, startLine, endLine) { - var ok, i, prevLine, rules = this.ruler.getRules(""), len = rules.length, line = startLine, hasEmptyLines = false, maxNesting = state.md.options.maxNesting; - while (line < endLine) { - state.line = line = state.skipEmptyLines(line); - if (line >= endLine) { - break; - } - // Termination condition for nested calls. - // Nested calls currently used for blockquotes & lists - if (state.sCount[line] < state.blkIndent) { - break; - } - // If nesting level exceeded - skip tail to the end. That's not ordinary - // situation and we should not care about content. - if (state.level >= maxNesting) { - state.line = endLine; - break; - } - // Try all possible rules. - // On success, rule should: - - // - update `state.line` - // - update `state.tokens` - // - return true - prevLine = state.line; - for (i = 0; i < len; i++) { - ok = rules[i](state, line, endLine, false); - if (ok) { - if (prevLine >= state.line) { - throw new Error("block rule didn't increment state.line"); - } - break; - } - } - // this can only happen if user disables paragraph rule - if (!ok) throw new Error("none of the block rules matched"); - // set state.tight if we had an empty line before current tag - // i.e. latest empty line should not count - state.tight = !hasEmptyLines; - // paragraph might "eat" one newline after it in nested lists - if (state.isEmpty(state.line - 1)) { - hasEmptyLines = true; - } - line = state.line; - if (line < endLine && state.isEmpty(line)) { - hasEmptyLines = true; - line++; - state.line = line; - } - } - }; - /** - * ParserBlock.parse(str, md, env, outTokens) - * - * Process input string and push block tokens into `outTokens` - **/ ParserBlock.prototype.parse = function(src, md, env, outTokens) { - var state; - if (!src) { - return; - } - state = new this.State(src, md, env, outTokens); - this.tokenize(state, state.line, state.lineMax); - }; - ParserBlock.prototype.State = state_block; - var parser_block = ParserBlock; - // Skip text characters for text token, place those to pending buffer - // Rule to skip pure text - // '{}$%@~+=:' reserved for extentions - // !, ", #, $, %, &, ', (, ), *, +, ,, -, ., /, :, ;, <, =, >, ?, @, [, \, ], ^, _, `, {, |, }, or ~ - // !!!! Don't confuse with "Markdown ASCII Punctuation" chars - // http://spec.commonmark.org/0.15/#ascii-punctuation-character - function isTerminatorChar(ch) { - switch (ch) { - case 10 /* \n */ : - case 33 /* ! */ : - case 35 /* # */ : - case 36 /* $ */ : - case 37 /* % */ : - case 38 /* & */ : - case 42 /* * */ : - case 43 /* + */ : - case 45 /* - */ : - case 58 /* : */ : - case 60 /* < */ : - case 61 /* = */ : - case 62 /* > */ : - case 64 /* @ */ : - case 91 /* [ */ : - case 92 /* \ */ : - case 93 /* ] */ : - case 94 /* ^ */ : - case 95 /* _ */ : - case 96 /* ` */ : - case 123 /* { */ : - case 125 /* } */ : - case 126 /* ~ */ : - return true; - - default: - return false; - } - } - var text = function text(state, silent) { - var pos = state.pos; - while (pos < state.posMax && !isTerminatorChar(state.src.charCodeAt(pos))) { - pos++; - } - if (pos === state.pos) { - return false; - } - if (!silent) { - state.pending += state.src.slice(state.pos, pos); - } - state.pos = pos; - return true; - }; - // Process links like https://example.org/ - // RFC3986: scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." ) - var SCHEME_RE = /(?:^|[^a-z0-9.+-])([a-z][a-z0-9.+-]*)$/i; - var linkify = function linkify(state, silent) { - var pos, max, match, proto, link, url, fullUrl, token; - if (!state.md.options.linkify) return false; - if (state.linkLevel > 0) return false; - pos = state.pos; - max = state.posMax; - if (pos + 3 > max) return false; - if (state.src.charCodeAt(pos) !== 58 /* : */) return false; - if (state.src.charCodeAt(pos + 1) !== 47 /* / */) return false; - if (state.src.charCodeAt(pos + 2) !== 47 /* / */) return false; - match = state.pending.match(SCHEME_RE); - if (!match) return false; - proto = match[1]; - link = state.md.linkify.matchAtStart(state.src.slice(pos - proto.length)); - if (!link) return false; - url = link.url; - // invalid link, but still detected by linkify somehow; - // need to check to prevent infinite loop below - if (url.length <= proto.length) return false; - // disallow '*' at the end of the link (conflicts with emphasis) - url = url.replace(/\*+$/, ""); - fullUrl = state.md.normalizeLink(url); - if (!state.md.validateLink(fullUrl)) return false; - if (!silent) { - state.pending = state.pending.slice(0, -proto.length); - token = state.push("link_open", "a", 1); - token.attrs = [ [ "href", fullUrl ] ]; - token.markup = "linkify"; - token.info = "auto"; - token = state.push("text", "", 0); - token.content = state.md.normalizeLinkText(url); - token = state.push("link_close", "a", -1); - token.markup = "linkify"; - token.info = "auto"; - } - state.pos += url.length - proto.length; - return true; - }; - var isSpace$3 = utils.isSpace; - var newline = function newline(state, silent) { - var pmax, max, ws, pos = state.pos; - if (state.src.charCodeAt(pos) !== 10 /* \n */) { - return false; - } - pmax = state.pending.length - 1; - max = state.posMax; - // ' \n' -> hardbreak - // Lookup in pending chars is bad practice! Don't copy to other rules! - // Pending string is stored in concat mode, indexed lookups will cause - // convertion to flat mode. - if (!silent) { - if (pmax >= 0 && state.pending.charCodeAt(pmax) === 32) { - if (pmax >= 1 && state.pending.charCodeAt(pmax - 1) === 32) { - // Find whitespaces tail of pending chars. - ws = pmax - 1; - while (ws >= 1 && state.pending.charCodeAt(ws - 1) === 32) ws--; - state.pending = state.pending.slice(0, ws); - state.push("hardbreak", "br", 0); - } else { - state.pending = state.pending.slice(0, -1); - state.push("softbreak", "br", 0); - } - } else { - state.push("softbreak", "br", 0); - } - } - pos++; - // skip heading spaces for next line - while (pos < max && isSpace$3(state.src.charCodeAt(pos))) { - pos++; - } - state.pos = pos; - return true; - }; - var isSpace$2 = utils.isSpace; - var ESCAPED = []; - for (var i = 0; i < 256; i++) { - ESCAPED.push(0); - } - "\\!\"#$%&'()*+,./:;<=>?@[]^_`{|}~-".split("").forEach((function(ch) { - ESCAPED[ch.charCodeAt(0)] = 1; - })); - var _escape = function escape(state, silent) { - var ch1, ch2, origStr, escapedStr, token, pos = state.pos, max = state.posMax; - if (state.src.charCodeAt(pos) !== 92 /* \ */) return false; - pos++; - // '\' at the end of the inline block - if (pos >= max) return false; - ch1 = state.src.charCodeAt(pos); - if (ch1 === 10) { - if (!silent) { - state.push("hardbreak", "br", 0); - } - pos++; - // skip leading whitespaces from next line - while (pos < max) { - ch1 = state.src.charCodeAt(pos); - if (!isSpace$2(ch1)) break; - pos++; - } - state.pos = pos; - return true; - } - escapedStr = state.src[pos]; - if (ch1 >= 55296 && ch1 <= 56319 && pos + 1 < max) { - ch2 = state.src.charCodeAt(pos + 1); - if (ch2 >= 56320 && ch2 <= 57343) { - escapedStr += state.src[pos + 1]; - pos++; - } - } - origStr = "\\" + escapedStr; - if (!silent) { - token = state.push("text_special", "", 0); - if (ch1 < 256 && ESCAPED[ch1] !== 0) { - token.content = escapedStr; - } else { - token.content = origStr; - } - token.markup = origStr; - token.info = "escape"; - } - state.pos = pos + 1; - return true; - }; - // Parse backticks - var backticks = function backtick(state, silent) { - var start, max, marker, token, matchStart, matchEnd, openerLength, closerLength, pos = state.pos, ch = state.src.charCodeAt(pos); - if (ch !== 96 /* ` */) { - return false; - } - start = pos; - pos++; - max = state.posMax; - // scan marker length - while (pos < max && state.src.charCodeAt(pos) === 96 /* ` */) { - pos++; - } - marker = state.src.slice(start, pos); - openerLength = marker.length; - if (state.backticksScanned && (state.backticks[openerLength] || 0) <= start) { - if (!silent) state.pending += marker; - state.pos += openerLength; - return true; - } - matchEnd = pos; - // Nothing found in the cache, scan until the end of the line (or until marker is found) - while ((matchStart = state.src.indexOf("`", matchEnd)) !== -1) { - matchEnd = matchStart + 1; - // scan marker length - while (matchEnd < max && state.src.charCodeAt(matchEnd) === 96 /* ` */) { - matchEnd++; - } - closerLength = matchEnd - matchStart; - if (closerLength === openerLength) { - // Found matching closer length. - if (!silent) { - token = state.push("code_inline", "code", 0); - token.markup = marker; - token.content = state.src.slice(pos, matchStart).replace(/\n/g, " ").replace(/^ (.+) $/, "$1"); - } - state.pos = matchEnd; - return true; - } - // Some different length found, put it in cache as upper limit of where closer can be found - state.backticks[closerLength] = matchStart; - } - // Scanned through the end, didn't find anything - state.backticksScanned = true; - if (!silent) state.pending += marker; - state.pos += openerLength; - return true; - }; - // ~~strike through~~ - // Insert each marker as a separate text token, and add it to delimiter list - - var tokenize$1 = function strikethrough(state, silent) { - var i, scanned, token, len, ch, start = state.pos, marker = state.src.charCodeAt(start); - if (silent) { - return false; - } - if (marker !== 126 /* ~ */) { - return false; - } - scanned = state.scanDelims(state.pos, true); - len = scanned.length; - ch = String.fromCharCode(marker); - if (len < 2) { - return false; - } - if (len % 2) { - token = state.push("text", "", 0); - token.content = ch; - len--; - } - for (i = 0; i < len; i += 2) { - token = state.push("text", "", 0); - token.content = ch + ch; - state.delimiters.push({ - marker: marker, - length: 0, - // disable "rule of 3" length checks meant for emphasis - token: state.tokens.length - 1, - end: -1, - open: scanned.can_open, - close: scanned.can_close - }); - } - state.pos += scanned.length; - return true; - }; - function postProcess$1(state, delimiters) { - var i, j, startDelim, endDelim, token, loneMarkers = [], max = delimiters.length; - for (i = 0; i < max; i++) { - startDelim = delimiters[i]; - if (startDelim.marker !== 126 /* ~ */) { - continue; - } - if (startDelim.end === -1) { - continue; - } - endDelim = delimiters[startDelim.end]; - token = state.tokens[startDelim.token]; - token.type = "s_open"; - token.tag = "s"; - token.nesting = 1; - token.markup = "~~"; - token.content = ""; - token = state.tokens[endDelim.token]; - token.type = "s_close"; - token.tag = "s"; - token.nesting = -1; - token.markup = "~~"; - token.content = ""; - if (state.tokens[endDelim.token - 1].type === "text" && state.tokens[endDelim.token - 1].content === "~") { - loneMarkers.push(endDelim.token - 1); - } - } - // If a marker sequence has an odd number of characters, it's splitted - // like this: `~~~~~` -> `~` + `~~` + `~~`, leaving one marker at the - // start of the sequence. - - // So, we have to move all those markers after subsequent s_close tags. - - while (loneMarkers.length) { - i = loneMarkers.pop(); - j = i + 1; - while (j < state.tokens.length && state.tokens[j].type === "s_close") { - j++; - } - j--; - if (i !== j) { - token = state.tokens[j]; - state.tokens[j] = state.tokens[i]; - state.tokens[i] = token; - } - } - } - // Walk through delimiter list and replace text tokens with tags - - var postProcess_1$1 = function strikethrough(state) { - var curr, tokens_meta = state.tokens_meta, max = state.tokens_meta.length; - postProcess$1(state, state.delimiters); - for (curr = 0; curr < max; curr++) { - if (tokens_meta[curr] && tokens_meta[curr].delimiters) { - postProcess$1(state, tokens_meta[curr].delimiters); - } - } - }; - var strikethrough = { - tokenize: tokenize$1, - postProcess: postProcess_1$1 - }; - // Process *this* and _that_ - // Insert each marker as a separate text token, and add it to delimiter list - - var tokenize = function emphasis(state, silent) { - var i, scanned, token, start = state.pos, marker = state.src.charCodeAt(start); - if (silent) { - return false; - } - if (marker !== 95 /* _ */ && marker !== 42 /* * */) { - return false; - } - scanned = state.scanDelims(state.pos, marker === 42); - for (i = 0; i < scanned.length; i++) { - token = state.push("text", "", 0); - token.content = String.fromCharCode(marker); - state.delimiters.push({ - // Char code of the starting marker (number). - marker: marker, - // Total length of these series of delimiters. - length: scanned.length, - // A position of the token this delimiter corresponds to. - token: state.tokens.length - 1, - // If this delimiter is matched as a valid opener, `end` will be - // equal to its position, otherwise it's `-1`. - end: -1, - // Boolean flags that determine if this delimiter could open or close - // an emphasis. - open: scanned.can_open, - close: scanned.can_close - }); - } - state.pos += scanned.length; - return true; - }; - function postProcess(state, delimiters) { - var i, startDelim, endDelim, token, ch, isStrong, max = delimiters.length; - for (i = max - 1; i >= 0; i--) { - startDelim = delimiters[i]; - if (startDelim.marker !== 95 /* _ */ && startDelim.marker !== 42 /* * */) { - continue; - } - // Process only opening markers - if (startDelim.end === -1) { - continue; - } - endDelim = delimiters[startDelim.end]; - // If the previous delimiter has the same marker and is adjacent to this one, - // merge those into one strong delimiter. - - // `whatever` -> `whatever` - - isStrong = i > 0 && delimiters[i - 1].end === startDelim.end + 1 && - // check that first two markers match and adjacent - delimiters[i - 1].marker === startDelim.marker && delimiters[i - 1].token === startDelim.token - 1 && - // check that last two markers are adjacent (we can safely assume they match) - delimiters[startDelim.end + 1].token === endDelim.token + 1; - ch = String.fromCharCode(startDelim.marker); - token = state.tokens[startDelim.token]; - token.type = isStrong ? "strong_open" : "em_open"; - token.tag = isStrong ? "strong" : "em"; - token.nesting = 1; - token.markup = isStrong ? ch + ch : ch; - token.content = ""; - token = state.tokens[endDelim.token]; - token.type = isStrong ? "strong_close" : "em_close"; - token.tag = isStrong ? "strong" : "em"; - token.nesting = -1; - token.markup = isStrong ? ch + ch : ch; - token.content = ""; - if (isStrong) { - state.tokens[delimiters[i - 1].token].content = ""; - state.tokens[delimiters[startDelim.end + 1].token].content = ""; - i--; - } - } - } - // Walk through delimiter list and replace text tokens with tags - - var postProcess_1 = function emphasis(state) { - var curr, tokens_meta = state.tokens_meta, max = state.tokens_meta.length; - postProcess(state, state.delimiters); - for (curr = 0; curr < max; curr++) { - if (tokens_meta[curr] && tokens_meta[curr].delimiters) { - postProcess(state, tokens_meta[curr].delimiters); - } - } - }; - var emphasis = { - tokenize: tokenize, - postProcess: postProcess_1 - }; - var normalizeReference$1 = utils.normalizeReference; - var isSpace$1 = utils.isSpace; - var link = function link(state, silent) { - var attrs, code, label, labelEnd, labelStart, pos, res, ref, token, href = "", title = "", oldPos = state.pos, max = state.posMax, start = state.pos, parseReference = true; - if (state.src.charCodeAt(state.pos) !== 91 /* [ */) { - return false; - } - labelStart = state.pos + 1; - labelEnd = state.md.helpers.parseLinkLabel(state, state.pos, true); - // parser failed to find ']', so it's not a valid link - if (labelEnd < 0) { - return false; - } - pos = labelEnd + 1; - if (pos < max && state.src.charCodeAt(pos) === 40 /* ( */) { - // Inline link - // might have found a valid shortcut link, disable reference parsing - parseReference = false; - // [link]( "title" ) - // ^^ skipping these spaces - pos++; - for (;pos < max; pos++) { - code = state.src.charCodeAt(pos); - if (!isSpace$1(code) && code !== 10) { - break; - } - } - if (pos >= max) { - return false; - } - // [link]( "title" ) - // ^^^^^^ parsing link destination - start = pos; - res = state.md.helpers.parseLinkDestination(state.src, pos, state.posMax); - if (res.ok) { - href = state.md.normalizeLink(res.str); - if (state.md.validateLink(href)) { - pos = res.pos; - } else { - href = ""; - } - // [link]( "title" ) - // ^^ skipping these spaces - start = pos; - for (;pos < max; pos++) { - code = state.src.charCodeAt(pos); - if (!isSpace$1(code) && code !== 10) { - break; - } - } - // [link]( "title" ) - // ^^^^^^^ parsing link title - res = state.md.helpers.parseLinkTitle(state.src, pos, state.posMax); - if (pos < max && start !== pos && res.ok) { - title = res.str; - pos = res.pos; - // [link]( "title" ) - // ^^ skipping these spaces - for (;pos < max; pos++) { - code = state.src.charCodeAt(pos); - if (!isSpace$1(code) && code !== 10) { - break; - } - } - } - } - if (pos >= max || state.src.charCodeAt(pos) !== 41 /* ) */) { - // parsing a valid shortcut link failed, fallback to reference - parseReference = true; - } - pos++; - } - if (parseReference) { - // Link reference - if (typeof state.env.references === "undefined") { - return false; - } - if (pos < max && state.src.charCodeAt(pos) === 91 /* [ */) { - start = pos + 1; - pos = state.md.helpers.parseLinkLabel(state, pos); - if (pos >= 0) { - label = state.src.slice(start, pos++); - } else { - pos = labelEnd + 1; - } - } else { - pos = labelEnd + 1; - } - // covers label === '' and label === undefined - // (collapsed reference link and shortcut reference link respectively) - if (!label) { - label = state.src.slice(labelStart, labelEnd); - } - ref = state.env.references[normalizeReference$1(label)]; - if (!ref) { - state.pos = oldPos; - return false; - } - href = ref.href; - title = ref.title; - } - - // We found the end of the link, and know for a fact it's a valid link; - // so all that's left to do is to call tokenizer. - - if (!silent) { - state.pos = labelStart; - state.posMax = labelEnd; - token = state.push("link_open", "a", 1); - token.attrs = attrs = [ [ "href", href ] ]; - if (title) { - attrs.push([ "title", title ]); - } - state.linkLevel++; - state.md.inline.tokenize(state); - state.linkLevel--; - token = state.push("link_close", "a", -1); - } - state.pos = pos; - state.posMax = max; - return true; - }; - var normalizeReference = utils.normalizeReference; - var isSpace = utils.isSpace; - var image = function image(state, silent) { - var attrs, code, content, label, labelEnd, labelStart, pos, ref, res, title, token, tokens, start, href = "", oldPos = state.pos, max = state.posMax; - if (state.src.charCodeAt(state.pos) !== 33 /* ! */) { - return false; - } - if (state.src.charCodeAt(state.pos + 1) !== 91 /* [ */) { - return false; - } - labelStart = state.pos + 2; - labelEnd = state.md.helpers.parseLinkLabel(state, state.pos + 1, false); - // parser failed to find ']', so it's not a valid link - if (labelEnd < 0) { - return false; - } - pos = labelEnd + 1; - if (pos < max && state.src.charCodeAt(pos) === 40 /* ( */) { - // Inline link - // [link]( "title" ) - // ^^ skipping these spaces - pos++; - for (;pos < max; pos++) { - code = state.src.charCodeAt(pos); - if (!isSpace(code) && code !== 10) { - break; - } - } - if (pos >= max) { - return false; - } - // [link]( "title" ) - // ^^^^^^ parsing link destination - start = pos; - res = state.md.helpers.parseLinkDestination(state.src, pos, state.posMax); - if (res.ok) { - href = state.md.normalizeLink(res.str); - if (state.md.validateLink(href)) { - pos = res.pos; - } else { - href = ""; - } - } - // [link]( "title" ) - // ^^ skipping these spaces - start = pos; - for (;pos < max; pos++) { - code = state.src.charCodeAt(pos); - if (!isSpace(code) && code !== 10) { - break; - } - } - // [link]( "title" ) - // ^^^^^^^ parsing link title - res = state.md.helpers.parseLinkTitle(state.src, pos, state.posMax); - if (pos < max && start !== pos && res.ok) { - title = res.str; - pos = res.pos; - // [link]( "title" ) - // ^^ skipping these spaces - for (;pos < max; pos++) { - code = state.src.charCodeAt(pos); - if (!isSpace(code) && code !== 10) { - break; - } - } - } else { - title = ""; - } - if (pos >= max || state.src.charCodeAt(pos) !== 41 /* ) */) { - state.pos = oldPos; - return false; - } - pos++; - } else { - // Link reference - if (typeof state.env.references === "undefined") { - return false; - } - if (pos < max && state.src.charCodeAt(pos) === 91 /* [ */) { - start = pos + 1; - pos = state.md.helpers.parseLinkLabel(state, pos); - if (pos >= 0) { - label = state.src.slice(start, pos++); - } else { - pos = labelEnd + 1; - } - } else { - pos = labelEnd + 1; - } - // covers label === '' and label === undefined - // (collapsed reference link and shortcut reference link respectively) - if (!label) { - label = state.src.slice(labelStart, labelEnd); - } - ref = state.env.references[normalizeReference(label)]; - if (!ref) { - state.pos = oldPos; - return false; - } - href = ref.href; - title = ref.title; - } - - // We found the end of the link, and know for a fact it's a valid link; - // so all that's left to do is to call tokenizer. - - if (!silent) { - content = state.src.slice(labelStart, labelEnd); - state.md.inline.parse(content, state.md, state.env, tokens = []); - token = state.push("image", "img", 0); - token.attrs = attrs = [ [ "src", href ], [ "alt", "" ] ]; - token.children = tokens; - token.content = content; - if (title) { - attrs.push([ "title", title ]); - } - } - state.pos = pos; - state.posMax = max; - return true; - }; - // Process autolinks '' - /*eslint max-len:0*/ var EMAIL_RE = /^([a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*)$/; - var AUTOLINK_RE = /^([a-zA-Z][a-zA-Z0-9+.\-]{1,31}):([^<>\x00-\x20]*)$/; - var autolink = function autolink(state, silent) { - var url, fullUrl, token, ch, start, max, pos = state.pos; - if (state.src.charCodeAt(pos) !== 60 /* < */) { - return false; - } - start = state.pos; - max = state.posMax; - for (;;) { - if (++pos >= max) return false; - ch = state.src.charCodeAt(pos); - if (ch === 60 /* < */) return false; - if (ch === 62 /* > */) break; - } - url = state.src.slice(start + 1, pos); - if (AUTOLINK_RE.test(url)) { - fullUrl = state.md.normalizeLink(url); - if (!state.md.validateLink(fullUrl)) { - return false; - } - if (!silent) { - token = state.push("link_open", "a", 1); - token.attrs = [ [ "href", fullUrl ] ]; - token.markup = "autolink"; - token.info = "auto"; - token = state.push("text", "", 0); - token.content = state.md.normalizeLinkText(url); - token = state.push("link_close", "a", -1); - token.markup = "autolink"; - token.info = "auto"; - } - state.pos += url.length + 2; - return true; - } - if (EMAIL_RE.test(url)) { - fullUrl = state.md.normalizeLink("mailto:" + url); - if (!state.md.validateLink(fullUrl)) { - return false; - } - if (!silent) { - token = state.push("link_open", "a", 1); - token.attrs = [ [ "href", fullUrl ] ]; - token.markup = "autolink"; - token.info = "auto"; - token = state.push("text", "", 0); - token.content = state.md.normalizeLinkText(url); - token = state.push("link_close", "a", -1); - token.markup = "autolink"; - token.info = "auto"; - } - state.pos += url.length + 2; - return true; - } - return false; - }; - var HTML_TAG_RE = html_re.HTML_TAG_RE; - function isLinkOpen(str) { - return /^\s]/i.test(str); - } - function isLinkClose(str) { - return /^<\/a\s*>/i.test(str); - } - function isLetter(ch) { - /*eslint no-bitwise:0*/ - var lc = ch | 32; - // to lower case - return lc >= 97 /* a */ && lc <= 122 /* z */; - } - var html_inline = function html_inline(state, silent) { - var ch, match, max, token, pos = state.pos; - if (!state.md.options.html) { - return false; - } - // Check start - max = state.posMax; - if (state.src.charCodeAt(pos) !== 60 /* < */ || pos + 2 >= max) { - return false; - } - // Quick fail on second char - ch = state.src.charCodeAt(pos + 1); - if (ch !== 33 /* ! */ && ch !== 63 /* ? */ && ch !== 47 /* / */ && !isLetter(ch)) { - return false; - } - match = state.src.slice(pos).match(HTML_TAG_RE); - if (!match) { - return false; - } - if (!silent) { - token = state.push("html_inline", "", 0); - token.content = match[0]; - if (isLinkOpen(token.content)) state.linkLevel++; - if (isLinkClose(token.content)) state.linkLevel--; - } - state.pos += match[0].length; - return true; - }; - var has = utils.has; - var isValidEntityCode = utils.isValidEntityCode; - var fromCodePoint = utils.fromCodePoint; - var DIGITAL_RE = /^&#((?:x[a-f0-9]{1,6}|[0-9]{1,7}));/i; - var NAMED_RE = /^&([a-z][a-z0-9]{1,31});/i; - var entity = function entity(state, silent) { - var ch, code, match, token, pos = state.pos, max = state.posMax; - if (state.src.charCodeAt(pos) !== 38 /* & */) return false; - if (pos + 1 >= max) return false; - ch = state.src.charCodeAt(pos + 1); - if (ch === 35 /* # */) { - match = state.src.slice(pos).match(DIGITAL_RE); - if (match) { - if (!silent) { - code = match[1][0].toLowerCase() === "x" ? parseInt(match[1].slice(1), 16) : parseInt(match[1], 10); - token = state.push("text_special", "", 0); - token.content = isValidEntityCode(code) ? fromCodePoint(code) : fromCodePoint(65533); - token.markup = match[0]; - token.info = "entity"; - } - state.pos += match[0].length; - return true; - } - } else { - match = state.src.slice(pos).match(NAMED_RE); - if (match) { - if (has(entities, match[1])) { - if (!silent) { - token = state.push("text_special", "", 0); - token.content = entities[match[1]]; - token.markup = match[0]; - token.info = "entity"; - } - state.pos += match[0].length; - return true; - } - } - } - return false; - }; - // For each opening emphasis-like marker find a matching closing one - function processDelimiters(delimiters) { - var closerIdx, openerIdx, closer, opener, minOpenerIdx, newMinOpenerIdx, isOddMatch, lastJump, openersBottom = {}, max = delimiters.length; - if (!max) return; - // headerIdx is the first delimiter of the current (where closer is) delimiter run - var headerIdx = 0; - var lastTokenIdx = -2; - // needs any value lower than -1 - var jumps = []; - for (closerIdx = 0; closerIdx < max; closerIdx++) { - closer = delimiters[closerIdx]; - jumps.push(0); - // markers belong to same delimiter run if: - // - they have adjacent tokens - // - AND markers are the same - - if (delimiters[headerIdx].marker !== closer.marker || lastTokenIdx !== closer.token - 1) { - headerIdx = closerIdx; - } - lastTokenIdx = closer.token; - // Length is only used for emphasis-specific "rule of 3", - // if it's not defined (in strikethrough or 3rd party plugins), - // we can default it to 0 to disable those checks. - - closer.length = closer.length || 0; - if (!closer.close) continue; - // Previously calculated lower bounds (previous fails) - // for each marker, each delimiter length modulo 3, - // and for whether this closer can be an opener; - // https://github.com/commonmark/cmark/commit/34250e12ccebdc6372b8b49c44fab57c72443460 - if (!openersBottom.hasOwnProperty(closer.marker)) { - openersBottom[closer.marker] = [ -1, -1, -1, -1, -1, -1 ]; - } - minOpenerIdx = openersBottom[closer.marker][(closer.open ? 3 : 0) + closer.length % 3]; - openerIdx = headerIdx - jumps[headerIdx] - 1; - newMinOpenerIdx = openerIdx; - for (;openerIdx > minOpenerIdx; openerIdx -= jumps[openerIdx] + 1) { - opener = delimiters[openerIdx]; - if (opener.marker !== closer.marker) continue; - if (opener.open && opener.end < 0) { - isOddMatch = false; - // from spec: - - // If one of the delimiters can both open and close emphasis, then the - // sum of the lengths of the delimiter runs containing the opening and - // closing delimiters must not be a multiple of 3 unless both lengths - // are multiples of 3. - - if (opener.close || closer.open) { - if ((opener.length + closer.length) % 3 === 0) { - if (opener.length % 3 !== 0 || closer.length % 3 !== 0) { - isOddMatch = true; - } - } - } - if (!isOddMatch) { - // If previous delimiter cannot be an opener, we can safely skip - // the entire sequence in future checks. This is required to make - // sure algorithm has linear complexity (see *_*_*_*_*_... case). - lastJump = openerIdx > 0 && !delimiters[openerIdx - 1].open ? jumps[openerIdx - 1] + 1 : 0; - jumps[closerIdx] = closerIdx - openerIdx + lastJump; - jumps[openerIdx] = lastJump; - closer.open = false; - opener.end = closerIdx; - opener.close = false; - newMinOpenerIdx = -1; - // treat next token as start of run, - // it optimizes skips in **<...>**a**<...>** pathological case - lastTokenIdx = -2; - break; - } - } - } - if (newMinOpenerIdx !== -1) { - // If match for this delimiter run failed, we want to set lower bound for - // future lookups. This is required to make sure algorithm has linear - // complexity. - // See details here: - // https://github.com/commonmark/cmark/issues/178#issuecomment-270417442 - openersBottom[closer.marker][(closer.open ? 3 : 0) + (closer.length || 0) % 3] = newMinOpenerIdx; - } - } - } - var balance_pairs = function link_pairs(state) { - var curr, tokens_meta = state.tokens_meta, max = state.tokens_meta.length; - processDelimiters(state.delimiters); - for (curr = 0; curr < max; curr++) { - if (tokens_meta[curr] && tokens_meta[curr].delimiters) { - processDelimiters(tokens_meta[curr].delimiters); - } - } - }; - // Clean up tokens after emphasis and strikethrough postprocessing: - var fragments_join = function fragments_join(state) { - var curr, last, level = 0, tokens = state.tokens, max = state.tokens.length; - for (curr = last = 0; curr < max; curr++) { - // re-calculate levels after emphasis/strikethrough turns some text nodes - // into opening/closing tags - if (tokens[curr].nesting < 0) level--; - // closing tag - tokens[curr].level = level; - if (tokens[curr].nesting > 0) level++; - // opening tag - if (tokens[curr].type === "text" && curr + 1 < max && tokens[curr + 1].type === "text") { - // collapse two adjacent text nodes - tokens[curr + 1].content = tokens[curr].content + tokens[curr + 1].content; - } else { - if (curr !== last) { - tokens[last] = tokens[curr]; - } - last++; - } - } - if (curr !== last) { - tokens.length = last; - } - }; - var isWhiteSpace = utils.isWhiteSpace; - var isPunctChar = utils.isPunctChar; - var isMdAsciiPunct = utils.isMdAsciiPunct; - function StateInline(src, md, env, outTokens) { - this.src = src; - this.env = env; - this.md = md; - this.tokens = outTokens; - this.tokens_meta = Array(outTokens.length); - this.pos = 0; - this.posMax = this.src.length; - this.level = 0; - this.pending = ""; - this.pendingLevel = 0; - // Stores { start: end } pairs. Useful for backtrack - // optimization of pairs parse (emphasis, strikes). - this.cache = {}; - // List of emphasis-like delimiters for current tag - this.delimiters = []; - // Stack of delimiter lists for upper level tags - this._prev_delimiters = []; - // backtick length => last seen position - this.backticks = {}; - this.backticksScanned = false; - // Counter used to disable inline linkify-it execution - // inside and markdown links - this.linkLevel = 0; - } - // Flush pending text - - StateInline.prototype.pushPending = function() { - var token$1 = new token("text", "", 0); - token$1.content = this.pending; - token$1.level = this.pendingLevel; - this.tokens.push(token$1); - this.pending = ""; - return token$1; - }; - // Push new token to "stream". - // If pending text exists - flush it as text token - - StateInline.prototype.push = function(type, tag, nesting) { - if (this.pending) { - this.pushPending(); - } - var token$1 = new token(type, tag, nesting); - var token_meta = null; - if (nesting < 0) { - // closing tag - this.level--; - this.delimiters = this._prev_delimiters.pop(); - } - token$1.level = this.level; - if (nesting > 0) { - // opening tag - this.level++; - this._prev_delimiters.push(this.delimiters); - this.delimiters = []; - token_meta = { - delimiters: this.delimiters - }; - } - this.pendingLevel = this.level; - this.tokens.push(token$1); - this.tokens_meta.push(token_meta); - return token$1; - }; - // Scan a sequence of emphasis-like markers, and determine whether - // it can start an emphasis sequence or end an emphasis sequence. - - // - start - position to scan from (it should point at a valid marker); - // - canSplitWord - determine if these markers can be found inside a word - - StateInline.prototype.scanDelims = function(start, canSplitWord) { - var pos = start, lastChar, nextChar, count, can_open, can_close, isLastWhiteSpace, isLastPunctChar, isNextWhiteSpace, isNextPunctChar, left_flanking = true, right_flanking = true, max = this.posMax, marker = this.src.charCodeAt(start); - // treat beginning of the line as a whitespace - lastChar = start > 0 ? this.src.charCodeAt(start - 1) : 32; - while (pos < max && this.src.charCodeAt(pos) === marker) { - pos++; - } - count = pos - start; - // treat end of the line as a whitespace - nextChar = pos < max ? this.src.charCodeAt(pos) : 32; - isLastPunctChar = isMdAsciiPunct(lastChar) || isPunctChar(String.fromCharCode(lastChar)); - isNextPunctChar = isMdAsciiPunct(nextChar) || isPunctChar(String.fromCharCode(nextChar)); - isLastWhiteSpace = isWhiteSpace(lastChar); - isNextWhiteSpace = isWhiteSpace(nextChar); - if (isNextWhiteSpace) { - left_flanking = false; - } else if (isNextPunctChar) { - if (!(isLastWhiteSpace || isLastPunctChar)) { - left_flanking = false; - } - } - if (isLastWhiteSpace) { - right_flanking = false; - } else if (isLastPunctChar) { - if (!(isNextWhiteSpace || isNextPunctChar)) { - right_flanking = false; - } - } - if (!canSplitWord) { - can_open = left_flanking && (!right_flanking || isLastPunctChar); - can_close = right_flanking && (!left_flanking || isNextPunctChar); - } else { - can_open = left_flanking; - can_close = right_flanking; - } - return { - can_open: can_open, - can_close: can_close, - length: count - }; - }; - // re-export Token class to use in block rules - StateInline.prototype.Token = token; - var state_inline = StateInline; - //////////////////////////////////////////////////////////////////////////////// - // Parser rules - var _rules = [ [ "text", text ], [ "linkify", linkify ], [ "newline", newline ], [ "escape", _escape ], [ "backticks", backticks ], [ "strikethrough", strikethrough.tokenize ], [ "emphasis", emphasis.tokenize ], [ "link", link ], [ "image", image ], [ "autolink", autolink ], [ "html_inline", html_inline ], [ "entity", entity ] ]; - // `rule2` ruleset was created specifically for emphasis/strikethrough - // post-processing and may be changed in the future. - - // Don't use this for anything except pairs (plugins working with `balance_pairs`). - - var _rules2 = [ [ "balance_pairs", balance_pairs ], [ "strikethrough", strikethrough.postProcess ], [ "emphasis", emphasis.postProcess ], - // rules for pairs separate '**' into its own text tokens, which may be left unused, - // rule below merges unused segments back with the rest of the text - [ "fragments_join", fragments_join ] ]; - /** - * new ParserInline() - **/ function ParserInline() { - var i; - /** - * ParserInline#ruler -> Ruler - * - * [[Ruler]] instance. Keep configuration of inline rules. - **/ this.ruler = new ruler; - for (i = 0; i < _rules.length; i++) { - this.ruler.push(_rules[i][0], _rules[i][1]); - } - /** - * ParserInline#ruler2 -> Ruler - * - * [[Ruler]] instance. Second ruler used for post-processing - * (e.g. in emphasis-like rules). - **/ this.ruler2 = new ruler; - for (i = 0; i < _rules2.length; i++) { - this.ruler2.push(_rules2[i][0], _rules2[i][1]); - } - } - // Skip single token by running all rules in validation mode; - // returns `true` if any rule reported success - - ParserInline.prototype.skipToken = function(state) { - var ok, i, pos = state.pos, rules = this.ruler.getRules(""), len = rules.length, maxNesting = state.md.options.maxNesting, cache = state.cache; - if (typeof cache[pos] !== "undefined") { - state.pos = cache[pos]; - return; - } - if (state.level < maxNesting) { - for (i = 0; i < len; i++) { - // Increment state.level and decrement it later to limit recursion. - // It's harmless to do here, because no tokens are created. But ideally, - // we'd need a separate private state variable for this purpose. - state.level++; - ok = rules[i](state, true); - state.level--; - if (ok) { - if (pos >= state.pos) { - throw new Error("inline rule didn't increment state.pos"); - } - break; - } - } - } else { - // Too much nesting, just skip until the end of the paragraph. - // NOTE: this will cause links to behave incorrectly in the following case, - // when an amount of `[` is exactly equal to `maxNesting + 1`: - // [[[[[[[[[[[[[[[[[[[[[foo]() - // TODO: remove this workaround when CM standard will allow nested links - // (we can replace it by preventing links from being parsed in - // validation mode) - state.pos = state.posMax; - } - if (!ok) { - state.pos++; - } - cache[pos] = state.pos; - }; - // Generate tokens for input range - - ParserInline.prototype.tokenize = function(state) { - var ok, i, prevPos, rules = this.ruler.getRules(""), len = rules.length, end = state.posMax, maxNesting = state.md.options.maxNesting; - while (state.pos < end) { - // Try all possible rules. - // On success, rule should: - // - update `state.pos` - // - update `state.tokens` - // - return true - prevPos = state.pos; - if (state.level < maxNesting) { - for (i = 0; i < len; i++) { - ok = rules[i](state, false); - if (ok) { - if (prevPos >= state.pos) { - throw new Error("inline rule didn't increment state.pos"); - } - break; - } - } - } - if (ok) { - if (state.pos >= end) { - break; - } - continue; - } - state.pending += state.src[state.pos++]; - } - if (state.pending) { - state.pushPending(); - } - }; - /** - * ParserInline.parse(str, md, env, outTokens) - * - * Process input string and push inline tokens into `outTokens` - **/ ParserInline.prototype.parse = function(str, md, env, outTokens) { - var i, rules, len; - var state = new this.State(str, md, env, outTokens); - this.tokenize(state); - rules = this.ruler2.getRules(""); - len = rules.length; - for (i = 0; i < len; i++) { - rules[i](state); - } - }; - ParserInline.prototype.State = state_inline; - var parser_inline = ParserInline; - var re = function(opts) { - var re = {}; - opts = opts || {}; - // Use direct extract instead of `regenerate` to reduse browserified size - re.src_Any = regex$3.source; - re.src_Cc = regex$2.source; - re.src_Z = regex.source; - re.src_P = regex$4.source; - // \p{\Z\P\Cc\CF} (white spaces + control + format + punctuation) - re.src_ZPCc = [ re.src_Z, re.src_P, re.src_Cc ].join("|"); - // \p{\Z\Cc} (white spaces + control) - re.src_ZCc = [ re.src_Z, re.src_Cc ].join("|"); - // Experimental. List of chars, completely prohibited in links - // because can separate it from other part of text - var text_separators = "[><\uff5c]"; - // All possible word characters (everything without punctuation, spaces & controls) - // Defined via punctuation & spaces to save space - // Should be something like \p{\L\N\S\M} (\w but without `_`) - re.src_pseudo_letter = "(?:(?!" + text_separators + "|" + re.src_ZPCc + ")" + re.src_Any + ")"; - // The same as abothe but without [0-9] - // var src_pseudo_letter_non_d = '(?:(?![0-9]|' + src_ZPCc + ')' + src_Any + ')'; - //////////////////////////////////////////////////////////////////////////////// - re.src_ip4 = "(?:(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)"; - // Prohibit any of "@/[]()" in user/pass to avoid wrong domain fetch. - re.src_auth = "(?:(?:(?!" + re.src_ZCc + "|[@/\\[\\]()]).)+@)?"; - re.src_port = "(?::(?:6(?:[0-4]\\d{3}|5(?:[0-4]\\d{2}|5(?:[0-2]\\d|3[0-5])))|[1-5]?\\d{1,4}))?"; - re.src_host_terminator = "(?=$|" + text_separators + "|" + re.src_ZPCc + ")" + "(?!" + (opts["---"] ? "-(?!--)|" : "-|") + "_|:\\d|\\.-|\\.(?!$|" + re.src_ZPCc + "))"; - re.src_path = "(?:" + "[/?#]" + "(?:" + "(?!" + re.src_ZCc + "|" + text_separators + "|[()[\\]{}.,\"'?!\\-;]).|" + "\\[(?:(?!" + re.src_ZCc + "|\\]).)*\\]|" + "\\((?:(?!" + re.src_ZCc + "|[)]).)*\\)|" + "\\{(?:(?!" + re.src_ZCc + "|[}]).)*\\}|" + '\\"(?:(?!' + re.src_ZCc + '|["]).)+\\"|' + "\\'(?:(?!" + re.src_ZCc + "|[']).)+\\'|" + "\\'(?=" + re.src_pseudo_letter + "|[-])|" + // allow `I'm_king` if no pair found - "\\.{2,}[a-zA-Z0-9%/&]|" + // google has many dots in "google search" links (#66, #81). - // github has ... in commit range links, - // Restrict to - // - english - // - percent-encoded - // - parts of file path - // - params separator - // until more examples found. - "\\.(?!" + re.src_ZCc + "|[.]|$)|" + (opts["---"] ? "\\-(?!--(?:[^-]|$))(?:-*)|" : "\\-+|") + ",(?!" + re.src_ZCc + "|$)|" + // allow `,,,` in paths - ";(?!" + re.src_ZCc + "|$)|" + // allow `;` if not followed by space-like char - "\\!+(?!" + re.src_ZCc + "|[!]|$)|" + // allow `!!!` in paths, but not at the end - "\\?(?!" + re.src_ZCc + "|[?]|$)" + ")+" + "|\\/" + ")?"; - // Allow anything in markdown spec, forbid quote (") at the first position - // because emails enclosed in quotes are far more common - re.src_email_name = '[\\-;:&=\\+\\$,\\.a-zA-Z0-9_][\\-;:&=\\+\\$,\\"\\.a-zA-Z0-9_]*'; - re.src_xn = "xn--[a-z0-9\\-]{1,59}"; - // More to read about domain names - // http://serverfault.com/questions/638260/ - re.src_domain_root = - // Allow letters & digits (http://test1) - "(?:" + re.src_xn + "|" + re.src_pseudo_letter + "{1,63}" + ")"; - re.src_domain = "(?:" + re.src_xn + "|" + "(?:" + re.src_pseudo_letter + ")" + "|" + "(?:" + re.src_pseudo_letter + "(?:-|" + re.src_pseudo_letter + "){0,61}" + re.src_pseudo_letter + ")" + ")"; - re.src_host = "(?:" + - // Don't need IP check, because digits are already allowed in normal domain names - // src_ip4 + - // '|' + - "(?:(?:(?:" + re.src_domain + ")\\.)*" + re.src_domain /*_root*/ + ")" + ")"; - re.tpl_host_fuzzy = "(?:" + re.src_ip4 + "|" + "(?:(?:(?:" + re.src_domain + ")\\.)+(?:%TLDS%))" + ")"; - re.tpl_host_no_ip_fuzzy = "(?:(?:(?:" + re.src_domain + ")\\.)+(?:%TLDS%))"; - re.src_host_strict = re.src_host + re.src_host_terminator; - re.tpl_host_fuzzy_strict = re.tpl_host_fuzzy + re.src_host_terminator; - re.src_host_port_strict = re.src_host + re.src_port + re.src_host_terminator; - re.tpl_host_port_fuzzy_strict = re.tpl_host_fuzzy + re.src_port + re.src_host_terminator; - re.tpl_host_port_no_ip_fuzzy_strict = re.tpl_host_no_ip_fuzzy + re.src_port + re.src_host_terminator; - //////////////////////////////////////////////////////////////////////////////// - // Main rules - // Rude test fuzzy links by host, for quick deny - re.tpl_host_fuzzy_test = "localhost|www\\.|\\.\\d{1,3}\\.|(?:\\.(?:%TLDS%)(?:" + re.src_ZPCc + "|>|$))"; - re.tpl_email_fuzzy = "(^|" + text_separators + '|"|\\(|' + re.src_ZCc + ")" + "(" + re.src_email_name + "@" + re.tpl_host_fuzzy_strict + ")"; - re.tpl_link_fuzzy = - // Fuzzy link can't be prepended with .:/\- and non punctuation. - // but can start with > (markdown blockquote) - "(^|(?![.:/\\-_@])(?:[$+<=>^`|\uff5c]|" + re.src_ZPCc + "))" + "((?![$+<=>^`|\uff5c])" + re.tpl_host_port_fuzzy_strict + re.src_path + ")"; - re.tpl_link_no_ip_fuzzy = - // Fuzzy link can't be prepended with .:/\- and non punctuation. - // but can start with > (markdown blockquote) - "(^|(?![.:/\\-_@])(?:[$+<=>^`|\uff5c]|" + re.src_ZPCc + "))" + "((?![$+<=>^`|\uff5c])" + re.tpl_host_port_no_ip_fuzzy_strict + re.src_path + ")"; - return re; - }; - //////////////////////////////////////////////////////////////////////////////// - // Helpers - // Merge objects - - function assign(obj /*from1, from2, from3, ...*/) { - var sources = Array.prototype.slice.call(arguments, 1); - sources.forEach((function(source) { - if (!source) { - return; - } - Object.keys(source).forEach((function(key) { - obj[key] = source[key]; - })); - })); - return obj; - } - function _class(obj) { - return Object.prototype.toString.call(obj); - } - function isString(obj) { - return _class(obj) === "[object String]"; - } - function isObject(obj) { - return _class(obj) === "[object Object]"; - } - function isRegExp(obj) { - return _class(obj) === "[object RegExp]"; - } - function isFunction(obj) { - return _class(obj) === "[object Function]"; - } - function escapeRE(str) { - return str.replace(/[.?*+^$[\]\\(){}|-]/g, "\\$&"); - } - //////////////////////////////////////////////////////////////////////////////// - var defaultOptions = { - fuzzyLink: true, - fuzzyEmail: true, - fuzzyIP: false - }; - function isOptionsObj(obj) { - return Object.keys(obj || {}).reduce((function(acc, k) { - return acc || defaultOptions.hasOwnProperty(k); - }), false); - } - var defaultSchemas = { - "http:": { - validate: function(text, pos, self) { - var tail = text.slice(pos); - if (!self.re.http) { - // compile lazily, because "host"-containing variables can change on tlds update. - self.re.http = new RegExp("^\\/\\/" + self.re.src_auth + self.re.src_host_port_strict + self.re.src_path, "i"); - } - if (self.re.http.test(tail)) { - return tail.match(self.re.http)[0].length; - } - return 0; - } - }, - "https:": "http:", - "ftp:": "http:", - "//": { - validate: function(text, pos, self) { - var tail = text.slice(pos); - if (!self.re.no_http) { - // compile lazily, because "host"-containing variables can change on tlds update. - self.re.no_http = new RegExp("^" + self.re.src_auth + - // Don't allow single-level domains, because of false positives like '//test' - // with code comments - "(?:localhost|(?:(?:" + self.re.src_domain + ")\\.)+" + self.re.src_domain_root + ")" + self.re.src_port + self.re.src_host_terminator + self.re.src_path, "i"); - } - if (self.re.no_http.test(tail)) { - // should not be `://` & `///`, that protects from errors in protocol name - if (pos >= 3 && text[pos - 3] === ":") { - return 0; - } - if (pos >= 3 && text[pos - 3] === "/") { - return 0; - } - return tail.match(self.re.no_http)[0].length; - } - return 0; - } - }, - "mailto:": { - validate: function(text, pos, self) { - var tail = text.slice(pos); - if (!self.re.mailto) { - self.re.mailto = new RegExp("^" + self.re.src_email_name + "@" + self.re.src_host_strict, "i"); - } - if (self.re.mailto.test(tail)) { - return tail.match(self.re.mailto)[0].length; - } - return 0; - } - } - }; - /*eslint-disable max-len*/ - // RE pattern for 2-character tlds (autogenerated by ./support/tlds_2char_gen.js) - var tlds_2ch_src_re = "a[cdefgilmnoqrstuwxz]|b[abdefghijmnorstvwyz]|c[acdfghiklmnoruvwxyz]|d[ejkmoz]|e[cegrstu]|f[ijkmor]|g[abdefghilmnpqrstuwy]|h[kmnrtu]|i[delmnoqrst]|j[emop]|k[eghimnprwyz]|l[abcikrstuvy]|m[acdeghklmnopqrstuvwxyz]|n[acefgilopruz]|om|p[aefghklmnrstwy]|qa|r[eosuw]|s[abcdeghijklmnortuvxyz]|t[cdfghjklmnortvwz]|u[agksyz]|v[aceginu]|w[fs]|y[et]|z[amw]"; - // DON'T try to make PRs with changes. Extend TLDs with LinkifyIt.tlds() instead - var tlds_default = "biz|com|edu|gov|net|org|pro|web|xxx|aero|asia|coop|info|museum|name|shop|\u0440\u0444".split("|"); - /*eslint-enable max-len*/ - //////////////////////////////////////////////////////////////////////////////// - function resetScanCache(self) { - self.__index__ = -1; - self.__text_cache__ = ""; - } - function createValidator(re) { - return function(text, pos) { - var tail = text.slice(pos); - if (re.test(tail)) { - return tail.match(re)[0].length; - } - return 0; - }; - } - function createNormalizer() { - return function(match, self) { - self.normalize(match); - }; - } - // Schemas compiler. Build regexps. - - function compile(self) { - // Load & clone RE patterns. - var re$1 = self.re = re(self.__opts__); - // Define dynamic patterns - var tlds = self.__tlds__.slice(); - self.onCompile(); - if (!self.__tlds_replaced__) { - tlds.push(tlds_2ch_src_re); - } - tlds.push(re$1.src_xn); - re$1.src_tlds = tlds.join("|"); - function untpl(tpl) { - return tpl.replace("%TLDS%", re$1.src_tlds); - } - re$1.email_fuzzy = RegExp(untpl(re$1.tpl_email_fuzzy), "i"); - re$1.link_fuzzy = RegExp(untpl(re$1.tpl_link_fuzzy), "i"); - re$1.link_no_ip_fuzzy = RegExp(untpl(re$1.tpl_link_no_ip_fuzzy), "i"); - re$1.host_fuzzy_test = RegExp(untpl(re$1.tpl_host_fuzzy_test), "i"); - - // Compile each schema - - var aliases = []; - self.__compiled__ = {}; - // Reset compiled data - function schemaError(name, val) { - throw new Error('(LinkifyIt) Invalid schema "' + name + '": ' + val); - } - Object.keys(self.__schemas__).forEach((function(name) { - var val = self.__schemas__[name]; - // skip disabled methods - if (val === null) { - return; - } - var compiled = { - validate: null, - link: null - }; - self.__compiled__[name] = compiled; - if (isObject(val)) { - if (isRegExp(val.validate)) { - compiled.validate = createValidator(val.validate); - } else if (isFunction(val.validate)) { - compiled.validate = val.validate; - } else { - schemaError(name, val); - } - if (isFunction(val.normalize)) { - compiled.normalize = val.normalize; - } else if (!val.normalize) { - compiled.normalize = createNormalizer(); - } else { - schemaError(name, val); - } - return; - } - if (isString(val)) { - aliases.push(name); - return; - } - schemaError(name, val); - })); - - // Compile postponed aliases - - aliases.forEach((function(alias) { - if (!self.__compiled__[self.__schemas__[alias]]) { - // Silently fail on missed schemas to avoid errons on disable. - // schemaError(alias, self.__schemas__[alias]); - return; - } - self.__compiled__[alias].validate = self.__compiled__[self.__schemas__[alias]].validate; - self.__compiled__[alias].normalize = self.__compiled__[self.__schemas__[alias]].normalize; - })); - - // Fake record for guessed links - - self.__compiled__[""] = { - validate: null, - normalize: createNormalizer() - }; - - // Build schema condition - - var slist = Object.keys(self.__compiled__).filter((function(name) { - // Filter disabled & fake schemas - return name.length > 0 && self.__compiled__[name]; - })).map(escapeRE).join("|"); - // (?!_) cause 1.5x slowdown - self.re.schema_test = RegExp("(^|(?!_)(?:[><\uff5c]|" + re$1.src_ZPCc + "))(" + slist + ")", "i"); - self.re.schema_search = RegExp("(^|(?!_)(?:[><\uff5c]|" + re$1.src_ZPCc + "))(" + slist + ")", "ig"); - self.re.schema_at_start = RegExp("^" + self.re.schema_search.source, "i"); - self.re.pretest = RegExp("(" + self.re.schema_test.source + ")|(" + self.re.host_fuzzy_test.source + ")|@", "i"); - - // Cleanup - - resetScanCache(self); - } - /** - * class Match - * - * Match result. Single element of array, returned by [[LinkifyIt#match]] - **/ function Match(self, shift) { - var start = self.__index__, end = self.__last_index__, text = self.__text_cache__.slice(start, end); - /** - * Match#schema -> String - * - * Prefix (protocol) for matched string. - **/ this.schema = self.__schema__.toLowerCase(); - /** - * Match#index -> Number - * - * First position of matched string. - **/ this.index = start + shift; - /** - * Match#lastIndex -> Number - * - * Next position after matched string. - **/ this.lastIndex = end + shift; - /** - * Match#raw -> String - * - * Matched string. - **/ this.raw = text; - /** - * Match#text -> String - * - * Notmalized text of matched string. - **/ this.text = text; - /** - * Match#url -> String - * - * Normalized url of matched string. - **/ this.url = text; - } - function createMatch(self, shift) { - var match = new Match(self, shift); - self.__compiled__[match.schema].normalize(match, self); - return match; - } - /** - * class LinkifyIt - **/ - /** - * new LinkifyIt(schemas, options) - * - schemas (Object): Optional. Additional schemas to validate (prefix/validator) - * - options (Object): { fuzzyLink|fuzzyEmail|fuzzyIP: true|false } - * - * Creates new linkifier instance with optional additional schemas. - * Can be called without `new` keyword for convenience. - * - * By default understands: - * - * - `http(s)://...` , `ftp://...`, `mailto:...` & `//...` links - * - "fuzzy" links and emails (example.com, foo@bar.com). - * - * `schemas` is an object, where each key/value describes protocol/rule: - * - * - __key__ - link prefix (usually, protocol name with `:` at the end, `skype:` - * for example). `linkify-it` makes shure that prefix is not preceeded with - * alphanumeric char and symbols. Only whitespaces and punctuation allowed. - * - __value__ - rule to check tail after link prefix - * - _String_ - just alias to existing rule - * - _Object_ - * - _validate_ - validator function (should return matched length on success), - * or `RegExp`. - * - _normalize_ - optional function to normalize text & url of matched result - * (for example, for @twitter mentions). - * - * `options`: - * - * - __fuzzyLink__ - recognige URL-s without `http(s):` prefix. Default `true`. - * - __fuzzyIP__ - allow IPs in fuzzy links above. Can conflict with some texts - * like version numbers. Default `false`. - * - __fuzzyEmail__ - recognize emails without `mailto:` prefix. - * - **/ function LinkifyIt(schemas, options) { - if (!(this instanceof LinkifyIt)) { - return new LinkifyIt(schemas, options); - } - if (!options) { - if (isOptionsObj(schemas)) { - options = schemas; - schemas = {}; - } - } - this.__opts__ = assign({}, defaultOptions, options); - // Cache last tested result. Used to skip repeating steps on next `match` call. - this.__index__ = -1; - this.__last_index__ = -1; - // Next scan position - this.__schema__ = ""; - this.__text_cache__ = ""; - this.__schemas__ = assign({}, defaultSchemas, schemas); - this.__compiled__ = {}; - this.__tlds__ = tlds_default; - this.__tlds_replaced__ = false; - this.re = {}; - compile(this); - } - /** chainable - * LinkifyIt#add(schema, definition) - * - schema (String): rule name (fixed pattern prefix) - * - definition (String|RegExp|Object): schema definition - * - * Add new rule definition. See constructor description for details. - **/ LinkifyIt.prototype.add = function add(schema, definition) { - this.__schemas__[schema] = definition; - compile(this); - return this; - }; - /** chainable - * LinkifyIt#set(options) - * - options (Object): { fuzzyLink|fuzzyEmail|fuzzyIP: true|false } - * - * Set recognition options for links without schema. - **/ LinkifyIt.prototype.set = function set(options) { - this.__opts__ = assign(this.__opts__, options); - return this; - }; - /** - * LinkifyIt#test(text) -> Boolean - * - * Searches linkifiable pattern and returns `true` on success or `false` on fail. - **/ LinkifyIt.prototype.test = function test(text) { - // Reset scan cache - this.__text_cache__ = text; - this.__index__ = -1; - if (!text.length) { - return false; - } - var m, ml, me, len, shift, next, re, tld_pos, at_pos; - // try to scan for link with schema - that's the most simple rule - if (this.re.schema_test.test(text)) { - re = this.re.schema_search; - re.lastIndex = 0; - while ((m = re.exec(text)) !== null) { - len = this.testSchemaAt(text, m[2], re.lastIndex); - if (len) { - this.__schema__ = m[2]; - this.__index__ = m.index + m[1].length; - this.__last_index__ = m.index + m[0].length + len; - break; - } - } - } - if (this.__opts__.fuzzyLink && this.__compiled__["http:"]) { - // guess schemaless links - tld_pos = text.search(this.re.host_fuzzy_test); - if (tld_pos >= 0) { - // if tld is located after found link - no need to check fuzzy pattern - if (this.__index__ < 0 || tld_pos < this.__index__) { - if ((ml = text.match(this.__opts__.fuzzyIP ? this.re.link_fuzzy : this.re.link_no_ip_fuzzy)) !== null) { - shift = ml.index + ml[1].length; - if (this.__index__ < 0 || shift < this.__index__) { - this.__schema__ = ""; - this.__index__ = shift; - this.__last_index__ = ml.index + ml[0].length; - } - } - } - } - } - if (this.__opts__.fuzzyEmail && this.__compiled__["mailto:"]) { - // guess schemaless emails - at_pos = text.indexOf("@"); - if (at_pos >= 0) { - // We can't skip this check, because this cases are possible: - // 192.168.1.1@gmail.com, my.in@example.com - if ((me = text.match(this.re.email_fuzzy)) !== null) { - shift = me.index + me[1].length; - next = me.index + me[0].length; - if (this.__index__ < 0 || shift < this.__index__ || shift === this.__index__ && next > this.__last_index__) { - this.__schema__ = "mailto:"; - this.__index__ = shift; - this.__last_index__ = next; - } - } - } - } - return this.__index__ >= 0; - }; - /** - * LinkifyIt#pretest(text) -> Boolean - * - * Very quick check, that can give false positives. Returns true if link MAY BE - * can exists. Can be used for speed optimization, when you need to check that - * link NOT exists. - **/ LinkifyIt.prototype.pretest = function pretest(text) { - return this.re.pretest.test(text); - }; - /** - * LinkifyIt#testSchemaAt(text, name, position) -> Number - * - text (String): text to scan - * - name (String): rule (schema) name - * - position (Number): text offset to check from - * - * Similar to [[LinkifyIt#test]] but checks only specific protocol tail exactly - * at given position. Returns length of found pattern (0 on fail). - **/ LinkifyIt.prototype.testSchemaAt = function testSchemaAt(text, schema, pos) { - // If not supported schema check requested - terminate - if (!this.__compiled__[schema.toLowerCase()]) { - return 0; - } - return this.__compiled__[schema.toLowerCase()].validate(text, pos, this); - }; - /** - * LinkifyIt#match(text) -> Array|null - * - * Returns array of found link descriptions or `null` on fail. We strongly - * recommend to use [[LinkifyIt#test]] first, for best speed. - * - * ##### Result match description - * - * - __schema__ - link schema, can be empty for fuzzy links, or `//` for - * protocol-neutral links. - * - __index__ - offset of matched text - * - __lastIndex__ - index of next char after mathch end - * - __raw__ - matched text - * - __text__ - normalized text - * - __url__ - link, generated from matched text - **/ LinkifyIt.prototype.match = function match(text) { - var shift = 0, result = []; - // Try to take previous element from cache, if .test() called before - if (this.__index__ >= 0 && this.__text_cache__ === text) { - result.push(createMatch(this, shift)); - shift = this.__last_index__; - } - // Cut head if cache was used - var tail = shift ? text.slice(shift) : text; - // Scan string until end reached - while (this.test(tail)) { - result.push(createMatch(this, shift)); - tail = tail.slice(this.__last_index__); - shift += this.__last_index__; - } - if (result.length) { - return result; - } - return null; - }; - /** - * LinkifyIt#matchAtStart(text) -> Match|null - * - * Returns fully-formed (not fuzzy) link if it starts at the beginning - * of the string, and null otherwise. - **/ LinkifyIt.prototype.matchAtStart = function matchAtStart(text) { - // Reset scan cache - this.__text_cache__ = text; - this.__index__ = -1; - if (!text.length) return null; - var m = this.re.schema_at_start.exec(text); - if (!m) return null; - var len = this.testSchemaAt(text, m[2], m[0].length); - if (!len) return null; - this.__schema__ = m[2]; - this.__index__ = m.index + m[1].length; - this.__last_index__ = m.index + m[0].length + len; - return createMatch(this, 0); - }; - /** chainable - * LinkifyIt#tlds(list [, keepOld]) -> this - * - list (Array): list of tlds - * - keepOld (Boolean): merge with current list if `true` (`false` by default) - * - * Load (or merge) new tlds list. Those are user for fuzzy links (without prefix) - * to avoid false positives. By default this algorythm used: - * - * - hostname with any 2-letter root zones are ok. - * - biz|com|edu|gov|net|org|pro|web|xxx|aero|asia|coop|info|museum|name|shop|рф - * are ok. - * - encoded (`xn--...`) root zones are ok. - * - * If list is replaced, then exact match for 2-chars root zones will be checked. - **/ LinkifyIt.prototype.tlds = function tlds(list, keepOld) { - list = Array.isArray(list) ? list : [ list ]; - if (!keepOld) { - this.__tlds__ = list.slice(); - this.__tlds_replaced__ = true; - compile(this); - return this; - } - this.__tlds__ = this.__tlds__.concat(list).sort().filter((function(el, idx, arr) { - return el !== arr[idx - 1]; - })).reverse(); - compile(this); - return this; - }; - /** - * LinkifyIt#normalize(match) - * - * Default normalizer (if schema does not define it's own). - **/ LinkifyIt.prototype.normalize = function normalize(match) { - // Do minimal possible changes by default. Need to collect feedback prior - // to move forward https://github.com/markdown-it/linkify-it/issues/1 - if (!match.schema) { - match.url = "http://" + match.url; - } - if (match.schema === "mailto:" && !/^mailto:/i.test(match.url)) { - match.url = "mailto:" + match.url; - } - }; - /** - * LinkifyIt#onCompile() - * - * Override to modify basic RegExp-s. - **/ LinkifyIt.prototype.onCompile = function onCompile() {}; - var linkifyIt = LinkifyIt; - /*! https://mths.be/punycode v1.4.1 by @mathias */ - /** Highest positive signed 32-bit float value */ var maxInt = 2147483647; - // aka. 0x7FFFFFFF or 2^31-1 - /** Bootstring parameters */ var base = 36; - var tMin = 1; - var tMax = 26; - var skew = 38; - var damp = 700; - var initialBias = 72; - var initialN = 128; - // 0x80 - var delimiter = "-"; - // '\x2D' - /** Regular expressions */ var regexPunycode = /^xn--/; - var regexNonASCII = /[^\x20-\x7E]/; - // unprintable ASCII chars + non-ASCII chars - var regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g; - // RFC 3490 separators - /** Error messages */ var errors = { - overflow: "Overflow: input needs wider integers to process", - "not-basic": "Illegal input >= 0x80 (not a basic code point)", - "invalid-input": "Invalid input" - }; - /** Convenience shortcuts */ var baseMinusTMin = base - tMin; - var floor = Math.floor; - var stringFromCharCode = String.fromCharCode; - /*--------------------------------------------------------------------------*/ - /** - * A generic error utility function. - * @private - * @param {String} type The error type. - * @returns {Error} Throws a `RangeError` with the applicable error message. - */ function error(type) { - throw new RangeError(errors[type]); - } - /** - * A generic `Array#map` utility function. - * @private - * @param {Array} array The array to iterate over. - * @param {Function} callback The function that gets called for every array - * item. - * @returns {Array} A new array of values returned by the callback function. - */ function map(array, fn) { - var length = array.length; - var result = []; - while (length--) { - result[length] = fn(array[length]); - } - return result; - } - /** - * A simple `Array#map`-like wrapper to work with domain name strings or email - * addresses. - * @private - * @param {String} domain The domain name or email address. - * @param {Function} callback The function that gets called for every - * character. - * @returns {Array} A new string of characters returned by the callback - * function. - */ function mapDomain(string, fn) { - var parts = string.split("@"); - var result = ""; - if (parts.length > 1) { - // In email addresses, only the domain name should be punycoded. Leave - // the local part (i.e. everything up to `@`) intact. - result = parts[0] + "@"; - string = parts[1]; - } - // Avoid `split(regex)` for IE8 compatibility. See #17. - string = string.replace(regexSeparators, "."); - var labels = string.split("."); - var encoded = map(labels, fn).join("."); - return result + encoded; - } - /** - * Creates an array containing the numeric code points of each Unicode - * character in the string. While JavaScript uses UCS-2 internally, - * this function will convert a pair of surrogate halves (each of which - * UCS-2 exposes as separate characters) into a single code point, - * matching UTF-16. - * @see `punycode.ucs2.encode` - * @see - * @memberOf punycode.ucs2 - * @name decode - * @param {String} string The Unicode input string (UCS-2). - * @returns {Array} The new array of code points. - */ function ucs2decode(string) { - var output = [], counter = 0, length = string.length, value, extra; - while (counter < length) { - value = string.charCodeAt(counter++); - if (value >= 55296 && value <= 56319 && counter < length) { - // high surrogate, and there is a next character - extra = string.charCodeAt(counter++); - if ((extra & 64512) == 56320) { - // low surrogate - output.push(((value & 1023) << 10) + (extra & 1023) + 65536); - } else { - // unmatched surrogate; only append this code unit, in case the next - // code unit is the high surrogate of a surrogate pair - output.push(value); - counter--; - } - } else { - output.push(value); - } - } - return output; - } - /** - * Creates a string based on an array of numeric code points. - * @see `punycode.ucs2.decode` - * @memberOf punycode.ucs2 - * @name encode - * @param {Array} codePoints The array of numeric code points. - * @returns {String} The new Unicode string (UCS-2). - */ function ucs2encode(array) { - return map(array, (function(value) { - var output = ""; - if (value > 65535) { - value -= 65536; - output += stringFromCharCode(value >>> 10 & 1023 | 55296); - value = 56320 | value & 1023; - } - output += stringFromCharCode(value); - return output; - })).join(""); - } - /** - * Converts a basic code point into a digit/integer. - * @see `digitToBasic()` - * @private - * @param {Number} codePoint The basic numeric code point value. - * @returns {Number} The numeric value of a basic code point (for use in - * representing integers) in the range `0` to `base - 1`, or `base` if - * the code point does not represent a value. - */ function basicToDigit(codePoint) { - if (codePoint - 48 < 10) { - return codePoint - 22; - } - if (codePoint - 65 < 26) { - return codePoint - 65; - } - if (codePoint - 97 < 26) { - return codePoint - 97; - } - return base; - } - /** - * Converts a digit/integer into a basic code point. - * @see `basicToDigit()` - * @private - * @param {Number} digit The numeric value of a basic code point. - * @returns {Number} The basic code point whose value (when used for - * representing integers) is `digit`, which needs to be in the range - * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is - * used; else, the lowercase form is used. The behavior is undefined - * if `flag` is non-zero and `digit` has no uppercase form. - */ function digitToBasic(digit, flag) { - // 0..25 map to ASCII a..z or A..Z - // 26..35 map to ASCII 0..9 - return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5); - } - /** - * Bias adaptation function as per section 3.4 of RFC 3492. - * https://tools.ietf.org/html/rfc3492#section-3.4 - * @private - */ function adapt(delta, numPoints, firstTime) { - var k = 0; - delta = firstTime ? floor(delta / damp) : delta >> 1; - delta += floor(delta / numPoints); - for (;delta > baseMinusTMin * tMax >> 1; k += base) { - delta = floor(delta / baseMinusTMin); - } - return floor(k + (baseMinusTMin + 1) * delta / (delta + skew)); - } - /** - * Converts a Punycode string of ASCII-only symbols to a string of Unicode - * symbols. - * @memberOf punycode - * @param {String} input The Punycode string of ASCII-only symbols. - * @returns {String} The resulting string of Unicode symbols. - */ function decode(input) { - // Don't use UCS-2 - var output = [], inputLength = input.length, out, i = 0, n = initialN, bias = initialBias, basic, j, index, oldi, w, k, digit, t, - /** Cached calculation results */ - baseMinusT; - // Handle the basic code points: let `basic` be the number of input code - // points before the last delimiter, or `0` if there is none, then copy - // the first basic code points to the output. - basic = input.lastIndexOf(delimiter); - if (basic < 0) { - basic = 0; - } - for (j = 0; j < basic; ++j) { - // if it's not a basic code point - if (input.charCodeAt(j) >= 128) { - error("not-basic"); - } - output.push(input.charCodeAt(j)); - } - // Main decoding loop: start just after the last delimiter if any basic code - // points were copied; start at the beginning otherwise. - for (index = basic > 0 ? basic + 1 : 0; index < inputLength; ) { - // `index` is the index of the next character to be consumed. - // Decode a generalized variable-length integer into `delta`, - // which gets added to `i`. The overflow checking is easier - // if we increase `i` as we go, then subtract off its starting - // value at the end to obtain `delta`. - for (oldi = i, w = 1, k = base; ;k += base) { - if (index >= inputLength) { - error("invalid-input"); - } - digit = basicToDigit(input.charCodeAt(index++)); - if (digit >= base || digit > floor((maxInt - i) / w)) { - error("overflow"); - } - i += digit * w; - t = k <= bias ? tMin : k >= bias + tMax ? tMax : k - bias; - if (digit < t) { - break; - } - baseMinusT = base - t; - if (w > floor(maxInt / baseMinusT)) { - error("overflow"); - } - w *= baseMinusT; - } - out = output.length + 1; - bias = adapt(i - oldi, out, oldi == 0); - // `i` was supposed to wrap around from `out` to `0`, - // incrementing `n` each time, so we'll fix that now: - if (floor(i / out) > maxInt - n) { - error("overflow"); - } - n += floor(i / out); - i %= out; - // Insert `n` at position `i` of the output - output.splice(i++, 0, n); - } - return ucs2encode(output); - } - /** - * Converts a string of Unicode symbols (e.g. a domain name label) to a - * Punycode string of ASCII-only symbols. - * @memberOf punycode - * @param {String} input The string of Unicode symbols. - * @returns {String} The resulting Punycode string of ASCII-only symbols. - */ function encode(input) { - var n, delta, handledCPCount, basicLength, bias, j, m, q, k, t, currentValue, output = [], - /** `inputLength` will hold the number of code points in `input`. */ - inputLength, - /** Cached calculation results */ - handledCPCountPlusOne, baseMinusT, qMinusT; - // Convert the input in UCS-2 to Unicode - input = ucs2decode(input); - // Cache the length - inputLength = input.length; - // Initialize the state - n = initialN; - delta = 0; - bias = initialBias; - // Handle the basic code points - for (j = 0; j < inputLength; ++j) { - currentValue = input[j]; - if (currentValue < 128) { - output.push(stringFromCharCode(currentValue)); - } - } - handledCPCount = basicLength = output.length; - // `handledCPCount` is the number of code points that have been handled; - // `basicLength` is the number of basic code points. - // Finish the basic string - if it is not empty - with a delimiter - if (basicLength) { - output.push(delimiter); - } - // Main encoding loop: - while (handledCPCount < inputLength) { - // All non-basic code points < n have been handled already. Find the next - // larger one: - for (m = maxInt, j = 0; j < inputLength; ++j) { - currentValue = input[j]; - if (currentValue >= n && currentValue < m) { - m = currentValue; - } - } - // Increase `delta` enough to advance the decoder's state to , - // but guard against overflow - handledCPCountPlusOne = handledCPCount + 1; - if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) { - error("overflow"); - } - delta += (m - n) * handledCPCountPlusOne; - n = m; - for (j = 0; j < inputLength; ++j) { - currentValue = input[j]; - if (currentValue < n && ++delta > maxInt) { - error("overflow"); - } - if (currentValue == n) { - // Represent delta as a generalized variable-length integer - for (q = delta, k = base; ;k += base) { - t = k <= bias ? tMin : k >= bias + tMax ? tMax : k - bias; - if (q < t) { - break; - } - qMinusT = q - t; - baseMinusT = base - t; - output.push(stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0))); - q = floor(qMinusT / baseMinusT); - } - output.push(stringFromCharCode(digitToBasic(q, 0))); - bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength); - delta = 0; - ++handledCPCount; - } - } - ++delta; - ++n; - } - return output.join(""); - } - /** - * Converts a Punycode string representing a domain name or an email address - * to Unicode. Only the Punycoded parts of the input will be converted, i.e. - * it doesn't matter if you call it on a string that has already been - * converted to Unicode. - * @memberOf punycode - * @param {String} input The Punycoded domain name or email address to - * convert to Unicode. - * @returns {String} The Unicode representation of the given Punycode - * string. - */ function toUnicode(input) { - return mapDomain(input, (function(string) { - return regexPunycode.test(string) ? decode(string.slice(4).toLowerCase()) : string; - })); - } - /** - * Converts a Unicode string representing a domain name or an email address to - * Punycode. Only the non-ASCII parts of the domain name will be converted, - * i.e. it doesn't matter if you call it with a domain that's already in - * ASCII. - * @memberOf punycode - * @param {String} input The domain name or email address to convert, as a - * Unicode string. - * @returns {String} The Punycode representation of the given domain name or - * email address. - */ function toASCII(input) { - return mapDomain(input, (function(string) { - return regexNonASCII.test(string) ? "xn--" + encode(string) : string; - })); - } - var version = "1.4.1"; - /** - * An object of methods to convert from JavaScript's internal character - * representation (UCS-2) to Unicode code points, and back. - * @see - * @memberOf punycode - * @type Object - */ var ucs2 = { - decode: ucs2decode, - encode: ucs2encode - }; - var punycode$1 = { - version: version, - ucs2: ucs2, - toASCII: toASCII, - toUnicode: toUnicode, - encode: encode, - decode: decode - }; - var punycode$2 = Object.freeze({ - __proto__: null, - decode: decode, - encode: encode, - toUnicode: toUnicode, - toASCII: toASCII, - version: version, - ucs2: ucs2, - default: punycode$1 - }); - // markdown-it default options - var _default = { - options: { - html: false, - // Enable HTML tags in source - xhtmlOut: false, - // Use '/' to close single tags (
) - breaks: false, - // Convert '\n' in paragraphs into
- langPrefix: "language-", - // CSS language prefix for fenced blocks - linkify: false, - // autoconvert URL-like texts to links - // Enable some language-neutral replacements + quotes beautification - typographer: false, - // Double + single quotes replacement pairs, when typographer enabled, - // and smartquotes on. Could be either a String or an Array. - // For example, you can use '«»„“' for Russian, '„“‚‘' for German, - // and ['«\xA0', '\xA0»', '‹\xA0', '\xA0›'] for French (including nbsp). - quotes: "\u201c\u201d\u2018\u2019", - /* “”‘’ */ - // Highlighter function. Should return escaped HTML, - // or '' if the source string is not changed and should be escaped externaly. - // If result starts with ) - breaks: false, - // Convert '\n' in paragraphs into
- langPrefix: "language-", - // CSS language prefix for fenced blocks - linkify: false, - // autoconvert URL-like texts to links - // Enable some language-neutral replacements + quotes beautification - typographer: false, - // Double + single quotes replacement pairs, when typographer enabled, - // and smartquotes on. Could be either a String or an Array. - // For example, you can use '«»„“' for Russian, '„“‚‘' for German, - // and ['«\xA0', '\xA0»', '‹\xA0', '\xA0›'] for French (including nbsp). - quotes: "\u201c\u201d\u2018\u2019", - /* “”‘’ */ - // Highlighter function. Should return escaped HTML, - // or '' if the source string is not changed and should be escaped externaly. - // If result starts with ) - breaks: false, - // Convert '\n' in paragraphs into
- langPrefix: "language-", - // CSS language prefix for fenced blocks - linkify: false, - // autoconvert URL-like texts to links - // Enable some language-neutral replacements + quotes beautification - typographer: false, - // Double + single quotes replacement pairs, when typographer enabled, - // and smartquotes on. Could be either a String or an Array. - // For example, you can use '«»„“' for Russian, '„“‚‘' for German, - // and ['«\xA0', '\xA0»', '‹\xA0', '\xA0›'] for French (including nbsp). - quotes: "\u201c\u201d\u2018\u2019", - /* “”‘’ */ - // Highlighter function. Should return escaped HTML, - // or '' if the source string is not changed and should be escaped externaly. - // If result starts with = 0) { - try { - parsed.hostname = punycode.toASCII(parsed.hostname); - } catch (er) {} - } - } - return mdurl.encode(mdurl.format(parsed)); - } - function normalizeLinkText(url) { - var parsed = mdurl.parse(url, true); - if (parsed.hostname) { - // Encode hostnames in urls like: - // `http://host/`, `https://host/`, `mailto:user@host`, `//host/` - // We don't encode unknown schemas, because it's likely that we encode - // something we shouldn't (e.g. `skype:name` treated as `skype:host`) - if (!parsed.protocol || RECODE_HOSTNAME_FOR.indexOf(parsed.protocol) >= 0) { - try { - parsed.hostname = punycode.toUnicode(parsed.hostname); - } catch (er) {} - } - } - // add '%' to exclude list because of https://github.com/markdown-it/markdown-it/issues/720 - return mdurl.decode(mdurl.format(parsed), mdurl.decode.defaultChars + "%"); - } - /** - * class MarkdownIt - * - * Main parser/renderer class. - * - * ##### Usage - * - * ```javascript - * // node.js, "classic" way: - * var MarkdownIt = require('markdown-it'), - * md = new MarkdownIt(); - * var result = md.render('# markdown-it rulezz!'); - * - * // node.js, the same, but with sugar: - * var md = require('markdown-it')(); - * var result = md.render('# markdown-it rulezz!'); - * - * // browser without AMD, added to "window" on script load - * // Note, there are no dash. - * var md = window.markdownit(); - * var result = md.render('# markdown-it rulezz!'); - * ``` - * - * Single line rendering, without paragraph wrap: - * - * ```javascript - * var md = require('markdown-it')(); - * var result = md.renderInline('__markdown-it__ rulezz!'); - * ``` - **/ - /** - * new MarkdownIt([presetName, options]) - * - presetName (String): optional, `commonmark` / `zero` - * - options (Object) - * - * Creates parser instanse with given config. Can be called without `new`. - * - * ##### presetName - * - * MarkdownIt provides named presets as a convenience to quickly - * enable/disable active syntax rules and options for common use cases. - * - * - ["commonmark"](https://github.com/markdown-it/markdown-it/blob/master/lib/presets/commonmark.js) - - * configures parser to strict [CommonMark](http://commonmark.org/) mode. - * - [default](https://github.com/markdown-it/markdown-it/blob/master/lib/presets/default.js) - - * similar to GFM, used when no preset name given. Enables all available rules, - * but still without html, typographer & autolinker. - * - ["zero"](https://github.com/markdown-it/markdown-it/blob/master/lib/presets/zero.js) - - * all rules disabled. Useful to quickly setup your config via `.enable()`. - * For example, when you need only `bold` and `italic` markup and nothing else. - * - * ##### options: - * - * - __html__ - `false`. Set `true` to enable HTML tags in source. Be careful! - * That's not safe! You may need external sanitizer to protect output from XSS. - * It's better to extend features via plugins, instead of enabling HTML. - * - __xhtmlOut__ - `false`. Set `true` to add '/' when closing single tags - * (`
`). This is needed only for full CommonMark compatibility. In real - * world you will need HTML output. - * - __breaks__ - `false`. Set `true` to convert `\n` in paragraphs into `
`. - * - __langPrefix__ - `language-`. CSS language class prefix for fenced blocks. - * Can be useful for external highlighters. - * - __linkify__ - `false`. Set `true` to autoconvert URL-like text to links. - * - __typographer__ - `false`. Set `true` to enable [some language-neutral - * replacement](https://github.com/markdown-it/markdown-it/blob/master/lib/rules_core/replacements.js) + - * quotes beautification (smartquotes). - * - __quotes__ - `“”‘’`, String or Array. Double + single quotes replacement - * pairs, when typographer enabled and smartquotes on. For example, you can - * use `'«»„“'` for Russian, `'„“‚‘'` for German, and - * `['«\xA0', '\xA0»', '‹\xA0', '\xA0›']` for French (including nbsp). - * - __highlight__ - `null`. Highlighter function for fenced code blocks. - * Highlighter `function (str, lang)` should return escaped HTML. It can also - * return empty string if the source was not changed and should be escaped - * externaly. If result starts with `): - * - * ```javascript - * var hljs = require('highlight.js') // https://highlightjs.org/ - * - * // Actual default values - * var md = require('markdown-it')({ - * highlight: function (str, lang) { - * if (lang && hljs.getLanguage(lang)) { - * try { - * return '
' +
-	 *                hljs.highlight(str, { language: lang, ignoreIllegals: true }).value +
-	 *                '
'; - * } catch (__) {} - * } - * - * return '
' + md.utils.escapeHtml(str) + '
'; - * } - * }); - * ``` - * - **/ function MarkdownIt(presetName, options) { - if (!(this instanceof MarkdownIt)) { - return new MarkdownIt(presetName, options); - } - if (!options) { - if (!utils.isString(presetName)) { - options = presetName || {}; - presetName = "default"; - } - } - /** - * MarkdownIt#inline -> ParserInline - * - * Instance of [[ParserInline]]. You may need it to add new rules when - * writing plugins. For simple rules control use [[MarkdownIt.disable]] and - * [[MarkdownIt.enable]]. - **/ this.inline = new parser_inline; - /** - * MarkdownIt#block -> ParserBlock - * - * Instance of [[ParserBlock]]. You may need it to add new rules when - * writing plugins. For simple rules control use [[MarkdownIt.disable]] and - * [[MarkdownIt.enable]]. - **/ this.block = new parser_block; - /** - * MarkdownIt#core -> Core - * - * Instance of [[Core]] chain executor. You may need it to add new rules when - * writing plugins. For simple rules control use [[MarkdownIt.disable]] and - * [[MarkdownIt.enable]]. - **/ this.core = new parser_core; - /** - * MarkdownIt#renderer -> Renderer - * - * Instance of [[Renderer]]. Use it to modify output look. Or to add rendering - * rules for new token types, generated by plugins. - * - * ##### Example - * - * ```javascript - * var md = require('markdown-it')(); - * - * function myToken(tokens, idx, options, env, self) { - * //... - * return result; - * }; - * - * md.renderer.rules['my_token'] = myToken - * ``` - * - * See [[Renderer]] docs and [source code](https://github.com/markdown-it/markdown-it/blob/master/lib/renderer.js). - **/ this.renderer = new renderer; - /** - * MarkdownIt#linkify -> LinkifyIt - * - * [linkify-it](https://github.com/markdown-it/linkify-it) instance. - * Used by [linkify](https://github.com/markdown-it/markdown-it/blob/master/lib/rules_core/linkify.js) - * rule. - **/ this.linkify = new linkifyIt; - /** - * MarkdownIt#validateLink(url) -> Boolean - * - * Link validation function. CommonMark allows too much in links. By default - * we disable `javascript:`, `vbscript:`, `file:` schemas, and almost all `data:...` schemas - * except some embedded image types. - * - * You can change this behaviour: - * - * ```javascript - * var md = require('markdown-it')(); - * // enable everything - * md.validateLink = function () { return true; } - * ``` - **/ this.validateLink = validateLink; - /** - * MarkdownIt#normalizeLink(url) -> String - * - * Function used to encode link url to a machine-readable format, - * which includes url-encoding, punycode, etc. - **/ this.normalizeLink = normalizeLink; - /** - * MarkdownIt#normalizeLinkText(url) -> String - * - * Function used to decode link url to a human-readable format` - **/ this.normalizeLinkText = normalizeLinkText; - // Expose utils & helpers for easy acces from plugins - /** - * MarkdownIt#utils -> utils - * - * Assorted utility functions, useful to write plugins. See details - * [here](https://github.com/markdown-it/markdown-it/blob/master/lib/common/utils.js). - **/ this.utils = utils; - /** - * MarkdownIt#helpers -> helpers - * - * Link components parser functions, useful to write plugins. See details - * [here](https://github.com/markdown-it/markdown-it/blob/master/lib/helpers). - **/ this.helpers = utils.assign({}, helpers); - this.options = {}; - this.configure(presetName); - if (options) { - this.set(options); - } - } - /** chainable - * MarkdownIt.set(options) - * - * Set parser options (in the same format as in constructor). Probably, you - * will never need it, but you can change options after constructor call. - * - * ##### Example - * - * ```javascript - * var md = require('markdown-it')() - * .set({ html: true, breaks: true }) - * .set({ typographer, true }); - * ``` - * - * __Note:__ To achieve the best possible performance, don't modify a - * `markdown-it` instance options on the fly. If you need multiple configurations - * it's best to create multiple instances and initialize each with separate - * config. - **/ MarkdownIt.prototype.set = function(options) { - utils.assign(this.options, options); - return this; - }; - /** chainable, internal - * MarkdownIt.configure(presets) - * - * Batch load of all options and compenent settings. This is internal method, - * and you probably will not need it. But if you will - see available presets - * and data structure [here](https://github.com/markdown-it/markdown-it/tree/master/lib/presets) - * - * We strongly recommend to use presets instead of direct config loads. That - * will give better compatibility with next versions. - **/ MarkdownIt.prototype.configure = function(presets) { - var self = this, presetName; - if (utils.isString(presets)) { - presetName = presets; - presets = config[presetName]; - if (!presets) { - throw new Error('Wrong `markdown-it` preset "' + presetName + '", check name'); - } - } - if (!presets) { - throw new Error("Wrong `markdown-it` preset, can't be empty"); - } - if (presets.options) { - self.set(presets.options); - } - if (presets.components) { - Object.keys(presets.components).forEach((function(name) { - if (presets.components[name].rules) { - self[name].ruler.enableOnly(presets.components[name].rules); - } - if (presets.components[name].rules2) { - self[name].ruler2.enableOnly(presets.components[name].rules2); - } - })); - } - return this; - }; - /** chainable - * MarkdownIt.enable(list, ignoreInvalid) - * - list (String|Array): rule name or list of rule names to enable - * - ignoreInvalid (Boolean): set `true` to ignore errors when rule not found. - * - * Enable list or rules. It will automatically find appropriate components, - * containing rules with given names. If rule not found, and `ignoreInvalid` - * not set - throws exception. - * - * ##### Example - * - * ```javascript - * var md = require('markdown-it')() - * .enable(['sub', 'sup']) - * .disable('smartquotes'); - * ``` - **/ MarkdownIt.prototype.enable = function(list, ignoreInvalid) { - var result = []; - if (!Array.isArray(list)) { - list = [ list ]; - } - [ "core", "block", "inline" ].forEach((function(chain) { - result = result.concat(this[chain].ruler.enable(list, true)); - }), this); - result = result.concat(this.inline.ruler2.enable(list, true)); - var missed = list.filter((function(name) { - return result.indexOf(name) < 0; - })); - if (missed.length && !ignoreInvalid) { - throw new Error("MarkdownIt. Failed to enable unknown rule(s): " + missed); - } - return this; - }; - /** chainable - * MarkdownIt.disable(list, ignoreInvalid) - * - list (String|Array): rule name or list of rule names to disable. - * - ignoreInvalid (Boolean): set `true` to ignore errors when rule not found. - * - * The same as [[MarkdownIt.enable]], but turn specified rules off. - **/ MarkdownIt.prototype.disable = function(list, ignoreInvalid) { - var result = []; - if (!Array.isArray(list)) { - list = [ list ]; - } - [ "core", "block", "inline" ].forEach((function(chain) { - result = result.concat(this[chain].ruler.disable(list, true)); - }), this); - result = result.concat(this.inline.ruler2.disable(list, true)); - var missed = list.filter((function(name) { - return result.indexOf(name) < 0; - })); - if (missed.length && !ignoreInvalid) { - throw new Error("MarkdownIt. Failed to disable unknown rule(s): " + missed); - } - return this; - }; - /** chainable - * MarkdownIt.use(plugin, params) - * - * Load specified plugin with given params into current parser instance. - * It's just a sugar to call `plugin(md, params)` with curring. - * - * ##### Example - * - * ```javascript - * var iterator = require('markdown-it-for-inline'); - * var md = require('markdown-it')() - * .use(iterator, 'foo_replace', 'text', function (tokens, idx) { - * tokens[idx].content = tokens[idx].content.replace(/foo/g, 'bar'); - * }); - * ``` - **/ MarkdownIt.prototype.use = function(plugin /*, params, ... */) { - var args = [ this ].concat(Array.prototype.slice.call(arguments, 1)); - plugin.apply(plugin, args); - return this; - }; - /** internal - * MarkdownIt.parse(src, env) -> Array - * - src (String): source string - * - env (Object): environment sandbox - * - * Parse input string and return list of block tokens (special token type - * "inline" will contain list of inline tokens). You should not call this - * method directly, until you write custom renderer (for example, to produce - * AST). - * - * `env` is used to pass data between "distributed" rules and return additional - * metadata like reference info, needed for the renderer. It also can be used to - * inject data in specific cases. Usually, you will be ok to pass `{}`, - * and then pass updated object to renderer. - **/ MarkdownIt.prototype.parse = function(src, env) { - if (typeof src !== "string") { - throw new Error("Input data should be a String"); - } - var state = new this.core.State(src, this, env); - this.core.process(state); - return state.tokens; - }; - /** - * MarkdownIt.render(src [, env]) -> String - * - src (String): source string - * - env (Object): environment sandbox - * - * Render markdown string into html. It does all magic for you :). - * - * `env` can be used to inject additional metadata (`{}` by default). - * But you will not need it with high probability. See also comment - * in [[MarkdownIt.parse]]. - **/ MarkdownIt.prototype.render = function(src, env) { - env = env || {}; - return this.renderer.render(this.parse(src, env), this.options, env); - }; - /** internal - * MarkdownIt.parseInline(src, env) -> Array - * - src (String): source string - * - env (Object): environment sandbox - * - * The same as [[MarkdownIt.parse]] but skip all block rules. It returns the - * block tokens list with the single `inline` element, containing parsed inline - * tokens in `children` property. Also updates `env` object. - **/ MarkdownIt.prototype.parseInline = function(src, env) { - var state = new this.core.State(src, this, env); - state.inlineMode = true; - this.core.process(state); - return state.tokens; - }; - /** - * MarkdownIt.renderInline(src [, env]) -> String - * - src (String): source string - * - env (Object): environment sandbox - * - * Similar to [[MarkdownIt.render]] but for single paragraph content. Result - * will NOT be wrapped into `

` tags. - **/ MarkdownIt.prototype.renderInline = function(src, env) { - env = env || {}; - return this.renderer.render(this.parseInline(src, env), this.options, env); - }; - var lib = MarkdownIt; - var markdownIt = lib; - return markdownIt; -})); - diff --git a/examples/server/public/deps_tailwindcss.js b/examples/server/public/deps_tailwindcss.js deleted file mode 100644 index 6736cb8ca7d16..0000000000000 --- a/examples/server/public/deps_tailwindcss.js +++ /dev/null @@ -1,82 +0,0 @@ -(()=>{var Iv=Object.create;var Ui=Object.defineProperty;var Dv=Object.getOwnPropertyDescriptor;var qv=Object.getOwnPropertyNames;var $v=Object.getPrototypeOf,Lv=Object.prototype.hasOwnProperty;var cf=r=>Ui(r,"__esModule",{value:!0});var pf=r=>{if(typeof require!="undefined")return require(r);throw new Error('Dynamic require of "'+r+'" is not supported')};var R=(r,e)=>()=>(r&&(e=r(r=0)),e);var x=(r,e)=>()=>(e||r((e={exports:{}}).exports,e),e.exports),Ge=(r,e)=>{cf(r);for(var t in e)Ui(r,t,{get:e[t],enumerable:!0})},Mv=(r,e,t)=>{if(e&&typeof e=="object"||typeof e=="function")for(let i of qv(e))!Lv.call(r,i)&&i!=="default"&&Ui(r,i,{get:()=>e[i],enumerable:!(t=Dv(e,i))||t.enumerable});return r},pe=r=>Mv(cf(Ui(r!=null?Iv($v(r)):{},"default",r&&r.__esModule&&"default"in r?{get:()=>r.default,enumerable:!0}:{value:r,enumerable:!0})),r);var m,u=R(()=>{m={platform:"",env:{},versions:{node:"14.17.6"}}});var Nv,be,ft=R(()=>{u();Nv=0,be={readFileSync:r=>self[r]||"",statSync:()=>({mtimeMs:Nv++}),promises:{readFile:r=>Promise.resolve(self[r]||"")}}});var Ns=x((sP,hf)=>{u();"use strict";var df=class{constructor(e={}){if(!(e.maxSize&&e.maxSize>0))throw new TypeError("`maxSize` must be a number greater than 0");if(typeof e.maxAge=="number"&&e.maxAge===0)throw new TypeError("`maxAge` must be a number greater than 0");this.maxSize=e.maxSize,this.maxAge=e.maxAge||1/0,this.onEviction=e.onEviction,this.cache=new Map,this.oldCache=new Map,this._size=0}_emitEvictions(e){if(typeof this.onEviction=="function")for(let[t,i]of e)this.onEviction(t,i.value)}_deleteIfExpired(e,t){return typeof t.expiry=="number"&&t.expiry<=Date.now()?(typeof this.onEviction=="function"&&this.onEviction(e,t.value),this.delete(e)):!1}_getOrDeleteIfExpired(e,t){if(this._deleteIfExpired(e,t)===!1)return t.value}_getItemValue(e,t){return t.expiry?this._getOrDeleteIfExpired(e,t):t.value}_peek(e,t){let i=t.get(e);return this._getItemValue(e,i)}_set(e,t){this.cache.set(e,t),this._size++,this._size>=this.maxSize&&(this._size=0,this._emitEvictions(this.oldCache),this.oldCache=this.cache,this.cache=new Map)}_moveToRecent(e,t){this.oldCache.delete(e),this._set(e,t)}*_entriesAscending(){for(let e of this.oldCache){let[t,i]=e;this.cache.has(t)||this._deleteIfExpired(t,i)===!1&&(yield e)}for(let e of this.cache){let[t,i]=e;this._deleteIfExpired(t,i)===!1&&(yield e)}}get(e){if(this.cache.has(e)){let t=this.cache.get(e);return this._getItemValue(e,t)}if(this.oldCache.has(e)){let t=this.oldCache.get(e);if(this._deleteIfExpired(e,t)===!1)return this._moveToRecent(e,t),t.value}}set(e,t,{maxAge:i=this.maxAge===1/0?void 0:Date.now()+this.maxAge}={}){this.cache.has(e)?this.cache.set(e,{value:t,maxAge:i}):this._set(e,{value:t,expiry:i})}has(e){return this.cache.has(e)?!this._deleteIfExpired(e,this.cache.get(e)):this.oldCache.has(e)?!this._deleteIfExpired(e,this.oldCache.get(e)):!1}peek(e){if(this.cache.has(e))return this._peek(e,this.cache);if(this.oldCache.has(e))return this._peek(e,this.oldCache)}delete(e){let t=this.cache.delete(e);return t&&this._size--,this.oldCache.delete(e)||t}clear(){this.cache.clear(),this.oldCache.clear(),this._size=0}resize(e){if(!(e&&e>0))throw new TypeError("`maxSize` must be a number greater than 0");let t=[...this._entriesAscending()],i=t.length-e;i<0?(this.cache=new Map(t),this.oldCache=new Map,this._size=t.length):(i>0&&this._emitEvictions(t.slice(0,i)),this.oldCache=new Map(t.slice(i)),this.cache=new Map,this._size=0),this.maxSize=e}*keys(){for(let[e]of this)yield e}*values(){for(let[,e]of this)yield e}*[Symbol.iterator](){for(let e of this.cache){let[t,i]=e;this._deleteIfExpired(t,i)===!1&&(yield[t,i.value])}for(let e of this.oldCache){let[t,i]=e;this.cache.has(t)||this._deleteIfExpired(t,i)===!1&&(yield[t,i.value])}}*entriesDescending(){let e=[...this.cache];for(let t=e.length-1;t>=0;--t){let i=e[t],[n,a]=i;this._deleteIfExpired(n,a)===!1&&(yield[n,a.value])}e=[...this.oldCache];for(let t=e.length-1;t>=0;--t){let i=e[t],[n,a]=i;this.cache.has(n)||this._deleteIfExpired(n,a)===!1&&(yield[n,a.value])}}*entriesAscending(){for(let[e,t]of this._entriesAscending())yield[e,t.value]}get size(){if(!this._size)return this.oldCache.size;let e=0;for(let t of this.oldCache.keys())this.cache.has(t)||e++;return Math.min(this._size+e,this.maxSize)}};hf.exports=df});var mf,gf=R(()=>{u();mf=r=>r&&r._hash});function Vi(r){return mf(r,{ignoreUnknown:!0})}var yf=R(()=>{u();gf()});function xt(r){if(r=`${r}`,r==="0")return"0";if(/^[+-]?(\d+|\d*\.\d+)(e[+-]?\d+)?(%|\w+)?$/.test(r))return r.replace(/^[+-]?/,t=>t==="-"?"":"-");let e=["var","calc","min","max","clamp"];for(let t of e)if(r.includes(`${t}(`))return`calc(${r} * -1)`}var Hi=R(()=>{u()});var bf,wf=R(()=>{u();bf=["preflight","container","accessibility","pointerEvents","visibility","position","inset","isolation","zIndex","order","gridColumn","gridColumnStart","gridColumnEnd","gridRow","gridRowStart","gridRowEnd","float","clear","margin","boxSizing","lineClamp","display","aspectRatio","size","height","maxHeight","minHeight","width","minWidth","maxWidth","flex","flexShrink","flexGrow","flexBasis","tableLayout","captionSide","borderCollapse","borderSpacing","transformOrigin","translate","rotate","skew","scale","transform","animation","cursor","touchAction","userSelect","resize","scrollSnapType","scrollSnapAlign","scrollSnapStop","scrollMargin","scrollPadding","listStylePosition","listStyleType","listStyleImage","appearance","columns","breakBefore","breakInside","breakAfter","gridAutoColumns","gridAutoFlow","gridAutoRows","gridTemplateColumns","gridTemplateRows","flexDirection","flexWrap","placeContent","placeItems","alignContent","alignItems","justifyContent","justifyItems","gap","space","divideWidth","divideStyle","divideColor","divideOpacity","placeSelf","alignSelf","justifySelf","overflow","overscrollBehavior","scrollBehavior","textOverflow","hyphens","whitespace","textWrap","wordBreak","borderRadius","borderWidth","borderStyle","borderColor","borderOpacity","backgroundColor","backgroundOpacity","backgroundImage","gradientColorStops","boxDecorationBreak","backgroundSize","backgroundAttachment","backgroundClip","backgroundPosition","backgroundRepeat","backgroundOrigin","fill","stroke","strokeWidth","objectFit","objectPosition","padding","textAlign","textIndent","verticalAlign","fontFamily","fontSize","fontWeight","textTransform","fontStyle","fontVariantNumeric","lineHeight","letterSpacing","textColor","textOpacity","textDecoration","textDecorationColor","textDecorationStyle","textDecorationThickness","textUnderlineOffset","fontSmoothing","placeholderColor","placeholderOpacity","caretColor","accentColor","opacity","backgroundBlendMode","mixBlendMode","boxShadow","boxShadowColor","outlineStyle","outlineWidth","outlineOffset","outlineColor","ringWidth","ringColor","ringOpacity","ringOffsetWidth","ringOffsetColor","blur","brightness","contrast","dropShadow","grayscale","hueRotate","invert","saturate","sepia","filter","backdropBlur","backdropBrightness","backdropContrast","backdropGrayscale","backdropHueRotate","backdropInvert","backdropOpacity","backdropSaturate","backdropSepia","backdropFilter","transitionProperty","transitionDelay","transitionDuration","transitionTimingFunction","willChange","contain","content","forcedColorAdjust"]});function vf(r,e){return r===void 0?e:Array.isArray(r)?r:[...new Set(e.filter(i=>r!==!1&&r[i]!==!1).concat(Object.keys(r).filter(i=>r[i]!==!1)))]}var xf=R(()=>{u()});var kf={};Ge(kf,{default:()=>Qe});var Qe,Wi=R(()=>{u();Qe=new Proxy({},{get:()=>String})});function Bs(r,e,t){typeof m!="undefined"&&m.env.JEST_WORKER_ID||t&&Sf.has(t)||(t&&Sf.add(t),console.warn(""),e.forEach(i=>console.warn(r,"-",i)))}function Fs(r){return Qe.dim(r)}var Sf,G,Be=R(()=>{u();Wi();Sf=new Set;G={info(r,e){Bs(Qe.bold(Qe.cyan("info")),...Array.isArray(r)?[r]:[e,r])},warn(r,e){["content-problems"].includes(r)||Bs(Qe.bold(Qe.yellow("warn")),...Array.isArray(r)?[r]:[e,r])},risk(r,e){Bs(Qe.bold(Qe.magenta("risk")),...Array.isArray(r)?[r]:[e,r])}}});var Af={};Ge(Af,{default:()=>js});function qr({version:r,from:e,to:t}){G.warn(`${e}-color-renamed`,[`As of Tailwind CSS ${r}, \`${e}\` has been renamed to \`${t}\`.`,"Update your configuration file to silence this warning."])}var js,zs=R(()=>{u();Be();js={inherit:"inherit",current:"currentColor",transparent:"transparent",black:"#000",white:"#fff",slate:{50:"#f8fafc",100:"#f1f5f9",200:"#e2e8f0",300:"#cbd5e1",400:"#94a3b8",500:"#64748b",600:"#475569",700:"#334155",800:"#1e293b",900:"#0f172a",950:"#020617"},gray:{50:"#f9fafb",100:"#f3f4f6",200:"#e5e7eb",300:"#d1d5db",400:"#9ca3af",500:"#6b7280",600:"#4b5563",700:"#374151",800:"#1f2937",900:"#111827",950:"#030712"},zinc:{50:"#fafafa",100:"#f4f4f5",200:"#e4e4e7",300:"#d4d4d8",400:"#a1a1aa",500:"#71717a",600:"#52525b",700:"#3f3f46",800:"#27272a",900:"#18181b",950:"#09090b"},neutral:{50:"#fafafa",100:"#f5f5f5",200:"#e5e5e5",300:"#d4d4d4",400:"#a3a3a3",500:"#737373",600:"#525252",700:"#404040",800:"#262626",900:"#171717",950:"#0a0a0a"},stone:{50:"#fafaf9",100:"#f5f5f4",200:"#e7e5e4",300:"#d6d3d1",400:"#a8a29e",500:"#78716c",600:"#57534e",700:"#44403c",800:"#292524",900:"#1c1917",950:"#0c0a09"},red:{50:"#fef2f2",100:"#fee2e2",200:"#fecaca",300:"#fca5a5",400:"#f87171",500:"#ef4444",600:"#dc2626",700:"#b91c1c",800:"#991b1b",900:"#7f1d1d",950:"#450a0a"},orange:{50:"#fff7ed",100:"#ffedd5",200:"#fed7aa",300:"#fdba74",400:"#fb923c",500:"#f97316",600:"#ea580c",700:"#c2410c",800:"#9a3412",900:"#7c2d12",950:"#431407"},amber:{50:"#fffbeb",100:"#fef3c7",200:"#fde68a",300:"#fcd34d",400:"#fbbf24",500:"#f59e0b",600:"#d97706",700:"#b45309",800:"#92400e",900:"#78350f",950:"#451a03"},yellow:{50:"#fefce8",100:"#fef9c3",200:"#fef08a",300:"#fde047",400:"#facc15",500:"#eab308",600:"#ca8a04",700:"#a16207",800:"#854d0e",900:"#713f12",950:"#422006"},lime:{50:"#f7fee7",100:"#ecfccb",200:"#d9f99d",300:"#bef264",400:"#a3e635",500:"#84cc16",600:"#65a30d",700:"#4d7c0f",800:"#3f6212",900:"#365314",950:"#1a2e05"},green:{50:"#f0fdf4",100:"#dcfce7",200:"#bbf7d0",300:"#86efac",400:"#4ade80",500:"#22c55e",600:"#16a34a",700:"#15803d",800:"#166534",900:"#14532d",950:"#052e16"},emerald:{50:"#ecfdf5",100:"#d1fae5",200:"#a7f3d0",300:"#6ee7b7",400:"#34d399",500:"#10b981",600:"#059669",700:"#047857",800:"#065f46",900:"#064e3b",950:"#022c22"},teal:{50:"#f0fdfa",100:"#ccfbf1",200:"#99f6e4",300:"#5eead4",400:"#2dd4bf",500:"#14b8a6",600:"#0d9488",700:"#0f766e",800:"#115e59",900:"#134e4a",950:"#042f2e"},cyan:{50:"#ecfeff",100:"#cffafe",200:"#a5f3fc",300:"#67e8f9",400:"#22d3ee",500:"#06b6d4",600:"#0891b2",700:"#0e7490",800:"#155e75",900:"#164e63",950:"#083344"},sky:{50:"#f0f9ff",100:"#e0f2fe",200:"#bae6fd",300:"#7dd3fc",400:"#38bdf8",500:"#0ea5e9",600:"#0284c7",700:"#0369a1",800:"#075985",900:"#0c4a6e",950:"#082f49"},blue:{50:"#eff6ff",100:"#dbeafe",200:"#bfdbfe",300:"#93c5fd",400:"#60a5fa",500:"#3b82f6",600:"#2563eb",700:"#1d4ed8",800:"#1e40af",900:"#1e3a8a",950:"#172554"},indigo:{50:"#eef2ff",100:"#e0e7ff",200:"#c7d2fe",300:"#a5b4fc",400:"#818cf8",500:"#6366f1",600:"#4f46e5",700:"#4338ca",800:"#3730a3",900:"#312e81",950:"#1e1b4b"},violet:{50:"#f5f3ff",100:"#ede9fe",200:"#ddd6fe",300:"#c4b5fd",400:"#a78bfa",500:"#8b5cf6",600:"#7c3aed",700:"#6d28d9",800:"#5b21b6",900:"#4c1d95",950:"#2e1065"},purple:{50:"#faf5ff",100:"#f3e8ff",200:"#e9d5ff",300:"#d8b4fe",400:"#c084fc",500:"#a855f7",600:"#9333ea",700:"#7e22ce",800:"#6b21a8",900:"#581c87",950:"#3b0764"},fuchsia:{50:"#fdf4ff",100:"#fae8ff",200:"#f5d0fe",300:"#f0abfc",400:"#e879f9",500:"#d946ef",600:"#c026d3",700:"#a21caf",800:"#86198f",900:"#701a75",950:"#4a044e"},pink:{50:"#fdf2f8",100:"#fce7f3",200:"#fbcfe8",300:"#f9a8d4",400:"#f472b6",500:"#ec4899",600:"#db2777",700:"#be185d",800:"#9d174d",900:"#831843",950:"#500724"},rose:{50:"#fff1f2",100:"#ffe4e6",200:"#fecdd3",300:"#fda4af",400:"#fb7185",500:"#f43f5e",600:"#e11d48",700:"#be123c",800:"#9f1239",900:"#881337",950:"#4c0519"},get lightBlue(){return qr({version:"v2.2",from:"lightBlue",to:"sky"}),this.sky},get warmGray(){return qr({version:"v3.0",from:"warmGray",to:"stone"}),this.stone},get trueGray(){return qr({version:"v3.0",from:"trueGray",to:"neutral"}),this.neutral},get coolGray(){return qr({version:"v3.0",from:"coolGray",to:"gray"}),this.gray},get blueGray(){return qr({version:"v3.0",from:"blueGray",to:"slate"}),this.slate}}});function Us(r,...e){for(let t of e){for(let i in t)r?.hasOwnProperty?.(i)||(r[i]=t[i]);for(let i of Object.getOwnPropertySymbols(t))r?.hasOwnProperty?.(i)||(r[i]=t[i])}return r}var Cf=R(()=>{u()});function kt(r){if(Array.isArray(r))return r;let e=r.split("[").length-1,t=r.split("]").length-1;if(e!==t)throw new Error(`Path is invalid. Has unbalanced brackets: ${r}`);return r.split(/\.(?![^\[]*\])|[\[\]]/g).filter(Boolean)}var Gi=R(()=>{u()});function we(r,e){return Qi.future.includes(e)?r.future==="all"||(r?.future?.[e]??_f[e]??!1):Qi.experimental.includes(e)?r.experimental==="all"||(r?.experimental?.[e]??_f[e]??!1):!1}function Ef(r){return r.experimental==="all"?Qi.experimental:Object.keys(r?.experimental??{}).filter(e=>Qi.experimental.includes(e)&&r.experimental[e])}function Of(r){if(m.env.JEST_WORKER_ID===void 0&&Ef(r).length>0){let e=Ef(r).map(t=>Qe.yellow(t)).join(", ");G.warn("experimental-flags-enabled",[`You have enabled experimental features: ${e}`,"Experimental features in Tailwind CSS are not covered by semver, may introduce breaking changes, and can change at any time."])}}var _f,Qi,ct=R(()=>{u();Wi();Be();_f={optimizeUniversalDefaults:!1,generalizedModifiers:!0,disableColorOpacityUtilitiesByDefault:!1,relativeContentPathsByDefault:!1},Qi={future:["hoverOnlyWhenSupported","respectDefaultRingColorOpacity","disableColorOpacityUtilitiesByDefault","relativeContentPathsByDefault"],experimental:["optimizeUniversalDefaults","generalizedModifiers"]}});function Tf(r){(()=>{if(r.purge||!r.content||!Array.isArray(r.content)&&!(typeof r.content=="object"&&r.content!==null))return!1;if(Array.isArray(r.content))return r.content.every(t=>typeof t=="string"?!0:!(typeof t?.raw!="string"||t?.extension&&typeof t?.extension!="string"));if(typeof r.content=="object"&&r.content!==null){if(Object.keys(r.content).some(t=>!["files","relative","extract","transform"].includes(t)))return!1;if(Array.isArray(r.content.files)){if(!r.content.files.every(t=>typeof t=="string"?!0:!(typeof t?.raw!="string"||t?.extension&&typeof t?.extension!="string")))return!1;if(typeof r.content.extract=="object"){for(let t of Object.values(r.content.extract))if(typeof t!="function")return!1}else if(!(r.content.extract===void 0||typeof r.content.extract=="function"))return!1;if(typeof r.content.transform=="object"){for(let t of Object.values(r.content.transform))if(typeof t!="function")return!1}else if(!(r.content.transform===void 0||typeof r.content.transform=="function"))return!1;if(typeof r.content.relative!="boolean"&&typeof r.content.relative!="undefined")return!1}return!0}return!1})()||G.warn("purge-deprecation",["The `purge`/`content` options have changed in Tailwind CSS v3.0.","Update your configuration file to eliminate this warning.","https://tailwindcss.com/docs/upgrade-guide#configure-content-sources"]),r.safelist=(()=>{let{content:t,purge:i,safelist:n}=r;return Array.isArray(n)?n:Array.isArray(t?.safelist)?t.safelist:Array.isArray(i?.safelist)?i.safelist:Array.isArray(i?.options?.safelist)?i.options.safelist:[]})(),r.blocklist=(()=>{let{blocklist:t}=r;if(Array.isArray(t)){if(t.every(i=>typeof i=="string"))return t;G.warn("blocklist-invalid",["The `blocklist` option must be an array of strings.","https://tailwindcss.com/docs/content-configuration#discarding-classes"])}return[]})(),typeof r.prefix=="function"?(G.warn("prefix-function",["As of Tailwind CSS v3.0, `prefix` cannot be a function.","Update `prefix` in your configuration to be a string to eliminate this warning.","https://tailwindcss.com/docs/upgrade-guide#prefix-cannot-be-a-function"]),r.prefix=""):r.prefix=r.prefix??"",r.content={relative:(()=>{let{content:t}=r;return t?.relative?t.relative:we(r,"relativeContentPathsByDefault")})(),files:(()=>{let{content:t,purge:i}=r;return Array.isArray(i)?i:Array.isArray(i?.content)?i.content:Array.isArray(t)?t:Array.isArray(t?.content)?t.content:Array.isArray(t?.files)?t.files:[]})(),extract:(()=>{let t=(()=>r.purge?.extract?r.purge.extract:r.content?.extract?r.content.extract:r.purge?.extract?.DEFAULT?r.purge.extract.DEFAULT:r.content?.extract?.DEFAULT?r.content.extract.DEFAULT:r.purge?.options?.extractors?r.purge.options.extractors:r.content?.options?.extractors?r.content.options.extractors:{})(),i={},n=(()=>{if(r.purge?.options?.defaultExtractor)return r.purge.options.defaultExtractor;if(r.content?.options?.defaultExtractor)return r.content.options.defaultExtractor})();if(n!==void 0&&(i.DEFAULT=n),typeof t=="function")i.DEFAULT=t;else if(Array.isArray(t))for(let{extensions:a,extractor:s}of t??[])for(let o of a)i[o]=s;else typeof t=="object"&&t!==null&&Object.assign(i,t);return i})(),transform:(()=>{let t=(()=>r.purge?.transform?r.purge.transform:r.content?.transform?r.content.transform:r.purge?.transform?.DEFAULT?r.purge.transform.DEFAULT:r.content?.transform?.DEFAULT?r.content.transform.DEFAULT:{})(),i={};return typeof t=="function"?i.DEFAULT=t:typeof t=="object"&&t!==null&&Object.assign(i,t),i})()};for(let t of r.content.files)if(typeof t=="string"&&/{([^,]*?)}/g.test(t)){G.warn("invalid-glob-braces",[`The glob pattern ${Fs(t)} in your Tailwind CSS configuration is invalid.`,`Update it to ${Fs(t.replace(/{([^,]*?)}/g,"$1"))} to silence this warning.`]);break}return r}var Rf=R(()=>{u();ct();Be()});function ke(r){if(Object.prototype.toString.call(r)!=="[object Object]")return!1;let e=Object.getPrototypeOf(r);return e===null||Object.getPrototypeOf(e)===null}var Kt=R(()=>{u()});function St(r){return Array.isArray(r)?r.map(e=>St(e)):typeof r=="object"&&r!==null?Object.fromEntries(Object.entries(r).map(([e,t])=>[e,St(t)])):r}var Yi=R(()=>{u()});function jt(r){return r.replace(/\\,/g,"\\2c ")}var Ki=R(()=>{u()});var Vs,Pf=R(()=>{u();Vs={aliceblue:[240,248,255],antiquewhite:[250,235,215],aqua:[0,255,255],aquamarine:[127,255,212],azure:[240,255,255],beige:[245,245,220],bisque:[255,228,196],black:[0,0,0],blanchedalmond:[255,235,205],blue:[0,0,255],blueviolet:[138,43,226],brown:[165,42,42],burlywood:[222,184,135],cadetblue:[95,158,160],chartreuse:[127,255,0],chocolate:[210,105,30],coral:[255,127,80],cornflowerblue:[100,149,237],cornsilk:[255,248,220],crimson:[220,20,60],cyan:[0,255,255],darkblue:[0,0,139],darkcyan:[0,139,139],darkgoldenrod:[184,134,11],darkgray:[169,169,169],darkgreen:[0,100,0],darkgrey:[169,169,169],darkkhaki:[189,183,107],darkmagenta:[139,0,139],darkolivegreen:[85,107,47],darkorange:[255,140,0],darkorchid:[153,50,204],darkred:[139,0,0],darksalmon:[233,150,122],darkseagreen:[143,188,143],darkslateblue:[72,61,139],darkslategray:[47,79,79],darkslategrey:[47,79,79],darkturquoise:[0,206,209],darkviolet:[148,0,211],deeppink:[255,20,147],deepskyblue:[0,191,255],dimgray:[105,105,105],dimgrey:[105,105,105],dodgerblue:[30,144,255],firebrick:[178,34,34],floralwhite:[255,250,240],forestgreen:[34,139,34],fuchsia:[255,0,255],gainsboro:[220,220,220],ghostwhite:[248,248,255],gold:[255,215,0],goldenrod:[218,165,32],gray:[128,128,128],green:[0,128,0],greenyellow:[173,255,47],grey:[128,128,128],honeydew:[240,255,240],hotpink:[255,105,180],indianred:[205,92,92],indigo:[75,0,130],ivory:[255,255,240],khaki:[240,230,140],lavender:[230,230,250],lavenderblush:[255,240,245],lawngreen:[124,252,0],lemonchiffon:[255,250,205],lightblue:[173,216,230],lightcoral:[240,128,128],lightcyan:[224,255,255],lightgoldenrodyellow:[250,250,210],lightgray:[211,211,211],lightgreen:[144,238,144],lightgrey:[211,211,211],lightpink:[255,182,193],lightsalmon:[255,160,122],lightseagreen:[32,178,170],lightskyblue:[135,206,250],lightslategray:[119,136,153],lightslategrey:[119,136,153],lightsteelblue:[176,196,222],lightyellow:[255,255,224],lime:[0,255,0],limegreen:[50,205,50],linen:[250,240,230],magenta:[255,0,255],maroon:[128,0,0],mediumaquamarine:[102,205,170],mediumblue:[0,0,205],mediumorchid:[186,85,211],mediumpurple:[147,112,219],mediumseagreen:[60,179,113],mediumslateblue:[123,104,238],mediumspringgreen:[0,250,154],mediumturquoise:[72,209,204],mediumvioletred:[199,21,133],midnightblue:[25,25,112],mintcream:[245,255,250],mistyrose:[255,228,225],moccasin:[255,228,181],navajowhite:[255,222,173],navy:[0,0,128],oldlace:[253,245,230],olive:[128,128,0],olivedrab:[107,142,35],orange:[255,165,0],orangered:[255,69,0],orchid:[218,112,214],palegoldenrod:[238,232,170],palegreen:[152,251,152],paleturquoise:[175,238,238],palevioletred:[219,112,147],papayawhip:[255,239,213],peachpuff:[255,218,185],peru:[205,133,63],pink:[255,192,203],plum:[221,160,221],powderblue:[176,224,230],purple:[128,0,128],rebeccapurple:[102,51,153],red:[255,0,0],rosybrown:[188,143,143],royalblue:[65,105,225],saddlebrown:[139,69,19],salmon:[250,128,114],sandybrown:[244,164,96],seagreen:[46,139,87],seashell:[255,245,238],sienna:[160,82,45],silver:[192,192,192],skyblue:[135,206,235],slateblue:[106,90,205],slategray:[112,128,144],slategrey:[112,128,144],snow:[255,250,250],springgreen:[0,255,127],steelblue:[70,130,180],tan:[210,180,140],teal:[0,128,128],thistle:[216,191,216],tomato:[255,99,71],turquoise:[64,224,208],violet:[238,130,238],wheat:[245,222,179],white:[255,255,255],whitesmoke:[245,245,245],yellow:[255,255,0],yellowgreen:[154,205,50]}});function $r(r,{loose:e=!1}={}){if(typeof r!="string")return null;if(r=r.trim(),r==="transparent")return{mode:"rgb",color:["0","0","0"],alpha:"0"};if(r in Vs)return{mode:"rgb",color:Vs[r].map(a=>a.toString())};let t=r.replace(Fv,(a,s,o,l,c)=>["#",s,s,o,o,l,l,c?c+c:""].join("")).match(Bv);if(t!==null)return{mode:"rgb",color:[parseInt(t[1],16),parseInt(t[2],16),parseInt(t[3],16)].map(a=>a.toString()),alpha:t[4]?(parseInt(t[4],16)/255).toString():void 0};let i=r.match(jv)??r.match(zv);if(i===null)return null;let n=[i[2],i[3],i[4]].filter(Boolean).map(a=>a.toString());return n.length===2&&n[0].startsWith("var(")?{mode:i[1],color:[n[0]],alpha:n[1]}:!e&&n.length!==3||n.length<3&&!n.some(a=>/^var\(.*?\)$/.test(a))?null:{mode:i[1],color:n,alpha:i[5]?.toString?.()}}function Hs({mode:r,color:e,alpha:t}){let i=t!==void 0;return r==="rgba"||r==="hsla"?`${r}(${e.join(", ")}${i?`, ${t}`:""})`:`${r}(${e.join(" ")}${i?` / ${t}`:""})`}var Bv,Fv,At,Xi,If,Ct,jv,zv,Ws=R(()=>{u();Pf();Bv=/^#([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})?$/i,Fv=/^#([a-f\d])([a-f\d])([a-f\d])([a-f\d])?$/i,At=/(?:\d+|\d*\.\d+)%?/,Xi=/(?:\s*,\s*|\s+)/,If=/\s*[,/]\s*/,Ct=/var\(--(?:[^ )]*?)(?:,(?:[^ )]*?|var\(--[^ )]*?\)))?\)/,jv=new RegExp(`^(rgba?)\\(\\s*(${At.source}|${Ct.source})(?:${Xi.source}(${At.source}|${Ct.source}))?(?:${Xi.source}(${At.source}|${Ct.source}))?(?:${If.source}(${At.source}|${Ct.source}))?\\s*\\)$`),zv=new RegExp(`^(hsla?)\\(\\s*((?:${At.source})(?:deg|rad|grad|turn)?|${Ct.source})(?:${Xi.source}(${At.source}|${Ct.source}))?(?:${Xi.source}(${At.source}|${Ct.source}))?(?:${If.source}(${At.source}|${Ct.source}))?\\s*\\)$`)});function Ze(r,e,t){if(typeof r=="function")return r({opacityValue:e});let i=$r(r,{loose:!0});return i===null?t:Hs({...i,alpha:e})}function Ae({color:r,property:e,variable:t}){let i=[].concat(e);if(typeof r=="function")return{[t]:"1",...Object.fromEntries(i.map(a=>[a,r({opacityVariable:t,opacityValue:`var(${t})`})]))};let n=$r(r);return n===null?Object.fromEntries(i.map(a=>[a,r])):n.alpha!==void 0?Object.fromEntries(i.map(a=>[a,r])):{[t]:"1",...Object.fromEntries(i.map(a=>[a,Hs({...n,alpha:`var(${t})`})]))}}var Lr=R(()=>{u();Ws()});function ve(r,e){let t=[],i=[],n=0,a=!1;for(let s=0;s{u()});function Ji(r){return ve(r,",").map(t=>{let i=t.trim(),n={raw:i},a=i.split(Vv),s=new Set;for(let o of a)Df.lastIndex=0,!s.has("KEYWORD")&&Uv.has(o)?(n.keyword=o,s.add("KEYWORD")):Df.test(o)?s.has("X")?s.has("Y")?s.has("BLUR")?s.has("SPREAD")||(n.spread=o,s.add("SPREAD")):(n.blur=o,s.add("BLUR")):(n.y=o,s.add("Y")):(n.x=o,s.add("X")):n.color?(n.unknown||(n.unknown=[]),n.unknown.push(o)):n.color=o;return n.valid=n.x!==void 0&&n.y!==void 0,n})}function qf(r){return r.map(e=>e.valid?[e.keyword,e.x,e.y,e.blur,e.spread,e.color].filter(Boolean).join(" "):e.raw).join(", ")}var Uv,Vv,Df,Gs=R(()=>{u();zt();Uv=new Set(["inset","inherit","initial","revert","unset"]),Vv=/\ +(?![^(]*\))/g,Df=/^-?(\d+|\.\d+)(.*?)$/g});function Qs(r){return Hv.some(e=>new RegExp(`^${e}\\(.*\\)`).test(r))}function K(r,e=null,t=!0){let i=e&&Wv.has(e.property);return r.startsWith("--")&&!i?`var(${r})`:r.includes("url(")?r.split(/(url\(.*?\))/g).filter(Boolean).map(n=>/^url\(.*?\)$/.test(n)?n:K(n,e,!1)).join(""):(r=r.replace(/([^\\])_+/g,(n,a)=>a+" ".repeat(n.length-1)).replace(/^_/g," ").replace(/\\_/g,"_"),t&&(r=r.trim()),r=Gv(r),r)}function Ye(r){return r.includes("=")&&(r=r.replace(/(=.*)/g,(e,t)=>{if(t[1]==="'"||t[1]==='"')return t;if(t.length>2){let i=t[t.length-1];if(t[t.length-2]===" "&&(i==="i"||i==="I"||i==="s"||i==="S"))return`="${t.slice(1,-2)}" ${t[t.length-1]}`}return`="${t.slice(1)}"`})),r}function Gv(r){let e=["theme"],t=["min-content","max-content","fit-content","safe-area-inset-top","safe-area-inset-right","safe-area-inset-bottom","safe-area-inset-left","titlebar-area-x","titlebar-area-y","titlebar-area-width","titlebar-area-height","keyboard-inset-top","keyboard-inset-right","keyboard-inset-bottom","keyboard-inset-left","keyboard-inset-width","keyboard-inset-height","radial-gradient","linear-gradient","conic-gradient","repeating-radial-gradient","repeating-linear-gradient","repeating-conic-gradient","anchor-size"];return r.replace(/(calc|min|max|clamp)\(.+\)/g,i=>{let n="";function a(){let s=n.trimEnd();return s[s.length-1]}for(let s=0;si[s+p]===d)},l=function(f){let d=1/0;for(let h of f){let b=i.indexOf(h,s);b!==-1&&bo(f))){let f=t.find(d=>o(d));n+=f,s+=f.length-1}else e.some(f=>o(f))?n+=l([")"]):o("[")?n+=l(["]"]):["+","-","*","/"].includes(c)&&!["(","+","-","*","/",","].includes(a())?n+=` ${c} `:n+=c}return n.replace(/\s+/g," ")})}function Ys(r){return r.startsWith("url(")}function Ks(r){return!isNaN(Number(r))||Qs(r)}function Mr(r){return r.endsWith("%")&&Ks(r.slice(0,-1))||Qs(r)}function Nr(r){return r==="0"||new RegExp(`^[+-]?[0-9]*.?[0-9]+(?:[eE][+-]?[0-9]+)?${Yv}$`).test(r)||Qs(r)}function $f(r){return Kv.has(r)}function Lf(r){let e=Ji(K(r));for(let t of e)if(!t.valid)return!1;return!0}function Mf(r){let e=0;return ve(r,"_").every(i=>(i=K(i),i.startsWith("var(")?!0:$r(i,{loose:!0})!==null?(e++,!0):!1))?e>0:!1}function Nf(r){let e=0;return ve(r,",").every(i=>(i=K(i),i.startsWith("var(")?!0:Ys(i)||Jv(i)||["element(","image(","cross-fade(","image-set("].some(n=>i.startsWith(n))?(e++,!0):!1))?e>0:!1}function Jv(r){r=K(r);for(let e of Xv)if(r.startsWith(`${e}(`))return!0;return!1}function Bf(r){let e=0;return ve(r,"_").every(i=>(i=K(i),i.startsWith("var(")?!0:Zv.has(i)||Nr(i)||Mr(i)?(e++,!0):!1))?e>0:!1}function Ff(r){let e=0;return ve(r,",").every(i=>(i=K(i),i.startsWith("var(")?!0:i.includes(" ")&&!/(['"])([^"']+)\1/g.test(i)||/^\d/g.test(i)?!1:(e++,!0)))?e>0:!1}function jf(r){return ex.has(r)}function zf(r){return tx.has(r)}function Uf(r){return rx.has(r)}var Hv,Wv,Qv,Yv,Kv,Xv,Zv,ex,tx,rx,Br=R(()=>{u();Ws();Gs();zt();Hv=["min","max","clamp","calc"];Wv=new Set(["scroll-timeline-name","timeline-scope","view-timeline-name","font-palette","anchor-name","anchor-scope","position-anchor","position-try-options","scroll-timeline","animation-timeline","view-timeline","position-try"]);Qv=["cm","mm","Q","in","pc","pt","px","em","ex","ch","rem","lh","rlh","vw","vh","vmin","vmax","vb","vi","svw","svh","lvw","lvh","dvw","dvh","cqw","cqh","cqi","cqb","cqmin","cqmax"],Yv=`(?:${Qv.join("|")})`;Kv=new Set(["thin","medium","thick"]);Xv=new Set(["conic-gradient","linear-gradient","radial-gradient","repeating-conic-gradient","repeating-linear-gradient","repeating-radial-gradient"]);Zv=new Set(["center","top","right","bottom","left"]);ex=new Set(["serif","sans-serif","monospace","cursive","fantasy","system-ui","ui-serif","ui-sans-serif","ui-monospace","ui-rounded","math","emoji","fangsong"]);tx=new Set(["xx-small","x-small","small","medium","large","x-large","xx-large","xxx-large"]);rx=new Set(["larger","smaller"])});function Vf(r){let e=["cover","contain"];return ve(r,",").every(t=>{let i=ve(t,"_").filter(Boolean);return i.length===1&&e.includes(i[0])?!0:i.length!==1&&i.length!==2?!1:i.every(n=>Nr(n)||Mr(n)||n==="auto")})}var Hf=R(()=>{u();Br();zt()});function Wf(r,e){r.walkClasses(t=>{t.value=e(t.value),t.raws&&t.raws.value&&(t.raws.value=jt(t.raws.value))})}function Gf(r,e){if(!_t(r))return;let t=r.slice(1,-1);if(!!e(t))return K(t)}function ix(r,e={},t){let i=e[r];if(i!==void 0)return xt(i);if(_t(r)){let n=Gf(r,t);return n===void 0?void 0:xt(n)}}function Zi(r,e={},{validate:t=()=>!0}={}){let i=e.values?.[r];return i!==void 0?i:e.supportsNegativeValues&&r.startsWith("-")?ix(r.slice(1),e.values,t):Gf(r,t)}function _t(r){return r.startsWith("[")&&r.endsWith("]")}function Qf(r){let e=r.lastIndexOf("/"),t=r.lastIndexOf("[",e),i=r.indexOf("]",e);return r[e-1]==="]"||r[e+1]==="["||t!==-1&&i!==-1&&t")){let e=r;return({opacityValue:t=1})=>e.replace(//g,t)}return r}function Yf(r){return K(r.slice(1,-1))}function nx(r,e={},{tailwindConfig:t={}}={}){if(e.values?.[r]!==void 0)return Xt(e.values?.[r]);let[i,n]=Qf(r);if(n!==void 0){let a=e.values?.[i]??(_t(i)?i.slice(1,-1):void 0);return a===void 0?void 0:(a=Xt(a),_t(n)?Ze(a,Yf(n)):t.theme?.opacity?.[n]===void 0?void 0:Ze(a,t.theme.opacity[n]))}return Zi(r,e,{validate:Mf})}function sx(r,e={}){return e.values?.[r]}function qe(r){return(e,t)=>Zi(e,t,{validate:r})}function ax(r,e){let t=r.indexOf(e);return t===-1?[void 0,r]:[r.slice(0,t),r.slice(t+1)]}function Js(r,e,t,i){if(t.values&&e in t.values)for(let{type:a}of r??[]){let s=Xs[a](e,t,{tailwindConfig:i});if(s!==void 0)return[s,a,null]}if(_t(e)){let a=e.slice(1,-1),[s,o]=ax(a,":");if(!/^[\w-_]+$/g.test(s))o=a;else if(s!==void 0&&!Kf.includes(s))return[];if(o.length>0&&Kf.includes(s))return[Zi(`[${o}]`,t),s,null]}let n=Zs(r,e,t,i);for(let a of n)return a;return[]}function*Zs(r,e,t,i){let n=we(i,"generalizedModifiers"),[a,s]=Qf(e);if(n&&t.modifiers!=null&&(t.modifiers==="any"||typeof t.modifiers=="object"&&(s&&_t(s)||s in t.modifiers))||(a=e,s=void 0),s!==void 0&&a===""&&(a="DEFAULT"),s!==void 0&&typeof t.modifiers=="object"){let l=t.modifiers?.[s]??null;l!==null?s=l:_t(s)&&(s=Yf(s))}for(let{type:l}of r??[]){let c=Xs[l](a,t,{tailwindConfig:i});c!==void 0&&(yield[c,l,s??null])}}var Xs,Kf,Fr=R(()=>{u();Ki();Lr();Br();Hi();Hf();ct();Xs={any:Zi,color:nx,url:qe(Ys),image:qe(Nf),length:qe(Nr),percentage:qe(Mr),position:qe(Bf),lookup:sx,"generic-name":qe(jf),"family-name":qe(Ff),number:qe(Ks),"line-width":qe($f),"absolute-size":qe(zf),"relative-size":qe(Uf),shadow:qe(Lf),size:qe(Vf)},Kf=Object.keys(Xs)});function X(r){return typeof r=="function"?r({}):r}var ea=R(()=>{u()});function Jt(r){return typeof r=="function"}function jr(r,...e){let t=e.pop();for(let i of e)for(let n in i){let a=t(r[n],i[n]);a===void 0?ke(r[n])&&ke(i[n])?r[n]=jr({},r[n],i[n],t):r[n]=i[n]:r[n]=a}return r}function ox(r,...e){return Jt(r)?r(...e):r}function lx(r){return r.reduce((e,{extend:t})=>jr(e,t,(i,n)=>i===void 0?[n]:Array.isArray(i)?[n,...i]:[n,i]),{})}function ux(r){return{...r.reduce((e,t)=>Us(e,t),{}),extend:lx(r)}}function Xf(r,e){if(Array.isArray(r)&&ke(r[0]))return r.concat(e);if(Array.isArray(e)&&ke(e[0])&&ke(r))return[r,...e];if(Array.isArray(e))return e}function fx({extend:r,...e}){return jr(e,r,(t,i)=>!Jt(t)&&!i.some(Jt)?jr({},t,...i,Xf):(n,a)=>jr({},...[t,...i].map(s=>ox(s,n,a)),Xf))}function*cx(r){let e=kt(r);if(e.length===0||(yield e,Array.isArray(r)))return;let t=/^(.*?)\s*\/\s*([^/]+)$/,i=r.match(t);if(i!==null){let[,n,a]=i,s=kt(n);s.alpha=a,yield s}}function px(r){let e=(t,i)=>{for(let n of cx(t)){let a=0,s=r;for(;s!=null&&a(t[i]=Jt(r[i])?r[i](e,ta):r[i],t),{})}function Jf(r){let e=[];return r.forEach(t=>{e=[...e,t];let i=t?.plugins??[];i.length!==0&&i.forEach(n=>{n.__isOptionsFunction&&(n=n()),e=[...e,...Jf([n?.config??{}])]})}),e}function dx(r){return[...r].reduceRight((t,i)=>Jt(i)?i({corePlugins:t}):vf(i,t),bf)}function hx(r){return[...r].reduceRight((t,i)=>[...t,...i],[])}function ra(r){let e=[...Jf(r),{prefix:"",important:!1,separator:":"}];return Tf(Us({theme:px(fx(ux(e.map(t=>t?.theme??{})))),corePlugins:dx(e.map(t=>t.corePlugins)),plugins:hx(r.map(t=>t?.plugins??[]))},...e))}var ta,Zf=R(()=>{u();Hi();wf();xf();zs();Cf();Gi();Rf();Kt();Yi();Fr();Lr();ea();ta={colors:js,negative(r){return Object.keys(r).filter(e=>r[e]!=="0").reduce((e,t)=>{let i=xt(r[t]);return i!==void 0&&(e[`-${t}`]=i),e},{})},breakpoints(r){return Object.keys(r).filter(e=>typeof r[e]=="string").reduce((e,t)=>({...e,[`screen-${t}`]:r[t]}),{})}}});var en=x((l3,ec)=>{u();ec.exports={content:[],presets:[],darkMode:"media",theme:{accentColor:({theme:r})=>({...r("colors"),auto:"auto"}),animation:{none:"none",spin:"spin 1s linear infinite",ping:"ping 1s cubic-bezier(0, 0, 0.2, 1) infinite",pulse:"pulse 2s cubic-bezier(0.4, 0, 0.6, 1) infinite",bounce:"bounce 1s infinite"},aria:{busy:'busy="true"',checked:'checked="true"',disabled:'disabled="true"',expanded:'expanded="true"',hidden:'hidden="true"',pressed:'pressed="true"',readonly:'readonly="true"',required:'required="true"',selected:'selected="true"'},aspectRatio:{auto:"auto",square:"1 / 1",video:"16 / 9"},backdropBlur:({theme:r})=>r("blur"),backdropBrightness:({theme:r})=>r("brightness"),backdropContrast:({theme:r})=>r("contrast"),backdropGrayscale:({theme:r})=>r("grayscale"),backdropHueRotate:({theme:r})=>r("hueRotate"),backdropInvert:({theme:r})=>r("invert"),backdropOpacity:({theme:r})=>r("opacity"),backdropSaturate:({theme:r})=>r("saturate"),backdropSepia:({theme:r})=>r("sepia"),backgroundColor:({theme:r})=>r("colors"),backgroundImage:{none:"none","gradient-to-t":"linear-gradient(to top, var(--tw-gradient-stops))","gradient-to-tr":"linear-gradient(to top right, var(--tw-gradient-stops))","gradient-to-r":"linear-gradient(to right, var(--tw-gradient-stops))","gradient-to-br":"linear-gradient(to bottom right, var(--tw-gradient-stops))","gradient-to-b":"linear-gradient(to bottom, var(--tw-gradient-stops))","gradient-to-bl":"linear-gradient(to bottom left, var(--tw-gradient-stops))","gradient-to-l":"linear-gradient(to left, var(--tw-gradient-stops))","gradient-to-tl":"linear-gradient(to top left, var(--tw-gradient-stops))"},backgroundOpacity:({theme:r})=>r("opacity"),backgroundPosition:{bottom:"bottom",center:"center",left:"left","left-bottom":"left bottom","left-top":"left top",right:"right","right-bottom":"right bottom","right-top":"right top",top:"top"},backgroundSize:{auto:"auto",cover:"cover",contain:"contain"},blur:{0:"0",none:"",sm:"4px",DEFAULT:"8px",md:"12px",lg:"16px",xl:"24px","2xl":"40px","3xl":"64px"},borderColor:({theme:r})=>({...r("colors"),DEFAULT:r("colors.gray.200","currentColor")}),borderOpacity:({theme:r})=>r("opacity"),borderRadius:{none:"0px",sm:"0.125rem",DEFAULT:"0.25rem",md:"0.375rem",lg:"0.5rem",xl:"0.75rem","2xl":"1rem","3xl":"1.5rem",full:"9999px"},borderSpacing:({theme:r})=>({...r("spacing")}),borderWidth:{DEFAULT:"1px",0:"0px",2:"2px",4:"4px",8:"8px"},boxShadow:{sm:"0 1px 2px 0 rgb(0 0 0 / 0.05)",DEFAULT:"0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1)",md:"0 4px 6px -1px rgb(0 0 0 / 0.1), 0 2px 4px -2px rgb(0 0 0 / 0.1)",lg:"0 10px 15px -3px rgb(0 0 0 / 0.1), 0 4px 6px -4px rgb(0 0 0 / 0.1)",xl:"0 20px 25px -5px rgb(0 0 0 / 0.1), 0 8px 10px -6px rgb(0 0 0 / 0.1)","2xl":"0 25px 50px -12px rgb(0 0 0 / 0.25)",inner:"inset 0 2px 4px 0 rgb(0 0 0 / 0.05)",none:"none"},boxShadowColor:({theme:r})=>r("colors"),brightness:{0:"0",50:".5",75:".75",90:".9",95:".95",100:"1",105:"1.05",110:"1.1",125:"1.25",150:"1.5",200:"2"},caretColor:({theme:r})=>r("colors"),colors:({colors:r})=>({inherit:r.inherit,current:r.current,transparent:r.transparent,black:r.black,white:r.white,slate:r.slate,gray:r.gray,zinc:r.zinc,neutral:r.neutral,stone:r.stone,red:r.red,orange:r.orange,amber:r.amber,yellow:r.yellow,lime:r.lime,green:r.green,emerald:r.emerald,teal:r.teal,cyan:r.cyan,sky:r.sky,blue:r.blue,indigo:r.indigo,violet:r.violet,purple:r.purple,fuchsia:r.fuchsia,pink:r.pink,rose:r.rose}),columns:{auto:"auto",1:"1",2:"2",3:"3",4:"4",5:"5",6:"6",7:"7",8:"8",9:"9",10:"10",11:"11",12:"12","3xs":"16rem","2xs":"18rem",xs:"20rem",sm:"24rem",md:"28rem",lg:"32rem",xl:"36rem","2xl":"42rem","3xl":"48rem","4xl":"56rem","5xl":"64rem","6xl":"72rem","7xl":"80rem"},container:{},content:{none:"none"},contrast:{0:"0",50:".5",75:".75",100:"1",125:"1.25",150:"1.5",200:"2"},cursor:{auto:"auto",default:"default",pointer:"pointer",wait:"wait",text:"text",move:"move",help:"help","not-allowed":"not-allowed",none:"none","context-menu":"context-menu",progress:"progress",cell:"cell",crosshair:"crosshair","vertical-text":"vertical-text",alias:"alias",copy:"copy","no-drop":"no-drop",grab:"grab",grabbing:"grabbing","all-scroll":"all-scroll","col-resize":"col-resize","row-resize":"row-resize","n-resize":"n-resize","e-resize":"e-resize","s-resize":"s-resize","w-resize":"w-resize","ne-resize":"ne-resize","nw-resize":"nw-resize","se-resize":"se-resize","sw-resize":"sw-resize","ew-resize":"ew-resize","ns-resize":"ns-resize","nesw-resize":"nesw-resize","nwse-resize":"nwse-resize","zoom-in":"zoom-in","zoom-out":"zoom-out"},divideColor:({theme:r})=>r("borderColor"),divideOpacity:({theme:r})=>r("borderOpacity"),divideWidth:({theme:r})=>r("borderWidth"),dropShadow:{sm:"0 1px 1px rgb(0 0 0 / 0.05)",DEFAULT:["0 1px 2px rgb(0 0 0 / 0.1)","0 1px 1px rgb(0 0 0 / 0.06)"],md:["0 4px 3px rgb(0 0 0 / 0.07)","0 2px 2px rgb(0 0 0 / 0.06)"],lg:["0 10px 8px rgb(0 0 0 / 0.04)","0 4px 3px rgb(0 0 0 / 0.1)"],xl:["0 20px 13px rgb(0 0 0 / 0.03)","0 8px 5px rgb(0 0 0 / 0.08)"],"2xl":"0 25px 25px rgb(0 0 0 / 0.15)",none:"0 0 #0000"},fill:({theme:r})=>({none:"none",...r("colors")}),flex:{1:"1 1 0%",auto:"1 1 auto",initial:"0 1 auto",none:"none"},flexBasis:({theme:r})=>({auto:"auto",...r("spacing"),"1/2":"50%","1/3":"33.333333%","2/3":"66.666667%","1/4":"25%","2/4":"50%","3/4":"75%","1/5":"20%","2/5":"40%","3/5":"60%","4/5":"80%","1/6":"16.666667%","2/6":"33.333333%","3/6":"50%","4/6":"66.666667%","5/6":"83.333333%","1/12":"8.333333%","2/12":"16.666667%","3/12":"25%","4/12":"33.333333%","5/12":"41.666667%","6/12":"50%","7/12":"58.333333%","8/12":"66.666667%","9/12":"75%","10/12":"83.333333%","11/12":"91.666667%",full:"100%"}),flexGrow:{0:"0",DEFAULT:"1"},flexShrink:{0:"0",DEFAULT:"1"},fontFamily:{sans:["ui-sans-serif","system-ui","sans-serif",'"Apple Color Emoji"','"Segoe UI Emoji"','"Segoe UI Symbol"','"Noto Color Emoji"'],serif:["ui-serif","Georgia","Cambria",'"Times New Roman"',"Times","serif"],mono:["ui-monospace","SFMono-Regular","Menlo","Monaco","Consolas",'"Liberation Mono"','"Courier New"',"monospace"]},fontSize:{xs:["0.75rem",{lineHeight:"1rem"}],sm:["0.875rem",{lineHeight:"1.25rem"}],base:["1rem",{lineHeight:"1.5rem"}],lg:["1.125rem",{lineHeight:"1.75rem"}],xl:["1.25rem",{lineHeight:"1.75rem"}],"2xl":["1.5rem",{lineHeight:"2rem"}],"3xl":["1.875rem",{lineHeight:"2.25rem"}],"4xl":["2.25rem",{lineHeight:"2.5rem"}],"5xl":["3rem",{lineHeight:"1"}],"6xl":["3.75rem",{lineHeight:"1"}],"7xl":["4.5rem",{lineHeight:"1"}],"8xl":["6rem",{lineHeight:"1"}],"9xl":["8rem",{lineHeight:"1"}]},fontWeight:{thin:"100",extralight:"200",light:"300",normal:"400",medium:"500",semibold:"600",bold:"700",extrabold:"800",black:"900"},gap:({theme:r})=>r("spacing"),gradientColorStops:({theme:r})=>r("colors"),gradientColorStopPositions:{"0%":"0%","5%":"5%","10%":"10%","15%":"15%","20%":"20%","25%":"25%","30%":"30%","35%":"35%","40%":"40%","45%":"45%","50%":"50%","55%":"55%","60%":"60%","65%":"65%","70%":"70%","75%":"75%","80%":"80%","85%":"85%","90%":"90%","95%":"95%","100%":"100%"},grayscale:{0:"0",DEFAULT:"100%"},gridAutoColumns:{auto:"auto",min:"min-content",max:"max-content",fr:"minmax(0, 1fr)"},gridAutoRows:{auto:"auto",min:"min-content",max:"max-content",fr:"minmax(0, 1fr)"},gridColumn:{auto:"auto","span-1":"span 1 / span 1","span-2":"span 2 / span 2","span-3":"span 3 / span 3","span-4":"span 4 / span 4","span-5":"span 5 / span 5","span-6":"span 6 / span 6","span-7":"span 7 / span 7","span-8":"span 8 / span 8","span-9":"span 9 / span 9","span-10":"span 10 / span 10","span-11":"span 11 / span 11","span-12":"span 12 / span 12","span-full":"1 / -1"},gridColumnEnd:{auto:"auto",1:"1",2:"2",3:"3",4:"4",5:"5",6:"6",7:"7",8:"8",9:"9",10:"10",11:"11",12:"12",13:"13"},gridColumnStart:{auto:"auto",1:"1",2:"2",3:"3",4:"4",5:"5",6:"6",7:"7",8:"8",9:"9",10:"10",11:"11",12:"12",13:"13"},gridRow:{auto:"auto","span-1":"span 1 / span 1","span-2":"span 2 / span 2","span-3":"span 3 / span 3","span-4":"span 4 / span 4","span-5":"span 5 / span 5","span-6":"span 6 / span 6","span-7":"span 7 / span 7","span-8":"span 8 / span 8","span-9":"span 9 / span 9","span-10":"span 10 / span 10","span-11":"span 11 / span 11","span-12":"span 12 / span 12","span-full":"1 / -1"},gridRowEnd:{auto:"auto",1:"1",2:"2",3:"3",4:"4",5:"5",6:"6",7:"7",8:"8",9:"9",10:"10",11:"11",12:"12",13:"13"},gridRowStart:{auto:"auto",1:"1",2:"2",3:"3",4:"4",5:"5",6:"6",7:"7",8:"8",9:"9",10:"10",11:"11",12:"12",13:"13"},gridTemplateColumns:{none:"none",subgrid:"subgrid",1:"repeat(1, minmax(0, 1fr))",2:"repeat(2, minmax(0, 1fr))",3:"repeat(3, minmax(0, 1fr))",4:"repeat(4, minmax(0, 1fr))",5:"repeat(5, minmax(0, 1fr))",6:"repeat(6, minmax(0, 1fr))",7:"repeat(7, minmax(0, 1fr))",8:"repeat(8, minmax(0, 1fr))",9:"repeat(9, minmax(0, 1fr))",10:"repeat(10, minmax(0, 1fr))",11:"repeat(11, minmax(0, 1fr))",12:"repeat(12, minmax(0, 1fr))"},gridTemplateRows:{none:"none",subgrid:"subgrid",1:"repeat(1, minmax(0, 1fr))",2:"repeat(2, minmax(0, 1fr))",3:"repeat(3, minmax(0, 1fr))",4:"repeat(4, minmax(0, 1fr))",5:"repeat(5, minmax(0, 1fr))",6:"repeat(6, minmax(0, 1fr))",7:"repeat(7, minmax(0, 1fr))",8:"repeat(8, minmax(0, 1fr))",9:"repeat(9, minmax(0, 1fr))",10:"repeat(10, minmax(0, 1fr))",11:"repeat(11, minmax(0, 1fr))",12:"repeat(12, minmax(0, 1fr))"},height:({theme:r})=>({auto:"auto",...r("spacing"),"1/2":"50%","1/3":"33.333333%","2/3":"66.666667%","1/4":"25%","2/4":"50%","3/4":"75%","1/5":"20%","2/5":"40%","3/5":"60%","4/5":"80%","1/6":"16.666667%","2/6":"33.333333%","3/6":"50%","4/6":"66.666667%","5/6":"83.333333%",full:"100%",screen:"100vh",svh:"100svh",lvh:"100lvh",dvh:"100dvh",min:"min-content",max:"max-content",fit:"fit-content"}),hueRotate:{0:"0deg",15:"15deg",30:"30deg",60:"60deg",90:"90deg",180:"180deg"},inset:({theme:r})=>({auto:"auto",...r("spacing"),"1/2":"50%","1/3":"33.333333%","2/3":"66.666667%","1/4":"25%","2/4":"50%","3/4":"75%",full:"100%"}),invert:{0:"0",DEFAULT:"100%"},keyframes:{spin:{to:{transform:"rotate(360deg)"}},ping:{"75%, 100%":{transform:"scale(2)",opacity:"0"}},pulse:{"50%":{opacity:".5"}},bounce:{"0%, 100%":{transform:"translateY(-25%)",animationTimingFunction:"cubic-bezier(0.8,0,1,1)"},"50%":{transform:"none",animationTimingFunction:"cubic-bezier(0,0,0.2,1)"}}},letterSpacing:{tighter:"-0.05em",tight:"-0.025em",normal:"0em",wide:"0.025em",wider:"0.05em",widest:"0.1em"},lineHeight:{none:"1",tight:"1.25",snug:"1.375",normal:"1.5",relaxed:"1.625",loose:"2",3:".75rem",4:"1rem",5:"1.25rem",6:"1.5rem",7:"1.75rem",8:"2rem",9:"2.25rem",10:"2.5rem"},listStyleType:{none:"none",disc:"disc",decimal:"decimal"},listStyleImage:{none:"none"},margin:({theme:r})=>({auto:"auto",...r("spacing")}),lineClamp:{1:"1",2:"2",3:"3",4:"4",5:"5",6:"6"},maxHeight:({theme:r})=>({...r("spacing"),none:"none",full:"100%",screen:"100vh",svh:"100svh",lvh:"100lvh",dvh:"100dvh",min:"min-content",max:"max-content",fit:"fit-content"}),maxWidth:({theme:r,breakpoints:e})=>({...r("spacing"),none:"none",xs:"20rem",sm:"24rem",md:"28rem",lg:"32rem",xl:"36rem","2xl":"42rem","3xl":"48rem","4xl":"56rem","5xl":"64rem","6xl":"72rem","7xl":"80rem",full:"100%",min:"min-content",max:"max-content",fit:"fit-content",prose:"65ch",...e(r("screens"))}),minHeight:({theme:r})=>({...r("spacing"),full:"100%",screen:"100vh",svh:"100svh",lvh:"100lvh",dvh:"100dvh",min:"min-content",max:"max-content",fit:"fit-content"}),minWidth:({theme:r})=>({...r("spacing"),full:"100%",min:"min-content",max:"max-content",fit:"fit-content"}),objectPosition:{bottom:"bottom",center:"center",left:"left","left-bottom":"left bottom","left-top":"left top",right:"right","right-bottom":"right bottom","right-top":"right top",top:"top"},opacity:{0:"0",5:"0.05",10:"0.1",15:"0.15",20:"0.2",25:"0.25",30:"0.3",35:"0.35",40:"0.4",45:"0.45",50:"0.5",55:"0.55",60:"0.6",65:"0.65",70:"0.7",75:"0.75",80:"0.8",85:"0.85",90:"0.9",95:"0.95",100:"1"},order:{first:"-9999",last:"9999",none:"0",1:"1",2:"2",3:"3",4:"4",5:"5",6:"6",7:"7",8:"8",9:"9",10:"10",11:"11",12:"12"},outlineColor:({theme:r})=>r("colors"),outlineOffset:{0:"0px",1:"1px",2:"2px",4:"4px",8:"8px"},outlineWidth:{0:"0px",1:"1px",2:"2px",4:"4px",8:"8px"},padding:({theme:r})=>r("spacing"),placeholderColor:({theme:r})=>r("colors"),placeholderOpacity:({theme:r})=>r("opacity"),ringColor:({theme:r})=>({DEFAULT:r("colors.blue.500","#3b82f6"),...r("colors")}),ringOffsetColor:({theme:r})=>r("colors"),ringOffsetWidth:{0:"0px",1:"1px",2:"2px",4:"4px",8:"8px"},ringOpacity:({theme:r})=>({DEFAULT:"0.5",...r("opacity")}),ringWidth:{DEFAULT:"3px",0:"0px",1:"1px",2:"2px",4:"4px",8:"8px"},rotate:{0:"0deg",1:"1deg",2:"2deg",3:"3deg",6:"6deg",12:"12deg",45:"45deg",90:"90deg",180:"180deg"},saturate:{0:"0",50:".5",100:"1",150:"1.5",200:"2"},scale:{0:"0",50:".5",75:".75",90:".9",95:".95",100:"1",105:"1.05",110:"1.1",125:"1.25",150:"1.5"},screens:{sm:"640px",md:"768px",lg:"1024px",xl:"1280px","2xl":"1536px"},scrollMargin:({theme:r})=>({...r("spacing")}),scrollPadding:({theme:r})=>r("spacing"),sepia:{0:"0",DEFAULT:"100%"},skew:{0:"0deg",1:"1deg",2:"2deg",3:"3deg",6:"6deg",12:"12deg"},space:({theme:r})=>({...r("spacing")}),spacing:{px:"1px",0:"0px",.5:"0.125rem",1:"0.25rem",1.5:"0.375rem",2:"0.5rem",2.5:"0.625rem",3:"0.75rem",3.5:"0.875rem",4:"1rem",5:"1.25rem",6:"1.5rem",7:"1.75rem",8:"2rem",9:"2.25rem",10:"2.5rem",11:"2.75rem",12:"3rem",14:"3.5rem",16:"4rem",20:"5rem",24:"6rem",28:"7rem",32:"8rem",36:"9rem",40:"10rem",44:"11rem",48:"12rem",52:"13rem",56:"14rem",60:"15rem",64:"16rem",72:"18rem",80:"20rem",96:"24rem"},stroke:({theme:r})=>({none:"none",...r("colors")}),strokeWidth:{0:"0",1:"1",2:"2"},supports:{},data:{},textColor:({theme:r})=>r("colors"),textDecorationColor:({theme:r})=>r("colors"),textDecorationThickness:{auto:"auto","from-font":"from-font",0:"0px",1:"1px",2:"2px",4:"4px",8:"8px"},textIndent:({theme:r})=>({...r("spacing")}),textOpacity:({theme:r})=>r("opacity"),textUnderlineOffset:{auto:"auto",0:"0px",1:"1px",2:"2px",4:"4px",8:"8px"},transformOrigin:{center:"center",top:"top","top-right":"top right",right:"right","bottom-right":"bottom right",bottom:"bottom","bottom-left":"bottom left",left:"left","top-left":"top left"},transitionDelay:{0:"0s",75:"75ms",100:"100ms",150:"150ms",200:"200ms",300:"300ms",500:"500ms",700:"700ms",1e3:"1000ms"},transitionDuration:{DEFAULT:"150ms",0:"0s",75:"75ms",100:"100ms",150:"150ms",200:"200ms",300:"300ms",500:"500ms",700:"700ms",1e3:"1000ms"},transitionProperty:{none:"none",all:"all",DEFAULT:"color, background-color, border-color, text-decoration-color, fill, stroke, opacity, box-shadow, transform, filter, backdrop-filter",colors:"color, background-color, border-color, text-decoration-color, fill, stroke",opacity:"opacity",shadow:"box-shadow",transform:"transform"},transitionTimingFunction:{DEFAULT:"cubic-bezier(0.4, 0, 0.2, 1)",linear:"linear",in:"cubic-bezier(0.4, 0, 1, 1)",out:"cubic-bezier(0, 0, 0.2, 1)","in-out":"cubic-bezier(0.4, 0, 0.2, 1)"},translate:({theme:r})=>({...r("spacing"),"1/2":"50%","1/3":"33.333333%","2/3":"66.666667%","1/4":"25%","2/4":"50%","3/4":"75%",full:"100%"}),size:({theme:r})=>({auto:"auto",...r("spacing"),"1/2":"50%","1/3":"33.333333%","2/3":"66.666667%","1/4":"25%","2/4":"50%","3/4":"75%","1/5":"20%","2/5":"40%","3/5":"60%","4/5":"80%","1/6":"16.666667%","2/6":"33.333333%","3/6":"50%","4/6":"66.666667%","5/6":"83.333333%","1/12":"8.333333%","2/12":"16.666667%","3/12":"25%","4/12":"33.333333%","5/12":"41.666667%","6/12":"50%","7/12":"58.333333%","8/12":"66.666667%","9/12":"75%","10/12":"83.333333%","11/12":"91.666667%",full:"100%",min:"min-content",max:"max-content",fit:"fit-content"}),width:({theme:r})=>({auto:"auto",...r("spacing"),"1/2":"50%","1/3":"33.333333%","2/3":"66.666667%","1/4":"25%","2/4":"50%","3/4":"75%","1/5":"20%","2/5":"40%","3/5":"60%","4/5":"80%","1/6":"16.666667%","2/6":"33.333333%","3/6":"50%","4/6":"66.666667%","5/6":"83.333333%","1/12":"8.333333%","2/12":"16.666667%","3/12":"25%","4/12":"33.333333%","5/12":"41.666667%","6/12":"50%","7/12":"58.333333%","8/12":"66.666667%","9/12":"75%","10/12":"83.333333%","11/12":"91.666667%",full:"100%",screen:"100vw",svw:"100svw",lvw:"100lvw",dvw:"100dvw",min:"min-content",max:"max-content",fit:"fit-content"}),willChange:{auto:"auto",scroll:"scroll-position",contents:"contents",transform:"transform"},zIndex:{auto:"auto",0:"0",10:"10",20:"20",30:"30",40:"40",50:"50"}},plugins:[]}});function tn(r){let e=(r?.presets??[tc.default]).slice().reverse().flatMap(n=>tn(n instanceof Function?n():n)),t={respectDefaultRingColorOpacity:{theme:{ringColor:({theme:n})=>({DEFAULT:"#3b82f67f",...n("colors")})}},disableColorOpacityUtilitiesByDefault:{corePlugins:{backgroundOpacity:!1,borderOpacity:!1,divideOpacity:!1,placeholderOpacity:!1,ringOpacity:!1,textOpacity:!1}}},i=Object.keys(t).filter(n=>we(r,n)).map(n=>t[n]);return[r,...i,...e]}var tc,rc=R(()=>{u();tc=pe(en());ct()});var ic={};Ge(ic,{default:()=>zr});function zr(...r){let[,...e]=tn(r[0]);return ra([...r,...e])}var ia=R(()=>{u();Zf();rc()});var Ur={};Ge(Ur,{default:()=>me});var me,et=R(()=>{u();me={resolve:r=>r,extname:r=>"."+r.split(".").pop()}});function rn(r){return typeof r=="object"&&r!==null}function gx(r){return Object.keys(r).length===0}function nc(r){return typeof r=="string"||r instanceof String}function na(r){return rn(r)&&r.config===void 0&&!gx(r)?null:rn(r)&&r.config!==void 0&&nc(r.config)?me.resolve(r.config):rn(r)&&r.config!==void 0&&rn(r.config)?null:nc(r)?me.resolve(r):yx()}function yx(){for(let r of mx)try{let e=me.resolve(r);return be.accessSync(e),e}catch(e){}return null}var mx,sc=R(()=>{u();ft();et();mx=["./tailwind.config.js","./tailwind.config.cjs","./tailwind.config.mjs","./tailwind.config.ts","./tailwind.config.cts","./tailwind.config.mts"]});var ac={};Ge(ac,{default:()=>sa});var sa,aa=R(()=>{u();sa={parse:r=>({href:r})}});var oa=x(()=>{u()});var nn=x((b3,uc)=>{u();"use strict";var oc=(Wi(),kf),lc=oa(),Zt=class extends Error{constructor(e,t,i,n,a,s){super(e);this.name="CssSyntaxError",this.reason=e,a&&(this.file=a),n&&(this.source=n),s&&(this.plugin=s),typeof t!="undefined"&&typeof i!="undefined"&&(typeof t=="number"?(this.line=t,this.column=i):(this.line=t.line,this.column=t.column,this.endLine=i.line,this.endColumn=i.column)),this.setMessage(),Error.captureStackTrace&&Error.captureStackTrace(this,Zt)}setMessage(){this.message=this.plugin?this.plugin+": ":"",this.message+=this.file?this.file:"",typeof this.line!="undefined"&&(this.message+=":"+this.line+":"+this.column),this.message+=": "+this.reason}showSourceCode(e){if(!this.source)return"";let t=this.source;e==null&&(e=oc.isColorSupported),lc&&e&&(t=lc(t));let i=t.split(/\r?\n/),n=Math.max(this.line-3,0),a=Math.min(this.line+2,i.length),s=String(a).length,o,l;if(e){let{bold:c,red:f,gray:d}=oc.createColors(!0);o=p=>c(f(p)),l=p=>d(p)}else o=l=c=>c;return i.slice(n,a).map((c,f)=>{let d=n+1+f,p=" "+(" "+d).slice(-s)+" | ";if(d===this.line){let h=l(p.replace(/\d/g," "))+c.slice(0,this.column-1).replace(/[^\t]/g," ");return o(">")+l(p)+c+` - `+h+o("^")}return" "+l(p)+c}).join(` -`)}toString(){let e=this.showSourceCode();return e&&(e=` - -`+e+` -`),this.name+": "+this.message+e}};uc.exports=Zt;Zt.default=Zt});var sn=x((w3,la)=>{u();"use strict";la.exports.isClean=Symbol("isClean");la.exports.my=Symbol("my")});var ua=x((v3,cc)=>{u();"use strict";var fc={colon:": ",indent:" ",beforeDecl:` -`,beforeRule:` -`,beforeOpen:" ",beforeClose:` -`,beforeComment:` -`,after:` -`,emptyBody:"",commentLeft:" ",commentRight:" ",semicolon:!1};function bx(r){return r[0].toUpperCase()+r.slice(1)}var an=class{constructor(e){this.builder=e}stringify(e,t){if(!this[e.type])throw new Error("Unknown AST node type "+e.type+". Maybe you need to change PostCSS stringifier.");this[e.type](e,t)}document(e){this.body(e)}root(e){this.body(e),e.raws.after&&this.builder(e.raws.after)}comment(e){let t=this.raw(e,"left","commentLeft"),i=this.raw(e,"right","commentRight");this.builder("/*"+t+e.text+i+"*/",e)}decl(e,t){let i=this.raw(e,"between","colon"),n=e.prop+i+this.rawValue(e,"value");e.important&&(n+=e.raws.important||" !important"),t&&(n+=";"),this.builder(n,e)}rule(e){this.block(e,this.rawValue(e,"selector")),e.raws.ownSemicolon&&this.builder(e.raws.ownSemicolon,e,"end")}atrule(e,t){let i="@"+e.name,n=e.params?this.rawValue(e,"params"):"";if(typeof e.raws.afterName!="undefined"?i+=e.raws.afterName:n&&(i+=" "),e.nodes)this.block(e,i+n);else{let a=(e.raws.between||"")+(t?";":"");this.builder(i+n+a,e)}}body(e){let t=e.nodes.length-1;for(;t>0&&e.nodes[t].type==="comment";)t-=1;let i=this.raw(e,"semicolon");for(let n=0;n{if(n=l.raws[t],typeof n!="undefined")return!1})}return typeof n=="undefined"&&(n=fc[i]),s.rawCache[i]=n,n}rawSemicolon(e){let t;return e.walk(i=>{if(i.nodes&&i.nodes.length&&i.last.type==="decl"&&(t=i.raws.semicolon,typeof t!="undefined"))return!1}),t}rawEmptyBody(e){let t;return e.walk(i=>{if(i.nodes&&i.nodes.length===0&&(t=i.raws.after,typeof t!="undefined"))return!1}),t}rawIndent(e){if(e.raws.indent)return e.raws.indent;let t;return e.walk(i=>{let n=i.parent;if(n&&n!==e&&n.parent&&n.parent===e&&typeof i.raws.before!="undefined"){let a=i.raws.before.split(` -`);return t=a[a.length-1],t=t.replace(/\S/g,""),!1}}),t}rawBeforeComment(e,t){let i;return e.walkComments(n=>{if(typeof n.raws.before!="undefined")return i=n.raws.before,i.includes(` -`)&&(i=i.replace(/[^\n]+$/,"")),!1}),typeof i=="undefined"?i=this.raw(t,null,"beforeDecl"):i&&(i=i.replace(/\S/g,"")),i}rawBeforeDecl(e,t){let i;return e.walkDecls(n=>{if(typeof n.raws.before!="undefined")return i=n.raws.before,i.includes(` -`)&&(i=i.replace(/[^\n]+$/,"")),!1}),typeof i=="undefined"?i=this.raw(t,null,"beforeRule"):i&&(i=i.replace(/\S/g,"")),i}rawBeforeRule(e){let t;return e.walk(i=>{if(i.nodes&&(i.parent!==e||e.first!==i)&&typeof i.raws.before!="undefined")return t=i.raws.before,t.includes(` -`)&&(t=t.replace(/[^\n]+$/,"")),!1}),t&&(t=t.replace(/\S/g,"")),t}rawBeforeClose(e){let t;return e.walk(i=>{if(i.nodes&&i.nodes.length>0&&typeof i.raws.after!="undefined")return t=i.raws.after,t.includes(` -`)&&(t=t.replace(/[^\n]+$/,"")),!1}),t&&(t=t.replace(/\S/g,"")),t}rawBeforeOpen(e){let t;return e.walk(i=>{if(i.type!=="decl"&&(t=i.raws.between,typeof t!="undefined"))return!1}),t}rawColon(e){let t;return e.walkDecls(i=>{if(typeof i.raws.between!="undefined")return t=i.raws.between.replace(/[^\s:]/g,""),!1}),t}beforeAfter(e,t){let i;e.type==="decl"?i=this.raw(e,null,"beforeDecl"):e.type==="comment"?i=this.raw(e,null,"beforeComment"):t==="before"?i=this.raw(e,null,"beforeRule"):i=this.raw(e,null,"beforeClose");let n=e.parent,a=0;for(;n&&n.type!=="root";)a+=1,n=n.parent;if(i.includes(` -`)){let s=this.raw(e,null,"indent");if(s.length)for(let o=0;o{u();"use strict";var wx=ua();function fa(r,e){new wx(e).stringify(r)}pc.exports=fa;fa.default=fa});var Hr=x((k3,dc)=>{u();"use strict";var{isClean:on,my:vx}=sn(),xx=nn(),kx=ua(),Sx=Vr();function ca(r,e){let t=new r.constructor;for(let i in r){if(!Object.prototype.hasOwnProperty.call(r,i)||i==="proxyCache")continue;let n=r[i],a=typeof n;i==="parent"&&a==="object"?e&&(t[i]=e):i==="source"?t[i]=n:Array.isArray(n)?t[i]=n.map(s=>ca(s,t)):(a==="object"&&n!==null&&(n=ca(n)),t[i]=n)}return t}var ln=class{constructor(e={}){this.raws={},this[on]=!1,this[vx]=!0;for(let t in e)if(t==="nodes"){this.nodes=[];for(let i of e[t])typeof i.clone=="function"?this.append(i.clone()):this.append(i)}else this[t]=e[t]}error(e,t={}){if(this.source){let{start:i,end:n}=this.rangeBy(t);return this.source.input.error(e,{line:i.line,column:i.column},{line:n.line,column:n.column},t)}return new xx(e)}warn(e,t,i){let n={node:this};for(let a in i)n[a]=i[a];return e.warn(t,n)}remove(){return this.parent&&this.parent.removeChild(this),this.parent=void 0,this}toString(e=Sx){e.stringify&&(e=e.stringify);let t="";return e(this,i=>{t+=i}),t}assign(e={}){for(let t in e)this[t]=e[t];return this}clone(e={}){let t=ca(this);for(let i in e)t[i]=e[i];return t}cloneBefore(e={}){let t=this.clone(e);return this.parent.insertBefore(this,t),t}cloneAfter(e={}){let t=this.clone(e);return this.parent.insertAfter(this,t),t}replaceWith(...e){if(this.parent){let t=this,i=!1;for(let n of e)n===this?i=!0:i?(this.parent.insertAfter(t,n),t=n):this.parent.insertBefore(t,n);i||this.remove()}return this}next(){if(!this.parent)return;let e=this.parent.index(this);return this.parent.nodes[e+1]}prev(){if(!this.parent)return;let e=this.parent.index(this);return this.parent.nodes[e-1]}before(e){return this.parent.insertBefore(this,e),this}after(e){return this.parent.insertAfter(this,e),this}root(){let e=this;for(;e.parent&&e.parent.type!=="document";)e=e.parent;return e}raw(e,t){return new kx().raw(this,e,t)}cleanRaws(e){delete this.raws.before,delete this.raws.after,e||delete this.raws.between}toJSON(e,t){let i={},n=t==null;t=t||new Map;let a=0;for(let s in this){if(!Object.prototype.hasOwnProperty.call(this,s)||s==="parent"||s==="proxyCache")continue;let o=this[s];if(Array.isArray(o))i[s]=o.map(l=>typeof l=="object"&&l.toJSON?l.toJSON(null,t):l);else if(typeof o=="object"&&o.toJSON)i[s]=o.toJSON(null,t);else if(s==="source"){let l=t.get(o.input);l==null&&(l=a,t.set(o.input,a),a++),i[s]={inputId:l,start:o.start,end:o.end}}else i[s]=o}return n&&(i.inputs=[...t.keys()].map(s=>s.toJSON())),i}positionInside(e){let t=this.toString(),i=this.source.start.column,n=this.source.start.line;for(let a=0;ae.root().toProxy():e[t]}}}toProxy(){return this.proxyCache||(this.proxyCache=new Proxy(this,this.getProxyProcessor())),this.proxyCache}addToError(e){if(e.postcssNode=this,e.stack&&this.source&&/\n\s{4}at /.test(e.stack)){let t=this.source;e.stack=e.stack.replace(/\n\s{4}at /,`$&${t.input.from}:${t.start.line}:${t.start.column}$&`)}return e}markDirty(){if(this[on]){this[on]=!1;let e=this;for(;e=e.parent;)e[on]=!1}}get proxyOf(){return this}};dc.exports=ln;ln.default=ln});var Wr=x((S3,hc)=>{u();"use strict";var Ax=Hr(),un=class extends Ax{constructor(e){e&&typeof e.value!="undefined"&&typeof e.value!="string"&&(e={...e,value:String(e.value)});super(e);this.type="decl"}get variable(){return this.prop.startsWith("--")||this.prop[0]==="$"}};hc.exports=un;un.default=un});var pa=x((A3,mc)=>{u();mc.exports=function(r,e){return{generate:()=>{let t="";return r(e,i=>{t+=i}),[t]}}}});var Gr=x((C3,gc)=>{u();"use strict";var Cx=Hr(),fn=class extends Cx{constructor(e){super(e);this.type="comment"}};gc.exports=fn;fn.default=fn});var Et=x((_3,Cc)=>{u();"use strict";var{isClean:yc,my:bc}=sn(),wc=Wr(),vc=Gr(),_x=Hr(),xc,da,ha,kc;function Sc(r){return r.map(e=>(e.nodes&&(e.nodes=Sc(e.nodes)),delete e.source,e))}function Ac(r){if(r[yc]=!1,r.proxyOf.nodes)for(let e of r.proxyOf.nodes)Ac(e)}var Fe=class extends _x{push(e){return e.parent=this,this.proxyOf.nodes.push(e),this}each(e){if(!this.proxyOf.nodes)return;let t=this.getIterator(),i,n;for(;this.indexes[t]{let n;try{n=e(t,i)}catch(a){throw t.addToError(a)}return n!==!1&&t.walk&&(n=t.walk(e)),n})}walkDecls(e,t){return t?e instanceof RegExp?this.walk((i,n)=>{if(i.type==="decl"&&e.test(i.prop))return t(i,n)}):this.walk((i,n)=>{if(i.type==="decl"&&i.prop===e)return t(i,n)}):(t=e,this.walk((i,n)=>{if(i.type==="decl")return t(i,n)}))}walkRules(e,t){return t?e instanceof RegExp?this.walk((i,n)=>{if(i.type==="rule"&&e.test(i.selector))return t(i,n)}):this.walk((i,n)=>{if(i.type==="rule"&&i.selector===e)return t(i,n)}):(t=e,this.walk((i,n)=>{if(i.type==="rule")return t(i,n)}))}walkAtRules(e,t){return t?e instanceof RegExp?this.walk((i,n)=>{if(i.type==="atrule"&&e.test(i.name))return t(i,n)}):this.walk((i,n)=>{if(i.type==="atrule"&&i.name===e)return t(i,n)}):(t=e,this.walk((i,n)=>{if(i.type==="atrule")return t(i,n)}))}walkComments(e){return this.walk((t,i)=>{if(t.type==="comment")return e(t,i)})}append(...e){for(let t of e){let i=this.normalize(t,this.last);for(let n of i)this.proxyOf.nodes.push(n)}return this.markDirty(),this}prepend(...e){e=e.reverse();for(let t of e){let i=this.normalize(t,this.first,"prepend").reverse();for(let n of i)this.proxyOf.nodes.unshift(n);for(let n in this.indexes)this.indexes[n]=this.indexes[n]+i.length}return this.markDirty(),this}cleanRaws(e){if(super.cleanRaws(e),this.nodes)for(let t of this.nodes)t.cleanRaws(e)}insertBefore(e,t){let i=this.index(e),n=i===0?"prepend":!1,a=this.normalize(t,this.proxyOf.nodes[i],n).reverse();i=this.index(e);for(let o of a)this.proxyOf.nodes.splice(i,0,o);let s;for(let o in this.indexes)s=this.indexes[o],i<=s&&(this.indexes[o]=s+a.length);return this.markDirty(),this}insertAfter(e,t){let i=this.index(e),n=this.normalize(t,this.proxyOf.nodes[i]).reverse();i=this.index(e);for(let s of n)this.proxyOf.nodes.splice(i+1,0,s);let a;for(let s in this.indexes)a=this.indexes[s],i=e&&(this.indexes[i]=t-1);return this.markDirty(),this}removeAll(){for(let e of this.proxyOf.nodes)e.parent=void 0;return this.proxyOf.nodes=[],this.markDirty(),this}replaceValues(e,t,i){return i||(i=t,t={}),this.walkDecls(n=>{t.props&&!t.props.includes(n.prop)||t.fast&&!n.value.includes(t.fast)||(n.value=n.value.replace(e,i))}),this.markDirty(),this}every(e){return this.nodes.every(e)}some(e){return this.nodes.some(e)}index(e){return typeof e=="number"?e:(e.proxyOf&&(e=e.proxyOf),this.proxyOf.nodes.indexOf(e))}get first(){if(!!this.proxyOf.nodes)return this.proxyOf.nodes[0]}get last(){if(!!this.proxyOf.nodes)return this.proxyOf.nodes[this.proxyOf.nodes.length-1]}normalize(e,t){if(typeof e=="string")e=Sc(xc(e).nodes);else if(Array.isArray(e)){e=e.slice(0);for(let n of e)n.parent&&n.parent.removeChild(n,"ignore")}else if(e.type==="root"&&this.type!=="document"){e=e.nodes.slice(0);for(let n of e)n.parent&&n.parent.removeChild(n,"ignore")}else if(e.type)e=[e];else if(e.prop){if(typeof e.value=="undefined")throw new Error("Value field is missed in node creation");typeof e.value!="string"&&(e.value=String(e.value)),e=[new wc(e)]}else if(e.selector)e=[new da(e)];else if(e.name)e=[new ha(e)];else if(e.text)e=[new vc(e)];else throw new Error("Unknown node type in node creation");return e.map(n=>(n[bc]||Fe.rebuild(n),n=n.proxyOf,n.parent&&n.parent.removeChild(n),n[yc]&&Ac(n),typeof n.raws.before=="undefined"&&t&&typeof t.raws.before!="undefined"&&(n.raws.before=t.raws.before.replace(/\S/g,"")),n.parent=this.proxyOf,n))}getProxyProcessor(){return{set(e,t,i){return e[t]===i||(e[t]=i,(t==="name"||t==="params"||t==="selector")&&e.markDirty()),!0},get(e,t){return t==="proxyOf"?e:e[t]?t==="each"||typeof t=="string"&&t.startsWith("walk")?(...i)=>e[t](...i.map(n=>typeof n=="function"?(a,s)=>n(a.toProxy(),s):n)):t==="every"||t==="some"?i=>e[t]((n,...a)=>i(n.toProxy(),...a)):t==="root"?()=>e.root().toProxy():t==="nodes"?e.nodes.map(i=>i.toProxy()):t==="first"||t==="last"?e[t].toProxy():e[t]:e[t]}}}getIterator(){this.lastEach||(this.lastEach=0),this.indexes||(this.indexes={}),this.lastEach+=1;let e=this.lastEach;return this.indexes[e]=0,e}};Fe.registerParse=r=>{xc=r};Fe.registerRule=r=>{da=r};Fe.registerAtRule=r=>{ha=r};Fe.registerRoot=r=>{kc=r};Cc.exports=Fe;Fe.default=Fe;Fe.rebuild=r=>{r.type==="atrule"?Object.setPrototypeOf(r,ha.prototype):r.type==="rule"?Object.setPrototypeOf(r,da.prototype):r.type==="decl"?Object.setPrototypeOf(r,wc.prototype):r.type==="comment"?Object.setPrototypeOf(r,vc.prototype):r.type==="root"&&Object.setPrototypeOf(r,kc.prototype),r[bc]=!0,r.nodes&&r.nodes.forEach(e=>{Fe.rebuild(e)})}});var cn=x((E3,Oc)=>{u();"use strict";var Ex=Et(),_c,Ec,er=class extends Ex{constructor(e){super({type:"document",...e});this.nodes||(this.nodes=[])}toResult(e={}){return new _c(new Ec,this,e).stringify()}};er.registerLazyResult=r=>{_c=r};er.registerProcessor=r=>{Ec=r};Oc.exports=er;er.default=er});var ma=x((O3,Rc)=>{u();"use strict";var Tc={};Rc.exports=function(e){Tc[e]||(Tc[e]=!0,typeof console!="undefined"&&console.warn&&console.warn(e))}});var ga=x((T3,Pc)=>{u();"use strict";var pn=class{constructor(e,t={}){if(this.type="warning",this.text=e,t.node&&t.node.source){let i=t.node.rangeBy(t);this.line=i.start.line,this.column=i.start.column,this.endLine=i.end.line,this.endColumn=i.end.column}for(let i in t)this[i]=t[i]}toString(){return this.node?this.node.error(this.text,{plugin:this.plugin,index:this.index,word:this.word}).message:this.plugin?this.plugin+": "+this.text:this.text}};Pc.exports=pn;pn.default=pn});var hn=x((R3,Ic)=>{u();"use strict";var Ox=ga(),dn=class{constructor(e,t,i){this.processor=e,this.messages=[],this.root=t,this.opts=i,this.css=void 0,this.map=void 0}toString(){return this.css}warn(e,t={}){t.plugin||this.lastPlugin&&this.lastPlugin.postcssPlugin&&(t.plugin=this.lastPlugin.postcssPlugin);let i=new Ox(e,t);return this.messages.push(i),i}warnings(){return this.messages.filter(e=>e.type==="warning")}get content(){return this.css}};Ic.exports=dn;dn.default=dn});var Mc=x((P3,Lc)=>{u();"use strict";var ya="'".charCodeAt(0),Dc='"'.charCodeAt(0),mn="\\".charCodeAt(0),qc="/".charCodeAt(0),gn=` -`.charCodeAt(0),Qr=" ".charCodeAt(0),yn="\f".charCodeAt(0),bn=" ".charCodeAt(0),wn="\r".charCodeAt(0),Tx="[".charCodeAt(0),Rx="]".charCodeAt(0),Px="(".charCodeAt(0),Ix=")".charCodeAt(0),Dx="{".charCodeAt(0),qx="}".charCodeAt(0),$x=";".charCodeAt(0),Lx="*".charCodeAt(0),Mx=":".charCodeAt(0),Nx="@".charCodeAt(0),vn=/[\t\n\f\r "#'()/;[\\\]{}]/g,xn=/[\t\n\f\r !"#'():;@[\\\]{}]|\/(?=\*)/g,Bx=/.[\n"'(/\\]/,$c=/[\da-f]/i;Lc.exports=function(e,t={}){let i=e.css.valueOf(),n=t.ignoreErrors,a,s,o,l,c,f,d,p,h,b,v=i.length,y=0,w=[],k=[];function S(){return y}function E(T){throw e.error("Unclosed "+T,y)}function O(){return k.length===0&&y>=v}function B(T){if(k.length)return k.pop();if(y>=v)return;let F=T?T.ignoreUnclosed:!1;switch(a=i.charCodeAt(y),a){case gn:case Qr:case bn:case wn:case yn:{s=y;do s+=1,a=i.charCodeAt(s);while(a===Qr||a===gn||a===bn||a===wn||a===yn);b=["space",i.slice(y,s)],y=s-1;break}case Tx:case Rx:case Dx:case qx:case Mx:case $x:case Ix:{let Y=String.fromCharCode(a);b=[Y,Y,y];break}case Px:{if(p=w.length?w.pop()[1]:"",h=i.charCodeAt(y+1),p==="url"&&h!==ya&&h!==Dc&&h!==Qr&&h!==gn&&h!==bn&&h!==yn&&h!==wn){s=y;do{if(f=!1,s=i.indexOf(")",s+1),s===-1)if(n||F){s=y;break}else E("bracket");for(d=s;i.charCodeAt(d-1)===mn;)d-=1,f=!f}while(f);b=["brackets",i.slice(y,s+1),y,s],y=s}else s=i.indexOf(")",y+1),l=i.slice(y,s+1),s===-1||Bx.test(l)?b=["(","(",y]:(b=["brackets",l,y,s],y=s);break}case ya:case Dc:{o=a===ya?"'":'"',s=y;do{if(f=!1,s=i.indexOf(o,s+1),s===-1)if(n||F){s=y+1;break}else E("string");for(d=s;i.charCodeAt(d-1)===mn;)d-=1,f=!f}while(f);b=["string",i.slice(y,s+1),y,s],y=s;break}case Nx:{vn.lastIndex=y+1,vn.test(i),vn.lastIndex===0?s=i.length-1:s=vn.lastIndex-2,b=["at-word",i.slice(y,s+1),y,s],y=s;break}case mn:{for(s=y,c=!0;i.charCodeAt(s+1)===mn;)s+=1,c=!c;if(a=i.charCodeAt(s+1),c&&a!==qc&&a!==Qr&&a!==gn&&a!==bn&&a!==wn&&a!==yn&&(s+=1,$c.test(i.charAt(s)))){for(;$c.test(i.charAt(s+1));)s+=1;i.charCodeAt(s+1)===Qr&&(s+=1)}b=["word",i.slice(y,s+1),y,s],y=s;break}default:{a===qc&&i.charCodeAt(y+1)===Lx?(s=i.indexOf("*/",y+2)+1,s===0&&(n||F?s=i.length:E("comment")),b=["comment",i.slice(y,s+1),y,s],y=s):(xn.lastIndex=y+1,xn.test(i),xn.lastIndex===0?s=i.length-1:s=xn.lastIndex-2,b=["word",i.slice(y,s+1),y,s],w.push(b),y=s);break}}return y++,b}function N(T){k.push(T)}return{back:N,nextToken:B,endOfFile:O,position:S}}});var kn=x((I3,Bc)=>{u();"use strict";var Nc=Et(),Yr=class extends Nc{constructor(e){super(e);this.type="atrule"}append(...e){return this.proxyOf.nodes||(this.nodes=[]),super.append(...e)}prepend(...e){return this.proxyOf.nodes||(this.nodes=[]),super.prepend(...e)}};Bc.exports=Yr;Yr.default=Yr;Nc.registerAtRule(Yr)});var tr=x((D3,Uc)=>{u();"use strict";var Fc=Et(),jc,zc,Ut=class extends Fc{constructor(e){super(e);this.type="root",this.nodes||(this.nodes=[])}removeChild(e,t){let i=this.index(e);return!t&&i===0&&this.nodes.length>1&&(this.nodes[1].raws.before=this.nodes[i].raws.before),super.removeChild(e)}normalize(e,t,i){let n=super.normalize(e);if(t){if(i==="prepend")this.nodes.length>1?t.raws.before=this.nodes[1].raws.before:delete t.raws.before;else if(this.first!==t)for(let a of n)a.raws.before=t.raws.before}return n}toResult(e={}){return new jc(new zc,this,e).stringify()}};Ut.registerLazyResult=r=>{jc=r};Ut.registerProcessor=r=>{zc=r};Uc.exports=Ut;Ut.default=Ut;Fc.registerRoot(Ut)});var ba=x((q3,Vc)=>{u();"use strict";var Kr={split(r,e,t){let i=[],n="",a=!1,s=0,o=!1,l="",c=!1;for(let f of r)c?c=!1:f==="\\"?c=!0:o?f===l&&(o=!1):f==='"'||f==="'"?(o=!0,l=f):f==="("?s+=1:f===")"?s>0&&(s-=1):s===0&&e.includes(f)&&(a=!0),a?(n!==""&&i.push(n.trim()),n="",a=!1):n+=f;return(t||n!=="")&&i.push(n.trim()),i},space(r){let e=[" ",` -`," "];return Kr.split(r,e)},comma(r){return Kr.split(r,[","],!0)}};Vc.exports=Kr;Kr.default=Kr});var Sn=x(($3,Wc)=>{u();"use strict";var Hc=Et(),Fx=ba(),Xr=class extends Hc{constructor(e){super(e);this.type="rule",this.nodes||(this.nodes=[])}get selectors(){return Fx.comma(this.selector)}set selectors(e){let t=this.selector?this.selector.match(/,\s*/):null,i=t?t[0]:","+this.raw("between","beforeOpen");this.selector=e.join(i)}};Wc.exports=Xr;Xr.default=Xr;Hc.registerRule(Xr)});var Xc=x((L3,Kc)=>{u();"use strict";var jx=Wr(),zx=Mc(),Ux=Gr(),Vx=kn(),Hx=tr(),Gc=Sn(),Qc={empty:!0,space:!0};function Wx(r){for(let e=r.length-1;e>=0;e--){let t=r[e],i=t[3]||t[2];if(i)return i}}var Yc=class{constructor(e){this.input=e,this.root=new Hx,this.current=this.root,this.spaces="",this.semicolon=!1,this.customProperty=!1,this.createTokenizer(),this.root.source={input:e,start:{offset:0,line:1,column:1}}}createTokenizer(){this.tokenizer=zx(this.input)}parse(){let e;for(;!this.tokenizer.endOfFile();)switch(e=this.tokenizer.nextToken(),e[0]){case"space":this.spaces+=e[1];break;case";":this.freeSemicolon(e);break;case"}":this.end(e);break;case"comment":this.comment(e);break;case"at-word":this.atrule(e);break;case"{":this.emptyRule(e);break;default:this.other(e);break}this.endFile()}comment(e){let t=new Ux;this.init(t,e[2]),t.source.end=this.getPosition(e[3]||e[2]);let i=e[1].slice(2,-2);if(/^\s*$/.test(i))t.text="",t.raws.left=i,t.raws.right="";else{let n=i.match(/^(\s*)([^]*\S)(\s*)$/);t.text=n[2],t.raws.left=n[1],t.raws.right=n[3]}}emptyRule(e){let t=new Gc;this.init(t,e[2]),t.selector="",t.raws.between="",this.current=t}other(e){let t=!1,i=null,n=!1,a=null,s=[],o=e[1].startsWith("--"),l=[],c=e;for(;c;){if(i=c[0],l.push(c),i==="("||i==="[")a||(a=c),s.push(i==="("?")":"]");else if(o&&n&&i==="{")a||(a=c),s.push("}");else if(s.length===0)if(i===";")if(n){this.decl(l,o);return}else break;else if(i==="{"){this.rule(l);return}else if(i==="}"){this.tokenizer.back(l.pop()),t=!0;break}else i===":"&&(n=!0);else i===s[s.length-1]&&(s.pop(),s.length===0&&(a=null));c=this.tokenizer.nextToken()}if(this.tokenizer.endOfFile()&&(t=!0),s.length>0&&this.unclosedBracket(a),t&&n){if(!o)for(;l.length&&(c=l[l.length-1][0],!(c!=="space"&&c!=="comment"));)this.tokenizer.back(l.pop());this.decl(l,o)}else this.unknownWord(l)}rule(e){e.pop();let t=new Gc;this.init(t,e[0][2]),t.raws.between=this.spacesAndCommentsFromEnd(e),this.raw(t,"selector",e),this.current=t}decl(e,t){let i=new jx;this.init(i,e[0][2]);let n=e[e.length-1];for(n[0]===";"&&(this.semicolon=!0,e.pop()),i.source.end=this.getPosition(n[3]||n[2]||Wx(e));e[0][0]!=="word";)e.length===1&&this.unknownWord(e),i.raws.before+=e.shift()[1];for(i.source.start=this.getPosition(e[0][2]),i.prop="";e.length;){let c=e[0][0];if(c===":"||c==="space"||c==="comment")break;i.prop+=e.shift()[1]}i.raws.between="";let a;for(;e.length;)if(a=e.shift(),a[0]===":"){i.raws.between+=a[1];break}else a[0]==="word"&&/\w/.test(a[1])&&this.unknownWord([a]),i.raws.between+=a[1];(i.prop[0]==="_"||i.prop[0]==="*")&&(i.raws.before+=i.prop[0],i.prop=i.prop.slice(1));let s=[],o;for(;e.length&&(o=e[0][0],!(o!=="space"&&o!=="comment"));)s.push(e.shift());this.precheckMissedSemicolon(e);for(let c=e.length-1;c>=0;c--){if(a=e[c],a[1].toLowerCase()==="!important"){i.important=!0;let f=this.stringFrom(e,c);f=this.spacesFromEnd(e)+f,f!==" !important"&&(i.raws.important=f);break}else if(a[1].toLowerCase()==="important"){let f=e.slice(0),d="";for(let p=c;p>0;p--){let h=f[p][0];if(d.trim().indexOf("!")===0&&h!=="space")break;d=f.pop()[1]+d}d.trim().indexOf("!")===0&&(i.important=!0,i.raws.important=d,e=f)}if(a[0]!=="space"&&a[0]!=="comment")break}e.some(c=>c[0]!=="space"&&c[0]!=="comment")&&(i.raws.between+=s.map(c=>c[1]).join(""),s=[]),this.raw(i,"value",s.concat(e),t),i.value.includes(":")&&!t&&this.checkMissedSemicolon(e)}atrule(e){let t=new Vx;t.name=e[1].slice(1),t.name===""&&this.unnamedAtrule(t,e),this.init(t,e[2]);let i,n,a,s=!1,o=!1,l=[],c=[];for(;!this.tokenizer.endOfFile();){if(e=this.tokenizer.nextToken(),i=e[0],i==="("||i==="["?c.push(i==="("?")":"]"):i==="{"&&c.length>0?c.push("}"):i===c[c.length-1]&&c.pop(),c.length===0)if(i===";"){t.source.end=this.getPosition(e[2]),this.semicolon=!0;break}else if(i==="{"){o=!0;break}else if(i==="}"){if(l.length>0){for(a=l.length-1,n=l[a];n&&n[0]==="space";)n=l[--a];n&&(t.source.end=this.getPosition(n[3]||n[2]))}this.end(e);break}else l.push(e);else l.push(e);if(this.tokenizer.endOfFile()){s=!0;break}}t.raws.between=this.spacesAndCommentsFromEnd(l),l.length?(t.raws.afterName=this.spacesAndCommentsFromStart(l),this.raw(t,"params",l),s&&(e=l[l.length-1],t.source.end=this.getPosition(e[3]||e[2]),this.spaces=t.raws.between,t.raws.between="")):(t.raws.afterName="",t.params=""),o&&(t.nodes=[],this.current=t)}end(e){this.current.nodes&&this.current.nodes.length&&(this.current.raws.semicolon=this.semicolon),this.semicolon=!1,this.current.raws.after=(this.current.raws.after||"")+this.spaces,this.spaces="",this.current.parent?(this.current.source.end=this.getPosition(e[2]),this.current=this.current.parent):this.unexpectedClose(e)}endFile(){this.current.parent&&this.unclosedBlock(),this.current.nodes&&this.current.nodes.length&&(this.current.raws.semicolon=this.semicolon),this.current.raws.after=(this.current.raws.after||"")+this.spaces}freeSemicolon(e){if(this.spaces+=e[1],this.current.nodes){let t=this.current.nodes[this.current.nodes.length-1];t&&t.type==="rule"&&!t.raws.ownSemicolon&&(t.raws.ownSemicolon=this.spaces,this.spaces="")}}getPosition(e){let t=this.input.fromOffset(e);return{offset:e,line:t.line,column:t.col}}init(e,t){this.current.push(e),e.source={start:this.getPosition(t),input:this.input},e.raws.before=this.spaces,this.spaces="",e.type!=="comment"&&(this.semicolon=!1)}raw(e,t,i,n){let a,s,o=i.length,l="",c=!0,f,d;for(let p=0;ph+b[1],"");e.raws[t]={value:l,raw:p}}e[t]=l}spacesAndCommentsFromEnd(e){let t,i="";for(;e.length&&(t=e[e.length-1][0],!(t!=="space"&&t!=="comment"));)i=e.pop()[1]+i;return i}spacesAndCommentsFromStart(e){let t,i="";for(;e.length&&(t=e[0][0],!(t!=="space"&&t!=="comment"));)i+=e.shift()[1];return i}spacesFromEnd(e){let t,i="";for(;e.length&&(t=e[e.length-1][0],t==="space");)i=e.pop()[1]+i;return i}stringFrom(e,t){let i="";for(let n=t;n=0&&(n=e[a],!(n[0]!=="space"&&(i+=1,i===2)));a--);throw this.input.error("Missed semicolon",n[0]==="word"?n[3]+1:n[2])}};Kc.exports=Yc});var Jc=x(()=>{u()});var ep=x((B3,Zc)=>{u();var Gx="useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict",Qx=(r,e=21)=>(t=e)=>{let i="",n=t;for(;n--;)i+=r[Math.random()*r.length|0];return i},Yx=(r=21)=>{let e="",t=r;for(;t--;)e+=Gx[Math.random()*64|0];return e};Zc.exports={nanoid:Yx,customAlphabet:Qx}});var wa=x((F3,tp)=>{u();tp.exports={}});var Cn=x((j3,sp)=>{u();"use strict";var{SourceMapConsumer:Kx,SourceMapGenerator:Xx}=Jc(),{fileURLToPath:rp,pathToFileURL:An}=(aa(),ac),{resolve:va,isAbsolute:xa}=(et(),Ur),{nanoid:Jx}=ep(),ka=oa(),ip=nn(),Zx=wa(),Sa=Symbol("fromOffsetCache"),e1=Boolean(Kx&&Xx),np=Boolean(va&&xa),Jr=class{constructor(e,t={}){if(e===null||typeof e=="undefined"||typeof e=="object"&&!e.toString)throw new Error(`PostCSS received ${e} instead of CSS string`);if(this.css=e.toString(),this.css[0]==="\uFEFF"||this.css[0]==="\uFFFE"?(this.hasBOM=!0,this.css=this.css.slice(1)):this.hasBOM=!1,t.from&&(!np||/^\w+:\/\//.test(t.from)||xa(t.from)?this.file=t.from:this.file=va(t.from)),np&&e1){let i=new Zx(this.css,t);if(i.text){this.map=i;let n=i.consumer().file;!this.file&&n&&(this.file=this.mapResolve(n))}}this.file||(this.id=""),this.map&&(this.map.file=this.from)}fromOffset(e){let t,i;if(this[Sa])i=this[Sa];else{let a=this.css.split(` -`);i=new Array(a.length);let s=0;for(let o=0,l=a.length;o=t)n=i.length-1;else{let a=i.length-2,s;for(;n>1),e=i[s+1])n=s+1;else{n=s;break}}return{line:n+1,col:e-i[n]+1}}error(e,t,i,n={}){let a,s,o;if(t&&typeof t=="object"){let c=t,f=i;if(typeof c.offset=="number"){let d=this.fromOffset(c.offset);t=d.line,i=d.col}else t=c.line,i=c.column;if(typeof f.offset=="number"){let d=this.fromOffset(f.offset);s=d.line,o=d.col}else s=f.line,o=f.column}else if(!i){let c=this.fromOffset(t);t=c.line,i=c.col}let l=this.origin(t,i,s,o);return l?a=new ip(e,l.endLine===void 0?l.line:{line:l.line,column:l.column},l.endLine===void 0?l.column:{line:l.endLine,column:l.endColumn},l.source,l.file,n.plugin):a=new ip(e,s===void 0?t:{line:t,column:i},s===void 0?i:{line:s,column:o},this.css,this.file,n.plugin),a.input={line:t,column:i,endLine:s,endColumn:o,source:this.css},this.file&&(An&&(a.input.url=An(this.file).toString()),a.input.file=this.file),a}origin(e,t,i,n){if(!this.map)return!1;let a=this.map.consumer(),s=a.originalPositionFor({line:e,column:t});if(!s.source)return!1;let o;typeof i=="number"&&(o=a.originalPositionFor({line:i,column:n}));let l;xa(s.source)?l=An(s.source):l=new URL(s.source,this.map.consumer().sourceRoot||An(this.map.mapFile));let c={url:l.toString(),line:s.line,column:s.column,endLine:o&&o.line,endColumn:o&&o.column};if(l.protocol==="file:")if(rp)c.file=rp(l);else throw new Error("file: protocol is not available in this PostCSS build");let f=a.sourceContentFor(s.source);return f&&(c.source=f),c}mapResolve(e){return/^\w+:\/\//.test(e)?e:va(this.map.consumer().sourceRoot||this.map.root||".",e)}get from(){return this.file||this.id}toJSON(){let e={};for(let t of["hasBOM","css","file","id"])this[t]!=null&&(e[t]=this[t]);return this.map&&(e.map={...this.map},e.map.consumerCache&&(e.map.consumerCache=void 0)),e}};sp.exports=Jr;Jr.default=Jr;ka&&ka.registerInput&&ka.registerInput(Jr)});var En=x((z3,ap)=>{u();"use strict";var t1=Et(),r1=Xc(),i1=Cn();function _n(r,e){let t=new i1(r,e),i=new r1(t);try{i.parse()}catch(n){throw n}return i.root}ap.exports=_n;_n.default=_n;t1.registerParse(_n)});var _a=x((V3,fp)=>{u();"use strict";var{isClean:tt,my:n1}=sn(),s1=pa(),a1=Vr(),o1=Et(),l1=cn(),U3=ma(),op=hn(),u1=En(),f1=tr(),c1={document:"Document",root:"Root",atrule:"AtRule",rule:"Rule",decl:"Declaration",comment:"Comment"},p1={postcssPlugin:!0,prepare:!0,Once:!0,Document:!0,Root:!0,Declaration:!0,Rule:!0,AtRule:!0,Comment:!0,DeclarationExit:!0,RuleExit:!0,AtRuleExit:!0,CommentExit:!0,RootExit:!0,DocumentExit:!0,OnceExit:!0},d1={postcssPlugin:!0,prepare:!0,Once:!0},rr=0;function Zr(r){return typeof r=="object"&&typeof r.then=="function"}function lp(r){let e=!1,t=c1[r.type];return r.type==="decl"?e=r.prop.toLowerCase():r.type==="atrule"&&(e=r.name.toLowerCase()),e&&r.append?[t,t+"-"+e,rr,t+"Exit",t+"Exit-"+e]:e?[t,t+"-"+e,t+"Exit",t+"Exit-"+e]:r.append?[t,rr,t+"Exit"]:[t,t+"Exit"]}function up(r){let e;return r.type==="document"?e=["Document",rr,"DocumentExit"]:r.type==="root"?e=["Root",rr,"RootExit"]:e=lp(r),{node:r,events:e,eventIndex:0,visitors:[],visitorIndex:0,iterator:0}}function Aa(r){return r[tt]=!1,r.nodes&&r.nodes.forEach(e=>Aa(e)),r}var Ca={},pt=class{constructor(e,t,i){this.stringified=!1,this.processed=!1;let n;if(typeof t=="object"&&t!==null&&(t.type==="root"||t.type==="document"))n=Aa(t);else if(t instanceof pt||t instanceof op)n=Aa(t.root),t.map&&(typeof i.map=="undefined"&&(i.map={}),i.map.inline||(i.map.inline=!1),i.map.prev=t.map);else{let a=u1;i.syntax&&(a=i.syntax.parse),i.parser&&(a=i.parser),a.parse&&(a=a.parse);try{n=a(t,i)}catch(s){this.processed=!0,this.error=s}n&&!n[n1]&&o1.rebuild(n)}this.result=new op(e,n,i),this.helpers={...Ca,result:this.result,postcss:Ca},this.plugins=this.processor.plugins.map(a=>typeof a=="object"&&a.prepare?{...a,...a.prepare(this.result)}:a)}get[Symbol.toStringTag](){return"LazyResult"}get processor(){return this.result.processor}get opts(){return this.result.opts}get css(){return this.stringify().css}get content(){return this.stringify().content}get map(){return this.stringify().map}get root(){return this.sync().root}get messages(){return this.sync().messages}warnings(){return this.sync().warnings()}toString(){return this.css}then(e,t){return this.async().then(e,t)}catch(e){return this.async().catch(e)}finally(e){return this.async().then(e,e)}async(){return this.error?Promise.reject(this.error):this.processed?Promise.resolve(this.result):(this.processing||(this.processing=this.runAsync()),this.processing)}sync(){if(this.error)throw this.error;if(this.processed)return this.result;if(this.processed=!0,this.processing)throw this.getAsyncError();for(let e of this.plugins){let t=this.runOnRoot(e);if(Zr(t))throw this.getAsyncError()}if(this.prepareVisitors(),this.hasListener){let e=this.result.root;for(;!e[tt];)e[tt]=!0,this.walkSync(e);if(this.listeners.OnceExit)if(e.type==="document")for(let t of e.nodes)this.visitSync(this.listeners.OnceExit,t);else this.visitSync(this.listeners.OnceExit,e)}return this.result}stringify(){if(this.error)throw this.error;if(this.stringified)return this.result;this.stringified=!0,this.sync();let e=this.result.opts,t=a1;e.syntax&&(t=e.syntax.stringify),e.stringifier&&(t=e.stringifier),t.stringify&&(t=t.stringify);let n=new s1(t,this.result.root,this.result.opts).generate();return this.result.css=n[0],this.result.map=n[1],this.result}walkSync(e){e[tt]=!0;let t=lp(e);for(let i of t)if(i===rr)e.nodes&&e.each(n=>{n[tt]||this.walkSync(n)});else{let n=this.listeners[i];if(n&&this.visitSync(n,e.toProxy()))return}}visitSync(e,t){for(let[i,n]of e){this.result.lastPlugin=i;let a;try{a=n(t,this.helpers)}catch(s){throw this.handleError(s,t.proxyOf)}if(t.type!=="root"&&t.type!=="document"&&!t.parent)return!0;if(Zr(a))throw this.getAsyncError()}}runOnRoot(e){this.result.lastPlugin=e;try{if(typeof e=="object"&&e.Once){if(this.result.root.type==="document"){let t=this.result.root.nodes.map(i=>e.Once(i,this.helpers));return Zr(t[0])?Promise.all(t):t}return e.Once(this.result.root,this.helpers)}else if(typeof e=="function")return e(this.result.root,this.result)}catch(t){throw this.handleError(t)}}getAsyncError(){throw new Error("Use process(css).then(cb) to work with async plugins")}handleError(e,t){let i=this.result.lastPlugin;try{t&&t.addToError(e),this.error=e,e.name==="CssSyntaxError"&&!e.plugin?(e.plugin=i.postcssPlugin,e.setMessage()):i.postcssVersion}catch(n){console&&console.error&&console.error(n)}return e}async runAsync(){this.plugin=0;for(let e=0;e0;){let i=this.visitTick(t);if(Zr(i))try{await i}catch(n){let a=t[t.length-1].node;throw this.handleError(n,a)}}}if(this.listeners.OnceExit)for(let[t,i]of this.listeners.OnceExit){this.result.lastPlugin=t;try{if(e.type==="document"){let n=e.nodes.map(a=>i(a,this.helpers));await Promise.all(n)}else await i(e,this.helpers)}catch(n){throw this.handleError(n)}}}return this.processed=!0,this.stringify()}prepareVisitors(){this.listeners={};let e=(t,i,n)=>{this.listeners[i]||(this.listeners[i]=[]),this.listeners[i].push([t,n])};for(let t of this.plugins)if(typeof t=="object")for(let i in t){if(!p1[i]&&/^[A-Z]/.test(i))throw new Error(`Unknown event ${i} in ${t.postcssPlugin}. Try to update PostCSS (${this.processor.version} now).`);if(!d1[i])if(typeof t[i]=="object")for(let n in t[i])n==="*"?e(t,i,t[i][n]):e(t,i+"-"+n.toLowerCase(),t[i][n]);else typeof t[i]=="function"&&e(t,i,t[i])}this.hasListener=Object.keys(this.listeners).length>0}visitTick(e){let t=e[e.length-1],{node:i,visitors:n}=t;if(i.type!=="root"&&i.type!=="document"&&!i.parent){e.pop();return}if(n.length>0&&t.visitorIndex{Ca=r};fp.exports=pt;pt.default=pt;f1.registerLazyResult(pt);l1.registerLazyResult(pt)});var pp=x((W3,cp)=>{u();"use strict";var h1=pa(),m1=Vr(),H3=ma(),g1=En(),y1=hn(),On=class{constructor(e,t,i){t=t.toString(),this.stringified=!1,this._processor=e,this._css=t,this._opts=i,this._map=void 0;let n,a=m1;this.result=new y1(this._processor,n,this._opts),this.result.css=t;let s=this;Object.defineProperty(this.result,"root",{get(){return s.root}});let o=new h1(a,n,this._opts,t);if(o.isMap()){let[l,c]=o.generate();l&&(this.result.css=l),c&&(this.result.map=c)}}get[Symbol.toStringTag](){return"NoWorkResult"}get processor(){return this.result.processor}get opts(){return this.result.opts}get css(){return this.result.css}get content(){return this.result.css}get map(){return this.result.map}get root(){if(this._root)return this._root;let e,t=g1;try{e=t(this._css,this._opts)}catch(i){this.error=i}if(this.error)throw this.error;return this._root=e,e}get messages(){return[]}warnings(){return[]}toString(){return this._css}then(e,t){return this.async().then(e,t)}catch(e){return this.async().catch(e)}finally(e){return this.async().then(e,e)}async(){return this.error?Promise.reject(this.error):Promise.resolve(this.result)}sync(){if(this.error)throw this.error;return this.result}};cp.exports=On;On.default=On});var hp=x((G3,dp)=>{u();"use strict";var b1=pp(),w1=_a(),v1=cn(),x1=tr(),ir=class{constructor(e=[]){this.version="8.4.24",this.plugins=this.normalize(e)}use(e){return this.plugins=this.plugins.concat(this.normalize([e])),this}process(e,t={}){return this.plugins.length===0&&typeof t.parser=="undefined"&&typeof t.stringifier=="undefined"&&typeof t.syntax=="undefined"?new b1(this,e,t):new w1(this,e,t)}normalize(e){let t=[];for(let i of e)if(i.postcss===!0?i=i():i.postcss&&(i=i.postcss),typeof i=="object"&&Array.isArray(i.plugins))t=t.concat(i.plugins);else if(typeof i=="object"&&i.postcssPlugin)t.push(i);else if(typeof i=="function")t.push(i);else if(!(typeof i=="object"&&(i.parse||i.stringify)))throw new Error(i+" is not a PostCSS plugin");return t}};dp.exports=ir;ir.default=ir;x1.registerProcessor(ir);v1.registerProcessor(ir)});var gp=x((Q3,mp)=>{u();"use strict";var k1=Wr(),S1=wa(),A1=Gr(),C1=kn(),_1=Cn(),E1=tr(),O1=Sn();function ei(r,e){if(Array.isArray(r))return r.map(n=>ei(n));let{inputs:t,...i}=r;if(t){e=[];for(let n of t){let a={...n,__proto__:_1.prototype};a.map&&(a.map={...a.map,__proto__:S1.prototype}),e.push(a)}}if(i.nodes&&(i.nodes=r.nodes.map(n=>ei(n,e))),i.source){let{inputId:n,...a}=i.source;i.source=a,n!=null&&(i.source.input=e[n])}if(i.type==="root")return new E1(i);if(i.type==="decl")return new k1(i);if(i.type==="rule")return new O1(i);if(i.type==="comment")return new A1(i);if(i.type==="atrule")return new C1(i);throw new Error("Unknown node type: "+r.type)}mp.exports=ei;ei.default=ei});var $e=x((Y3,Sp)=>{u();"use strict";var T1=nn(),yp=Wr(),R1=_a(),P1=Et(),Ea=hp(),I1=Vr(),D1=gp(),bp=cn(),q1=ga(),wp=Gr(),vp=kn(),$1=hn(),L1=Cn(),M1=En(),N1=ba(),xp=Sn(),kp=tr(),B1=Hr();function Z(...r){return r.length===1&&Array.isArray(r[0])&&(r=r[0]),new Ea(r)}Z.plugin=function(e,t){let i=!1;function n(...s){console&&console.warn&&!i&&(i=!0,console.warn(e+`: postcss.plugin was deprecated. Migration guide: -https://evilmartians.com/chronicles/postcss-8-plugin-migration`),m.env.LANG&&m.env.LANG.startsWith("cn")&&console.warn(e+`: \u91CC\u9762 postcss.plugin \u88AB\u5F03\u7528. \u8FC1\u79FB\u6307\u5357: -https://www.w3ctech.com/topic/2226`));let o=t(...s);return o.postcssPlugin=e,o.postcssVersion=new Ea().version,o}let a;return Object.defineProperty(n,"postcss",{get(){return a||(a=n()),a}}),n.process=function(s,o,l){return Z([n(l)]).process(s,o)},n};Z.stringify=I1;Z.parse=M1;Z.fromJSON=D1;Z.list=N1;Z.comment=r=>new wp(r);Z.atRule=r=>new vp(r);Z.decl=r=>new yp(r);Z.rule=r=>new xp(r);Z.root=r=>new kp(r);Z.document=r=>new bp(r);Z.CssSyntaxError=T1;Z.Declaration=yp;Z.Container=P1;Z.Processor=Ea;Z.Document=bp;Z.Comment=wp;Z.Warning=q1;Z.AtRule=vp;Z.Result=$1;Z.Input=L1;Z.Rule=xp;Z.Root=kp;Z.Node=B1;R1.registerPostcss(Z);Sp.exports=Z;Z.default=Z});var re,ee,K3,X3,J3,Z3,eI,tI,rI,iI,nI,sI,aI,oI,lI,uI,fI,cI,pI,dI,hI,mI,gI,yI,bI,wI,Ot=R(()=>{u();re=pe($e()),ee=re.default,K3=re.default.stringify,X3=re.default.fromJSON,J3=re.default.plugin,Z3=re.default.parse,eI=re.default.list,tI=re.default.document,rI=re.default.comment,iI=re.default.atRule,nI=re.default.rule,sI=re.default.decl,aI=re.default.root,oI=re.default.CssSyntaxError,lI=re.default.Declaration,uI=re.default.Container,fI=re.default.Processor,cI=re.default.Document,pI=re.default.Comment,dI=re.default.Warning,hI=re.default.AtRule,mI=re.default.Result,gI=re.default.Input,yI=re.default.Rule,bI=re.default.Root,wI=re.default.Node});var Oa=x((xI,Ap)=>{u();Ap.exports=function(r,e,t,i,n){for(e=e.split?e.split("."):e,i=0;i{u();"use strict";Tn.__esModule=!0;Tn.default=z1;function F1(r){for(var e=r.toLowerCase(),t="",i=!1,n=0;n<6&&e[n]!==void 0;n++){var a=e.charCodeAt(n),s=a>=97&&a<=102||a>=48&&a<=57;if(i=a===32,!s)break;t+=e[n]}if(t.length!==0){var o=parseInt(t,16),l=o>=55296&&o<=57343;return l||o===0||o>1114111?["\uFFFD",t.length+(i?1:0)]:[String.fromCodePoint(o),t.length+(i?1:0)]}}var j1=/\\/;function z1(r){var e=j1.test(r);if(!e)return r;for(var t="",i=0;i{u();"use strict";Pn.__esModule=!0;Pn.default=U1;function U1(r){for(var e=arguments.length,t=new Array(e>1?e-1:0),i=1;i0;){var n=t.shift();if(!r[n])return;r=r[n]}return r}_p.exports=Pn.default});var Tp=x((In,Op)=>{u();"use strict";In.__esModule=!0;In.default=V1;function V1(r){for(var e=arguments.length,t=new Array(e>1?e-1:0),i=1;i0;){var n=t.shift();r[n]||(r[n]={}),r=r[n]}}Op.exports=In.default});var Pp=x((Dn,Rp)=>{u();"use strict";Dn.__esModule=!0;Dn.default=H1;function H1(r){for(var e="",t=r.indexOf("/*"),i=0;t>=0;){e=e+r.slice(i,t);var n=r.indexOf("*/",t+2);if(n<0)return e;i=n+2,t=r.indexOf("/*",i)}return e=e+r.slice(i),e}Rp.exports=Dn.default});var ti=x(rt=>{u();"use strict";rt.__esModule=!0;rt.unesc=rt.stripComments=rt.getProp=rt.ensureObject=void 0;var W1=qn(Rn());rt.unesc=W1.default;var G1=qn(Ep());rt.getProp=G1.default;var Q1=qn(Tp());rt.ensureObject=Q1.default;var Y1=qn(Pp());rt.stripComments=Y1.default;function qn(r){return r&&r.__esModule?r:{default:r}}});var dt=x((ri,qp)=>{u();"use strict";ri.__esModule=!0;ri.default=void 0;var Ip=ti();function Dp(r,e){for(var t=0;ti||this.source.end.linen||this.source.end.line===i&&this.source.end.column{u();"use strict";ie.__esModule=!0;ie.UNIVERSAL=ie.TAG=ie.STRING=ie.SELECTOR=ie.ROOT=ie.PSEUDO=ie.NESTING=ie.ID=ie.COMMENT=ie.COMBINATOR=ie.CLASS=ie.ATTRIBUTE=void 0;var Z1="tag";ie.TAG=Z1;var ek="string";ie.STRING=ek;var tk="selector";ie.SELECTOR=tk;var rk="root";ie.ROOT=rk;var ik="pseudo";ie.PSEUDO=ik;var nk="nesting";ie.NESTING=nk;var sk="id";ie.ID=sk;var ak="comment";ie.COMMENT=ak;var ok="combinator";ie.COMBINATOR=ok;var lk="class";ie.CLASS=lk;var uk="attribute";ie.ATTRIBUTE=uk;var fk="universal";ie.UNIVERSAL=fk});var $n=x((ii,Np)=>{u();"use strict";ii.__esModule=!0;ii.default=void 0;var ck=dk(dt()),ht=pk(Se());function $p(r){if(typeof WeakMap!="function")return null;var e=new WeakMap,t=new WeakMap;return($p=function(n){return n?t:e})(r)}function pk(r,e){if(!e&&r&&r.__esModule)return r;if(r===null||typeof r!="object"&&typeof r!="function")return{default:r};var t=$p(e);if(t&&t.has(r))return t.get(r);var i={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var a in r)if(a!=="default"&&Object.prototype.hasOwnProperty.call(r,a)){var s=n?Object.getOwnPropertyDescriptor(r,a):null;s&&(s.get||s.set)?Object.defineProperty(i,a,s):i[a]=r[a]}return i.default=r,t&&t.set(r,i),i}function dk(r){return r&&r.__esModule?r:{default:r}}function hk(r,e){var t=typeof Symbol!="undefined"&&r[Symbol.iterator]||r["@@iterator"];if(t)return(t=t.call(r)).next.bind(t);if(Array.isArray(r)||(t=mk(r))||e&&r&&typeof r.length=="number"){t&&(r=t);var i=0;return function(){return i>=r.length?{done:!0}:{done:!1,value:r[i++]}}}throw new TypeError(`Invalid attempt to iterate non-iterable instance. -In order to be iterable, non-array objects must have a [Symbol.iterator]() method.`)}function mk(r,e){if(!!r){if(typeof r=="string")return Lp(r,e);var t=Object.prototype.toString.call(r).slice(8,-1);if(t==="Object"&&r.constructor&&(t=r.constructor.name),t==="Map"||t==="Set")return Array.from(r);if(t==="Arguments"||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(t))return Lp(r,e)}}function Lp(r,e){(e==null||e>r.length)&&(e=r.length);for(var t=0,i=new Array(e);t=n&&(this.indexes[s]=a-1);return this},t.removeAll=function(){for(var n=hk(this.nodes),a;!(a=n()).done;){var s=a.value;s.parent=void 0}return this.nodes=[],this},t.empty=function(){return this.removeAll()},t.insertAfter=function(n,a){a.parent=this;var s=this.index(n);this.nodes.splice(s+1,0,a),a.parent=this;var o;for(var l in this.indexes)o=this.indexes[l],s<=o&&(this.indexes[l]=o+1);return this},t.insertBefore=function(n,a){a.parent=this;var s=this.index(n);this.nodes.splice(s,0,a),a.parent=this;var o;for(var l in this.indexes)o=this.indexes[l],o<=s&&(this.indexes[l]=o+1);return this},t._findChildAtPosition=function(n,a){var s=void 0;return this.each(function(o){if(o.atPosition){var l=o.atPosition(n,a);if(l)return s=l,!1}else if(o.isAtPosition(n,a))return s=o,!1}),s},t.atPosition=function(n,a){if(this.isAtPosition(n,a))return this._findChildAtPosition(n,a)||this},t._inferEndPosition=function(){this.last&&this.last.source&&this.last.source.end&&(this.source=this.source||{},this.source.end=this.source.end||{},Object.assign(this.source.end,this.last.source.end))},t.each=function(n){this.lastEach||(this.lastEach=0),this.indexes||(this.indexes={}),this.lastEach++;var a=this.lastEach;if(this.indexes[a]=0,!!this.length){for(var s,o;this.indexes[a]{u();"use strict";ni.__esModule=!0;ni.default=void 0;var wk=xk($n()),vk=Se();function xk(r){return r&&r.__esModule?r:{default:r}}function Bp(r,e){for(var t=0;t{u();"use strict";si.__esModule=!0;si.default=void 0;var Ck=Ek($n()),_k=Se();function Ek(r){return r&&r.__esModule?r:{default:r}}function Ok(r,e){r.prototype=Object.create(e.prototype),r.prototype.constructor=r,Ia(r,e)}function Ia(r,e){return Ia=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(i,n){return i.__proto__=n,i},Ia(r,e)}var Tk=function(r){Ok(e,r);function e(t){var i;return i=r.call(this,t)||this,i.type=_k.SELECTOR,i}return e}(Ck.default);si.default=Tk;jp.exports=si.default});var Ln=x((AI,zp)=>{u();"use strict";var Rk={},Pk=Rk.hasOwnProperty,Ik=function(e,t){if(!e)return t;var i={};for(var n in t)i[n]=Pk.call(e,n)?e[n]:t[n];return i},Dk=/[ -,\.\/:-@\[-\^`\{-~]/,qk=/[ -,\.\/:-@\[\]\^`\{-~]/,$k=/(^|\\+)?(\\[A-F0-9]{1,6})\x20(?![a-fA-F0-9\x20])/g,qa=function r(e,t){t=Ik(t,r.options),t.quotes!="single"&&t.quotes!="double"&&(t.quotes="single");for(var i=t.quotes=="double"?'"':"'",n=t.isIdentifier,a=e.charAt(0),s="",o=0,l=e.length;o126){if(f>=55296&&f<=56319&&o{u();"use strict";ai.__esModule=!0;ai.default=void 0;var Lk=Up(Ln()),Mk=ti(),Nk=Up(dt()),Bk=Se();function Up(r){return r&&r.__esModule?r:{default:r}}function Vp(r,e){for(var t=0;t{u();"use strict";oi.__esModule=!0;oi.default=void 0;var Uk=Hk(dt()),Vk=Se();function Hk(r){return r&&r.__esModule?r:{default:r}}function Wk(r,e){r.prototype=Object.create(e.prototype),r.prototype.constructor=r,Ma(r,e)}function Ma(r,e){return Ma=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(i,n){return i.__proto__=n,i},Ma(r,e)}var Gk=function(r){Wk(e,r);function e(t){var i;return i=r.call(this,t)||this,i.type=Vk.COMMENT,i}return e}(Uk.default);oi.default=Gk;Wp.exports=oi.default});var Fa=x((li,Gp)=>{u();"use strict";li.__esModule=!0;li.default=void 0;var Qk=Kk(dt()),Yk=Se();function Kk(r){return r&&r.__esModule?r:{default:r}}function Xk(r,e){r.prototype=Object.create(e.prototype),r.prototype.constructor=r,Ba(r,e)}function Ba(r,e){return Ba=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(i,n){return i.__proto__=n,i},Ba(r,e)}var Jk=function(r){Xk(e,r);function e(i){var n;return n=r.call(this,i)||this,n.type=Yk.ID,n}var t=e.prototype;return t.valueToString=function(){return"#"+r.prototype.valueToString.call(this)},e}(Qk.default);li.default=Jk;Gp.exports=li.default});var Mn=x((ui,Kp)=>{u();"use strict";ui.__esModule=!0;ui.default=void 0;var Zk=Qp(Ln()),eS=ti(),tS=Qp(dt());function Qp(r){return r&&r.__esModule?r:{default:r}}function Yp(r,e){for(var t=0;t{u();"use strict";fi.__esModule=!0;fi.default=void 0;var sS=oS(Mn()),aS=Se();function oS(r){return r&&r.__esModule?r:{default:r}}function lS(r,e){r.prototype=Object.create(e.prototype),r.prototype.constructor=r,za(r,e)}function za(r,e){return za=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(i,n){return i.__proto__=n,i},za(r,e)}var uS=function(r){lS(e,r);function e(t){var i;return i=r.call(this,t)||this,i.type=aS.TAG,i}return e}(sS.default);fi.default=uS;Xp.exports=fi.default});var Ha=x((ci,Jp)=>{u();"use strict";ci.__esModule=!0;ci.default=void 0;var fS=pS(dt()),cS=Se();function pS(r){return r&&r.__esModule?r:{default:r}}function dS(r,e){r.prototype=Object.create(e.prototype),r.prototype.constructor=r,Va(r,e)}function Va(r,e){return Va=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(i,n){return i.__proto__=n,i},Va(r,e)}var hS=function(r){dS(e,r);function e(t){var i;return i=r.call(this,t)||this,i.type=cS.STRING,i}return e}(fS.default);ci.default=hS;Jp.exports=ci.default});var Ga=x((pi,Zp)=>{u();"use strict";pi.__esModule=!0;pi.default=void 0;var mS=yS($n()),gS=Se();function yS(r){return r&&r.__esModule?r:{default:r}}function bS(r,e){r.prototype=Object.create(e.prototype),r.prototype.constructor=r,Wa(r,e)}function Wa(r,e){return Wa=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(i,n){return i.__proto__=n,i},Wa(r,e)}var wS=function(r){bS(e,r);function e(i){var n;return n=r.call(this,i)||this,n.type=gS.PSEUDO,n}var t=e.prototype;return t.toString=function(){var n=this.length?"("+this.map(String).join(",")+")":"";return[this.rawSpaceBefore,this.stringifyProperty("value"),n,this.rawSpaceAfter].join("")},e}(mS.default);pi.default=wS;Zp.exports=pi.default});var Nn={};Ge(Nn,{deprecate:()=>vS});function vS(r){return r}var Bn=R(()=>{u()});var td=x((CI,ed)=>{u();ed.exports=(Bn(),Nn).deprecate});var Za=x(mi=>{u();"use strict";mi.__esModule=!0;mi.default=void 0;mi.unescapeValue=Xa;var di=Ya(Ln()),xS=Ya(Rn()),kS=Ya(Mn()),SS=Se(),Qa;function Ya(r){return r&&r.__esModule?r:{default:r}}function rd(r,e){for(var t=0;t0&&!n.quoted&&o.before.length===0&&!(n.spaces.value&&n.spaces.value.after)&&(o.before=" "),id(s,o)}))),a.push("]"),a.push(this.rawSpaceAfter),a.join("")},AS(e,[{key:"quoted",get:function(){var n=this.quoteMark;return n==="'"||n==='"'},set:function(n){OS()}},{key:"quoteMark",get:function(){return this._quoteMark},set:function(n){if(!this._constructed){this._quoteMark=n;return}this._quoteMark!==n&&(this._quoteMark=n,this._syncRawValue())}},{key:"qualifiedAttribute",get:function(){return this.qualifiedName(this.raws.attribute||this.attribute)}},{key:"insensitiveFlag",get:function(){return this.insensitive?"i":""}},{key:"value",get:function(){return this._value},set:function(n){if(this._constructed){var a=Xa(n),s=a.deprecatedUsage,o=a.unescaped,l=a.quoteMark;if(s&&ES(),o===this._value&&l===this._quoteMark)return;this._value=o,this._quoteMark=l,this._syncRawValue()}else this._value=n}},{key:"insensitive",get:function(){return this._insensitive},set:function(n){n||(this._insensitive=!1,this.raws&&(this.raws.insensitiveFlag==="I"||this.raws.insensitiveFlag==="i")&&(this.raws.insensitiveFlag=void 0)),this._insensitive=n}},{key:"attribute",get:function(){return this._attribute},set:function(n){this._handleEscapes("attribute",n),this._attribute=n}}]),e}(kS.default);mi.default=Fn;Fn.NO_QUOTE=null;Fn.SINGLE_QUOTE="'";Fn.DOUBLE_QUOTE='"';var Ja=(Qa={"'":{quotes:"single",wrap:!0},'"':{quotes:"double",wrap:!0}},Qa[null]={isIdentifier:!0},Qa);function id(r,e){return""+e.before+r+e.after}});var to=x((gi,nd)=>{u();"use strict";gi.__esModule=!0;gi.default=void 0;var PS=DS(Mn()),IS=Se();function DS(r){return r&&r.__esModule?r:{default:r}}function qS(r,e){r.prototype=Object.create(e.prototype),r.prototype.constructor=r,eo(r,e)}function eo(r,e){return eo=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(i,n){return i.__proto__=n,i},eo(r,e)}var $S=function(r){qS(e,r);function e(t){var i;return i=r.call(this,t)||this,i.type=IS.UNIVERSAL,i.value="*",i}return e}(PS.default);gi.default=$S;nd.exports=gi.default});var io=x((yi,sd)=>{u();"use strict";yi.__esModule=!0;yi.default=void 0;var LS=NS(dt()),MS=Se();function NS(r){return r&&r.__esModule?r:{default:r}}function BS(r,e){r.prototype=Object.create(e.prototype),r.prototype.constructor=r,ro(r,e)}function ro(r,e){return ro=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(i,n){return i.__proto__=n,i},ro(r,e)}var FS=function(r){BS(e,r);function e(t){var i;return i=r.call(this,t)||this,i.type=MS.COMBINATOR,i}return e}(LS.default);yi.default=FS;sd.exports=yi.default});var so=x((bi,ad)=>{u();"use strict";bi.__esModule=!0;bi.default=void 0;var jS=US(dt()),zS=Se();function US(r){return r&&r.__esModule?r:{default:r}}function VS(r,e){r.prototype=Object.create(e.prototype),r.prototype.constructor=r,no(r,e)}function no(r,e){return no=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(i,n){return i.__proto__=n,i},no(r,e)}var HS=function(r){VS(e,r);function e(t){var i;return i=r.call(this,t)||this,i.type=zS.NESTING,i.value="&",i}return e}(jS.default);bi.default=HS;ad.exports=bi.default});var ld=x((jn,od)=>{u();"use strict";jn.__esModule=!0;jn.default=WS;function WS(r){return r.sort(function(e,t){return e-t})}od.exports=jn.default});var ao=x(M=>{u();"use strict";M.__esModule=!0;M.word=M.tilde=M.tab=M.str=M.space=M.slash=M.singleQuote=M.semicolon=M.plus=M.pipe=M.openSquare=M.openParenthesis=M.newline=M.greaterThan=M.feed=M.equals=M.doubleQuote=M.dollar=M.cr=M.comment=M.comma=M.combinator=M.colon=M.closeSquare=M.closeParenthesis=M.caret=M.bang=M.backslash=M.at=M.asterisk=M.ampersand=void 0;var GS=38;M.ampersand=GS;var QS=42;M.asterisk=QS;var YS=64;M.at=YS;var KS=44;M.comma=KS;var XS=58;M.colon=XS;var JS=59;M.semicolon=JS;var ZS=40;M.openParenthesis=ZS;var eA=41;M.closeParenthesis=eA;var tA=91;M.openSquare=tA;var rA=93;M.closeSquare=rA;var iA=36;M.dollar=iA;var nA=126;M.tilde=nA;var sA=94;M.caret=sA;var aA=43;M.plus=aA;var oA=61;M.equals=oA;var lA=124;M.pipe=lA;var uA=62;M.greaterThan=uA;var fA=32;M.space=fA;var ud=39;M.singleQuote=ud;var cA=34;M.doubleQuote=cA;var pA=47;M.slash=pA;var dA=33;M.bang=dA;var hA=92;M.backslash=hA;var mA=13;M.cr=mA;var gA=12;M.feed=gA;var yA=10;M.newline=yA;var bA=9;M.tab=bA;var wA=ud;M.str=wA;var vA=-1;M.comment=vA;var xA=-2;M.word=xA;var kA=-3;M.combinator=kA});var pd=x(wi=>{u();"use strict";wi.__esModule=!0;wi.FIELDS=void 0;wi.default=TA;var D=SA(ao()),nr,te;function fd(r){if(typeof WeakMap!="function")return null;var e=new WeakMap,t=new WeakMap;return(fd=function(n){return n?t:e})(r)}function SA(r,e){if(!e&&r&&r.__esModule)return r;if(r===null||typeof r!="object"&&typeof r!="function")return{default:r};var t=fd(e);if(t&&t.has(r))return t.get(r);var i={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var a in r)if(a!=="default"&&Object.prototype.hasOwnProperty.call(r,a)){var s=n?Object.getOwnPropertyDescriptor(r,a):null;s&&(s.get||s.set)?Object.defineProperty(i,a,s):i[a]=r[a]}return i.default=r,t&&t.set(r,i),i}var AA=(nr={},nr[D.tab]=!0,nr[D.newline]=!0,nr[D.cr]=!0,nr[D.feed]=!0,nr),CA=(te={},te[D.space]=!0,te[D.tab]=!0,te[D.newline]=!0,te[D.cr]=!0,te[D.feed]=!0,te[D.ampersand]=!0,te[D.asterisk]=!0,te[D.bang]=!0,te[D.comma]=!0,te[D.colon]=!0,te[D.semicolon]=!0,te[D.openParenthesis]=!0,te[D.closeParenthesis]=!0,te[D.openSquare]=!0,te[D.closeSquare]=!0,te[D.singleQuote]=!0,te[D.doubleQuote]=!0,te[D.plus]=!0,te[D.pipe]=!0,te[D.tilde]=!0,te[D.greaterThan]=!0,te[D.equals]=!0,te[D.dollar]=!0,te[D.caret]=!0,te[D.slash]=!0,te),oo={},cd="0123456789abcdefABCDEF";for(zn=0;zn0?(k=s+v,S=w-y[v].length):(k=s,S=a),O=D.comment,s=k,p=k,d=w-S):c===D.slash?(w=o,O=c,p=s,d=o-a,l=w+1):(w=_A(t,o),O=D.word,p=s,d=w-a),l=w+1;break}e.push([O,s,o-a,p,d,o,l]),S&&(a=S,S=null),o=l}return e}});var vd=x((vi,wd)=>{u();"use strict";vi.__esModule=!0;vi.default=void 0;var RA=je(Pa()),lo=je(Da()),PA=je(La()),dd=je(Na()),IA=je(Fa()),DA=je(Ua()),uo=je(Ha()),qA=je(Ga()),hd=Un(Za()),$A=je(to()),fo=je(io()),LA=je(so()),MA=je(ld()),P=Un(pd()),$=Un(ao()),NA=Un(Se()),le=ti(),Vt,co;function md(r){if(typeof WeakMap!="function")return null;var e=new WeakMap,t=new WeakMap;return(md=function(n){return n?t:e})(r)}function Un(r,e){if(!e&&r&&r.__esModule)return r;if(r===null||typeof r!="object"&&typeof r!="function")return{default:r};var t=md(e);if(t&&t.has(r))return t.get(r);var i={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var a in r)if(a!=="default"&&Object.prototype.hasOwnProperty.call(r,a)){var s=n?Object.getOwnPropertyDescriptor(r,a):null;s&&(s.get||s.set)?Object.defineProperty(i,a,s):i[a]=r[a]}return i.default=r,t&&t.set(r,i),i}function je(r){return r&&r.__esModule?r:{default:r}}function gd(r,e){for(var t=0;t0){var s=this.current.last;if(s){var o=this.convertWhitespaceNodesToSpace(a),l=o.space,c=o.rawSpace;c!==void 0&&(s.rawSpaceAfter+=c),s.spaces.after+=l}else a.forEach(function(O){return i.newNode(O)})}return}var f=this.currToken,d=void 0;n>this.position&&(d=this.parseWhitespaceEquivalentTokens(n));var p;if(this.isNamedCombinator()?p=this.namedCombinator():this.currToken[P.FIELDS.TYPE]===$.combinator?(p=new fo.default({value:this.content(),source:sr(this.currToken),sourceIndex:this.currToken[P.FIELDS.START_POS]}),this.position++):po[this.currToken[P.FIELDS.TYPE]]||d||this.unexpected(),p){if(d){var h=this.convertWhitespaceNodesToSpace(d),b=h.space,v=h.rawSpace;p.spaces.before=b,p.rawSpaceBefore=v}}else{var y=this.convertWhitespaceNodesToSpace(d,!0),w=y.space,k=y.rawSpace;k||(k=w);var S={},E={spaces:{}};w.endsWith(" ")&&k.endsWith(" ")?(S.before=w.slice(0,w.length-1),E.spaces.before=k.slice(0,k.length-1)):w.startsWith(" ")&&k.startsWith(" ")?(S.after=w.slice(1),E.spaces.after=k.slice(1)):E.value=k,p=new fo.default({value:" ",source:ho(f,this.tokens[this.position-1]),sourceIndex:f[P.FIELDS.START_POS],spaces:S,raws:E})}return this.currToken&&this.currToken[P.FIELDS.TYPE]===$.space&&(p.spaces.after=this.optionalSpace(this.content()),this.position++),this.newNode(p)},e.comma=function(){if(this.position===this.tokens.length-1){this.root.trailingComma=!0,this.position++;return}this.current._inferEndPosition();var i=new lo.default({source:{start:yd(this.tokens[this.position+1])}});this.current.parent.append(i),this.current=i,this.position++},e.comment=function(){var i=this.currToken;this.newNode(new dd.default({value:this.content(),source:sr(i),sourceIndex:i[P.FIELDS.START_POS]})),this.position++},e.error=function(i,n){throw this.root.error(i,n)},e.missingBackslash=function(){return this.error("Expected a backslash preceding the semicolon.",{index:this.currToken[P.FIELDS.START_POS]})},e.missingParenthesis=function(){return this.expected("opening parenthesis",this.currToken[P.FIELDS.START_POS])},e.missingSquareBracket=function(){return this.expected("opening square bracket",this.currToken[P.FIELDS.START_POS])},e.unexpected=function(){return this.error("Unexpected '"+this.content()+"'. Escaping special characters with \\ may help.",this.currToken[P.FIELDS.START_POS])},e.unexpectedPipe=function(){return this.error("Unexpected '|'.",this.currToken[P.FIELDS.START_POS])},e.namespace=function(){var i=this.prevToken&&this.content(this.prevToken)||!0;if(this.nextToken[P.FIELDS.TYPE]===$.word)return this.position++,this.word(i);if(this.nextToken[P.FIELDS.TYPE]===$.asterisk)return this.position++,this.universal(i);this.unexpectedPipe()},e.nesting=function(){if(this.nextToken){var i=this.content(this.nextToken);if(i==="|"){this.position++;return}}var n=this.currToken;this.newNode(new LA.default({value:this.content(),source:sr(n),sourceIndex:n[P.FIELDS.START_POS]})),this.position++},e.parentheses=function(){var i=this.current.last,n=1;if(this.position++,i&&i.type===NA.PSEUDO){var a=new lo.default({source:{start:yd(this.tokens[this.position-1])}}),s=this.current;for(i.append(a),this.current=a;this.position1&&i.nextToken&&i.nextToken[P.FIELDS.TYPE]===$.openParenthesis&&i.error("Misplaced parenthesis.",{index:i.nextToken[P.FIELDS.START_POS]})});else return this.expected(["pseudo-class","pseudo-element"],this.currToken[P.FIELDS.START_POS])},e.space=function(){var i=this.content();this.position===0||this.prevToken[P.FIELDS.TYPE]===$.comma||this.prevToken[P.FIELDS.TYPE]===$.openParenthesis||this.current.nodes.every(function(n){return n.type==="comment"})?(this.spaces=this.optionalSpace(i),this.position++):this.position===this.tokens.length-1||this.nextToken[P.FIELDS.TYPE]===$.comma||this.nextToken[P.FIELDS.TYPE]===$.closeParenthesis?(this.current.last.spaces.after=this.optionalSpace(i),this.position++):this.combinator()},e.string=function(){var i=this.currToken;this.newNode(new uo.default({value:this.content(),source:sr(i),sourceIndex:i[P.FIELDS.START_POS]})),this.position++},e.universal=function(i){var n=this.nextToken;if(n&&this.content(n)==="|")return this.position++,this.namespace();var a=this.currToken;this.newNode(new $A.default({value:this.content(),source:sr(a),sourceIndex:a[P.FIELDS.START_POS]}),i),this.position++},e.splitWord=function(i,n){for(var a=this,s=this.nextToken,o=this.content();s&&~[$.dollar,$.caret,$.equals,$.word].indexOf(s[P.FIELDS.TYPE]);){this.position++;var l=this.content();if(o+=l,l.lastIndexOf("\\")===l.length-1){var c=this.nextToken;c&&c[P.FIELDS.TYPE]===$.space&&(o+=this.requiredSpace(this.content(c)),this.position++)}s=this.nextToken}var f=mo(o,".").filter(function(b){var v=o[b-1]==="\\",y=/^\d+\.\d+%$/.test(o);return!v&&!y}),d=mo(o,"#").filter(function(b){return o[b-1]!=="\\"}),p=mo(o,"#{");p.length&&(d=d.filter(function(b){return!~p.indexOf(b)}));var h=(0,MA.default)(jA([0].concat(f,d)));h.forEach(function(b,v){var y=h[v+1]||o.length,w=o.slice(b,y);if(v===0&&n)return n.call(a,w,h.length);var k,S=a.currToken,E=S[P.FIELDS.START_POS]+h[v],O=Ht(S[1],S[2]+b,S[3],S[2]+(y-1));if(~f.indexOf(b)){var B={value:w.slice(1),source:O,sourceIndex:E};k=new PA.default(ar(B,"value"))}else if(~d.indexOf(b)){var N={value:w.slice(1),source:O,sourceIndex:E};k=new IA.default(ar(N,"value"))}else{var T={value:w,source:O,sourceIndex:E};ar(T,"value"),k=new DA.default(T)}a.newNode(k,i),i=null}),this.position++},e.word=function(i){var n=this.nextToken;return n&&this.content(n)==="|"?(this.position++,this.namespace()):this.splitWord(i)},e.loop=function(){for(;this.position{u();"use strict";xi.__esModule=!0;xi.default=void 0;var UA=VA(vd());function VA(r){return r&&r.__esModule?r:{default:r}}var HA=function(){function r(t,i){this.func=t||function(){},this.funcRes=null,this.options=i}var e=r.prototype;return e._shouldUpdateSelector=function(i,n){n===void 0&&(n={});var a=Object.assign({},this.options,n);return a.updateSelector===!1?!1:typeof i!="string"},e._isLossy=function(i){i===void 0&&(i={});var n=Object.assign({},this.options,i);return n.lossless===!1},e._root=function(i,n){n===void 0&&(n={});var a=new UA.default(i,this._parseOptions(n));return a.root},e._parseOptions=function(i){return{lossy:this._isLossy(i)}},e._run=function(i,n){var a=this;return n===void 0&&(n={}),new Promise(function(s,o){try{var l=a._root(i,n);Promise.resolve(a.func(l)).then(function(c){var f=void 0;return a._shouldUpdateSelector(i,n)&&(f=l.toString(),i.selector=f),{transform:c,root:l,string:f}}).then(s,o)}catch(c){o(c);return}})},e._runSync=function(i,n){n===void 0&&(n={});var a=this._root(i,n),s=this.func(a);if(s&&typeof s.then=="function")throw new Error("Selector processor returned a promise to a synchronous call.");var o=void 0;return n.updateSelector&&typeof i!="string"&&(o=a.toString(),i.selector=o),{transform:s,root:a,string:o}},e.ast=function(i,n){return this._run(i,n).then(function(a){return a.root})},e.astSync=function(i,n){return this._runSync(i,n).root},e.transform=function(i,n){return this._run(i,n).then(function(a){return a.transform})},e.transformSync=function(i,n){return this._runSync(i,n).transform},e.process=function(i,n){return this._run(i,n).then(function(a){return a.string||a.root.toString()})},e.processSync=function(i,n){var a=this._runSync(i,n);return a.string||a.root.toString()},r}();xi.default=HA;xd.exports=xi.default});var Sd=x(ne=>{u();"use strict";ne.__esModule=!0;ne.universal=ne.tag=ne.string=ne.selector=ne.root=ne.pseudo=ne.nesting=ne.id=ne.comment=ne.combinator=ne.className=ne.attribute=void 0;var WA=ze(Za()),GA=ze(La()),QA=ze(io()),YA=ze(Na()),KA=ze(Fa()),XA=ze(so()),JA=ze(Ga()),ZA=ze(Pa()),eC=ze(Da()),tC=ze(Ha()),rC=ze(Ua()),iC=ze(to());function ze(r){return r&&r.__esModule?r:{default:r}}var nC=function(e){return new WA.default(e)};ne.attribute=nC;var sC=function(e){return new GA.default(e)};ne.className=sC;var aC=function(e){return new QA.default(e)};ne.combinator=aC;var oC=function(e){return new YA.default(e)};ne.comment=oC;var lC=function(e){return new KA.default(e)};ne.id=lC;var uC=function(e){return new XA.default(e)};ne.nesting=uC;var fC=function(e){return new JA.default(e)};ne.pseudo=fC;var cC=function(e){return new ZA.default(e)};ne.root=cC;var pC=function(e){return new eC.default(e)};ne.selector=pC;var dC=function(e){return new tC.default(e)};ne.string=dC;var hC=function(e){return new rC.default(e)};ne.tag=hC;var mC=function(e){return new iC.default(e)};ne.universal=mC});var Ed=x(J=>{u();"use strict";J.__esModule=!0;J.isComment=J.isCombinator=J.isClassName=J.isAttribute=void 0;J.isContainer=EC;J.isIdentifier=void 0;J.isNamespace=OC;J.isNesting=void 0;J.isNode=go;J.isPseudo=void 0;J.isPseudoClass=_C;J.isPseudoElement=_d;J.isUniversal=J.isTag=J.isString=J.isSelector=J.isRoot=void 0;var ue=Se(),Oe,gC=(Oe={},Oe[ue.ATTRIBUTE]=!0,Oe[ue.CLASS]=!0,Oe[ue.COMBINATOR]=!0,Oe[ue.COMMENT]=!0,Oe[ue.ID]=!0,Oe[ue.NESTING]=!0,Oe[ue.PSEUDO]=!0,Oe[ue.ROOT]=!0,Oe[ue.SELECTOR]=!0,Oe[ue.STRING]=!0,Oe[ue.TAG]=!0,Oe[ue.UNIVERSAL]=!0,Oe);function go(r){return typeof r=="object"&&gC[r.type]}function Ue(r,e){return go(e)&&e.type===r}var Ad=Ue.bind(null,ue.ATTRIBUTE);J.isAttribute=Ad;var yC=Ue.bind(null,ue.CLASS);J.isClassName=yC;var bC=Ue.bind(null,ue.COMBINATOR);J.isCombinator=bC;var wC=Ue.bind(null,ue.COMMENT);J.isComment=wC;var vC=Ue.bind(null,ue.ID);J.isIdentifier=vC;var xC=Ue.bind(null,ue.NESTING);J.isNesting=xC;var yo=Ue.bind(null,ue.PSEUDO);J.isPseudo=yo;var kC=Ue.bind(null,ue.ROOT);J.isRoot=kC;var SC=Ue.bind(null,ue.SELECTOR);J.isSelector=SC;var AC=Ue.bind(null,ue.STRING);J.isString=AC;var Cd=Ue.bind(null,ue.TAG);J.isTag=Cd;var CC=Ue.bind(null,ue.UNIVERSAL);J.isUniversal=CC;function _d(r){return yo(r)&&r.value&&(r.value.startsWith("::")||r.value.toLowerCase()===":before"||r.value.toLowerCase()===":after"||r.value.toLowerCase()===":first-letter"||r.value.toLowerCase()===":first-line")}function _C(r){return yo(r)&&!_d(r)}function EC(r){return!!(go(r)&&r.walk)}function OC(r){return Ad(r)||Cd(r)}});var Od=x(Ke=>{u();"use strict";Ke.__esModule=!0;var bo=Se();Object.keys(bo).forEach(function(r){r==="default"||r==="__esModule"||r in Ke&&Ke[r]===bo[r]||(Ke[r]=bo[r])});var wo=Sd();Object.keys(wo).forEach(function(r){r==="default"||r==="__esModule"||r in Ke&&Ke[r]===wo[r]||(Ke[r]=wo[r])});var vo=Ed();Object.keys(vo).forEach(function(r){r==="default"||r==="__esModule"||r in Ke&&Ke[r]===vo[r]||(Ke[r]=vo[r])})});var it=x((ki,Rd)=>{u();"use strict";ki.__esModule=!0;ki.default=void 0;var TC=IC(kd()),RC=PC(Od());function Td(r){if(typeof WeakMap!="function")return null;var e=new WeakMap,t=new WeakMap;return(Td=function(n){return n?t:e})(r)}function PC(r,e){if(!e&&r&&r.__esModule)return r;if(r===null||typeof r!="object"&&typeof r!="function")return{default:r};var t=Td(e);if(t&&t.has(r))return t.get(r);var i={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var a in r)if(a!=="default"&&Object.prototype.hasOwnProperty.call(r,a)){var s=n?Object.getOwnPropertyDescriptor(r,a):null;s&&(s.get||s.set)?Object.defineProperty(i,a,s):i[a]=r[a]}return i.default=r,t&&t.set(r,i),i}function IC(r){return r&&r.__esModule?r:{default:r}}var xo=function(e){return new TC.default(e)};Object.assign(xo,RC);delete xo.__esModule;var DC=xo;ki.default=DC;Rd.exports=ki.default});function mt(r){return["fontSize","outline"].includes(r)?e=>(typeof e=="function"&&(e=e({})),Array.isArray(e)&&(e=e[0]),e):r==="fontFamily"?e=>{typeof e=="function"&&(e=e({}));let t=Array.isArray(e)&&ke(e[1])?e[0]:e;return Array.isArray(t)?t.join(", "):t}:["boxShadow","transitionProperty","transitionDuration","transitionDelay","transitionTimingFunction","backgroundImage","backgroundSize","backgroundColor","cursor","animation"].includes(r)?e=>(typeof e=="function"&&(e=e({})),Array.isArray(e)&&(e=e.join(", ")),e):["gridTemplateColumns","gridTemplateRows","objectPosition"].includes(r)?e=>(typeof e=="function"&&(e=e({})),typeof e=="string"&&(e=ee.list.comma(e).join(" ")),e):(e,t={})=>(typeof e=="function"&&(e=e(t)),e)}var Si=R(()=>{u();Ot();Kt()});var Md=x(($I,_o)=>{u();var{Rule:Pd,AtRule:qC}=$e(),Id=it();function ko(r,e){let t;try{Id(i=>{t=i}).processSync(r)}catch(i){throw r.includes(":")?e?e.error("Missed semicolon"):i:e?e.error(i.message):i}return t.at(0)}function Dd(r,e){let t=!1;return r.each(i=>{if(i.type==="nesting"){let n=e.clone({});i.value!=="&"?i.replaceWith(ko(i.value.replace("&",n.toString()))):i.replaceWith(n),t=!0}else"nodes"in i&&i.nodes&&Dd(i,e)&&(t=!0)}),t}function qd(r,e){let t=[];return r.selectors.forEach(i=>{let n=ko(i,r);e.selectors.forEach(a=>{if(!a)return;let s=ko(a,e);Dd(s,n)||(s.prepend(Id.combinator({value:" "})),s.prepend(n.clone({}))),t.push(s.toString())})}),t}function Vn(r,e){let t=r.prev();for(e.after(r);t&&t.type==="comment";){let i=t.prev();e.after(t),t=i}return r}function $C(r){return function e(t,i,n,a=n){let s=[];if(i.each(o=>{o.type==="rule"&&n?a&&(o.selectors=qd(t,o)):o.type==="atrule"&&o.nodes?r[o.name]?e(t,o,a):i[Ao]!==!1&&s.push(o):s.push(o)}),n&&s.length){let o=t.clone({nodes:[]});for(let l of s)o.append(l);i.prepend(o)}}}function So(r,e,t){let i=new Pd({selector:r,nodes:[]});return i.append(e),t.after(i),i}function $d(r,e){let t={};for(let i of r)t[i]=!0;if(e)for(let i of e)t[i.replace(/^@/,"")]=!0;return t}function LC(r){r=r.trim();let e=r.match(/^\((.*)\)$/);if(!e)return{type:"basic",selector:r};let t=e[1].match(/^(with(?:out)?):(.+)$/);if(t){let i=t[1]==="with",n=Object.fromEntries(t[2].trim().split(/\s+/).map(s=>[s,!0]));if(i&&n.all)return{type:"noop"};let a=s=>!!n[s];return n.all?a=()=>!0:i&&(a=s=>s==="all"?!1:!n[s]),{type:"withrules",escapes:a}}return{type:"unknown"}}function MC(r){let e=[],t=r.parent;for(;t&&t instanceof qC;)e.push(t),t=t.parent;return e}function NC(r){let e=r[Ld];if(!e)r.after(r.nodes);else{let t=r.nodes,i,n=-1,a,s,o,l=MC(r);if(l.forEach((c,f)=>{if(e(c.name))i=c,n=f,s=o;else{let d=o;o=c.clone({nodes:[]}),d&&o.append(d),a=a||o}}),i?s?(a.append(t),i.after(s)):i.after(t):r.after(t),r.next()&&i){let c;l.slice(0,n+1).forEach((f,d,p)=>{let h=c;c=f.clone({nodes:[]}),h&&c.append(h);let b=[],y=(p[d-1]||r).next();for(;y;)b.push(y),y=y.next();c.append(b)}),c&&(s||t[t.length-1]).after(c)}}r.remove()}var Ao=Symbol("rootRuleMergeSel"),Ld=Symbol("rootRuleEscapes");function BC(r){let{params:e}=r,{type:t,selector:i,escapes:n}=LC(e);if(t==="unknown")throw r.error(`Unknown @${r.name} parameter ${JSON.stringify(e)}`);if(t==="basic"&&i){let a=new Pd({selector:i,nodes:r.nodes});r.removeAll(),r.append(a)}r[Ld]=n,r[Ao]=n?!n("all"):t==="noop"}var Co=Symbol("hasRootRule");_o.exports=(r={})=>{let e=$d(["media","supports","layer","container"],r.bubble),t=$C(e),i=$d(["document","font-face","keyframes","-webkit-keyframes","-moz-keyframes"],r.unwrap),n=(r.rootRuleName||"at-root").replace(/^@/,""),a=r.preserveEmpty;return{postcssPlugin:"postcss-nested",Once(s){s.walkAtRules(n,o=>{BC(o),s[Co]=!0})},Rule(s){let o=!1,l=s,c=!1,f=[];s.each(d=>{d.type==="rule"?(f.length&&(l=So(s.selector,f,l),f=[]),c=!0,o=!0,d.selectors=qd(s,d),l=Vn(d,l)):d.type==="atrule"?(f.length&&(l=So(s.selector,f,l),f=[]),d.name===n?(o=!0,t(s,d,!0,d[Ao]),l=Vn(d,l)):e[d.name]?(c=!0,o=!0,t(s,d,!0),l=Vn(d,l)):i[d.name]?(c=!0,o=!0,t(s,d,!1),l=Vn(d,l)):c&&f.push(d)):d.type==="decl"&&c&&f.push(d)}),f.length&&(l=So(s.selector,f,l)),o&&a!==!0&&(s.raws.semicolon=!0,s.nodes.length===0&&s.remove())},RootExit(s){s[Co]&&(s.walkAtRules(n,NC),s[Co]=!1)}}};_o.exports.postcss=!0});var jd=x((LI,Fd)=>{u();"use strict";var Nd=/-(\w|$)/g,Bd=(r,e)=>e.toUpperCase(),FC=r=>(r=r.toLowerCase(),r==="float"?"cssFloat":r.startsWith("-ms-")?r.substr(1).replace(Nd,Bd):r.replace(Nd,Bd));Fd.exports=FC});var To=x((MI,zd)=>{u();var jC=jd(),zC={boxFlex:!0,boxFlexGroup:!0,columnCount:!0,flex:!0,flexGrow:!0,flexPositive:!0,flexShrink:!0,flexNegative:!0,fontWeight:!0,lineClamp:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,tabSize:!0,widows:!0,zIndex:!0,zoom:!0,fillOpacity:!0,strokeDashoffset:!0,strokeOpacity:!0,strokeWidth:!0};function Eo(r){return typeof r.nodes=="undefined"?!0:Oo(r)}function Oo(r){let e,t={};return r.each(i=>{if(i.type==="atrule")e="@"+i.name,i.params&&(e+=" "+i.params),typeof t[e]=="undefined"?t[e]=Eo(i):Array.isArray(t[e])?t[e].push(Eo(i)):t[e]=[t[e],Eo(i)];else if(i.type==="rule"){let n=Oo(i);if(t[i.selector])for(let a in n)t[i.selector][a]=n[a];else t[i.selector]=n}else if(i.type==="decl"){i.prop[0]==="-"&&i.prop[1]==="-"||i.parent&&i.parent.selector===":export"?e=i.prop:e=jC(i.prop);let n=i.value;!isNaN(i.value)&&zC[e]&&(n=parseFloat(i.value)),i.important&&(n+=" !important"),typeof t[e]=="undefined"?t[e]=n:Array.isArray(t[e])?t[e].push(n):t[e]=[t[e],n]}}),t}zd.exports=Oo});var Hn=x((NI,Wd)=>{u();var Ai=$e(),Ud=/\s*!important\s*$/i,UC={"box-flex":!0,"box-flex-group":!0,"column-count":!0,flex:!0,"flex-grow":!0,"flex-positive":!0,"flex-shrink":!0,"flex-negative":!0,"font-weight":!0,"line-clamp":!0,"line-height":!0,opacity:!0,order:!0,orphans:!0,"tab-size":!0,widows:!0,"z-index":!0,zoom:!0,"fill-opacity":!0,"stroke-dashoffset":!0,"stroke-opacity":!0,"stroke-width":!0};function VC(r){return r.replace(/([A-Z])/g,"-$1").replace(/^ms-/,"-ms-").toLowerCase()}function Vd(r,e,t){t===!1||t===null||(e.startsWith("--")||(e=VC(e)),typeof t=="number"&&(t===0||UC[e]?t=t.toString():t+="px"),e==="css-float"&&(e="float"),Ud.test(t)?(t=t.replace(Ud,""),r.push(Ai.decl({prop:e,value:t,important:!0}))):r.push(Ai.decl({prop:e,value:t})))}function Hd(r,e,t){let i=Ai.atRule({name:e[1],params:e[3]||""});typeof t=="object"&&(i.nodes=[],Ro(t,i)),r.push(i)}function Ro(r,e){let t,i,n;for(t in r)if(i=r[t],!(i===null||typeof i=="undefined"))if(t[0]==="@"){let a=t.match(/@(\S+)(\s+([\W\w]*)\s*)?/);if(Array.isArray(i))for(let s of i)Hd(e,a,s);else Hd(e,a,i)}else if(Array.isArray(i))for(let a of i)Vd(e,t,a);else typeof i=="object"?(n=Ai.rule({selector:t}),Ro(i,n),e.push(n)):Vd(e,t,i)}Wd.exports=function(r){let e=Ai.root();return Ro(r,e),e}});var Po=x((BI,Gd)=>{u();var HC=To();Gd.exports=function(e){return console&&console.warn&&e.warnings().forEach(t=>{let i=t.plugin||"PostCSS";console.warn(i+": "+t.text)}),HC(e.root)}});var Yd=x((FI,Qd)=>{u();var WC=$e(),GC=Po(),QC=Hn();Qd.exports=function(e){let t=WC(e);return async i=>{let n=await t.process(i,{parser:QC,from:void 0});return GC(n)}}});var Xd=x((jI,Kd)=>{u();var YC=$e(),KC=Po(),XC=Hn();Kd.exports=function(r){let e=YC(r);return t=>{let i=e.process(t,{parser:XC,from:void 0});return KC(i)}}});var Zd=x((zI,Jd)=>{u();var JC=To(),ZC=Hn(),e_=Yd(),t_=Xd();Jd.exports={objectify:JC,parse:ZC,async:e_,sync:t_}});var or,eh,UI,VI,HI,WI,th=R(()=>{u();or=pe(Zd()),eh=or.default,UI=or.default.objectify,VI=or.default.parse,HI=or.default.async,WI=or.default.sync});function lr(r){return Array.isArray(r)?r.flatMap(e=>ee([(0,rh.default)({bubble:["screen"]})]).process(e,{parser:eh}).root.nodes):lr([r])}var rh,Io=R(()=>{u();Ot();rh=pe(Md());th()});function ur(r,e,t=!1){if(r==="")return e;let i=typeof e=="string"?(0,ih.default)().astSync(e):e;return i.walkClasses(n=>{let a=n.value,s=t&&a.startsWith("-");n.value=s?`-${r}${a.slice(1)}`:`${r}${a}`}),typeof e=="string"?i.toString():i}var ih,Wn=R(()=>{u();ih=pe(it())});function Te(r){let e=nh.default.className();return e.value=r,jt(e?.raws?.value??e.value)}var nh,fr=R(()=>{u();nh=pe(it());Ki()});function Do(r){return jt(`.${Te(r)}`)}function Gn(r,e){return Do(Ci(r,e))}function Ci(r,e){return e==="DEFAULT"?r:e==="-"||e==="-DEFAULT"?`-${r}`:e.startsWith("-")?`-${r}${e}`:e.startsWith("/")?`${r}${e}`:`${r}-${e}`}var qo=R(()=>{u();fr();Ki()});function L(r,e=[[r,[r]]],{filterDefault:t=!1,...i}={}){let n=mt(r);return function({matchUtilities:a,theme:s}){for(let o of e){let l=Array.isArray(o[0])?o:[o];a(l.reduce((c,[f,d])=>Object.assign(c,{[f]:p=>d.reduce((h,b)=>Array.isArray(b)?Object.assign(h,{[b[0]]:b[1]}):Object.assign(h,{[b]:n(p)}),{})}),{}),{...i,values:t?Object.fromEntries(Object.entries(s(r)??{}).filter(([c])=>c!=="DEFAULT")):s(r)})}}}var sh=R(()=>{u();Si()});function Tt(r){return r=Array.isArray(r)?r:[r],r.map(e=>{let t=e.values.map(i=>i.raw!==void 0?i.raw:[i.min&&`(min-width: ${i.min})`,i.max&&`(max-width: ${i.max})`].filter(Boolean).join(" and "));return e.not?`not all and ${t}`:t}).join(", ")}var Qn=R(()=>{u()});function $o(r){return r.split(l_).map(t=>{let i=t.trim(),n={value:i},a=i.split(u_),s=new Set;for(let o of a)!s.has("DIRECTIONS")&&r_.has(o)?(n.direction=o,s.add("DIRECTIONS")):!s.has("PLAY_STATES")&&i_.has(o)?(n.playState=o,s.add("PLAY_STATES")):!s.has("FILL_MODES")&&n_.has(o)?(n.fillMode=o,s.add("FILL_MODES")):!s.has("ITERATION_COUNTS")&&(s_.has(o)||f_.test(o))?(n.iterationCount=o,s.add("ITERATION_COUNTS")):!s.has("TIMING_FUNCTION")&&a_.has(o)||!s.has("TIMING_FUNCTION")&&o_.some(l=>o.startsWith(`${l}(`))?(n.timingFunction=o,s.add("TIMING_FUNCTION")):!s.has("DURATION")&&ah.test(o)?(n.duration=o,s.add("DURATION")):!s.has("DELAY")&&ah.test(o)?(n.delay=o,s.add("DELAY")):s.has("NAME")?(n.unknown||(n.unknown=[]),n.unknown.push(o)):(n.name=o,s.add("NAME"));return n})}var r_,i_,n_,s_,a_,o_,l_,u_,ah,f_,oh=R(()=>{u();r_=new Set(["normal","reverse","alternate","alternate-reverse"]),i_=new Set(["running","paused"]),n_=new Set(["none","forwards","backwards","both"]),s_=new Set(["infinite"]),a_=new Set(["linear","ease","ease-in","ease-out","ease-in-out","step-start","step-end"]),o_=["cubic-bezier","steps"],l_=/\,(?![^(]*\))/g,u_=/\ +(?![^(]*\))/g,ah=/^(-?[\d.]+m?s)$/,f_=/^(\d+)$/});var lh,xe,uh=R(()=>{u();lh=r=>Object.assign({},...Object.entries(r??{}).flatMap(([e,t])=>typeof t=="object"?Object.entries(lh(t)).map(([i,n])=>({[e+(i==="DEFAULT"?"":`-${i}`)]:n})):[{[`${e}`]:t}])),xe=lh});var ch,fh=R(()=>{ch="3.4.14"});function Rt(r,e=!0){return Array.isArray(r)?r.map(t=>{if(e&&Array.isArray(t))throw new Error("The tuple syntax is not supported for `screens`.");if(typeof t=="string")return{name:t.toString(),not:!1,values:[{min:t,max:void 0}]};let[i,n]=t;return i=i.toString(),typeof n=="string"?{name:i,not:!1,values:[{min:n,max:void 0}]}:Array.isArray(n)?{name:i,not:!1,values:n.map(a=>dh(a))}:{name:i,not:!1,values:[dh(n)]}}):Rt(Object.entries(r??{}),!1)}function Yn(r){return r.values.length!==1?{result:!1,reason:"multiple-values"}:r.values[0].raw!==void 0?{result:!1,reason:"raw-values"}:r.values[0].min!==void 0&&r.values[0].max!==void 0?{result:!1,reason:"min-and-max"}:{result:!0,reason:null}}function ph(r,e,t){let i=Kn(e,r),n=Kn(t,r),a=Yn(i),s=Yn(n);if(a.reason==="multiple-values"||s.reason==="multiple-values")throw new Error("Attempted to sort a screen with multiple values. This should never happen. Please open a bug report.");if(a.reason==="raw-values"||s.reason==="raw-values")throw new Error("Attempted to sort a screen with raw values. This should never happen. Please open a bug report.");if(a.reason==="min-and-max"||s.reason==="min-and-max")throw new Error("Attempted to sort a screen with both min and max values. This should never happen. Please open a bug report.");let{min:o,max:l}=i.values[0],{min:c,max:f}=n.values[0];e.not&&([o,l]=[l,o]),t.not&&([c,f]=[f,c]),o=o===void 0?o:parseFloat(o),l=l===void 0?l:parseFloat(l),c=c===void 0?c:parseFloat(c),f=f===void 0?f:parseFloat(f);let[d,p]=r==="min"?[o,c]:[f,l];return d-p}function Kn(r,e){return typeof r=="object"?r:{name:"arbitrary-screen",values:[{[e]:r}]}}function dh({"min-width":r,min:e=r,max:t,raw:i}={}){return{min:e,max:t,raw:i}}var Xn=R(()=>{u()});function Jn(r,e){r.walkDecls(t=>{if(e.includes(t.prop)){t.remove();return}for(let i of e)t.value.includes(`/ var(${i})`)&&(t.value=t.value.replace(`/ var(${i})`,""))})}var hh=R(()=>{u()});var se,Xe,nt,ge,mh,gh=R(()=>{u();ft();et();Ot();sh();Qn();fr();oh();uh();Lr();ea();Kt();Si();fh();Be();Xn();Gs();hh();ct();Br();_i();se={childVariant:({addVariant:r})=>{r("*","& > *")},pseudoElementVariants:({addVariant:r})=>{r("first-letter","&::first-letter"),r("first-line","&::first-line"),r("marker",[({container:e})=>(Jn(e,["--tw-text-opacity"]),"& *::marker"),({container:e})=>(Jn(e,["--tw-text-opacity"]),"&::marker")]),r("selection",["& *::selection","&::selection"]),r("file","&::file-selector-button"),r("placeholder","&::placeholder"),r("backdrop","&::backdrop"),r("before",({container:e})=>(e.walkRules(t=>{let i=!1;t.walkDecls("content",()=>{i=!0}),i||t.prepend(ee.decl({prop:"content",value:"var(--tw-content)"}))}),"&::before")),r("after",({container:e})=>(e.walkRules(t=>{let i=!1;t.walkDecls("content",()=>{i=!0}),i||t.prepend(ee.decl({prop:"content",value:"var(--tw-content)"}))}),"&::after"))},pseudoClassVariants:({addVariant:r,matchVariant:e,config:t,prefix:i})=>{let n=[["first","&:first-child"],["last","&:last-child"],["only","&:only-child"],["odd","&:nth-child(odd)"],["even","&:nth-child(even)"],"first-of-type","last-of-type","only-of-type",["visited",({container:s})=>(Jn(s,["--tw-text-opacity","--tw-border-opacity","--tw-bg-opacity"]),"&:visited")],"target",["open","&[open]"],"default","checked","indeterminate","placeholder-shown","autofill","optional","required","valid","invalid","in-range","out-of-range","read-only","empty","focus-within",["hover",we(t(),"hoverOnlyWhenSupported")?"@media (hover: hover) and (pointer: fine) { &:hover }":"&:hover"],"focus","focus-visible","active","enabled","disabled"].map(s=>Array.isArray(s)?s:[s,`&:${s}`]);for(let[s,o]of n)r(s,l=>typeof o=="function"?o(l):o);let a={group:(s,{modifier:o})=>o?[`:merge(${i(".group")}\\/${Te(o)})`," &"]:[`:merge(${i(".group")})`," &"],peer:(s,{modifier:o})=>o?[`:merge(${i(".peer")}\\/${Te(o)})`," ~ &"]:[`:merge(${i(".peer")})`," ~ &"]};for(let[s,o]of Object.entries(a))e(s,(l="",c)=>{let f=K(typeof l=="function"?l(c):l);f.includes("&")||(f="&"+f);let[d,p]=o("",c),h=null,b=null,v=0;for(let y=0;y{r("ltr",'&:where([dir="ltr"], [dir="ltr"] *)'),r("rtl",'&:where([dir="rtl"], [dir="rtl"] *)')},reducedMotionVariants:({addVariant:r})=>{r("motion-safe","@media (prefers-reduced-motion: no-preference)"),r("motion-reduce","@media (prefers-reduced-motion: reduce)")},darkVariants:({config:r,addVariant:e})=>{let[t,i=".dark"]=[].concat(r("darkMode","media"));if(t===!1&&(t="media",G.warn("darkmode-false",["The `darkMode` option in your Tailwind CSS configuration is set to `false`, which now behaves the same as `media`.","Change `darkMode` to `media` or remove it entirely.","https://tailwindcss.com/docs/upgrade-guide#remove-dark-mode-configuration"])),t==="variant"){let n;if(Array.isArray(i)||typeof i=="function"?n=i:typeof i=="string"&&(n=[i]),Array.isArray(n))for(let a of n)a===".dark"?(t=!1,G.warn("darkmode-variant-without-selector",["When using `variant` for `darkMode`, you must provide a selector.",'Example: `darkMode: ["variant", ".your-selector &"]`'])):a.includes("&")||(t=!1,G.warn("darkmode-variant-without-ampersand",["When using `variant` for `darkMode`, your selector must contain `&`.",'Example `darkMode: ["variant", ".your-selector &"]`']));i=n}t==="selector"?e("dark",`&:where(${i}, ${i} *)`):t==="media"?e("dark","@media (prefers-color-scheme: dark)"):t==="variant"?e("dark",i):t==="class"&&e("dark",`&:is(${i} *)`)},printVariant:({addVariant:r})=>{r("print","@media print")},screenVariants:({theme:r,addVariant:e,matchVariant:t})=>{let i=r("screens")??{},n=Object.values(i).every(w=>typeof w=="string"),a=Rt(r("screens")),s=new Set([]);function o(w){return w.match(/(\D+)$/)?.[1]??"(none)"}function l(w){w!==void 0&&s.add(o(w))}function c(w){return l(w),s.size===1}for(let w of a)for(let k of w.values)l(k.min),l(k.max);let f=s.size<=1;function d(w){return Object.fromEntries(a.filter(k=>Yn(k).result).map(k=>{let{min:S,max:E}=k.values[0];if(w==="min"&&S!==void 0)return k;if(w==="min"&&E!==void 0)return{...k,not:!k.not};if(w==="max"&&E!==void 0)return k;if(w==="max"&&S!==void 0)return{...k,not:!k.not}}).map(k=>[k.name,k]))}function p(w){return(k,S)=>ph(w,k.value,S.value)}let h=p("max"),b=p("min");function v(w){return k=>{if(n)if(f){if(typeof k=="string"&&!c(k))return G.warn("minmax-have-mixed-units",["The `min-*` and `max-*` variants are not supported with a `screens` configuration containing mixed units."]),[]}else return G.warn("mixed-screen-units",["The `min-*` and `max-*` variants are not supported with a `screens` configuration containing mixed units."]),[];else return G.warn("complex-screen-config",["The `min-*` and `max-*` variants are not supported with a `screens` configuration containing objects."]),[];return[`@media ${Tt(Kn(k,w))}`]}}t("max",v("max"),{sort:h,values:n?d("max"):{}});let y="min-screens";for(let w of a)e(w.name,`@media ${Tt(w)}`,{id:y,sort:n&&f?b:void 0,value:w});t("min",v("min"),{id:y,sort:b})},supportsVariants:({matchVariant:r,theme:e})=>{r("supports",(t="")=>{let i=K(t),n=/^\w*\s*\(/.test(i);return i=n?i.replace(/\b(and|or|not)\b/g," $1 "):i,n?`@supports ${i}`:(i.includes(":")||(i=`${i}: var(--tw)`),i.startsWith("(")&&i.endsWith(")")||(i=`(${i})`),`@supports ${i}`)},{values:e("supports")??{}})},hasVariants:({matchVariant:r,prefix:e})=>{r("has",t=>`&:has(${K(t)})`,{values:{},[Pt]:{respectPrefix:!1}}),r("group-has",(t,{modifier:i})=>i?`:merge(${e(".group")}\\/${i}):has(${K(t)}) &`:`:merge(${e(".group")}):has(${K(t)}) &`,{values:{},[Pt]:{respectPrefix:!1}}),r("peer-has",(t,{modifier:i})=>i?`:merge(${e(".peer")}\\/${i}):has(${K(t)}) ~ &`:`:merge(${e(".peer")}):has(${K(t)}) ~ &`,{values:{},[Pt]:{respectPrefix:!1}})},ariaVariants:({matchVariant:r,theme:e})=>{r("aria",t=>`&[aria-${Ye(K(t))}]`,{values:e("aria")??{}}),r("group-aria",(t,{modifier:i})=>i?`:merge(.group\\/${i})[aria-${Ye(K(t))}] &`:`:merge(.group)[aria-${Ye(K(t))}] &`,{values:e("aria")??{}}),r("peer-aria",(t,{modifier:i})=>i?`:merge(.peer\\/${i})[aria-${Ye(K(t))}] ~ &`:`:merge(.peer)[aria-${Ye(K(t))}] ~ &`,{values:e("aria")??{}})},dataVariants:({matchVariant:r,theme:e})=>{r("data",t=>`&[data-${Ye(K(t))}]`,{values:e("data")??{}}),r("group-data",(t,{modifier:i})=>i?`:merge(.group\\/${i})[data-${Ye(K(t))}] &`:`:merge(.group)[data-${Ye(K(t))}] &`,{values:e("data")??{}}),r("peer-data",(t,{modifier:i})=>i?`:merge(.peer\\/${i})[data-${Ye(K(t))}] ~ &`:`:merge(.peer)[data-${Ye(K(t))}] ~ &`,{values:e("data")??{}})},orientationVariants:({addVariant:r})=>{r("portrait","@media (orientation: portrait)"),r("landscape","@media (orientation: landscape)")},prefersContrastVariants:({addVariant:r})=>{r("contrast-more","@media (prefers-contrast: more)"),r("contrast-less","@media (prefers-contrast: less)")},forcedColorsVariants:({addVariant:r})=>{r("forced-colors","@media (forced-colors: active)")}},Xe=["translate(var(--tw-translate-x), var(--tw-translate-y))","rotate(var(--tw-rotate))","skewX(var(--tw-skew-x))","skewY(var(--tw-skew-y))","scaleX(var(--tw-scale-x))","scaleY(var(--tw-scale-y))"].join(" "),nt=["var(--tw-blur)","var(--tw-brightness)","var(--tw-contrast)","var(--tw-grayscale)","var(--tw-hue-rotate)","var(--tw-invert)","var(--tw-saturate)","var(--tw-sepia)","var(--tw-drop-shadow)"].join(" "),ge=["var(--tw-backdrop-blur)","var(--tw-backdrop-brightness)","var(--tw-backdrop-contrast)","var(--tw-backdrop-grayscale)","var(--tw-backdrop-hue-rotate)","var(--tw-backdrop-invert)","var(--tw-backdrop-opacity)","var(--tw-backdrop-saturate)","var(--tw-backdrop-sepia)"].join(" "),mh={preflight:({addBase:r})=>{let e=ee.parse(`*,::after,::before{box-sizing:border-box;border-width:0;border-style:solid;border-color:theme('borderColor.DEFAULT', currentColor)}::after,::before{--tw-content:''}:host,html{line-height:1.5;-webkit-text-size-adjust:100%;-moz-tab-size:4;tab-size:4;font-family:theme('fontFamily.sans', ui-sans-serif, system-ui, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji");font-feature-settings:theme('fontFamily.sans[1].fontFeatureSettings', normal);font-variation-settings:theme('fontFamily.sans[1].fontVariationSettings', normal);-webkit-tap-highlight-color:transparent}body{margin:0;line-height:inherit}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,pre,samp{font-family:theme('fontFamily.mono', ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace);font-feature-settings:theme('fontFamily.mono[1].fontFeatureSettings', normal);font-variation-settings:theme('fontFamily.mono[1].fontVariationSettings', normal);font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}button,input,optgroup,select,textarea{font-family:inherit;font-feature-settings:inherit;font-variation-settings:inherit;font-size:100%;font-weight:inherit;line-height:inherit;letter-spacing:inherit;color:inherit;margin:0;padding:0}button,select{text-transform:none}button,input:where([type=button]),input:where([type=reset]),input:where([type=submit]){-webkit-appearance:button;background-color:transparent;background-image:none}:-moz-focusring{outline:auto}:-moz-ui-invalid{box-shadow:none}progress{vertical-align:baseline}::-webkit-inner-spin-button,::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}summary{display:list-item}blockquote,dd,dl,figure,h1,h2,h3,h4,h5,h6,hr,p,pre{margin:0}fieldset{margin:0;padding:0}legend{padding:0}menu,ol,ul{list-style:none;margin:0;padding:0}dialog{padding:0}textarea{resize:vertical}input::placeholder,textarea::placeholder{opacity:1;color:theme('colors.gray.4', #9ca3af)}[role=button],button{cursor:pointer}:disabled{cursor:default}audio,canvas,embed,iframe,img,object,svg,video{display:block;vertical-align:middle}img,video{max-width:100%;height:auto}[hidden]:where(:not([hidden=until-found])){display:none}`);r([ee.comment({text:`! tailwindcss v${ch} | MIT License | https://tailwindcss.com`}),...e.nodes])},container:(()=>{function r(t=[]){return t.flatMap(i=>i.values.map(n=>n.min)).filter(i=>i!==void 0)}function e(t,i,n){if(typeof n=="undefined")return[];if(!(typeof n=="object"&&n!==null))return[{screen:"DEFAULT",minWidth:0,padding:n}];let a=[];n.DEFAULT&&a.push({screen:"DEFAULT",minWidth:0,padding:n.DEFAULT});for(let s of t)for(let o of i)for(let{min:l}of o.values)l===s&&a.push({minWidth:s,padding:n[o.name]});return a}return function({addComponents:t,theme:i}){let n=Rt(i("container.screens",i("screens"))),a=r(n),s=e(a,n,i("container.padding")),o=c=>{let f=s.find(d=>d.minWidth===c);return f?{paddingRight:f.padding,paddingLeft:f.padding}:{}},l=Array.from(new Set(a.slice().sort((c,f)=>parseInt(c)-parseInt(f)))).map(c=>({[`@media (min-width: ${c})`]:{".container":{"max-width":c,...o(c)}}}));t([{".container":Object.assign({width:"100%"},i("container.center",!1)?{marginRight:"auto",marginLeft:"auto"}:{},o(0))},...l])}})(),accessibility:({addUtilities:r})=>{r({".sr-only":{position:"absolute",width:"1px",height:"1px",padding:"0",margin:"-1px",overflow:"hidden",clip:"rect(0, 0, 0, 0)",whiteSpace:"nowrap",borderWidth:"0"},".not-sr-only":{position:"static",width:"auto",height:"auto",padding:"0",margin:"0",overflow:"visible",clip:"auto",whiteSpace:"normal"}})},pointerEvents:({addUtilities:r})=>{r({".pointer-events-none":{"pointer-events":"none"},".pointer-events-auto":{"pointer-events":"auto"}})},visibility:({addUtilities:r})=>{r({".visible":{visibility:"visible"},".invisible":{visibility:"hidden"},".collapse":{visibility:"collapse"}})},position:({addUtilities:r})=>{r({".static":{position:"static"},".fixed":{position:"fixed"},".absolute":{position:"absolute"},".relative":{position:"relative"},".sticky":{position:"sticky"}})},inset:L("inset",[["inset",["inset"]],[["inset-x",["left","right"]],["inset-y",["top","bottom"]]],[["start",["inset-inline-start"]],["end",["inset-inline-end"]],["top",["top"]],["right",["right"]],["bottom",["bottom"]],["left",["left"]]]],{supportsNegativeValues:!0}),isolation:({addUtilities:r})=>{r({".isolate":{isolation:"isolate"},".isolation-auto":{isolation:"auto"}})},zIndex:L("zIndex",[["z",["zIndex"]]],{supportsNegativeValues:!0}),order:L("order",void 0,{supportsNegativeValues:!0}),gridColumn:L("gridColumn",[["col",["gridColumn"]]]),gridColumnStart:L("gridColumnStart",[["col-start",["gridColumnStart"]]],{supportsNegativeValues:!0}),gridColumnEnd:L("gridColumnEnd",[["col-end",["gridColumnEnd"]]],{supportsNegativeValues:!0}),gridRow:L("gridRow",[["row",["gridRow"]]]),gridRowStart:L("gridRowStart",[["row-start",["gridRowStart"]]],{supportsNegativeValues:!0}),gridRowEnd:L("gridRowEnd",[["row-end",["gridRowEnd"]]],{supportsNegativeValues:!0}),float:({addUtilities:r})=>{r({".float-start":{float:"inline-start"},".float-end":{float:"inline-end"},".float-right":{float:"right"},".float-left":{float:"left"},".float-none":{float:"none"}})},clear:({addUtilities:r})=>{r({".clear-start":{clear:"inline-start"},".clear-end":{clear:"inline-end"},".clear-left":{clear:"left"},".clear-right":{clear:"right"},".clear-both":{clear:"both"},".clear-none":{clear:"none"}})},margin:L("margin",[["m",["margin"]],[["mx",["margin-left","margin-right"]],["my",["margin-top","margin-bottom"]]],[["ms",["margin-inline-start"]],["me",["margin-inline-end"]],["mt",["margin-top"]],["mr",["margin-right"]],["mb",["margin-bottom"]],["ml",["margin-left"]]]],{supportsNegativeValues:!0}),boxSizing:({addUtilities:r})=>{r({".box-border":{"box-sizing":"border-box"},".box-content":{"box-sizing":"content-box"}})},lineClamp:({matchUtilities:r,addUtilities:e,theme:t})=>{r({"line-clamp":i=>({overflow:"hidden",display:"-webkit-box","-webkit-box-orient":"vertical","-webkit-line-clamp":`${i}`})},{values:t("lineClamp")}),e({".line-clamp-none":{overflow:"visible",display:"block","-webkit-box-orient":"horizontal","-webkit-line-clamp":"none"}})},display:({addUtilities:r})=>{r({".block":{display:"block"},".inline-block":{display:"inline-block"},".inline":{display:"inline"},".flex":{display:"flex"},".inline-flex":{display:"inline-flex"},".table":{display:"table"},".inline-table":{display:"inline-table"},".table-caption":{display:"table-caption"},".table-cell":{display:"table-cell"},".table-column":{display:"table-column"},".table-column-group":{display:"table-column-group"},".table-footer-group":{display:"table-footer-group"},".table-header-group":{display:"table-header-group"},".table-row-group":{display:"table-row-group"},".table-row":{display:"table-row"},".flow-root":{display:"flow-root"},".grid":{display:"grid"},".inline-grid":{display:"inline-grid"},".contents":{display:"contents"},".list-item":{display:"list-item"},".hidden":{display:"none"}})},aspectRatio:L("aspectRatio",[["aspect",["aspect-ratio"]]]),size:L("size",[["size",["width","height"]]]),height:L("height",[["h",["height"]]]),maxHeight:L("maxHeight",[["max-h",["maxHeight"]]]),minHeight:L("minHeight",[["min-h",["minHeight"]]]),width:L("width",[["w",["width"]]]),minWidth:L("minWidth",[["min-w",["minWidth"]]]),maxWidth:L("maxWidth",[["max-w",["maxWidth"]]]),flex:L("flex"),flexShrink:L("flexShrink",[["flex-shrink",["flex-shrink"]],["shrink",["flex-shrink"]]]),flexGrow:L("flexGrow",[["flex-grow",["flex-grow"]],["grow",["flex-grow"]]]),flexBasis:L("flexBasis",[["basis",["flex-basis"]]]),tableLayout:({addUtilities:r})=>{r({".table-auto":{"table-layout":"auto"},".table-fixed":{"table-layout":"fixed"}})},captionSide:({addUtilities:r})=>{r({".caption-top":{"caption-side":"top"},".caption-bottom":{"caption-side":"bottom"}})},borderCollapse:({addUtilities:r})=>{r({".border-collapse":{"border-collapse":"collapse"},".border-separate":{"border-collapse":"separate"}})},borderSpacing:({addDefaults:r,matchUtilities:e,theme:t})=>{r("border-spacing",{"--tw-border-spacing-x":0,"--tw-border-spacing-y":0}),e({"border-spacing":i=>({"--tw-border-spacing-x":i,"--tw-border-spacing-y":i,"@defaults border-spacing":{},"border-spacing":"var(--tw-border-spacing-x) var(--tw-border-spacing-y)"}),"border-spacing-x":i=>({"--tw-border-spacing-x":i,"@defaults border-spacing":{},"border-spacing":"var(--tw-border-spacing-x) var(--tw-border-spacing-y)"}),"border-spacing-y":i=>({"--tw-border-spacing-y":i,"@defaults border-spacing":{},"border-spacing":"var(--tw-border-spacing-x) var(--tw-border-spacing-y)"})},{values:t("borderSpacing")})},transformOrigin:L("transformOrigin",[["origin",["transformOrigin"]]]),translate:L("translate",[[["translate-x",[["@defaults transform",{}],"--tw-translate-x",["transform",Xe]]],["translate-y",[["@defaults transform",{}],"--tw-translate-y",["transform",Xe]]]]],{supportsNegativeValues:!0}),rotate:L("rotate",[["rotate",[["@defaults transform",{}],"--tw-rotate",["transform",Xe]]]],{supportsNegativeValues:!0}),skew:L("skew",[[["skew-x",[["@defaults transform",{}],"--tw-skew-x",["transform",Xe]]],["skew-y",[["@defaults transform",{}],"--tw-skew-y",["transform",Xe]]]]],{supportsNegativeValues:!0}),scale:L("scale",[["scale",[["@defaults transform",{}],"--tw-scale-x","--tw-scale-y",["transform",Xe]]],[["scale-x",[["@defaults transform",{}],"--tw-scale-x",["transform",Xe]]],["scale-y",[["@defaults transform",{}],"--tw-scale-y",["transform",Xe]]]]],{supportsNegativeValues:!0}),transform:({addDefaults:r,addUtilities:e})=>{r("transform",{"--tw-translate-x":"0","--tw-translate-y":"0","--tw-rotate":"0","--tw-skew-x":"0","--tw-skew-y":"0","--tw-scale-x":"1","--tw-scale-y":"1"}),e({".transform":{"@defaults transform":{},transform:Xe},".transform-cpu":{transform:Xe},".transform-gpu":{transform:Xe.replace("translate(var(--tw-translate-x), var(--tw-translate-y))","translate3d(var(--tw-translate-x), var(--tw-translate-y), 0)")},".transform-none":{transform:"none"}})},animation:({matchUtilities:r,theme:e,config:t})=>{let i=a=>Te(t("prefix")+a),n=Object.fromEntries(Object.entries(e("keyframes")??{}).map(([a,s])=>[a,{[`@keyframes ${i(a)}`]:s}]));r({animate:a=>{let s=$o(a);return[...s.flatMap(o=>n[o.name]),{animation:s.map(({name:o,value:l})=>o===void 0||n[o]===void 0?l:l.replace(o,i(o))).join(", ")}]}},{values:e("animation")})},cursor:L("cursor"),touchAction:({addDefaults:r,addUtilities:e})=>{r("touch-action",{"--tw-pan-x":" ","--tw-pan-y":" ","--tw-pinch-zoom":" "});let t="var(--tw-pan-x) var(--tw-pan-y) var(--tw-pinch-zoom)";e({".touch-auto":{"touch-action":"auto"},".touch-none":{"touch-action":"none"},".touch-pan-x":{"@defaults touch-action":{},"--tw-pan-x":"pan-x","touch-action":t},".touch-pan-left":{"@defaults touch-action":{},"--tw-pan-x":"pan-left","touch-action":t},".touch-pan-right":{"@defaults touch-action":{},"--tw-pan-x":"pan-right","touch-action":t},".touch-pan-y":{"@defaults touch-action":{},"--tw-pan-y":"pan-y","touch-action":t},".touch-pan-up":{"@defaults touch-action":{},"--tw-pan-y":"pan-up","touch-action":t},".touch-pan-down":{"@defaults touch-action":{},"--tw-pan-y":"pan-down","touch-action":t},".touch-pinch-zoom":{"@defaults touch-action":{},"--tw-pinch-zoom":"pinch-zoom","touch-action":t},".touch-manipulation":{"touch-action":"manipulation"}})},userSelect:({addUtilities:r})=>{r({".select-none":{"user-select":"none"},".select-text":{"user-select":"text"},".select-all":{"user-select":"all"},".select-auto":{"user-select":"auto"}})},resize:({addUtilities:r})=>{r({".resize-none":{resize:"none"},".resize-y":{resize:"vertical"},".resize-x":{resize:"horizontal"},".resize":{resize:"both"}})},scrollSnapType:({addDefaults:r,addUtilities:e})=>{r("scroll-snap-type",{"--tw-scroll-snap-strictness":"proximity"}),e({".snap-none":{"scroll-snap-type":"none"},".snap-x":{"@defaults scroll-snap-type":{},"scroll-snap-type":"x var(--tw-scroll-snap-strictness)"},".snap-y":{"@defaults scroll-snap-type":{},"scroll-snap-type":"y var(--tw-scroll-snap-strictness)"},".snap-both":{"@defaults scroll-snap-type":{},"scroll-snap-type":"both var(--tw-scroll-snap-strictness)"},".snap-mandatory":{"--tw-scroll-snap-strictness":"mandatory"},".snap-proximity":{"--tw-scroll-snap-strictness":"proximity"}})},scrollSnapAlign:({addUtilities:r})=>{r({".snap-start":{"scroll-snap-align":"start"},".snap-end":{"scroll-snap-align":"end"},".snap-center":{"scroll-snap-align":"center"},".snap-align-none":{"scroll-snap-align":"none"}})},scrollSnapStop:({addUtilities:r})=>{r({".snap-normal":{"scroll-snap-stop":"normal"},".snap-always":{"scroll-snap-stop":"always"}})},scrollMargin:L("scrollMargin",[["scroll-m",["scroll-margin"]],[["scroll-mx",["scroll-margin-left","scroll-margin-right"]],["scroll-my",["scroll-margin-top","scroll-margin-bottom"]]],[["scroll-ms",["scroll-margin-inline-start"]],["scroll-me",["scroll-margin-inline-end"]],["scroll-mt",["scroll-margin-top"]],["scroll-mr",["scroll-margin-right"]],["scroll-mb",["scroll-margin-bottom"]],["scroll-ml",["scroll-margin-left"]]]],{supportsNegativeValues:!0}),scrollPadding:L("scrollPadding",[["scroll-p",["scroll-padding"]],[["scroll-px",["scroll-padding-left","scroll-padding-right"]],["scroll-py",["scroll-padding-top","scroll-padding-bottom"]]],[["scroll-ps",["scroll-padding-inline-start"]],["scroll-pe",["scroll-padding-inline-end"]],["scroll-pt",["scroll-padding-top"]],["scroll-pr",["scroll-padding-right"]],["scroll-pb",["scroll-padding-bottom"]],["scroll-pl",["scroll-padding-left"]]]]),listStylePosition:({addUtilities:r})=>{r({".list-inside":{"list-style-position":"inside"},".list-outside":{"list-style-position":"outside"}})},listStyleType:L("listStyleType",[["list",["listStyleType"]]]),listStyleImage:L("listStyleImage",[["list-image",["listStyleImage"]]]),appearance:({addUtilities:r})=>{r({".appearance-none":{appearance:"none"},".appearance-auto":{appearance:"auto"}})},columns:L("columns",[["columns",["columns"]]]),breakBefore:({addUtilities:r})=>{r({".break-before-auto":{"break-before":"auto"},".break-before-avoid":{"break-before":"avoid"},".break-before-all":{"break-before":"all"},".break-before-avoid-page":{"break-before":"avoid-page"},".break-before-page":{"break-before":"page"},".break-before-left":{"break-before":"left"},".break-before-right":{"break-before":"right"},".break-before-column":{"break-before":"column"}})},breakInside:({addUtilities:r})=>{r({".break-inside-auto":{"break-inside":"auto"},".break-inside-avoid":{"break-inside":"avoid"},".break-inside-avoid-page":{"break-inside":"avoid-page"},".break-inside-avoid-column":{"break-inside":"avoid-column"}})},breakAfter:({addUtilities:r})=>{r({".break-after-auto":{"break-after":"auto"},".break-after-avoid":{"break-after":"avoid"},".break-after-all":{"break-after":"all"},".break-after-avoid-page":{"break-after":"avoid-page"},".break-after-page":{"break-after":"page"},".break-after-left":{"break-after":"left"},".break-after-right":{"break-after":"right"},".break-after-column":{"break-after":"column"}})},gridAutoColumns:L("gridAutoColumns",[["auto-cols",["gridAutoColumns"]]]),gridAutoFlow:({addUtilities:r})=>{r({".grid-flow-row":{gridAutoFlow:"row"},".grid-flow-col":{gridAutoFlow:"column"},".grid-flow-dense":{gridAutoFlow:"dense"},".grid-flow-row-dense":{gridAutoFlow:"row dense"},".grid-flow-col-dense":{gridAutoFlow:"column dense"}})},gridAutoRows:L("gridAutoRows",[["auto-rows",["gridAutoRows"]]]),gridTemplateColumns:L("gridTemplateColumns",[["grid-cols",["gridTemplateColumns"]]]),gridTemplateRows:L("gridTemplateRows",[["grid-rows",["gridTemplateRows"]]]),flexDirection:({addUtilities:r})=>{r({".flex-row":{"flex-direction":"row"},".flex-row-reverse":{"flex-direction":"row-reverse"},".flex-col":{"flex-direction":"column"},".flex-col-reverse":{"flex-direction":"column-reverse"}})},flexWrap:({addUtilities:r})=>{r({".flex-wrap":{"flex-wrap":"wrap"},".flex-wrap-reverse":{"flex-wrap":"wrap-reverse"},".flex-nowrap":{"flex-wrap":"nowrap"}})},placeContent:({addUtilities:r})=>{r({".place-content-center":{"place-content":"center"},".place-content-start":{"place-content":"start"},".place-content-end":{"place-content":"end"},".place-content-between":{"place-content":"space-between"},".place-content-around":{"place-content":"space-around"},".place-content-evenly":{"place-content":"space-evenly"},".place-content-baseline":{"place-content":"baseline"},".place-content-stretch":{"place-content":"stretch"}})},placeItems:({addUtilities:r})=>{r({".place-items-start":{"place-items":"start"},".place-items-end":{"place-items":"end"},".place-items-center":{"place-items":"center"},".place-items-baseline":{"place-items":"baseline"},".place-items-stretch":{"place-items":"stretch"}})},alignContent:({addUtilities:r})=>{r({".content-normal":{"align-content":"normal"},".content-center":{"align-content":"center"},".content-start":{"align-content":"flex-start"},".content-end":{"align-content":"flex-end"},".content-between":{"align-content":"space-between"},".content-around":{"align-content":"space-around"},".content-evenly":{"align-content":"space-evenly"},".content-baseline":{"align-content":"baseline"},".content-stretch":{"align-content":"stretch"}})},alignItems:({addUtilities:r})=>{r({".items-start":{"align-items":"flex-start"},".items-end":{"align-items":"flex-end"},".items-center":{"align-items":"center"},".items-baseline":{"align-items":"baseline"},".items-stretch":{"align-items":"stretch"}})},justifyContent:({addUtilities:r})=>{r({".justify-normal":{"justify-content":"normal"},".justify-start":{"justify-content":"flex-start"},".justify-end":{"justify-content":"flex-end"},".justify-center":{"justify-content":"center"},".justify-between":{"justify-content":"space-between"},".justify-around":{"justify-content":"space-around"},".justify-evenly":{"justify-content":"space-evenly"},".justify-stretch":{"justify-content":"stretch"}})},justifyItems:({addUtilities:r})=>{r({".justify-items-start":{"justify-items":"start"},".justify-items-end":{"justify-items":"end"},".justify-items-center":{"justify-items":"center"},".justify-items-stretch":{"justify-items":"stretch"}})},gap:L("gap",[["gap",["gap"]],[["gap-x",["columnGap"]],["gap-y",["rowGap"]]]]),space:({matchUtilities:r,addUtilities:e,theme:t})=>{r({"space-x":i=>(i=i==="0"?"0px":i,{"& > :not([hidden]) ~ :not([hidden])":{"--tw-space-x-reverse":"0","margin-right":`calc(${i} * var(--tw-space-x-reverse))`,"margin-left":`calc(${i} * calc(1 - var(--tw-space-x-reverse)))`}}),"space-y":i=>(i=i==="0"?"0px":i,{"& > :not([hidden]) ~ :not([hidden])":{"--tw-space-y-reverse":"0","margin-top":`calc(${i} * calc(1 - var(--tw-space-y-reverse)))`,"margin-bottom":`calc(${i} * var(--tw-space-y-reverse))`}})},{values:t("space"),supportsNegativeValues:!0}),e({".space-y-reverse > :not([hidden]) ~ :not([hidden])":{"--tw-space-y-reverse":"1"},".space-x-reverse > :not([hidden]) ~ :not([hidden])":{"--tw-space-x-reverse":"1"}})},divideWidth:({matchUtilities:r,addUtilities:e,theme:t})=>{r({"divide-x":i=>(i=i==="0"?"0px":i,{"& > :not([hidden]) ~ :not([hidden])":{"@defaults border-width":{},"--tw-divide-x-reverse":"0","border-right-width":`calc(${i} * var(--tw-divide-x-reverse))`,"border-left-width":`calc(${i} * calc(1 - var(--tw-divide-x-reverse)))`}}),"divide-y":i=>(i=i==="0"?"0px":i,{"& > :not([hidden]) ~ :not([hidden])":{"@defaults border-width":{},"--tw-divide-y-reverse":"0","border-top-width":`calc(${i} * calc(1 - var(--tw-divide-y-reverse)))`,"border-bottom-width":`calc(${i} * var(--tw-divide-y-reverse))`}})},{values:t("divideWidth"),type:["line-width","length","any"]}),e({".divide-y-reverse > :not([hidden]) ~ :not([hidden])":{"@defaults border-width":{},"--tw-divide-y-reverse":"1"},".divide-x-reverse > :not([hidden]) ~ :not([hidden])":{"@defaults border-width":{},"--tw-divide-x-reverse":"1"}})},divideStyle:({addUtilities:r})=>{r({".divide-solid > :not([hidden]) ~ :not([hidden])":{"border-style":"solid"},".divide-dashed > :not([hidden]) ~ :not([hidden])":{"border-style":"dashed"},".divide-dotted > :not([hidden]) ~ :not([hidden])":{"border-style":"dotted"},".divide-double > :not([hidden]) ~ :not([hidden])":{"border-style":"double"},".divide-none > :not([hidden]) ~ :not([hidden])":{"border-style":"none"}})},divideColor:({matchUtilities:r,theme:e,corePlugins:t})=>{r({divide:i=>t("divideOpacity")?{["& > :not([hidden]) ~ :not([hidden])"]:Ae({color:i,property:"border-color",variable:"--tw-divide-opacity"})}:{["& > :not([hidden]) ~ :not([hidden])"]:{"border-color":X(i)}}},{values:(({DEFAULT:i,...n})=>n)(xe(e("divideColor"))),type:["color","any"]})},divideOpacity:({matchUtilities:r,theme:e})=>{r({"divide-opacity":t=>({["& > :not([hidden]) ~ :not([hidden])"]:{"--tw-divide-opacity":t}})},{values:e("divideOpacity")})},placeSelf:({addUtilities:r})=>{r({".place-self-auto":{"place-self":"auto"},".place-self-start":{"place-self":"start"},".place-self-end":{"place-self":"end"},".place-self-center":{"place-self":"center"},".place-self-stretch":{"place-self":"stretch"}})},alignSelf:({addUtilities:r})=>{r({".self-auto":{"align-self":"auto"},".self-start":{"align-self":"flex-start"},".self-end":{"align-self":"flex-end"},".self-center":{"align-self":"center"},".self-stretch":{"align-self":"stretch"},".self-baseline":{"align-self":"baseline"}})},justifySelf:({addUtilities:r})=>{r({".justify-self-auto":{"justify-self":"auto"},".justify-self-start":{"justify-self":"start"},".justify-self-end":{"justify-self":"end"},".justify-self-center":{"justify-self":"center"},".justify-self-stretch":{"justify-self":"stretch"}})},overflow:({addUtilities:r})=>{r({".overflow-auto":{overflow:"auto"},".overflow-hidden":{overflow:"hidden"},".overflow-clip":{overflow:"clip"},".overflow-visible":{overflow:"visible"},".overflow-scroll":{overflow:"scroll"},".overflow-x-auto":{"overflow-x":"auto"},".overflow-y-auto":{"overflow-y":"auto"},".overflow-x-hidden":{"overflow-x":"hidden"},".overflow-y-hidden":{"overflow-y":"hidden"},".overflow-x-clip":{"overflow-x":"clip"},".overflow-y-clip":{"overflow-y":"clip"},".overflow-x-visible":{"overflow-x":"visible"},".overflow-y-visible":{"overflow-y":"visible"},".overflow-x-scroll":{"overflow-x":"scroll"},".overflow-y-scroll":{"overflow-y":"scroll"}})},overscrollBehavior:({addUtilities:r})=>{r({".overscroll-auto":{"overscroll-behavior":"auto"},".overscroll-contain":{"overscroll-behavior":"contain"},".overscroll-none":{"overscroll-behavior":"none"},".overscroll-y-auto":{"overscroll-behavior-y":"auto"},".overscroll-y-contain":{"overscroll-behavior-y":"contain"},".overscroll-y-none":{"overscroll-behavior-y":"none"},".overscroll-x-auto":{"overscroll-behavior-x":"auto"},".overscroll-x-contain":{"overscroll-behavior-x":"contain"},".overscroll-x-none":{"overscroll-behavior-x":"none"}})},scrollBehavior:({addUtilities:r})=>{r({".scroll-auto":{"scroll-behavior":"auto"},".scroll-smooth":{"scroll-behavior":"smooth"}})},textOverflow:({addUtilities:r})=>{r({".truncate":{overflow:"hidden","text-overflow":"ellipsis","white-space":"nowrap"},".overflow-ellipsis":{"text-overflow":"ellipsis"},".text-ellipsis":{"text-overflow":"ellipsis"},".text-clip":{"text-overflow":"clip"}})},hyphens:({addUtilities:r})=>{r({".hyphens-none":{hyphens:"none"},".hyphens-manual":{hyphens:"manual"},".hyphens-auto":{hyphens:"auto"}})},whitespace:({addUtilities:r})=>{r({".whitespace-normal":{"white-space":"normal"},".whitespace-nowrap":{"white-space":"nowrap"},".whitespace-pre":{"white-space":"pre"},".whitespace-pre-line":{"white-space":"pre-line"},".whitespace-pre-wrap":{"white-space":"pre-wrap"},".whitespace-break-spaces":{"white-space":"break-spaces"}})},textWrap:({addUtilities:r})=>{r({".text-wrap":{"text-wrap":"wrap"},".text-nowrap":{"text-wrap":"nowrap"},".text-balance":{"text-wrap":"balance"},".text-pretty":{"text-wrap":"pretty"}})},wordBreak:({addUtilities:r})=>{r({".break-normal":{"overflow-wrap":"normal","word-break":"normal"},".break-words":{"overflow-wrap":"break-word"},".break-all":{"word-break":"break-all"},".break-keep":{"word-break":"keep-all"}})},borderRadius:L("borderRadius",[["rounded",["border-radius"]],[["rounded-s",["border-start-start-radius","border-end-start-radius"]],["rounded-e",["border-start-end-radius","border-end-end-radius"]],["rounded-t",["border-top-left-radius","border-top-right-radius"]],["rounded-r",["border-top-right-radius","border-bottom-right-radius"]],["rounded-b",["border-bottom-right-radius","border-bottom-left-radius"]],["rounded-l",["border-top-left-radius","border-bottom-left-radius"]]],[["rounded-ss",["border-start-start-radius"]],["rounded-se",["border-start-end-radius"]],["rounded-ee",["border-end-end-radius"]],["rounded-es",["border-end-start-radius"]],["rounded-tl",["border-top-left-radius"]],["rounded-tr",["border-top-right-radius"]],["rounded-br",["border-bottom-right-radius"]],["rounded-bl",["border-bottom-left-radius"]]]]),borderWidth:L("borderWidth",[["border",[["@defaults border-width",{}],"border-width"]],[["border-x",[["@defaults border-width",{}],"border-left-width","border-right-width"]],["border-y",[["@defaults border-width",{}],"border-top-width","border-bottom-width"]]],[["border-s",[["@defaults border-width",{}],"border-inline-start-width"]],["border-e",[["@defaults border-width",{}],"border-inline-end-width"]],["border-t",[["@defaults border-width",{}],"border-top-width"]],["border-r",[["@defaults border-width",{}],"border-right-width"]],["border-b",[["@defaults border-width",{}],"border-bottom-width"]],["border-l",[["@defaults border-width",{}],"border-left-width"]]]],{type:["line-width","length"]}),borderStyle:({addUtilities:r})=>{r({".border-solid":{"border-style":"solid"},".border-dashed":{"border-style":"dashed"},".border-dotted":{"border-style":"dotted"},".border-double":{"border-style":"double"},".border-hidden":{"border-style":"hidden"},".border-none":{"border-style":"none"}})},borderColor:({matchUtilities:r,theme:e,corePlugins:t})=>{r({border:i=>t("borderOpacity")?Ae({color:i,property:"border-color",variable:"--tw-border-opacity"}):{"border-color":X(i)}},{values:(({DEFAULT:i,...n})=>n)(xe(e("borderColor"))),type:["color","any"]}),r({"border-x":i=>t("borderOpacity")?Ae({color:i,property:["border-left-color","border-right-color"],variable:"--tw-border-opacity"}):{"border-left-color":X(i),"border-right-color":X(i)},"border-y":i=>t("borderOpacity")?Ae({color:i,property:["border-top-color","border-bottom-color"],variable:"--tw-border-opacity"}):{"border-top-color":X(i),"border-bottom-color":X(i)}},{values:(({DEFAULT:i,...n})=>n)(xe(e("borderColor"))),type:["color","any"]}),r({"border-s":i=>t("borderOpacity")?Ae({color:i,property:"border-inline-start-color",variable:"--tw-border-opacity"}):{"border-inline-start-color":X(i)},"border-e":i=>t("borderOpacity")?Ae({color:i,property:"border-inline-end-color",variable:"--tw-border-opacity"}):{"border-inline-end-color":X(i)},"border-t":i=>t("borderOpacity")?Ae({color:i,property:"border-top-color",variable:"--tw-border-opacity"}):{"border-top-color":X(i)},"border-r":i=>t("borderOpacity")?Ae({color:i,property:"border-right-color",variable:"--tw-border-opacity"}):{"border-right-color":X(i)},"border-b":i=>t("borderOpacity")?Ae({color:i,property:"border-bottom-color",variable:"--tw-border-opacity"}):{"border-bottom-color":X(i)},"border-l":i=>t("borderOpacity")?Ae({color:i,property:"border-left-color",variable:"--tw-border-opacity"}):{"border-left-color":X(i)}},{values:(({DEFAULT:i,...n})=>n)(xe(e("borderColor"))),type:["color","any"]})},borderOpacity:L("borderOpacity",[["border-opacity",["--tw-border-opacity"]]]),backgroundColor:({matchUtilities:r,theme:e,corePlugins:t})=>{r({bg:i=>t("backgroundOpacity")?Ae({color:i,property:"background-color",variable:"--tw-bg-opacity"}):{"background-color":X(i)}},{values:xe(e("backgroundColor")),type:["color","any"]})},backgroundOpacity:L("backgroundOpacity",[["bg-opacity",["--tw-bg-opacity"]]]),backgroundImage:L("backgroundImage",[["bg",["background-image"]]],{type:["lookup","image","url"]}),gradientColorStops:(()=>{function r(e){return Ze(e,0,"rgb(255 255 255 / 0)")}return function({matchUtilities:e,theme:t,addDefaults:i}){i("gradient-color-stops",{"--tw-gradient-from-position":" ","--tw-gradient-via-position":" ","--tw-gradient-to-position":" "});let n={values:xe(t("gradientColorStops")),type:["color","any"]},a={values:t("gradientColorStopPositions"),type:["length","percentage"]};e({from:s=>{let o=r(s);return{"@defaults gradient-color-stops":{},"--tw-gradient-from":`${X(s)} var(--tw-gradient-from-position)`,"--tw-gradient-to":`${o} var(--tw-gradient-to-position)`,"--tw-gradient-stops":"var(--tw-gradient-from), var(--tw-gradient-to)"}}},n),e({from:s=>({"--tw-gradient-from-position":s})},a),e({via:s=>{let o=r(s);return{"@defaults gradient-color-stops":{},"--tw-gradient-to":`${o} var(--tw-gradient-to-position)`,"--tw-gradient-stops":`var(--tw-gradient-from), ${X(s)} var(--tw-gradient-via-position), var(--tw-gradient-to)`}}},n),e({via:s=>({"--tw-gradient-via-position":s})},a),e({to:s=>({"@defaults gradient-color-stops":{},"--tw-gradient-to":`${X(s)} var(--tw-gradient-to-position)`})},n),e({to:s=>({"--tw-gradient-to-position":s})},a)}})(),boxDecorationBreak:({addUtilities:r})=>{r({".decoration-slice":{"box-decoration-break":"slice"},".decoration-clone":{"box-decoration-break":"clone"},".box-decoration-slice":{"box-decoration-break":"slice"},".box-decoration-clone":{"box-decoration-break":"clone"}})},backgroundSize:L("backgroundSize",[["bg",["background-size"]]],{type:["lookup","length","percentage","size"]}),backgroundAttachment:({addUtilities:r})=>{r({".bg-fixed":{"background-attachment":"fixed"},".bg-local":{"background-attachment":"local"},".bg-scroll":{"background-attachment":"scroll"}})},backgroundClip:({addUtilities:r})=>{r({".bg-clip-border":{"background-clip":"border-box"},".bg-clip-padding":{"background-clip":"padding-box"},".bg-clip-content":{"background-clip":"content-box"},".bg-clip-text":{"background-clip":"text"}})},backgroundPosition:L("backgroundPosition",[["bg",["background-position"]]],{type:["lookup",["position",{preferOnConflict:!0}]]}),backgroundRepeat:({addUtilities:r})=>{r({".bg-repeat":{"background-repeat":"repeat"},".bg-no-repeat":{"background-repeat":"no-repeat"},".bg-repeat-x":{"background-repeat":"repeat-x"},".bg-repeat-y":{"background-repeat":"repeat-y"},".bg-repeat-round":{"background-repeat":"round"},".bg-repeat-space":{"background-repeat":"space"}})},backgroundOrigin:({addUtilities:r})=>{r({".bg-origin-border":{"background-origin":"border-box"},".bg-origin-padding":{"background-origin":"padding-box"},".bg-origin-content":{"background-origin":"content-box"}})},fill:({matchUtilities:r,theme:e})=>{r({fill:t=>({fill:X(t)})},{values:xe(e("fill")),type:["color","any"]})},stroke:({matchUtilities:r,theme:e})=>{r({stroke:t=>({stroke:X(t)})},{values:xe(e("stroke")),type:["color","url","any"]})},strokeWidth:L("strokeWidth",[["stroke",["stroke-width"]]],{type:["length","number","percentage"]}),objectFit:({addUtilities:r})=>{r({".object-contain":{"object-fit":"contain"},".object-cover":{"object-fit":"cover"},".object-fill":{"object-fit":"fill"},".object-none":{"object-fit":"none"},".object-scale-down":{"object-fit":"scale-down"}})},objectPosition:L("objectPosition",[["object",["object-position"]]]),padding:L("padding",[["p",["padding"]],[["px",["padding-left","padding-right"]],["py",["padding-top","padding-bottom"]]],[["ps",["padding-inline-start"]],["pe",["padding-inline-end"]],["pt",["padding-top"]],["pr",["padding-right"]],["pb",["padding-bottom"]],["pl",["padding-left"]]]]),textAlign:({addUtilities:r})=>{r({".text-left":{"text-align":"left"},".text-center":{"text-align":"center"},".text-right":{"text-align":"right"},".text-justify":{"text-align":"justify"},".text-start":{"text-align":"start"},".text-end":{"text-align":"end"}})},textIndent:L("textIndent",[["indent",["text-indent"]]],{supportsNegativeValues:!0}),verticalAlign:({addUtilities:r,matchUtilities:e})=>{r({".align-baseline":{"vertical-align":"baseline"},".align-top":{"vertical-align":"top"},".align-middle":{"vertical-align":"middle"},".align-bottom":{"vertical-align":"bottom"},".align-text-top":{"vertical-align":"text-top"},".align-text-bottom":{"vertical-align":"text-bottom"},".align-sub":{"vertical-align":"sub"},".align-super":{"vertical-align":"super"}}),e({align:t=>({"vertical-align":t})})},fontFamily:({matchUtilities:r,theme:e})=>{r({font:t=>{let[i,n={}]=Array.isArray(t)&&ke(t[1])?t:[t],{fontFeatureSettings:a,fontVariationSettings:s}=n;return{"font-family":Array.isArray(i)?i.join(", "):i,...a===void 0?{}:{"font-feature-settings":a},...s===void 0?{}:{"font-variation-settings":s}}}},{values:e("fontFamily"),type:["lookup","generic-name","family-name"]})},fontSize:({matchUtilities:r,theme:e})=>{r({text:(t,{modifier:i})=>{let[n,a]=Array.isArray(t)?t:[t];if(i)return{"font-size":n,"line-height":i};let{lineHeight:s,letterSpacing:o,fontWeight:l}=ke(a)?a:{lineHeight:a};return{"font-size":n,...s===void 0?{}:{"line-height":s},...o===void 0?{}:{"letter-spacing":o},...l===void 0?{}:{"font-weight":l}}}},{values:e("fontSize"),modifiers:e("lineHeight"),type:["absolute-size","relative-size","length","percentage"]})},fontWeight:L("fontWeight",[["font",["fontWeight"]]],{type:["lookup","number","any"]}),textTransform:({addUtilities:r})=>{r({".uppercase":{"text-transform":"uppercase"},".lowercase":{"text-transform":"lowercase"},".capitalize":{"text-transform":"capitalize"},".normal-case":{"text-transform":"none"}})},fontStyle:({addUtilities:r})=>{r({".italic":{"font-style":"italic"},".not-italic":{"font-style":"normal"}})},fontVariantNumeric:({addDefaults:r,addUtilities:e})=>{let t="var(--tw-ordinal) var(--tw-slashed-zero) var(--tw-numeric-figure) var(--tw-numeric-spacing) var(--tw-numeric-fraction)";r("font-variant-numeric",{"--tw-ordinal":" ","--tw-slashed-zero":" ","--tw-numeric-figure":" ","--tw-numeric-spacing":" ","--tw-numeric-fraction":" "}),e({".normal-nums":{"font-variant-numeric":"normal"},".ordinal":{"@defaults font-variant-numeric":{},"--tw-ordinal":"ordinal","font-variant-numeric":t},".slashed-zero":{"@defaults font-variant-numeric":{},"--tw-slashed-zero":"slashed-zero","font-variant-numeric":t},".lining-nums":{"@defaults font-variant-numeric":{},"--tw-numeric-figure":"lining-nums","font-variant-numeric":t},".oldstyle-nums":{"@defaults font-variant-numeric":{},"--tw-numeric-figure":"oldstyle-nums","font-variant-numeric":t},".proportional-nums":{"@defaults font-variant-numeric":{},"--tw-numeric-spacing":"proportional-nums","font-variant-numeric":t},".tabular-nums":{"@defaults font-variant-numeric":{},"--tw-numeric-spacing":"tabular-nums","font-variant-numeric":t},".diagonal-fractions":{"@defaults font-variant-numeric":{},"--tw-numeric-fraction":"diagonal-fractions","font-variant-numeric":t},".stacked-fractions":{"@defaults font-variant-numeric":{},"--tw-numeric-fraction":"stacked-fractions","font-variant-numeric":t}})},lineHeight:L("lineHeight",[["leading",["lineHeight"]]]),letterSpacing:L("letterSpacing",[["tracking",["letterSpacing"]]],{supportsNegativeValues:!0}),textColor:({matchUtilities:r,theme:e,corePlugins:t})=>{r({text:i=>t("textOpacity")?Ae({color:i,property:"color",variable:"--tw-text-opacity"}):{color:X(i)}},{values:xe(e("textColor")),type:["color","any"]})},textOpacity:L("textOpacity",[["text-opacity",["--tw-text-opacity"]]]),textDecoration:({addUtilities:r})=>{r({".underline":{"text-decoration-line":"underline"},".overline":{"text-decoration-line":"overline"},".line-through":{"text-decoration-line":"line-through"},".no-underline":{"text-decoration-line":"none"}})},textDecorationColor:({matchUtilities:r,theme:e})=>{r({decoration:t=>({"text-decoration-color":X(t)})},{values:xe(e("textDecorationColor")),type:["color","any"]})},textDecorationStyle:({addUtilities:r})=>{r({".decoration-solid":{"text-decoration-style":"solid"},".decoration-double":{"text-decoration-style":"double"},".decoration-dotted":{"text-decoration-style":"dotted"},".decoration-dashed":{"text-decoration-style":"dashed"},".decoration-wavy":{"text-decoration-style":"wavy"}})},textDecorationThickness:L("textDecorationThickness",[["decoration",["text-decoration-thickness"]]],{type:["length","percentage"]}),textUnderlineOffset:L("textUnderlineOffset",[["underline-offset",["text-underline-offset"]]],{type:["length","percentage","any"]}),fontSmoothing:({addUtilities:r})=>{r({".antialiased":{"-webkit-font-smoothing":"antialiased","-moz-osx-font-smoothing":"grayscale"},".subpixel-antialiased":{"-webkit-font-smoothing":"auto","-moz-osx-font-smoothing":"auto"}})},placeholderColor:({matchUtilities:r,theme:e,corePlugins:t})=>{r({placeholder:i=>t("placeholderOpacity")?{"&::placeholder":Ae({color:i,property:"color",variable:"--tw-placeholder-opacity"})}:{"&::placeholder":{color:X(i)}}},{values:xe(e("placeholderColor")),type:["color","any"]})},placeholderOpacity:({matchUtilities:r,theme:e})=>{r({"placeholder-opacity":t=>({["&::placeholder"]:{"--tw-placeholder-opacity":t}})},{values:e("placeholderOpacity")})},caretColor:({matchUtilities:r,theme:e})=>{r({caret:t=>({"caret-color":X(t)})},{values:xe(e("caretColor")),type:["color","any"]})},accentColor:({matchUtilities:r,theme:e})=>{r({accent:t=>({"accent-color":X(t)})},{values:xe(e("accentColor")),type:["color","any"]})},opacity:L("opacity",[["opacity",["opacity"]]]),backgroundBlendMode:({addUtilities:r})=>{r({".bg-blend-normal":{"background-blend-mode":"normal"},".bg-blend-multiply":{"background-blend-mode":"multiply"},".bg-blend-screen":{"background-blend-mode":"screen"},".bg-blend-overlay":{"background-blend-mode":"overlay"},".bg-blend-darken":{"background-blend-mode":"darken"},".bg-blend-lighten":{"background-blend-mode":"lighten"},".bg-blend-color-dodge":{"background-blend-mode":"color-dodge"},".bg-blend-color-burn":{"background-blend-mode":"color-burn"},".bg-blend-hard-light":{"background-blend-mode":"hard-light"},".bg-blend-soft-light":{"background-blend-mode":"soft-light"},".bg-blend-difference":{"background-blend-mode":"difference"},".bg-blend-exclusion":{"background-blend-mode":"exclusion"},".bg-blend-hue":{"background-blend-mode":"hue"},".bg-blend-saturation":{"background-blend-mode":"saturation"},".bg-blend-color":{"background-blend-mode":"color"},".bg-blend-luminosity":{"background-blend-mode":"luminosity"}})},mixBlendMode:({addUtilities:r})=>{r({".mix-blend-normal":{"mix-blend-mode":"normal"},".mix-blend-multiply":{"mix-blend-mode":"multiply"},".mix-blend-screen":{"mix-blend-mode":"screen"},".mix-blend-overlay":{"mix-blend-mode":"overlay"},".mix-blend-darken":{"mix-blend-mode":"darken"},".mix-blend-lighten":{"mix-blend-mode":"lighten"},".mix-blend-color-dodge":{"mix-blend-mode":"color-dodge"},".mix-blend-color-burn":{"mix-blend-mode":"color-burn"},".mix-blend-hard-light":{"mix-blend-mode":"hard-light"},".mix-blend-soft-light":{"mix-blend-mode":"soft-light"},".mix-blend-difference":{"mix-blend-mode":"difference"},".mix-blend-exclusion":{"mix-blend-mode":"exclusion"},".mix-blend-hue":{"mix-blend-mode":"hue"},".mix-blend-saturation":{"mix-blend-mode":"saturation"},".mix-blend-color":{"mix-blend-mode":"color"},".mix-blend-luminosity":{"mix-blend-mode":"luminosity"},".mix-blend-plus-darker":{"mix-blend-mode":"plus-darker"},".mix-blend-plus-lighter":{"mix-blend-mode":"plus-lighter"}})},boxShadow:(()=>{let r=mt("boxShadow"),e=["var(--tw-ring-offset-shadow, 0 0 #0000)","var(--tw-ring-shadow, 0 0 #0000)","var(--tw-shadow)"].join(", ");return function({matchUtilities:t,addDefaults:i,theme:n}){i("box-shadow",{"--tw-ring-offset-shadow":"0 0 #0000","--tw-ring-shadow":"0 0 #0000","--tw-shadow":"0 0 #0000","--tw-shadow-colored":"0 0 #0000"}),t({shadow:a=>{a=r(a);let s=Ji(a);for(let o of s)!o.valid||(o.color="var(--tw-shadow-color)");return{"@defaults box-shadow":{},"--tw-shadow":a==="none"?"0 0 #0000":a,"--tw-shadow-colored":a==="none"?"0 0 #0000":qf(s),"box-shadow":e}}},{values:n("boxShadow"),type:["shadow"]})}})(),boxShadowColor:({matchUtilities:r,theme:e})=>{r({shadow:t=>({"--tw-shadow-color":X(t),"--tw-shadow":"var(--tw-shadow-colored)"})},{values:xe(e("boxShadowColor")),type:["color","any"]})},outlineStyle:({addUtilities:r})=>{r({".outline-none":{outline:"2px solid transparent","outline-offset":"2px"},".outline":{"outline-style":"solid"},".outline-dashed":{"outline-style":"dashed"},".outline-dotted":{"outline-style":"dotted"},".outline-double":{"outline-style":"double"}})},outlineWidth:L("outlineWidth",[["outline",["outline-width"]]],{type:["length","number","percentage"]}),outlineOffset:L("outlineOffset",[["outline-offset",["outline-offset"]]],{type:["length","number","percentage","any"],supportsNegativeValues:!0}),outlineColor:({matchUtilities:r,theme:e})=>{r({outline:t=>({"outline-color":X(t)})},{values:xe(e("outlineColor")),type:["color","any"]})},ringWidth:({matchUtilities:r,addDefaults:e,addUtilities:t,theme:i,config:n})=>{let a=(()=>{if(we(n(),"respectDefaultRingColorOpacity"))return i("ringColor.DEFAULT");let s=i("ringOpacity.DEFAULT","0.5");return i("ringColor")?.DEFAULT?Ze(i("ringColor")?.DEFAULT,s,`rgb(147 197 253 / ${s})`):`rgb(147 197 253 / ${s})`})();e("ring-width",{"--tw-ring-inset":" ","--tw-ring-offset-width":i("ringOffsetWidth.DEFAULT","0px"),"--tw-ring-offset-color":i("ringOffsetColor.DEFAULT","#fff"),"--tw-ring-color":a,"--tw-ring-offset-shadow":"0 0 #0000","--tw-ring-shadow":"0 0 #0000","--tw-shadow":"0 0 #0000","--tw-shadow-colored":"0 0 #0000"}),r({ring:s=>({"@defaults ring-width":{},"--tw-ring-offset-shadow":"var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color)","--tw-ring-shadow":`var(--tw-ring-inset) 0 0 0 calc(${s} + var(--tw-ring-offset-width)) var(--tw-ring-color)`,"box-shadow":["var(--tw-ring-offset-shadow)","var(--tw-ring-shadow)","var(--tw-shadow, 0 0 #0000)"].join(", ")})},{values:i("ringWidth"),type:"length"}),t({".ring-inset":{"@defaults ring-width":{},"--tw-ring-inset":"inset"}})},ringColor:({matchUtilities:r,theme:e,corePlugins:t})=>{r({ring:i=>t("ringOpacity")?Ae({color:i,property:"--tw-ring-color",variable:"--tw-ring-opacity"}):{"--tw-ring-color":X(i)}},{values:Object.fromEntries(Object.entries(xe(e("ringColor"))).filter(([i])=>i!=="DEFAULT")),type:["color","any"]})},ringOpacity:r=>{let{config:e}=r;return L("ringOpacity",[["ring-opacity",["--tw-ring-opacity"]]],{filterDefault:!we(e(),"respectDefaultRingColorOpacity")})(r)},ringOffsetWidth:L("ringOffsetWidth",[["ring-offset",["--tw-ring-offset-width"]]],{type:"length"}),ringOffsetColor:({matchUtilities:r,theme:e})=>{r({"ring-offset":t=>({"--tw-ring-offset-color":X(t)})},{values:xe(e("ringOffsetColor")),type:["color","any"]})},blur:({matchUtilities:r,theme:e})=>{r({blur:t=>({"--tw-blur":t.trim()===""?" ":`blur(${t})`,"@defaults filter":{},filter:nt})},{values:e("blur")})},brightness:({matchUtilities:r,theme:e})=>{r({brightness:t=>({"--tw-brightness":`brightness(${t})`,"@defaults filter":{},filter:nt})},{values:e("brightness")})},contrast:({matchUtilities:r,theme:e})=>{r({contrast:t=>({"--tw-contrast":`contrast(${t})`,"@defaults filter":{},filter:nt})},{values:e("contrast")})},dropShadow:({matchUtilities:r,theme:e})=>{r({"drop-shadow":t=>({"--tw-drop-shadow":Array.isArray(t)?t.map(i=>`drop-shadow(${i})`).join(" "):`drop-shadow(${t})`,"@defaults filter":{},filter:nt})},{values:e("dropShadow")})},grayscale:({matchUtilities:r,theme:e})=>{r({grayscale:t=>({"--tw-grayscale":`grayscale(${t})`,"@defaults filter":{},filter:nt})},{values:e("grayscale")})},hueRotate:({matchUtilities:r,theme:e})=>{r({"hue-rotate":t=>({"--tw-hue-rotate":`hue-rotate(${t})`,"@defaults filter":{},filter:nt})},{values:e("hueRotate"),supportsNegativeValues:!0})},invert:({matchUtilities:r,theme:e})=>{r({invert:t=>({"--tw-invert":`invert(${t})`,"@defaults filter":{},filter:nt})},{values:e("invert")})},saturate:({matchUtilities:r,theme:e})=>{r({saturate:t=>({"--tw-saturate":`saturate(${t})`,"@defaults filter":{},filter:nt})},{values:e("saturate")})},sepia:({matchUtilities:r,theme:e})=>{r({sepia:t=>({"--tw-sepia":`sepia(${t})`,"@defaults filter":{},filter:nt})},{values:e("sepia")})},filter:({addDefaults:r,addUtilities:e})=>{r("filter",{"--tw-blur":" ","--tw-brightness":" ","--tw-contrast":" ","--tw-grayscale":" ","--tw-hue-rotate":" ","--tw-invert":" ","--tw-saturate":" ","--tw-sepia":" ","--tw-drop-shadow":" "}),e({".filter":{"@defaults filter":{},filter:nt},".filter-none":{filter:"none"}})},backdropBlur:({matchUtilities:r,theme:e})=>{r({"backdrop-blur":t=>({"--tw-backdrop-blur":t.trim()===""?" ":`blur(${t})`,"@defaults backdrop-filter":{},"-webkit-backdrop-filter":ge,"backdrop-filter":ge})},{values:e("backdropBlur")})},backdropBrightness:({matchUtilities:r,theme:e})=>{r({"backdrop-brightness":t=>({"--tw-backdrop-brightness":`brightness(${t})`,"@defaults backdrop-filter":{},"-webkit-backdrop-filter":ge,"backdrop-filter":ge})},{values:e("backdropBrightness")})},backdropContrast:({matchUtilities:r,theme:e})=>{r({"backdrop-contrast":t=>({"--tw-backdrop-contrast":`contrast(${t})`,"@defaults backdrop-filter":{},"-webkit-backdrop-filter":ge,"backdrop-filter":ge})},{values:e("backdropContrast")})},backdropGrayscale:({matchUtilities:r,theme:e})=>{r({"backdrop-grayscale":t=>({"--tw-backdrop-grayscale":`grayscale(${t})`,"@defaults backdrop-filter":{},"-webkit-backdrop-filter":ge,"backdrop-filter":ge})},{values:e("backdropGrayscale")})},backdropHueRotate:({matchUtilities:r,theme:e})=>{r({"backdrop-hue-rotate":t=>({"--tw-backdrop-hue-rotate":`hue-rotate(${t})`,"@defaults backdrop-filter":{},"-webkit-backdrop-filter":ge,"backdrop-filter":ge})},{values:e("backdropHueRotate"),supportsNegativeValues:!0})},backdropInvert:({matchUtilities:r,theme:e})=>{r({"backdrop-invert":t=>({"--tw-backdrop-invert":`invert(${t})`,"@defaults backdrop-filter":{},"-webkit-backdrop-filter":ge,"backdrop-filter":ge})},{values:e("backdropInvert")})},backdropOpacity:({matchUtilities:r,theme:e})=>{r({"backdrop-opacity":t=>({"--tw-backdrop-opacity":`opacity(${t})`,"@defaults backdrop-filter":{},"-webkit-backdrop-filter":ge,"backdrop-filter":ge})},{values:e("backdropOpacity")})},backdropSaturate:({matchUtilities:r,theme:e})=>{r({"backdrop-saturate":t=>({"--tw-backdrop-saturate":`saturate(${t})`,"@defaults backdrop-filter":{},"-webkit-backdrop-filter":ge,"backdrop-filter":ge})},{values:e("backdropSaturate")})},backdropSepia:({matchUtilities:r,theme:e})=>{r({"backdrop-sepia":t=>({"--tw-backdrop-sepia":`sepia(${t})`,"@defaults backdrop-filter":{},"-webkit-backdrop-filter":ge,"backdrop-filter":ge})},{values:e("backdropSepia")})},backdropFilter:({addDefaults:r,addUtilities:e})=>{r("backdrop-filter",{"--tw-backdrop-blur":" ","--tw-backdrop-brightness":" ","--tw-backdrop-contrast":" ","--tw-backdrop-grayscale":" ","--tw-backdrop-hue-rotate":" ","--tw-backdrop-invert":" ","--tw-backdrop-opacity":" ","--tw-backdrop-saturate":" ","--tw-backdrop-sepia":" "}),e({".backdrop-filter":{"@defaults backdrop-filter":{},"-webkit-backdrop-filter":ge,"backdrop-filter":ge},".backdrop-filter-none":{"-webkit-backdrop-filter":"none","backdrop-filter":"none"}})},transitionProperty:({matchUtilities:r,theme:e})=>{let t=e("transitionTimingFunction.DEFAULT"),i=e("transitionDuration.DEFAULT");r({transition:n=>({"transition-property":n,...n==="none"?{}:{"transition-timing-function":t,"transition-duration":i}})},{values:e("transitionProperty")})},transitionDelay:L("transitionDelay",[["delay",["transitionDelay"]]]),transitionDuration:L("transitionDuration",[["duration",["transitionDuration"]]],{filterDefault:!0}),transitionTimingFunction:L("transitionTimingFunction",[["ease",["transitionTimingFunction"]]],{filterDefault:!0}),willChange:L("willChange",[["will-change",["will-change"]]]),contain:({addDefaults:r,addUtilities:e})=>{let t="var(--tw-contain-size) var(--tw-contain-layout) var(--tw-contain-paint) var(--tw-contain-style)";r("contain",{"--tw-contain-size":" ","--tw-contain-layout":" ","--tw-contain-paint":" ","--tw-contain-style":" "}),e({".contain-none":{contain:"none"},".contain-content":{contain:"content"},".contain-strict":{contain:"strict"},".contain-size":{"@defaults contain":{},"--tw-contain-size":"size",contain:t},".contain-inline-size":{"@defaults contain":{},"--tw-contain-size":"inline-size",contain:t},".contain-layout":{"@defaults contain":{},"--tw-contain-layout":"layout",contain:t},".contain-paint":{"@defaults contain":{},"--tw-contain-paint":"paint",contain:t},".contain-style":{"@defaults contain":{},"--tw-contain-style":"style",contain:t}})},content:L("content",[["content",["--tw-content",["content","var(--tw-content)"]]]]),forcedColorAdjust:({addUtilities:r})=>{r({".forced-color-adjust-auto":{"forced-color-adjust":"auto"},".forced-color-adjust-none":{"forced-color-adjust":"none"}})}}});function p_(r){if(r===void 0)return!1;if(r==="true"||r==="1")return!0;if(r==="false"||r==="0")return!1;if(r==="*")return!0;let e=r.split(",").map(t=>t.split(":")[0]);return e.includes("-tailwindcss")?!1:!!e.includes("tailwindcss")}var Je,yh,bh,Zn,Lo,gt,Ei,It=R(()=>{u();Je=typeof m!="undefined"?{NODE_ENV:"production",DEBUG:p_(m.env.DEBUG)}:{NODE_ENV:"production",DEBUG:!1},yh=new Map,bh=new Map,Zn=new Map,Lo=new Map,gt=new String("*"),Ei=Symbol("__NONE__")});function cr(r){let e=[],t=!1;for(let i=0;i0)}var wh,vh,d_,Mo=R(()=>{u();wh=new Map([["{","}"],["[","]"],["(",")"]]),vh=new Map(Array.from(wh.entries()).map(([r,e])=>[e,r])),d_=new Set(['"',"'","`"])});function pr(r){let[e]=xh(r);return e.forEach(([t,i])=>t.removeChild(i)),r.nodes.push(...e.map(([,t])=>t)),r}function xh(r){let e=[],t=null;for(let i of r.nodes)if(i.type==="combinator")e=e.filter(([,n])=>Bo(n).includes("jumpable")),t=null;else if(i.type==="pseudo"){h_(i)?(t=i,e.push([r,i,null])):t&&m_(i,t)?e.push([r,i,t]):t=null;for(let n of i.nodes??[]){let[a,s]=xh(n);t=s||t,e.push(...a)}}return[e,t]}function kh(r){return r.value.startsWith("::")||No[r.value]!==void 0}function h_(r){return kh(r)&&Bo(r).includes("terminal")}function m_(r,e){return r.type!=="pseudo"||kh(r)?!1:Bo(e).includes("actionable")}function Bo(r){return No[r.value]??No.__default__}var No,es=R(()=>{u();No={"::after":["terminal","jumpable"],"::backdrop":["terminal","jumpable"],"::before":["terminal","jumpable"],"::cue":["terminal"],"::cue-region":["terminal"],"::first-letter":["terminal","jumpable"],"::first-line":["terminal","jumpable"],"::grammar-error":["terminal"],"::marker":["terminal","jumpable"],"::part":["terminal","actionable"],"::placeholder":["terminal","jumpable"],"::selection":["terminal","jumpable"],"::slotted":["terminal"],"::spelling-error":["terminal"],"::target-text":["terminal"],"::file-selector-button":["terminal","actionable"],"::deep":["actionable"],"::v-deep":["actionable"],"::ng-deep":["actionable"],":after":["terminal","jumpable"],":before":["terminal","jumpable"],":first-letter":["terminal","jumpable"],":first-line":["terminal","jumpable"],":where":[],":is":[],":has":[],__default__:["terminal","actionable"]}});function dr(r,{context:e,candidate:t}){let i=e?.tailwindConfig.prefix??"",n=r.map(s=>{let o=(0,st.default)().astSync(s.format);return{...s,ast:s.respectPrefix?ur(i,o):o}}),a=st.default.root({nodes:[st.default.selector({nodes:[st.default.className({value:Te(t)})]})]});for(let{ast:s}of n)[a,s]=y_(a,s),s.walkNesting(o=>o.replaceWith(...a.nodes[0].nodes)),a=s;return a}function Ah(r){let e=[];for(;r.prev()&&r.prev().type!=="combinator";)r=r.prev();for(;r&&r.type!=="combinator";)e.push(r),r=r.next();return e}function g_(r){return r.sort((e,t)=>e.type==="tag"&&t.type==="class"?-1:e.type==="class"&&t.type==="tag"?1:e.type==="class"&&t.type==="pseudo"&&t.value.startsWith("::")?-1:e.type==="pseudo"&&e.value.startsWith("::")&&t.type==="class"?1:r.index(e)-r.index(t)),r}function jo(r,e){let t=!1;r.walk(i=>{if(i.type==="class"&&i.value===e)return t=!0,!1}),t||r.remove()}function ts(r,e,{context:t,candidate:i,base:n}){let a=t?.tailwindConfig?.separator??":";n=n??ve(i,a).pop();let s=(0,st.default)().astSync(r);if(s.walkClasses(f=>{f.raws&&f.value.includes(n)&&(f.raws.value=Te((0,Sh.default)(f.raws.value)))}),s.each(f=>jo(f,n)),s.length===0)return null;let o=Array.isArray(e)?dr(e,{context:t,candidate:i}):e;if(o===null)return s.toString();let l=st.default.comment({value:"/*__simple__*/"}),c=st.default.comment({value:"/*__simple__*/"});return s.walkClasses(f=>{if(f.value!==n)return;let d=f.parent,p=o.nodes[0].nodes;if(d.nodes.length===1){f.replaceWith(...p);return}let h=Ah(f);d.insertBefore(h[0],l),d.insertAfter(h[h.length-1],c);for(let v of p)d.insertBefore(h[0],v.clone());f.remove(),h=Ah(l);let b=d.index(l);d.nodes.splice(b,h.length,...g_(st.default.selector({nodes:h})).nodes),l.remove(),c.remove()}),s.walkPseudos(f=>{f.value===Fo&&f.replaceWith(f.nodes)}),s.each(f=>pr(f)),s.toString()}function y_(r,e){let t=[];return r.walkPseudos(i=>{i.value===Fo&&t.push({pseudo:i,value:i.nodes[0].toString()})}),e.walkPseudos(i=>{if(i.value!==Fo)return;let n=i.nodes[0].toString(),a=t.find(c=>c.value===n);if(!a)return;let s=[],o=i.next();for(;o&&o.type!=="combinator";)s.push(o),o=o.next();let l=o;a.pseudo.parent.insertAfter(a.pseudo,st.default.selector({nodes:s.map(c=>c.clone())})),i.remove(),s.forEach(c=>c.remove()),l&&l.type==="combinator"&&l.remove()}),[r,e]}var st,Sh,Fo,zo=R(()=>{u();st=pe(it()),Sh=pe(Rn());fr();Wn();es();zt();Fo=":merge"});function rs(r,e){let t=(0,Uo.default)().astSync(r);return t.each(i=>{i.nodes.some(a=>a.type==="combinator")&&(i.nodes=[Uo.default.pseudo({value:":is",nodes:[i.clone()]})]),pr(i)}),`${e} ${t.toString()}`}var Uo,Vo=R(()=>{u();Uo=pe(it());es()});function Ho(r){return b_.transformSync(r)}function*w_(r){let e=1/0;for(;e>=0;){let t,i=!1;if(e===1/0&&r.endsWith("]")){let s=r.indexOf("[");r[s-1]==="-"?t=s-1:r[s-1]==="/"?(t=s-1,i=!0):t=-1}else e===1/0&&r.includes("/")?(t=r.lastIndexOf("/"),i=!0):t=r.lastIndexOf("-",e);if(t<0)break;let n=r.slice(0,t),a=r.slice(i?t:t+1);e=t-1,!(n===""||a==="/")&&(yield[n,a])}}function v_(r,e){if(r.length===0||e.tailwindConfig.prefix==="")return r;for(let t of r){let[i]=t;if(i.options.respectPrefix){let n=ee.root({nodes:[t[1].clone()]}),a=t[1].raws.tailwind.classCandidate;n.walkRules(s=>{let o=a.startsWith("-");s.selector=ur(e.tailwindConfig.prefix,s.selector,o)}),t[1]=n.nodes[0]}}return r}function x_(r,e){if(r.length===0)return r;let t=[];function i(n){return n.parent&&n.parent.type==="atrule"&&n.parent.name==="keyframes"}for(let[n,a]of r){let s=ee.root({nodes:[a.clone()]});s.walkRules(o=>{if(i(o))return;let l=(0,is.default)().astSync(o.selector);l.each(c=>jo(c,e)),Wf(l,c=>c===e?`!${c}`:c),o.selector=l.toString(),o.walkDecls(c=>c.important=!0)}),t.push([{...n,important:!0},s.nodes[0]])}return t}function k_(r,e,t){if(e.length===0)return e;let i={modifier:null,value:Ei};{let[n,...a]=ve(r,"/");if(a.length>1&&(n=n+"/"+a.slice(0,-1).join("/"),a=a.slice(-1)),a.length&&!t.variantMap.has(r)&&(r=n,i.modifier=a[0],!we(t.tailwindConfig,"generalizedModifiers")))return[]}if(r.endsWith("]")&&!r.startsWith("[")){let n=/(.)(-?)\[(.*)\]/g.exec(r);if(n){let[,a,s,o]=n;if(a==="@"&&s==="-")return[];if(a!=="@"&&s==="")return[];r=r.replace(`${s}[${o}]`,""),i.value=o}}if(Qo(r)&&!t.variantMap.has(r)){let n=t.offsets.recordVariant(r),a=K(r.slice(1,-1)),s=ve(a,",");if(s.length>1)return[];if(!s.every(os))return[];let o=s.map((l,c)=>[t.offsets.applyParallelOffset(n,c),Oi(l.trim())]);t.variantMap.set(r,o)}if(t.variantMap.has(r)){let n=Qo(r),a=t.variantOptions.get(r)?.[Pt]??{},s=t.variantMap.get(r).slice(),o=[],l=(()=>!(n||a.respectPrefix===!1))();for(let[c,f]of e){if(c.layer==="user")continue;let d=ee.root({nodes:[f.clone()]});for(let[p,h,b]of s){let w=function(){v.raws.neededBackup||(v.raws.neededBackup=!0,v.walkRules(O=>O.raws.originalSelector=O.selector))},k=function(O){return w(),v.each(B=>{B.type==="rule"&&(B.selectors=B.selectors.map(N=>O({get className(){return Ho(N)},selector:N})))}),v},v=(b??d).clone(),y=[],S=h({get container(){return w(),v},separator:t.tailwindConfig.separator,modifySelectors:k,wrap(O){let B=v.nodes;v.removeAll(),O.append(B),v.append(O)},format(O){y.push({format:O,respectPrefix:l})},args:i});if(Array.isArray(S)){for(let[O,B]of S.entries())s.push([t.offsets.applyParallelOffset(p,O),B,v.clone()]);continue}if(typeof S=="string"&&y.push({format:S,respectPrefix:l}),S===null)continue;v.raws.neededBackup&&(delete v.raws.neededBackup,v.walkRules(O=>{let B=O.raws.originalSelector;if(!B||(delete O.raws.originalSelector,B===O.selector))return;let N=O.selector,T=(0,is.default)(F=>{F.walkClasses(Y=>{Y.value=`${r}${t.tailwindConfig.separator}${Y.value}`})}).processSync(B);y.push({format:N.replace(T,"&"),respectPrefix:l}),O.selector=B})),v.nodes[0].raws.tailwind={...v.nodes[0].raws.tailwind,parentLayer:c.layer};let E=[{...c,sort:t.offsets.applyVariantOffset(c.sort,p,Object.assign(i,t.variantOptions.get(r))),collectedFormats:(c.collectedFormats??[]).concat(y)},v.nodes[0]];o.push(E)}}return o}return[]}function Wo(r,e,t={}){return!ke(r)&&!Array.isArray(r)?[[r],t]:Array.isArray(r)?Wo(r[0],e,r[1]):(e.has(r)||e.set(r,lr(r)),[e.get(r),t])}function A_(r){return S_.test(r)}function C_(r){if(!r.includes("://"))return!1;try{let e=new URL(r);return e.scheme!==""&&e.host!==""}catch(e){return!1}}function Ch(r){let e=!0;return r.walkDecls(t=>{if(!_h(t.prop,t.value))return e=!1,!1}),e}function _h(r,e){if(C_(`${r}:${e}`))return!1;try{return ee.parse(`a{${r}:${e}}`).toResult(),!0}catch(t){return!1}}function __(r,e){let[,t,i]=r.match(/^\[([a-zA-Z0-9-_]+):(\S+)\]$/)??[];if(i===void 0||!A_(t)||!cr(i))return null;let n=K(i,{property:t});return _h(t,n)?[[{sort:e.offsets.arbitraryProperty(r),layer:"utilities",options:{respectImportant:!0}},()=>({[Do(r)]:{[t]:n}})]]:null}function*E_(r,e){e.candidateRuleMap.has(r)&&(yield[e.candidateRuleMap.get(r),"DEFAULT"]),yield*function*(o){o!==null&&(yield[o,"DEFAULT"])}(__(r,e));let t=r,i=!1,n=e.tailwindConfig.prefix,a=n.length,s=t.startsWith(n)||t.startsWith(`-${n}`);t[a]==="-"&&s&&(i=!0,t=n+t.slice(a+1)),i&&e.candidateRuleMap.has(t)&&(yield[e.candidateRuleMap.get(t),"-DEFAULT"]);for(let[o,l]of w_(t))e.candidateRuleMap.has(o)&&(yield[e.candidateRuleMap.get(o),i?`-${l}`:l])}function O_(r,e){return r===gt?[gt]:ve(r,e)}function*T_(r,e){for(let t of r)t[1].raws.tailwind={...t[1].raws.tailwind,classCandidate:e,preserveSource:t[0].options?.preserveSource??!1},yield t}function*Go(r,e){let t=e.tailwindConfig.separator,[i,...n]=O_(r,t).reverse(),a=!1;i.startsWith("!")&&(a=!0,i=i.slice(1));for(let s of E_(i,e)){let o=[],l=new Map,[c,f]=s,d=c.length===1;for(let[p,h]of c){let b=[];if(typeof h=="function")for(let v of[].concat(h(f,{isOnlyPlugin:d}))){let[y,w]=Wo(v,e.postCssNodeCache);for(let k of y)b.push([{...p,options:{...p.options,...w}},k])}else if(f==="DEFAULT"||f==="-DEFAULT"){let v=h,[y,w]=Wo(v,e.postCssNodeCache);for(let k of y)b.push([{...p,options:{...p.options,...w}},k])}if(b.length>0){let v=Array.from(Zs(p.options?.types??[],f,p.options??{},e.tailwindConfig)).map(([y,w])=>w);v.length>0&&l.set(b,v),o.push(b)}}if(Qo(f)){if(o.length>1){let b=function(y){return y.length===1?y[0]:y.find(w=>{let k=l.get(w);return w.some(([{options:S},E])=>Ch(E)?S.types.some(({type:O,preferOnConflict:B})=>k.includes(O)&&B):!1)})},[p,h]=o.reduce((y,w)=>(w.some(([{options:S}])=>S.types.some(({type:E})=>E==="any"))?y[0].push(w):y[1].push(w),y),[[],[]]),v=b(h)??b(p);if(v)o=[v];else{let y=o.map(k=>new Set([...l.get(k)??[]]));for(let k of y)for(let S of k){let E=!1;for(let O of y)k!==O&&O.has(S)&&(O.delete(S),E=!0);E&&k.delete(S)}let w=[];for(let[k,S]of y.entries())for(let E of S){let O=o[k].map(([,B])=>B).flat().map(B=>B.toString().split(` -`).slice(1,-1).map(N=>N.trim()).map(N=>` ${N}`).join(` -`)).join(` - -`);w.push(` Use \`${r.replace("[",`[${E}:`)}\` for \`${O.trim()}\``);break}G.warn([`The class \`${r}\` is ambiguous and matches multiple utilities.`,...w,`If this is content and not a class, replace it with \`${r.replace("[","[").replace("]","]")}\` to silence this warning.`]);continue}}o=o.map(p=>p.filter(h=>Ch(h[1])))}o=o.flat(),o=Array.from(T_(o,i)),o=v_(o,e),a&&(o=x_(o,i));for(let p of n)o=k_(p,o,e);for(let p of o)p[1].raws.tailwind={...p[1].raws.tailwind,candidate:r},p=R_(p,{context:e,candidate:r}),p!==null&&(yield p)}}function R_(r,{context:e,candidate:t}){if(!r[0].collectedFormats)return r;let i=!0,n;try{n=dr(r[0].collectedFormats,{context:e,candidate:t})}catch{return null}let a=ee.root({nodes:[r[1].clone()]});return a.walkRules(s=>{if(!ns(s))try{let o=ts(s.selector,n,{candidate:t,context:e});if(o===null){s.remove();return}s.selector=o}catch{return i=!1,!1}}),!i||a.nodes.length===0?null:(r[1]=a.nodes[0],r)}function ns(r){return r.parent&&r.parent.type==="atrule"&&r.parent.name==="keyframes"}function P_(r){if(r===!0)return e=>{ns(e)||e.walkDecls(t=>{t.parent.type==="rule"&&!ns(t.parent)&&(t.important=!0)})};if(typeof r=="string")return e=>{ns(e)||(e.selectors=e.selectors.map(t=>rs(t,r)))}}function ss(r,e,t=!1){let i=[],n=P_(e.tailwindConfig.important);for(let a of r){if(e.notClassCache.has(a))continue;if(e.candidateRuleCache.has(a)){i=i.concat(Array.from(e.candidateRuleCache.get(a)));continue}let s=Array.from(Go(a,e));if(s.length===0){e.notClassCache.add(a);continue}e.classCache.set(a,s);let o=e.candidateRuleCache.get(a)??new Set;e.candidateRuleCache.set(a,o);for(let l of s){let[{sort:c,options:f},d]=l;if(f.respectImportant&&n){let h=ee.root({nodes:[d.clone()]});h.walkRules(n),d=h.nodes[0]}let p=[c,t?d.clone():d];o.add(p),e.ruleCache.add(p),i.push(p)}}return i}function Qo(r){return r.startsWith("[")&&r.endsWith("]")}var is,b_,S_,as=R(()=>{u();Ot();is=pe(it());Io();Kt();Wn();Fr();Be();It();zo();qo();Br();_i();Mo();zt();ct();Vo();b_=(0,is.default)(r=>r.first.filter(({type:e})=>e==="class").pop().value);S_=/^[a-z_-]/});var Eh,Oh=R(()=>{u();Eh={}});function I_(r){try{return Eh.createHash("md5").update(r,"utf-8").digest("binary")}catch(e){return""}}function Th(r,e){let t=e.toString();if(!t.includes("@tailwind"))return!1;let i=Lo.get(r),n=I_(t),a=i!==n;return Lo.set(r,n),a}var Rh=R(()=>{u();Oh();It()});function ls(r){return(r>0n)-(r<0n)}var Ph=R(()=>{u()});function Ih(r,e){let t=0n,i=0n;for(let[n,a]of e)r&n&&(t=t|n,i=i|a);return r&~t|i}var Dh=R(()=>{u()});function qh(r){let e=null;for(let t of r)e=e??t,e=e>t?e:t;return e}function D_(r,e){let t=r.length,i=e.length,n=t{u();Ph();Dh();Yo=class{constructor(){this.offsets={defaults:0n,base:0n,components:0n,utilities:0n,variants:0n,user:0n},this.layerPositions={defaults:0n,base:1n,components:2n,utilities:3n,user:4n,variants:5n},this.reservedVariantBits=0n,this.variantOffsets=new Map}create(e){return{layer:e,parentLayer:e,arbitrary:0n,variants:0n,parallelIndex:0n,index:this.offsets[e]++,propertyOffset:0n,property:"",options:[]}}arbitraryProperty(e){return{...this.create("utilities"),arbitrary:1n,property:e}}forVariant(e,t=0){let i=this.variantOffsets.get(e);if(i===void 0)throw new Error(`Cannot find offset for unknown variant ${e}`);return{...this.create("variants"),variants:i<n.startsWith("[")).sort(([n],[a])=>D_(n,a)),t=e.map(([,n])=>n).sort((n,a)=>ls(n-a));return e.map(([,n],a)=>[n,t[a]]).filter(([n,a])=>n!==a)}remapArbitraryVariantOffsets(e){let t=this.recalculateVariantOffsets();return t.length===0?e:e.map(i=>{let[n,a]=i;return n={...n,variants:Ih(n.variants,t)},[n,a]})}sortArbitraryProperties(e){let t=new Set;for(let[s]of e)s.arbitrary===1n&&t.add(s.property);if(t.size===0)return e;let i=Array.from(t).sort(),n=new Map,a=1n;for(let s of i)n.set(s,a++);return e.map(s=>{let[o,l]=s;return o={...o,propertyOffset:n.get(o.property)??0n},[o,l]})}sort(e){return e=this.remapArbitraryVariantOffsets(e),e=this.sortArbitraryProperties(e),e.sort(([t],[i])=>ls(this.compare(t,i)))}}});function Zo(r,e){let t=r.tailwindConfig.prefix;return typeof t=="function"?t(e):t+e}function Mh({type:r="any",...e}){let t=[].concat(r);return{...e,types:t.map(i=>Array.isArray(i)?{type:i[0],...i[1]}:{type:i,preferOnConflict:!1})}}function q_(r){let e=[],t="",i=0;for(let n=0;n0&&e.push(t.trim()),e=e.filter(n=>n!==""),e}function $_(r,e,{before:t=[]}={}){if(t=[].concat(t),t.length<=0){r.push(e);return}let i=r.length-1;for(let n of t){let a=r.indexOf(n);a!==-1&&(i=Math.min(i,a))}r.splice(i,0,e)}function Nh(r){return Array.isArray(r)?r.flatMap(e=>!Array.isArray(e)&&!ke(e)?e:lr(e)):Nh([r])}function L_(r,e){return(0,Ko.default)(i=>{let n=[];return e&&e(i),i.walkClasses(a=>{n.push(a.value)}),n}).transformSync(r)}function M_(r){r.walkPseudos(e=>{e.value===":not"&&e.remove()})}function N_(r,e={containsNonOnDemandable:!1},t=0){let i=[],n=[];r.type==="rule"?n.push(...r.selectors):r.type==="atrule"&&r.walkRules(a=>n.push(...a.selectors));for(let a of n){let s=L_(a,M_);s.length===0&&(e.containsNonOnDemandable=!0);for(let o of s)i.push(o)}return t===0?[e.containsNonOnDemandable||i.length===0,i]:i}function us(r){return Nh(r).flatMap(e=>{let t=new Map,[i,n]=N_(e);return i&&n.unshift(gt),n.map(a=>(t.has(e)||t.set(e,e),[a,t.get(e)]))})}function os(r){return r.startsWith("@")||r.includes("&")}function Oi(r){r=r.replace(/\n+/g,"").replace(/\s{1,}/g," ").trim();let e=q_(r).map(t=>{if(!t.startsWith("@"))return({format:a})=>a(t);let[,i,n]=/@(\S*)( .+|[({].*)?/g.exec(t);return({wrap:a})=>a(ee.atRule({name:i,params:n?.trim()??""}))}).reverse();return t=>{for(let i of e)i(t)}}function B_(r,e,{variantList:t,variantMap:i,offsets:n,classList:a}){function s(p,h){return p?(0,Lh.default)(r,p,h):r}function o(p){return ur(r.prefix,p)}function l(p,h){return p===gt?gt:h.respectPrefix?e.tailwindConfig.prefix+p:p}function c(p,h,b={}){let v=kt(p),y=s(["theme",...v],h);return mt(v[0])(y,b)}let f=0,d={postcss:ee,prefix:o,e:Te,config:s,theme:c,corePlugins:p=>Array.isArray(r.corePlugins)?r.corePlugins.includes(p):s(["corePlugins",p],!0),variants:()=>[],addBase(p){for(let[h,b]of us(p)){let v=l(h,{}),y=n.create("base");e.candidateRuleMap.has(v)||e.candidateRuleMap.set(v,[]),e.candidateRuleMap.get(v).push([{sort:y,layer:"base"},b])}},addDefaults(p,h){let b={[`@defaults ${p}`]:h};for(let[v,y]of us(b)){let w=l(v,{});e.candidateRuleMap.has(w)||e.candidateRuleMap.set(w,[]),e.candidateRuleMap.get(w).push([{sort:n.create("defaults"),layer:"defaults"},y])}},addComponents(p,h){h=Object.assign({},{preserveSource:!1,respectPrefix:!0,respectImportant:!1},Array.isArray(h)?{}:h);for(let[v,y]of us(p)){let w=l(v,h);a.add(w),e.candidateRuleMap.has(w)||e.candidateRuleMap.set(w,[]),e.candidateRuleMap.get(w).push([{sort:n.create("components"),layer:"components",options:h},y])}},addUtilities(p,h){h=Object.assign({},{preserveSource:!1,respectPrefix:!0,respectImportant:!0},Array.isArray(h)?{}:h);for(let[v,y]of us(p)){let w=l(v,h);a.add(w),e.candidateRuleMap.has(w)||e.candidateRuleMap.set(w,[]),e.candidateRuleMap.get(w).push([{sort:n.create("utilities"),layer:"utilities",options:h},y])}},matchUtilities:function(p,h){h=Mh({...{respectPrefix:!0,respectImportant:!0,modifiers:!1},...h});let v=n.create("utilities");for(let y in p){let S=function(O,{isOnlyPlugin:B}){let[N,T,F]=Js(h.types,O,h,r);if(N===void 0)return[];if(!h.types.some(({type:U})=>U===T))if(B)G.warn([`Unnecessary typehint \`${T}\` in \`${y}-${O}\`.`,`You can safely update it to \`${y}-${O.replace(T+":","")}\`.`]);else return[];if(!cr(N))return[];let Y={get modifier(){return h.modifiers||G.warn(`modifier-used-without-options-for-${y}`,["Your plugin must set `modifiers: true` in its options to support modifiers."]),F}},_=we(r,"generalizedModifiers");return[].concat(_?k(N,Y):k(N)).filter(Boolean).map(U=>({[Gn(y,O)]:U}))},w=l(y,h),k=p[y];a.add([w,h]);let E=[{sort:v,layer:"utilities",options:h},S];e.candidateRuleMap.has(w)||e.candidateRuleMap.set(w,[]),e.candidateRuleMap.get(w).push(E)}},matchComponents:function(p,h){h=Mh({...{respectPrefix:!0,respectImportant:!1,modifiers:!1},...h});let v=n.create("components");for(let y in p){let S=function(O,{isOnlyPlugin:B}){let[N,T,F]=Js(h.types,O,h,r);if(N===void 0)return[];if(!h.types.some(({type:U})=>U===T))if(B)G.warn([`Unnecessary typehint \`${T}\` in \`${y}-${O}\`.`,`You can safely update it to \`${y}-${O.replace(T+":","")}\`.`]);else return[];if(!cr(N))return[];let Y={get modifier(){return h.modifiers||G.warn(`modifier-used-without-options-for-${y}`,["Your plugin must set `modifiers: true` in its options to support modifiers."]),F}},_=we(r,"generalizedModifiers");return[].concat(_?k(N,Y):k(N)).filter(Boolean).map(U=>({[Gn(y,O)]:U}))},w=l(y,h),k=p[y];a.add([w,h]);let E=[{sort:v,layer:"components",options:h},S];e.candidateRuleMap.has(w)||e.candidateRuleMap.set(w,[]),e.candidateRuleMap.get(w).push(E)}},addVariant(p,h,b={}){h=[].concat(h).map(v=>{if(typeof v!="string")return(y={})=>{let{args:w,modifySelectors:k,container:S,separator:E,wrap:O,format:B}=y,N=v(Object.assign({modifySelectors:k,container:S,separator:E},b.type===Xo.MatchVariant&&{args:w,wrap:O,format:B}));if(typeof N=="string"&&!os(N))throw new Error(`Your custom variant \`${p}\` has an invalid format string. Make sure it's an at-rule or contains a \`&\` placeholder.`);return Array.isArray(N)?N.filter(T=>typeof T=="string").map(T=>Oi(T)):N&&typeof N=="string"&&Oi(N)(y)};if(!os(v))throw new Error(`Your custom variant \`${p}\` has an invalid format string. Make sure it's an at-rule or contains a \`&\` placeholder.`);return Oi(v)}),$_(t,p,b),i.set(p,h),e.variantOptions.set(p,b)},matchVariant(p,h,b){let v=b?.id??++f,y=p==="@",w=we(r,"generalizedModifiers");for(let[S,E]of Object.entries(b?.values??{}))S!=="DEFAULT"&&d.addVariant(y?`${p}${S}`:`${p}-${S}`,({args:O,container:B})=>h(E,w?{modifier:O?.modifier,container:B}:{container:B}),{...b,value:E,id:v,type:Xo.MatchVariant,variantInfo:Jo.Base});let k="DEFAULT"in(b?.values??{});d.addVariant(p,({args:S,container:E})=>S?.value===Ei&&!k?null:h(S?.value===Ei?b.values.DEFAULT:S?.value??(typeof S=="string"?S:""),w?{modifier:S?.modifier,container:E}:{container:E}),{...b,id:v,type:Xo.MatchVariant,variantInfo:Jo.Dynamic})}};return d}function fs(r){return el.has(r)||el.set(r,new Map),el.get(r)}function Bh(r,e){let t=!1,i=new Map;for(let n of r){if(!n)continue;let a=sa.parse(n),s=a.hash?a.href.replace(a.hash,""):a.href;s=a.search?s.replace(a.search,""):s;let o=be.statSync(decodeURIComponent(s),{throwIfNoEntry:!1})?.mtimeMs;!o||((!e.has(n)||o>e.get(n))&&(t=!0),i.set(n,o))}return[t,i]}function Fh(r){r.walkAtRules(e=>{["responsive","variants"].includes(e.name)&&(Fh(e),e.before(e.nodes),e.remove())})}function F_(r){let e=[];return r.each(t=>{t.type==="atrule"&&["responsive","variants"].includes(t.name)&&(t.name="layer",t.params="utilities")}),r.walkAtRules("layer",t=>{if(Fh(t),t.params==="base"){for(let i of t.nodes)e.push(function({addBase:n}){n(i,{respectPrefix:!1})});t.remove()}else if(t.params==="components"){for(let i of t.nodes)e.push(function({addComponents:n}){n(i,{respectPrefix:!1,preserveSource:!0})});t.remove()}else if(t.params==="utilities"){for(let i of t.nodes)e.push(function({addUtilities:n}){n(i,{respectPrefix:!1,preserveSource:!0})});t.remove()}}),e}function j_(r,e){let t=Object.entries({...se,...mh}).map(([l,c])=>r.tailwindConfig.corePlugins.includes(l)?c:null).filter(Boolean),i=r.tailwindConfig.plugins.map(l=>(l.__isOptionsFunction&&(l=l()),typeof l=="function"?l:l.handler)),n=F_(e),a=[se.childVariant,se.pseudoElementVariants,se.pseudoClassVariants,se.hasVariants,se.ariaVariants,se.dataVariants],s=[se.supportsVariants,se.reducedMotionVariants,se.prefersContrastVariants,se.screenVariants,se.orientationVariants,se.directionVariants,se.darkVariants,se.forcedColorsVariants,se.printVariant];return(r.tailwindConfig.darkMode==="class"||Array.isArray(r.tailwindConfig.darkMode)&&r.tailwindConfig.darkMode[0]==="class")&&(s=[se.supportsVariants,se.reducedMotionVariants,se.prefersContrastVariants,se.darkVariants,se.screenVariants,se.orientationVariants,se.directionVariants,se.forcedColorsVariants,se.printVariant]),[...t,...a,...i,...s,...n]}function z_(r,e){let t=[],i=new Map;e.variantMap=i;let n=new Yo;e.offsets=n;let a=new Set,s=B_(e.tailwindConfig,e,{variantList:t,variantMap:i,offsets:n,classList:a});for(let f of r)if(Array.isArray(f))for(let d of f)d(s);else f?.(s);n.recordVariants(t,f=>i.get(f).length);for(let[f,d]of i.entries())e.variantMap.set(f,d.map((p,h)=>[n.forVariant(f,h),p]));let o=(e.tailwindConfig.safelist??[]).filter(Boolean);if(o.length>0){let f=[];for(let d of o){if(typeof d=="string"){e.changedContent.push({content:d,extension:"html"});continue}if(d instanceof RegExp){G.warn("root-regex",["Regular expressions in `safelist` work differently in Tailwind CSS v3.0.","Update your `safelist` configuration to eliminate this warning.","https://tailwindcss.com/docs/content-configuration#safelisting-classes"]);continue}f.push(d)}if(f.length>0){let d=new Map,p=e.tailwindConfig.prefix.length,h=f.some(b=>b.pattern.source.includes("!"));for(let b of a){let v=Array.isArray(b)?(()=>{let[y,w]=b,S=Object.keys(w?.values??{}).map(E=>Ci(y,E));return w?.supportsNegativeValues&&(S=[...S,...S.map(E=>"-"+E)],S=[...S,...S.map(E=>E.slice(0,p)+"-"+E.slice(p))]),w.types.some(({type:E})=>E==="color")&&(S=[...S,...S.flatMap(E=>Object.keys(e.tailwindConfig.theme.opacity).map(O=>`${E}/${O}`))]),h&&w?.respectImportant&&(S=[...S,...S.map(E=>"!"+E)]),S})():[b];for(let y of v)for(let{pattern:w,variants:k=[]}of f)if(w.lastIndex=0,d.has(w)||d.set(w,0),!!w.test(y)){d.set(w,d.get(w)+1),e.changedContent.push({content:y,extension:"html"});for(let S of k)e.changedContent.push({content:S+e.tailwindConfig.separator+y,extension:"html"})}}for(let[b,v]of d.entries())v===0&&G.warn([`The safelist pattern \`${b}\` doesn't match any Tailwind CSS classes.`,"Fix this pattern or remove it from your `safelist` configuration.","https://tailwindcss.com/docs/content-configuration#safelisting-classes"])}}let l=[].concat(e.tailwindConfig.darkMode??"media")[1]??"dark",c=[Zo(e,l),Zo(e,"group"),Zo(e,"peer")];e.getClassOrder=function(d){let p=[...d].sort((y,w)=>y===w?0:y[y,null])),b=ss(new Set(p),e,!0);b=e.offsets.sort(b);let v=BigInt(c.length);for(let[,y]of b){let w=y.raws.tailwind.candidate;h.set(w,h.get(w)??v++)}return d.map(y=>{let w=h.get(y)??null,k=c.indexOf(y);return w===null&&k!==-1&&(w=BigInt(k)),[y,w]})},e.getClassList=function(d={}){let p=[];for(let h of a)if(Array.isArray(h)){let[b,v]=h,y=[],w=Object.keys(v?.modifiers??{});v?.types?.some(({type:E})=>E==="color")&&w.push(...Object.keys(e.tailwindConfig.theme.opacity??{}));let k={modifiers:w},S=d.includeMetadata&&w.length>0;for(let[E,O]of Object.entries(v?.values??{})){if(O==null)continue;let B=Ci(b,E);if(p.push(S?[B,k]:B),v?.supportsNegativeValues&&xt(O)){let N=Ci(b,`-${E}`);y.push(S?[N,k]:N)}}p.push(...y)}else p.push(h);return p},e.getVariants=function(){let d=Math.random().toString(36).substring(7).toUpperCase(),p=[];for(let[h,b]of e.variantOptions.entries())b.variantInfo!==Jo.Base&&p.push({name:h,isArbitrary:b.type===Symbol.for("MATCH_VARIANT"),values:Object.keys(b.values??{}),hasDash:h!=="@",selectors({modifier:v,value:y}={}){let w=`TAILWINDPLACEHOLDER${d}`,k=ee.rule({selector:`.${w}`}),S=ee.root({nodes:[k.clone()]}),E=S.toString(),O=(e.variantMap.get(h)??[]).flatMap(([oe,A])=>A),B=[];for(let oe of O){let A=[],C={args:{modifier:v,value:b.values?.[y]??y},separator:e.tailwindConfig.separator,modifySelectors(V){return S.each(Ee=>{Ee.type==="rule"&&(Ee.selectors=Ee.selectors.map(Ie=>V({get className(){return Ho(Ie)},selector:Ie})))}),S},format(V){A.push(V)},wrap(V){A.push(`@${V.name} ${V.params} { & }`)},container:S},he=oe(C);if(A.length>0&&B.push(A),Array.isArray(he))for(let V of he)A=[],V(C),B.push(A)}let N=[],T=S.toString();E!==T&&(S.walkRules(oe=>{let A=oe.selector,C=(0,Ko.default)(he=>{he.walkClasses(V=>{V.value=`${h}${e.tailwindConfig.separator}${V.value}`})}).processSync(A);N.push(A.replace(C,"&").replace(w,"&"))}),S.walkAtRules(oe=>{N.push(`@${oe.name} (${oe.params}) { & }`)}));let F=!(y in(b.values??{})),Y=b[Pt]??{},_=(()=>!(F||Y.respectPrefix===!1))();B=B.map(oe=>oe.map(A=>({format:A,respectPrefix:_}))),N=N.map(oe=>({format:oe,respectPrefix:_}));let Q={candidate:w,context:e},U=B.map(oe=>ts(`.${w}`,dr(oe,Q),Q).replace(`.${w}`,"&").replace("{ & }","").trim());return N.length>0&&U.push(dr(N,Q).toString().replace(`.${w}`,"&")),U}});return p}}function jh(r,e){!r.classCache.has(e)||(r.notClassCache.add(e),r.classCache.delete(e),r.applyClassCache.delete(e),r.candidateRuleMap.delete(e),r.candidateRuleCache.delete(e),r.stylesheetCache=null)}function U_(r,e){let t=e.raws.tailwind.candidate;if(!!t){for(let i of r.ruleCache)i[1].raws.tailwind.candidate===t&&r.ruleCache.delete(i);jh(r,t)}}function tl(r,e=[],t=ee.root()){let i={disposables:[],ruleCache:new Set,candidateRuleCache:new Map,classCache:new Map,applyClassCache:new Map,notClassCache:new Set(r.blocklist??[]),postCssNodeCache:new Map,candidateRuleMap:new Map,tailwindConfig:r,changedContent:e,variantMap:new Map,stylesheetCache:null,variantOptions:new Map,markInvalidUtilityCandidate:a=>jh(i,a),markInvalidUtilityNode:a=>U_(i,a)},n=j_(i,t);return z_(n,i),i}function zh(r,e,t,i,n,a){let s=e.opts.from,o=i!==null;Je.DEBUG&&console.log("Source path:",s);let l;if(o&&hr.has(s))l=hr.get(s);else if(Ti.has(n)){let p=Ti.get(n);Dt.get(p).add(s),hr.set(s,p),l=p}let c=Th(s,r);if(l){let[p,h]=Bh([...a],fs(l));if(!p&&!c)return[l,!1,h]}if(hr.has(s)){let p=hr.get(s);if(Dt.has(p)&&(Dt.get(p).delete(s),Dt.get(p).size===0)){Dt.delete(p);for(let[h,b]of Ti)b===p&&Ti.delete(h);for(let h of p.disposables.splice(0))h(p)}}Je.DEBUG&&console.log("Setting up new context...");let f=tl(t,[],r);Object.assign(f,{userConfigPath:i});let[,d]=Bh([...a],fs(f));return Ti.set(n,f),hr.set(s,f),Dt.has(f)||Dt.set(f,new Set),Dt.get(f).add(s),[f,!0,d]}var Lh,Ko,Pt,Xo,Jo,el,hr,Ti,Dt,_i=R(()=>{u();ft();aa();Ot();Lh=pe(Oa()),Ko=pe(it());Si();Io();Wn();Kt();fr();qo();Fr();gh();It();It();Gi();Be();Hi();Mo();as();Rh();$h();ct();zo();Pt=Symbol(),Xo={AddVariant:Symbol.for("ADD_VARIANT"),MatchVariant:Symbol.for("MATCH_VARIANT")},Jo={Base:1<<0,Dynamic:1<<1};el=new WeakMap;hr=yh,Ti=bh,Dt=Zn});function rl(r){return r.ignore?[]:r.glob?m.env.ROLLUP_WATCH==="true"?[{type:"dependency",file:r.base}]:[{type:"dir-dependency",dir:r.base,glob:r.glob}]:[{type:"dependency",file:r.base}]}var Uh=R(()=>{u()});function Vh(r,e){return{handler:r,config:e}}var Hh,Wh=R(()=>{u();Vh.withOptions=function(r,e=()=>({})){let t=function(i){return{__options:i,handler:r(i),config:e(i)}};return t.__isOptionsFunction=!0,t.__pluginFunction=r,t.__configFunction=e,t};Hh=Vh});var il={};Ge(il,{default:()=>V_});var V_,nl=R(()=>{u();Wh();V_=Hh});var Qh=x((F4,Gh)=>{u();var H_=(nl(),il).default,W_={overflow:"hidden",display:"-webkit-box","-webkit-box-orient":"vertical"},G_=H_(function({matchUtilities:r,addUtilities:e,theme:t,variants:i}){let n=t("lineClamp");r({"line-clamp":a=>({...W_,"-webkit-line-clamp":`${a}`})},{values:n}),e([{".line-clamp-none":{"-webkit-line-clamp":"unset"}}],i("lineClamp"))},{theme:{lineClamp:{1:"1",2:"2",3:"3",4:"4",5:"5",6:"6"}},variants:{lineClamp:["responsive"]}});Gh.exports=G_});function sl(r){r.content.files.length===0&&G.warn("content-problems",["The `content` option in your Tailwind CSS configuration is missing or empty.","Configure your content sources or your generated CSS will be missing styles.","https://tailwindcss.com/docs/content-configuration"]);try{let e=Qh();r.plugins.includes(e)&&(G.warn("line-clamp-in-core",["As of Tailwind CSS v3.3, the `@tailwindcss/line-clamp` plugin is now included by default.","Remove it from the `plugins` array in your configuration to eliminate this warning."]),r.plugins=r.plugins.filter(t=>t!==e))}catch{}return r}var Yh=R(()=>{u();Be()});var Kh,Xh=R(()=>{u();Kh=()=>!1});var cs,Jh=R(()=>{u();cs={sync:r=>[].concat(r),generateTasks:r=>[{dynamic:!1,base:".",negative:[],positive:[].concat(r),patterns:[].concat(r)}],escapePath:r=>r}});var al,Zh=R(()=>{u();al=r=>r});var em,tm=R(()=>{u();em=()=>""});function rm(r){let e=r,t=em(r);return t!=="."&&(e=r.substr(t.length),e.charAt(0)==="/"&&(e=e.substr(1))),e.substr(0,2)==="./"?e=e.substr(2):e.charAt(0)==="/"&&(e=e.substr(1)),{base:t,glob:e}}var im=R(()=>{u();tm()});var ps=x(Ve=>{u();"use strict";Ve.isInteger=r=>typeof r=="number"?Number.isInteger(r):typeof r=="string"&&r.trim()!==""?Number.isInteger(Number(r)):!1;Ve.find=(r,e)=>r.nodes.find(t=>t.type===e);Ve.exceedsLimit=(r,e,t=1,i)=>i===!1||!Ve.isInteger(r)||!Ve.isInteger(e)?!1:(Number(e)-Number(r))/Number(t)>=i;Ve.escapeNode=(r,e=0,t)=>{let i=r.nodes[e];!i||(t&&i.type===t||i.type==="open"||i.type==="close")&&i.escaped!==!0&&(i.value="\\"+i.value,i.escaped=!0)};Ve.encloseBrace=r=>r.type!=="brace"?!1:r.commas>>0+r.ranges>>0==0?(r.invalid=!0,!0):!1;Ve.isInvalidBrace=r=>r.type!=="brace"?!1:r.invalid===!0||r.dollar?!0:r.commas>>0+r.ranges>>0==0||r.open!==!0||r.close!==!0?(r.invalid=!0,!0):!1;Ve.isOpenOrClose=r=>r.type==="open"||r.type==="close"?!0:r.open===!0||r.close===!0;Ve.reduce=r=>r.reduce((e,t)=>(t.type==="text"&&e.push(t.value),t.type==="range"&&(t.type="text"),e),[]);Ve.flatten=(...r)=>{let e=[],t=i=>{for(let n=0;n{u();"use strict";var nm=ps();sm.exports=(r,e={})=>{let t=(i,n={})=>{let a=e.escapeInvalid&&nm.isInvalidBrace(n),s=i.invalid===!0&&e.escapeInvalid===!0,o="";if(i.value)return(a||s)&&nm.isOpenOrClose(i)?"\\"+i.value:i.value;if(i.value)return i.value;if(i.nodes)for(let l of i.nodes)o+=t(l);return o};return t(r)}});var om=x((X4,am)=>{u();"use strict";am.exports=function(r){return typeof r=="number"?r-r==0:typeof r=="string"&&r.trim()!==""?Number.isFinite?Number.isFinite(+r):isFinite(+r):!1}});var gm=x((J4,mm)=>{u();"use strict";var lm=om(),Wt=(r,e,t)=>{if(lm(r)===!1)throw new TypeError("toRegexRange: expected the first argument to be a number");if(e===void 0||r===e)return String(r);if(lm(e)===!1)throw new TypeError("toRegexRange: expected the second argument to be a number.");let i={relaxZeros:!0,...t};typeof i.strictZeros=="boolean"&&(i.relaxZeros=i.strictZeros===!1);let n=String(i.relaxZeros),a=String(i.shorthand),s=String(i.capture),o=String(i.wrap),l=r+":"+e+"="+n+a+s+o;if(Wt.cache.hasOwnProperty(l))return Wt.cache[l].result;let c=Math.min(r,e),f=Math.max(r,e);if(Math.abs(c-f)===1){let v=r+"|"+e;return i.capture?`(${v})`:i.wrap===!1?v:`(?:${v})`}let d=hm(r)||hm(e),p={min:r,max:e,a:c,b:f},h=[],b=[];if(d&&(p.isPadded=d,p.maxLen=String(p.max).length),c<0){let v=f<0?Math.abs(f):1;b=um(v,Math.abs(c),p,i),c=p.a=0}return f>=0&&(h=um(c,f,p,i)),p.negatives=b,p.positives=h,p.result=Q_(b,h,i),i.capture===!0?p.result=`(${p.result})`:i.wrap!==!1&&h.length+b.length>1&&(p.result=`(?:${p.result})`),Wt.cache[l]=p,p.result};function Q_(r,e,t){let i=ol(r,e,"-",!1,t)||[],n=ol(e,r,"",!1,t)||[],a=ol(r,e,"-?",!0,t)||[];return i.concat(a).concat(n).join("|")}function Y_(r,e){let t=1,i=1,n=cm(r,t),a=new Set([e]);for(;r<=n&&n<=e;)a.add(n),t+=1,n=cm(r,t);for(n=pm(e+1,i)-1;r1&&o.count.pop(),o.count.push(f.count[0]),o.string=o.pattern+dm(o.count),s=c+1;continue}t.isPadded&&(d=eE(c,t,i)),f.string=d+f.pattern+dm(f.count),a.push(f),s=c+1,o=f}return a}function ol(r,e,t,i,n){let a=[];for(let s of r){let{string:o}=s;!i&&!fm(e,"string",o)&&a.push(t+o),i&&fm(e,"string",o)&&a.push(t+o)}return a}function X_(r,e){let t=[];for(let i=0;ie?1:e>r?-1:0}function fm(r,e,t){return r.some(i=>i[e]===t)}function cm(r,e){return Number(String(r).slice(0,-e)+"9".repeat(e))}function pm(r,e){return r-r%Math.pow(10,e)}function dm(r){let[e=0,t=""]=r;return t||e>1?`{${e+(t?","+t:"")}}`:""}function Z_(r,e,t){return`[${r}${e-r==1?"":"-"}${e}]`}function hm(r){return/^-?(0+)\d/.test(r)}function eE(r,e,t){if(!e.isPadded)return r;let i=Math.abs(e.maxLen-String(r).length),n=t.relaxZeros!==!1;switch(i){case 0:return"";case 1:return n?"0?":"0";case 2:return n?"0{0,2}":"00";default:return n?`0{0,${i}}`:`0{${i}}`}}Wt.cache={};Wt.clearCache=()=>Wt.cache={};mm.exports=Wt});var fl=x((Z4,Am)=>{u();"use strict";var tE=(Bn(),Nn),ym=gm(),bm=r=>r!==null&&typeof r=="object"&&!Array.isArray(r),rE=r=>e=>r===!0?Number(e):String(e),ll=r=>typeof r=="number"||typeof r=="string"&&r!=="",Ri=r=>Number.isInteger(+r),ul=r=>{let e=`${r}`,t=-1;if(e[0]==="-"&&(e=e.slice(1)),e==="0")return!1;for(;e[++t]==="0";);return t>0},iE=(r,e,t)=>typeof r=="string"||typeof e=="string"?!0:t.stringify===!0,nE=(r,e,t)=>{if(e>0){let i=r[0]==="-"?"-":"";i&&(r=r.slice(1)),r=i+r.padStart(i?e-1:e,"0")}return t===!1?String(r):r},wm=(r,e)=>{let t=r[0]==="-"?"-":"";for(t&&(r=r.slice(1),e--);r.length{r.negatives.sort((s,o)=>so?1:0),r.positives.sort((s,o)=>so?1:0);let t=e.capture?"":"?:",i="",n="",a;return r.positives.length&&(i=r.positives.join("|")),r.negatives.length&&(n=`-(${t}${r.negatives.join("|")})`),i&&n?a=`${i}|${n}`:a=i||n,e.wrap?`(${t}${a})`:a},vm=(r,e,t,i)=>{if(t)return ym(r,e,{wrap:!1,...i});let n=String.fromCharCode(r);if(r===e)return n;let a=String.fromCharCode(e);return`[${n}-${a}]`},xm=(r,e,t)=>{if(Array.isArray(r)){let i=t.wrap===!0,n=t.capture?"":"?:";return i?`(${n}${r.join("|")})`:r.join("|")}return ym(r,e,t)},km=(...r)=>new RangeError("Invalid range arguments: "+tE.inspect(...r)),Sm=(r,e,t)=>{if(t.strictRanges===!0)throw km([r,e]);return[]},aE=(r,e)=>{if(e.strictRanges===!0)throw new TypeError(`Expected step "${r}" to be a number`);return[]},oE=(r,e,t=1,i={})=>{let n=Number(r),a=Number(e);if(!Number.isInteger(n)||!Number.isInteger(a)){if(i.strictRanges===!0)throw km([r,e]);return[]}n===0&&(n=0),a===0&&(a=0);let s=n>a,o=String(r),l=String(e),c=String(t);t=Math.max(Math.abs(t),1);let f=ul(o)||ul(l)||ul(c),d=f?Math.max(o.length,l.length,c.length):0,p=f===!1&&iE(r,e,i)===!1,h=i.transform||rE(p);if(i.toRegex&&t===1)return vm(wm(r,d),wm(e,d),!0,i);let b={negatives:[],positives:[]},v=k=>b[k<0?"negatives":"positives"].push(Math.abs(k)),y=[],w=0;for(;s?n>=a:n<=a;)i.toRegex===!0&&t>1?v(n):y.push(nE(h(n,w),d,p)),n=s?n-t:n+t,w++;return i.toRegex===!0?t>1?sE(b,i):xm(y,null,{wrap:!1,...i}):y},lE=(r,e,t=1,i={})=>{if(!Ri(r)&&r.length>1||!Ri(e)&&e.length>1)return Sm(r,e,i);let n=i.transform||(p=>String.fromCharCode(p)),a=`${r}`.charCodeAt(0),s=`${e}`.charCodeAt(0),o=a>s,l=Math.min(a,s),c=Math.max(a,s);if(i.toRegex&&t===1)return vm(l,c,!1,i);let f=[],d=0;for(;o?a>=s:a<=s;)f.push(n(a,d)),a=o?a-t:a+t,d++;return i.toRegex===!0?xm(f,null,{wrap:!1,options:i}):f},hs=(r,e,t,i={})=>{if(e==null&&ll(r))return[r];if(!ll(r)||!ll(e))return Sm(r,e,i);if(typeof t=="function")return hs(r,e,1,{transform:t});if(bm(t))return hs(r,e,0,t);let n={...i};return n.capture===!0&&(n.wrap=!0),t=t||n.step||1,Ri(t)?Ri(r)&&Ri(e)?oE(r,e,t,n):lE(r,e,Math.max(Math.abs(t),1),n):t!=null&&!bm(t)?aE(t,n):hs(r,e,1,t)};Am.exports=hs});var Em=x((e6,_m)=>{u();"use strict";var uE=fl(),Cm=ps(),fE=(r,e={})=>{let t=(i,n={})=>{let a=Cm.isInvalidBrace(n),s=i.invalid===!0&&e.escapeInvalid===!0,o=a===!0||s===!0,l=e.escapeInvalid===!0?"\\":"",c="";if(i.isOpen===!0||i.isClose===!0)return l+i.value;if(i.type==="open")return o?l+i.value:"(";if(i.type==="close")return o?l+i.value:")";if(i.type==="comma")return i.prev.type==="comma"?"":o?i.value:"|";if(i.value)return i.value;if(i.nodes&&i.ranges>0){let f=Cm.reduce(i.nodes),d=uE(...f,{...e,wrap:!1,toRegex:!0});if(d.length!==0)return f.length>1&&d.length>1?`(${d})`:d}if(i.nodes)for(let f of i.nodes)c+=t(f,i);return c};return t(r)};_m.exports=fE});var Rm=x((t6,Tm)=>{u();"use strict";var cE=fl(),Om=ds(),mr=ps(),Gt=(r="",e="",t=!1)=>{let i=[];if(r=[].concat(r),e=[].concat(e),!e.length)return r;if(!r.length)return t?mr.flatten(e).map(n=>`{${n}}`):e;for(let n of r)if(Array.isArray(n))for(let a of n)i.push(Gt(a,e,t));else for(let a of e)t===!0&&typeof a=="string"&&(a=`{${a}}`),i.push(Array.isArray(a)?Gt(n,a,t):n+a);return mr.flatten(i)},pE=(r,e={})=>{let t=e.rangeLimit===void 0?1e3:e.rangeLimit,i=(n,a={})=>{n.queue=[];let s=a,o=a.queue;for(;s.type!=="brace"&&s.type!=="root"&&s.parent;)s=s.parent,o=s.queue;if(n.invalid||n.dollar){o.push(Gt(o.pop(),Om(n,e)));return}if(n.type==="brace"&&n.invalid!==!0&&n.nodes.length===2){o.push(Gt(o.pop(),["{}"]));return}if(n.nodes&&n.ranges>0){let d=mr.reduce(n.nodes);if(mr.exceedsLimit(...d,e.step,t))throw new RangeError("expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.");let p=cE(...d,e);p.length===0&&(p=Om(n,e)),o.push(Gt(o.pop(),p)),n.nodes=[];return}let l=mr.encloseBrace(n),c=n.queue,f=n;for(;f.type!=="brace"&&f.type!=="root"&&f.parent;)f=f.parent,c=f.queue;for(let d=0;d{u();"use strict";Pm.exports={MAX_LENGTH:1024*64,CHAR_0:"0",CHAR_9:"9",CHAR_UPPERCASE_A:"A",CHAR_LOWERCASE_A:"a",CHAR_UPPERCASE_Z:"Z",CHAR_LOWERCASE_Z:"z",CHAR_LEFT_PARENTHESES:"(",CHAR_RIGHT_PARENTHESES:")",CHAR_ASTERISK:"*",CHAR_AMPERSAND:"&",CHAR_AT:"@",CHAR_BACKSLASH:"\\",CHAR_BACKTICK:"`",CHAR_CARRIAGE_RETURN:"\r",CHAR_CIRCUMFLEX_ACCENT:"^",CHAR_COLON:":",CHAR_COMMA:",",CHAR_DOLLAR:"$",CHAR_DOT:".",CHAR_DOUBLE_QUOTE:'"',CHAR_EQUAL:"=",CHAR_EXCLAMATION_MARK:"!",CHAR_FORM_FEED:"\f",CHAR_FORWARD_SLASH:"/",CHAR_HASH:"#",CHAR_HYPHEN_MINUS:"-",CHAR_LEFT_ANGLE_BRACKET:"<",CHAR_LEFT_CURLY_BRACE:"{",CHAR_LEFT_SQUARE_BRACKET:"[",CHAR_LINE_FEED:` -`,CHAR_NO_BREAK_SPACE:"\xA0",CHAR_PERCENT:"%",CHAR_PLUS:"+",CHAR_QUESTION_MARK:"?",CHAR_RIGHT_ANGLE_BRACKET:">",CHAR_RIGHT_CURLY_BRACE:"}",CHAR_RIGHT_SQUARE_BRACKET:"]",CHAR_SEMICOLON:";",CHAR_SINGLE_QUOTE:"'",CHAR_SPACE:" ",CHAR_TAB:" ",CHAR_UNDERSCORE:"_",CHAR_VERTICAL_LINE:"|",CHAR_ZERO_WIDTH_NOBREAK_SPACE:"\uFEFF"}});var Mm=x((i6,Lm)=>{u();"use strict";var dE=ds(),{MAX_LENGTH:Dm,CHAR_BACKSLASH:cl,CHAR_BACKTICK:hE,CHAR_COMMA:mE,CHAR_DOT:gE,CHAR_LEFT_PARENTHESES:yE,CHAR_RIGHT_PARENTHESES:bE,CHAR_LEFT_CURLY_BRACE:wE,CHAR_RIGHT_CURLY_BRACE:vE,CHAR_LEFT_SQUARE_BRACKET:qm,CHAR_RIGHT_SQUARE_BRACKET:$m,CHAR_DOUBLE_QUOTE:xE,CHAR_SINGLE_QUOTE:kE,CHAR_NO_BREAK_SPACE:SE,CHAR_ZERO_WIDTH_NOBREAK_SPACE:AE}=Im(),CE=(r,e={})=>{if(typeof r!="string")throw new TypeError("Expected a string");let t=e||{},i=typeof t.maxLength=="number"?Math.min(Dm,t.maxLength):Dm;if(r.length>i)throw new SyntaxError(`Input length (${r.length}), exceeds max characters (${i})`);let n={type:"root",input:r,nodes:[]},a=[n],s=n,o=n,l=0,c=r.length,f=0,d=0,p,h={},b=()=>r[f++],v=y=>{if(y.type==="text"&&o.type==="dot"&&(o.type="text"),o&&o.type==="text"&&y.type==="text"){o.value+=y.value;return}return s.nodes.push(y),y.parent=s,y.prev=o,o=y,y};for(v({type:"bos"});f0){if(s.ranges>0){s.ranges=0;let y=s.nodes.shift();s.nodes=[y,{type:"text",value:dE(s)}]}v({type:"comma",value:p}),s.commas++;continue}if(p===gE&&d>0&&s.commas===0){let y=s.nodes;if(d===0||y.length===0){v({type:"text",value:p});continue}if(o.type==="dot"){if(s.range=[],o.value+=p,o.type="range",s.nodes.length!==3&&s.nodes.length!==5){s.invalid=!0,s.ranges=0,o.type="text";continue}s.ranges++,s.args=[];continue}if(o.type==="range"){y.pop();let w=y[y.length-1];w.value+=o.value+p,o=w,s.ranges--;continue}v({type:"dot",value:p});continue}v({type:"text",value:p})}do if(s=a.pop(),s.type!=="root"){s.nodes.forEach(k=>{k.nodes||(k.type==="open"&&(k.isOpen=!0),k.type==="close"&&(k.isClose=!0),k.nodes||(k.type="text"),k.invalid=!0)});let y=a[a.length-1],w=y.nodes.indexOf(s);y.nodes.splice(w,1,...s.nodes)}while(a.length>0);return v({type:"eos"}),n};Lm.exports=CE});var Fm=x((n6,Bm)=>{u();"use strict";var Nm=ds(),_E=Em(),EE=Rm(),OE=Mm(),Le=(r,e={})=>{let t=[];if(Array.isArray(r))for(let i of r){let n=Le.create(i,e);Array.isArray(n)?t.push(...n):t.push(n)}else t=[].concat(Le.create(r,e));return e&&e.expand===!0&&e.nodupes===!0&&(t=[...new Set(t)]),t};Le.parse=(r,e={})=>OE(r,e);Le.stringify=(r,e={})=>typeof r=="string"?Nm(Le.parse(r,e),e):Nm(r,e);Le.compile=(r,e={})=>(typeof r=="string"&&(r=Le.parse(r,e)),_E(r,e));Le.expand=(r,e={})=>{typeof r=="string"&&(r=Le.parse(r,e));let t=EE(r,e);return e.noempty===!0&&(t=t.filter(Boolean)),e.nodupes===!0&&(t=[...new Set(t)]),t};Le.create=(r,e={})=>r===""||r.length<3?[r]:e.expand!==!0?Le.compile(r,e):Le.expand(r,e);Bm.exports=Le});var Pi=x((s6,Hm)=>{u();"use strict";var TE=(et(),Ur),at="\\\\/",jm=`[^${at}]`,yt="\\.",RE="\\+",PE="\\?",ms="\\/",IE="(?=.)",zm="[^/]",pl=`(?:${ms}|$)`,Um=`(?:^|${ms})`,dl=`${yt}{1,2}${pl}`,DE=`(?!${yt})`,qE=`(?!${Um}${dl})`,$E=`(?!${yt}{0,1}${pl})`,LE=`(?!${dl})`,ME=`[^.${ms}]`,NE=`${zm}*?`,Vm={DOT_LITERAL:yt,PLUS_LITERAL:RE,QMARK_LITERAL:PE,SLASH_LITERAL:ms,ONE_CHAR:IE,QMARK:zm,END_ANCHOR:pl,DOTS_SLASH:dl,NO_DOT:DE,NO_DOTS:qE,NO_DOT_SLASH:$E,NO_DOTS_SLASH:LE,QMARK_NO_DOT:ME,STAR:NE,START_ANCHOR:Um},BE={...Vm,SLASH_LITERAL:`[${at}]`,QMARK:jm,STAR:`${jm}*?`,DOTS_SLASH:`${yt}{1,2}(?:[${at}]|$)`,NO_DOT:`(?!${yt})`,NO_DOTS:`(?!(?:^|[${at}])${yt}{1,2}(?:[${at}]|$))`,NO_DOT_SLASH:`(?!${yt}{0,1}(?:[${at}]|$))`,NO_DOTS_SLASH:`(?!${yt}{1,2}(?:[${at}]|$))`,QMARK_NO_DOT:`[^.${at}]`,START_ANCHOR:`(?:^|[${at}])`,END_ANCHOR:`(?:[${at}]|$)`},FE={alnum:"a-zA-Z0-9",alpha:"a-zA-Z",ascii:"\\x00-\\x7F",blank:" \\t",cntrl:"\\x00-\\x1F\\x7F",digit:"0-9",graph:"\\x21-\\x7E",lower:"a-z",print:"\\x20-\\x7E ",punct:"\\-!\"#$%&'()\\*+,./:;<=>?@[\\]^_`{|}~",space:" \\t\\r\\n\\v\\f",upper:"A-Z",word:"A-Za-z0-9_",xdigit:"A-Fa-f0-9"};Hm.exports={MAX_LENGTH:1024*64,POSIX_REGEX_SOURCE:FE,REGEX_BACKSLASH:/\\(?![*+?^${}(|)[\]])/g,REGEX_NON_SPECIAL_CHARS:/^[^@![\].,$*+?^{}()|\\/]+/,REGEX_SPECIAL_CHARS:/[-*+?.^${}(|)[\]]/,REGEX_SPECIAL_CHARS_BACKREF:/(\\?)((\W)(\3*))/g,REGEX_SPECIAL_CHARS_GLOBAL:/([-*+?.^${}(|)[\]])/g,REGEX_REMOVE_BACKSLASH:/(?:\[.*?[^\\]\]|\\(?=.))/g,REPLACEMENTS:{"***":"*","**/**":"**","**/**/**":"**"},CHAR_0:48,CHAR_9:57,CHAR_UPPERCASE_A:65,CHAR_LOWERCASE_A:97,CHAR_UPPERCASE_Z:90,CHAR_LOWERCASE_Z:122,CHAR_LEFT_PARENTHESES:40,CHAR_RIGHT_PARENTHESES:41,CHAR_ASTERISK:42,CHAR_AMPERSAND:38,CHAR_AT:64,CHAR_BACKWARD_SLASH:92,CHAR_CARRIAGE_RETURN:13,CHAR_CIRCUMFLEX_ACCENT:94,CHAR_COLON:58,CHAR_COMMA:44,CHAR_DOT:46,CHAR_DOUBLE_QUOTE:34,CHAR_EQUAL:61,CHAR_EXCLAMATION_MARK:33,CHAR_FORM_FEED:12,CHAR_FORWARD_SLASH:47,CHAR_GRAVE_ACCENT:96,CHAR_HASH:35,CHAR_HYPHEN_MINUS:45,CHAR_LEFT_ANGLE_BRACKET:60,CHAR_LEFT_CURLY_BRACE:123,CHAR_LEFT_SQUARE_BRACKET:91,CHAR_LINE_FEED:10,CHAR_NO_BREAK_SPACE:160,CHAR_PERCENT:37,CHAR_PLUS:43,CHAR_QUESTION_MARK:63,CHAR_RIGHT_ANGLE_BRACKET:62,CHAR_RIGHT_CURLY_BRACE:125,CHAR_RIGHT_SQUARE_BRACKET:93,CHAR_SEMICOLON:59,CHAR_SINGLE_QUOTE:39,CHAR_SPACE:32,CHAR_TAB:9,CHAR_UNDERSCORE:95,CHAR_VERTICAL_LINE:124,CHAR_ZERO_WIDTH_NOBREAK_SPACE:65279,SEP:TE.sep,extglobChars(r){return{"!":{type:"negate",open:"(?:(?!(?:",close:`))${r.STAR})`},"?":{type:"qmark",open:"(?:",close:")?"},"+":{type:"plus",open:"(?:",close:")+"},"*":{type:"star",open:"(?:",close:")*"},"@":{type:"at",open:"(?:",close:")"}}},globChars(r){return r===!0?BE:Vm}}});var Ii=x(Re=>{u();"use strict";var jE=(et(),Ur),zE=m.platform==="win32",{REGEX_BACKSLASH:UE,REGEX_REMOVE_BACKSLASH:VE,REGEX_SPECIAL_CHARS:HE,REGEX_SPECIAL_CHARS_GLOBAL:WE}=Pi();Re.isObject=r=>r!==null&&typeof r=="object"&&!Array.isArray(r);Re.hasRegexChars=r=>HE.test(r);Re.isRegexChar=r=>r.length===1&&Re.hasRegexChars(r);Re.escapeRegex=r=>r.replace(WE,"\\$1");Re.toPosixSlashes=r=>r.replace(UE,"/");Re.removeBackslashes=r=>r.replace(VE,e=>e==="\\"?"":e);Re.supportsLookbehinds=()=>{let r=m.version.slice(1).split(".").map(Number);return r.length===3&&r[0]>=9||r[0]===8&&r[1]>=10};Re.isWindows=r=>r&&typeof r.windows=="boolean"?r.windows:zE===!0||jE.sep==="\\";Re.escapeLast=(r,e,t)=>{let i=r.lastIndexOf(e,t);return i===-1?r:r[i-1]==="\\"?Re.escapeLast(r,e,i-1):`${r.slice(0,i)}\\${r.slice(i)}`};Re.removePrefix=(r,e={})=>{let t=r;return t.startsWith("./")&&(t=t.slice(2),e.prefix="./"),t};Re.wrapOutput=(r,e={},t={})=>{let i=t.contains?"":"^",n=t.contains?"":"$",a=`${i}(?:${r})${n}`;return e.negated===!0&&(a=`(?:^(?!${a}).*$)`),a}});var Zm=x((o6,Jm)=>{u();"use strict";var Wm=Ii(),{CHAR_ASTERISK:hl,CHAR_AT:GE,CHAR_BACKWARD_SLASH:Di,CHAR_COMMA:QE,CHAR_DOT:ml,CHAR_EXCLAMATION_MARK:gl,CHAR_FORWARD_SLASH:Gm,CHAR_LEFT_CURLY_BRACE:yl,CHAR_LEFT_PARENTHESES:bl,CHAR_LEFT_SQUARE_BRACKET:YE,CHAR_PLUS:KE,CHAR_QUESTION_MARK:Qm,CHAR_RIGHT_CURLY_BRACE:XE,CHAR_RIGHT_PARENTHESES:Ym,CHAR_RIGHT_SQUARE_BRACKET:JE}=Pi(),Km=r=>r===Gm||r===Di,Xm=r=>{r.isPrefix!==!0&&(r.depth=r.isGlobstar?1/0:1)},ZE=(r,e)=>{let t=e||{},i=r.length-1,n=t.parts===!0||t.scanToEnd===!0,a=[],s=[],o=[],l=r,c=-1,f=0,d=0,p=!1,h=!1,b=!1,v=!1,y=!1,w=!1,k=!1,S=!1,E=!1,O=!1,B=0,N,T,F={value:"",depth:0,isGlob:!1},Y=()=>c>=i,_=()=>l.charCodeAt(c+1),Q=()=>(N=T,l.charCodeAt(++c));for(;c0&&(oe=l.slice(0,f),l=l.slice(f),d-=f),U&&b===!0&&d>0?(U=l.slice(0,d),A=l.slice(d)):b===!0?(U="",A=l):U=l,U&&U!==""&&U!=="/"&&U!==l&&Km(U.charCodeAt(U.length-1))&&(U=U.slice(0,-1)),t.unescape===!0&&(A&&(A=Wm.removeBackslashes(A)),U&&k===!0&&(U=Wm.removeBackslashes(U)));let C={prefix:oe,input:r,start:f,base:U,glob:A,isBrace:p,isBracket:h,isGlob:b,isExtglob:v,isGlobstar:y,negated:S,negatedExtglob:E};if(t.tokens===!0&&(C.maxDepth=0,Km(T)||s.push(F),C.tokens=s),t.parts===!0||t.tokens===!0){let he;for(let V=0;V{u();"use strict";var gs=Pi(),Me=Ii(),{MAX_LENGTH:ys,POSIX_REGEX_SOURCE:e2,REGEX_NON_SPECIAL_CHARS:t2,REGEX_SPECIAL_CHARS_BACKREF:r2,REPLACEMENTS:eg}=gs,i2=(r,e)=>{if(typeof e.expandRange=="function")return e.expandRange(...r,e);r.sort();let t=`[${r.join("-")}]`;try{new RegExp(t)}catch(i){return r.map(n=>Me.escapeRegex(n)).join("..")}return t},gr=(r,e)=>`Missing ${r}: "${e}" - use "\\\\${e}" to match literal characters`,wl=(r,e)=>{if(typeof r!="string")throw new TypeError("Expected a string");r=eg[r]||r;let t={...e},i=typeof t.maxLength=="number"?Math.min(ys,t.maxLength):ys,n=r.length;if(n>i)throw new SyntaxError(`Input length: ${n}, exceeds maximum allowed length: ${i}`);let a={type:"bos",value:"",output:t.prepend||""},s=[a],o=t.capture?"":"?:",l=Me.isWindows(e),c=gs.globChars(l),f=gs.extglobChars(c),{DOT_LITERAL:d,PLUS_LITERAL:p,SLASH_LITERAL:h,ONE_CHAR:b,DOTS_SLASH:v,NO_DOT:y,NO_DOT_SLASH:w,NO_DOTS_SLASH:k,QMARK:S,QMARK_NO_DOT:E,STAR:O,START_ANCHOR:B}=c,N=q=>`(${o}(?:(?!${B}${q.dot?v:d}).)*?)`,T=t.dot?"":y,F=t.dot?S:E,Y=t.bash===!0?N(t):O;t.capture&&(Y=`(${Y})`),typeof t.noext=="boolean"&&(t.noextglob=t.noext);let _={input:r,index:-1,start:0,dot:t.dot===!0,consumed:"",output:"",prefix:"",backtrack:!1,negated:!1,brackets:0,braces:0,parens:0,quotes:0,globstar:!1,tokens:s};r=Me.removePrefix(r,_),n=r.length;let Q=[],U=[],oe=[],A=a,C,he=()=>_.index===n-1,V=_.peek=(q=1)=>r[_.index+q],Ee=_.advance=()=>r[++_.index]||"",Ie=()=>r.slice(_.index+1),De=(q="",ae=0)=>{_.consumed+=q,_.index+=ae},Bi=q=>{_.output+=q.output!=null?q.output:q.value,De(q.value)},Rv=()=>{let q=1;for(;V()==="!"&&(V(2)!=="("||V(3)==="?");)Ee(),_.start++,q++;return q%2==0?!1:(_.negated=!0,_.start++,!0)},Fi=q=>{_[q]++,oe.push(q)},Ft=q=>{_[q]--,oe.pop()},W=q=>{if(A.type==="globstar"){let ae=_.braces>0&&(q.type==="comma"||q.type==="brace"),I=q.extglob===!0||Q.length&&(q.type==="pipe"||q.type==="paren");q.type!=="slash"&&q.type!=="paren"&&!ae&&!I&&(_.output=_.output.slice(0,-A.output.length),A.type="star",A.value="*",A.output=Y,_.output+=A.output)}if(Q.length&&q.type!=="paren"&&(Q[Q.length-1].inner+=q.value),(q.value||q.output)&&Bi(q),A&&A.type==="text"&&q.type==="text"){A.value+=q.value,A.output=(A.output||"")+q.value;return}q.prev=A,s.push(q),A=q},ji=(q,ae)=>{let I={...f[ae],conditions:1,inner:""};I.prev=A,I.parens=_.parens,I.output=_.output;let H=(t.capture?"(":"")+I.open;Fi("parens"),W({type:q,value:ae,output:_.output?"":b}),W({type:"paren",extglob:!0,value:Ee(),output:H}),Q.push(I)},Pv=q=>{let ae=q.close+(t.capture?")":""),I;if(q.type==="negate"){let H=Y;if(q.inner&&q.inner.length>1&&q.inner.includes("/")&&(H=N(t)),(H!==Y||he()||/^\)+$/.test(Ie()))&&(ae=q.close=`)$))${H}`),q.inner.includes("*")&&(I=Ie())&&/^\.[^\\/.]+$/.test(I)){let ce=wl(I,{...e,fastpaths:!1}).output;ae=q.close=`)${ce})${H})`}q.prev.type==="bos"&&(_.negatedExtglob=!0)}W({type:"paren",extglob:!0,value:C,output:ae}),Ft("parens")};if(t.fastpaths!==!1&&!/(^[*!]|[/()[\]{}"])/.test(r)){let q=!1,ae=r.replace(r2,(I,H,ce,Ce,ye,Ms)=>Ce==="\\"?(q=!0,I):Ce==="?"?H?H+Ce+(ye?S.repeat(ye.length):""):Ms===0?F+(ye?S.repeat(ye.length):""):S.repeat(ce.length):Ce==="."?d.repeat(ce.length):Ce==="*"?H?H+Ce+(ye?Y:""):Y:H?I:`\\${I}`);return q===!0&&(t.unescape===!0?ae=ae.replace(/\\/g,""):ae=ae.replace(/\\+/g,I=>I.length%2==0?"\\\\":I?"\\":"")),ae===r&&t.contains===!0?(_.output=r,_):(_.output=Me.wrapOutput(ae,_,e),_)}for(;!he();){if(C=Ee(),C==="\0")continue;if(C==="\\"){let I=V();if(I==="/"&&t.bash!==!0||I==="."||I===";")continue;if(!I){C+="\\",W({type:"text",value:C});continue}let H=/^\\+/.exec(Ie()),ce=0;if(H&&H[0].length>2&&(ce=H[0].length,_.index+=ce,ce%2!=0&&(C+="\\")),t.unescape===!0?C=Ee():C+=Ee(),_.brackets===0){W({type:"text",value:C});continue}}if(_.brackets>0&&(C!=="]"||A.value==="["||A.value==="[^")){if(t.posix!==!1&&C===":"){let I=A.value.slice(1);if(I.includes("[")&&(A.posix=!0,I.includes(":"))){let H=A.value.lastIndexOf("["),ce=A.value.slice(0,H),Ce=A.value.slice(H+2),ye=e2[Ce];if(ye){A.value=ce+ye,_.backtrack=!0,Ee(),!a.output&&s.indexOf(A)===1&&(a.output=b);continue}}}(C==="["&&V()!==":"||C==="-"&&V()==="]")&&(C=`\\${C}`),C==="]"&&(A.value==="["||A.value==="[^")&&(C=`\\${C}`),t.posix===!0&&C==="!"&&A.value==="["&&(C="^"),A.value+=C,Bi({value:C});continue}if(_.quotes===1&&C!=='"'){C=Me.escapeRegex(C),A.value+=C,Bi({value:C});continue}if(C==='"'){_.quotes=_.quotes===1?0:1,t.keepQuotes===!0&&W({type:"text",value:C});continue}if(C==="("){Fi("parens"),W({type:"paren",value:C});continue}if(C===")"){if(_.parens===0&&t.strictBrackets===!0)throw new SyntaxError(gr("opening","("));let I=Q[Q.length-1];if(I&&_.parens===I.parens+1){Pv(Q.pop());continue}W({type:"paren",value:C,output:_.parens?")":"\\)"}),Ft("parens");continue}if(C==="["){if(t.nobracket===!0||!Ie().includes("]")){if(t.nobracket!==!0&&t.strictBrackets===!0)throw new SyntaxError(gr("closing","]"));C=`\\${C}`}else Fi("brackets");W({type:"bracket",value:C});continue}if(C==="]"){if(t.nobracket===!0||A&&A.type==="bracket"&&A.value.length===1){W({type:"text",value:C,output:`\\${C}`});continue}if(_.brackets===0){if(t.strictBrackets===!0)throw new SyntaxError(gr("opening","["));W({type:"text",value:C,output:`\\${C}`});continue}Ft("brackets");let I=A.value.slice(1);if(A.posix!==!0&&I[0]==="^"&&!I.includes("/")&&(C=`/${C}`),A.value+=C,Bi({value:C}),t.literalBrackets===!1||Me.hasRegexChars(I))continue;let H=Me.escapeRegex(A.value);if(_.output=_.output.slice(0,-A.value.length),t.literalBrackets===!0){_.output+=H,A.value=H;continue}A.value=`(${o}${H}|${A.value})`,_.output+=A.value;continue}if(C==="{"&&t.nobrace!==!0){Fi("braces");let I={type:"brace",value:C,output:"(",outputIndex:_.output.length,tokensIndex:_.tokens.length};U.push(I),W(I);continue}if(C==="}"){let I=U[U.length-1];if(t.nobrace===!0||!I){W({type:"text",value:C,output:C});continue}let H=")";if(I.dots===!0){let ce=s.slice(),Ce=[];for(let ye=ce.length-1;ye>=0&&(s.pop(),ce[ye].type!=="brace");ye--)ce[ye].type!=="dots"&&Ce.unshift(ce[ye].value);H=i2(Ce,t),_.backtrack=!0}if(I.comma!==!0&&I.dots!==!0){let ce=_.output.slice(0,I.outputIndex),Ce=_.tokens.slice(I.tokensIndex);I.value=I.output="\\{",C=H="\\}",_.output=ce;for(let ye of Ce)_.output+=ye.output||ye.value}W({type:"brace",value:C,output:H}),Ft("braces"),U.pop();continue}if(C==="|"){Q.length>0&&Q[Q.length-1].conditions++,W({type:"text",value:C});continue}if(C===","){let I=C,H=U[U.length-1];H&&oe[oe.length-1]==="braces"&&(H.comma=!0,I="|"),W({type:"comma",value:C,output:I});continue}if(C==="/"){if(A.type==="dot"&&_.index===_.start+1){_.start=_.index+1,_.consumed="",_.output="",s.pop(),A=a;continue}W({type:"slash",value:C,output:h});continue}if(C==="."){if(_.braces>0&&A.type==="dot"){A.value==="."&&(A.output=d);let I=U[U.length-1];A.type="dots",A.output+=C,A.value+=C,I.dots=!0;continue}if(_.braces+_.parens===0&&A.type!=="bos"&&A.type!=="slash"){W({type:"text",value:C,output:d});continue}W({type:"dot",value:C,output:d});continue}if(C==="?"){if(!(A&&A.value==="(")&&t.noextglob!==!0&&V()==="("&&V(2)!=="?"){ji("qmark",C);continue}if(A&&A.type==="paren"){let H=V(),ce=C;if(H==="<"&&!Me.supportsLookbehinds())throw new Error("Node.js v10 or higher is required for regex lookbehinds");(A.value==="("&&!/[!=<:]/.test(H)||H==="<"&&!/<([!=]|\w+>)/.test(Ie()))&&(ce=`\\${C}`),W({type:"text",value:C,output:ce});continue}if(t.dot!==!0&&(A.type==="slash"||A.type==="bos")){W({type:"qmark",value:C,output:E});continue}W({type:"qmark",value:C,output:S});continue}if(C==="!"){if(t.noextglob!==!0&&V()==="("&&(V(2)!=="?"||!/[!=<:]/.test(V(3)))){ji("negate",C);continue}if(t.nonegate!==!0&&_.index===0){Rv();continue}}if(C==="+"){if(t.noextglob!==!0&&V()==="("&&V(2)!=="?"){ji("plus",C);continue}if(A&&A.value==="("||t.regex===!1){W({type:"plus",value:C,output:p});continue}if(A&&(A.type==="bracket"||A.type==="paren"||A.type==="brace")||_.parens>0){W({type:"plus",value:C});continue}W({type:"plus",value:p});continue}if(C==="@"){if(t.noextglob!==!0&&V()==="("&&V(2)!=="?"){W({type:"at",extglob:!0,value:C,output:""});continue}W({type:"text",value:C});continue}if(C!=="*"){(C==="$"||C==="^")&&(C=`\\${C}`);let I=t2.exec(Ie());I&&(C+=I[0],_.index+=I[0].length),W({type:"text",value:C});continue}if(A&&(A.type==="globstar"||A.star===!0)){A.type="star",A.star=!0,A.value+=C,A.output=Y,_.backtrack=!0,_.globstar=!0,De(C);continue}let q=Ie();if(t.noextglob!==!0&&/^\([^?]/.test(q)){ji("star",C);continue}if(A.type==="star"){if(t.noglobstar===!0){De(C);continue}let I=A.prev,H=I.prev,ce=I.type==="slash"||I.type==="bos",Ce=H&&(H.type==="star"||H.type==="globstar");if(t.bash===!0&&(!ce||q[0]&&q[0]!=="/")){W({type:"star",value:C,output:""});continue}let ye=_.braces>0&&(I.type==="comma"||I.type==="brace"),Ms=Q.length&&(I.type==="pipe"||I.type==="paren");if(!ce&&I.type!=="paren"&&!ye&&!Ms){W({type:"star",value:C,output:""});continue}for(;q.slice(0,3)==="/**";){let zi=r[_.index+4];if(zi&&zi!=="/")break;q=q.slice(3),De("/**",3)}if(I.type==="bos"&&he()){A.type="globstar",A.value+=C,A.output=N(t),_.output=A.output,_.globstar=!0,De(C);continue}if(I.type==="slash"&&I.prev.type!=="bos"&&!Ce&&he()){_.output=_.output.slice(0,-(I.output+A.output).length),I.output=`(?:${I.output}`,A.type="globstar",A.output=N(t)+(t.strictSlashes?")":"|$)"),A.value+=C,_.globstar=!0,_.output+=I.output+A.output,De(C);continue}if(I.type==="slash"&&I.prev.type!=="bos"&&q[0]==="/"){let zi=q[1]!==void 0?"|$":"";_.output=_.output.slice(0,-(I.output+A.output).length),I.output=`(?:${I.output}`,A.type="globstar",A.output=`${N(t)}${h}|${h}${zi})`,A.value+=C,_.output+=I.output+A.output,_.globstar=!0,De(C+Ee()),W({type:"slash",value:"/",output:""});continue}if(I.type==="bos"&&q[0]==="/"){A.type="globstar",A.value+=C,A.output=`(?:^|${h}|${N(t)}${h})`,_.output=A.output,_.globstar=!0,De(C+Ee()),W({type:"slash",value:"/",output:""});continue}_.output=_.output.slice(0,-A.output.length),A.type="globstar",A.output=N(t),A.value+=C,_.output+=A.output,_.globstar=!0,De(C);continue}let ae={type:"star",value:C,output:Y};if(t.bash===!0){ae.output=".*?",(A.type==="bos"||A.type==="slash")&&(ae.output=T+ae.output),W(ae);continue}if(A&&(A.type==="bracket"||A.type==="paren")&&t.regex===!0){ae.output=C,W(ae);continue}(_.index===_.start||A.type==="slash"||A.type==="dot")&&(A.type==="dot"?(_.output+=w,A.output+=w):t.dot===!0?(_.output+=k,A.output+=k):(_.output+=T,A.output+=T),V()!=="*"&&(_.output+=b,A.output+=b)),W(ae)}for(;_.brackets>0;){if(t.strictBrackets===!0)throw new SyntaxError(gr("closing","]"));_.output=Me.escapeLast(_.output,"["),Ft("brackets")}for(;_.parens>0;){if(t.strictBrackets===!0)throw new SyntaxError(gr("closing",")"));_.output=Me.escapeLast(_.output,"("),Ft("parens")}for(;_.braces>0;){if(t.strictBrackets===!0)throw new SyntaxError(gr("closing","}"));_.output=Me.escapeLast(_.output,"{"),Ft("braces")}if(t.strictSlashes!==!0&&(A.type==="star"||A.type==="bracket")&&W({type:"maybe_slash",value:"",output:`${h}?`}),_.backtrack===!0){_.output="";for(let q of _.tokens)_.output+=q.output!=null?q.output:q.value,q.suffix&&(_.output+=q.suffix)}return _};wl.fastpaths=(r,e)=>{let t={...e},i=typeof t.maxLength=="number"?Math.min(ys,t.maxLength):ys,n=r.length;if(n>i)throw new SyntaxError(`Input length: ${n}, exceeds maximum allowed length: ${i}`);r=eg[r]||r;let a=Me.isWindows(e),{DOT_LITERAL:s,SLASH_LITERAL:o,ONE_CHAR:l,DOTS_SLASH:c,NO_DOT:f,NO_DOTS:d,NO_DOTS_SLASH:p,STAR:h,START_ANCHOR:b}=gs.globChars(a),v=t.dot?d:f,y=t.dot?p:f,w=t.capture?"":"?:",k={negated:!1,prefix:""},S=t.bash===!0?".*?":h;t.capture&&(S=`(${S})`);let E=T=>T.noglobstar===!0?S:`(${w}(?:(?!${b}${T.dot?c:s}).)*?)`,O=T=>{switch(T){case"*":return`${v}${l}${S}`;case".*":return`${s}${l}${S}`;case"*.*":return`${v}${S}${s}${l}${S}`;case"*/*":return`${v}${S}${o}${l}${y}${S}`;case"**":return v+E(t);case"**/*":return`(?:${v}${E(t)}${o})?${y}${l}${S}`;case"**/*.*":return`(?:${v}${E(t)}${o})?${y}${S}${s}${l}${S}`;case"**/.*":return`(?:${v}${E(t)}${o})?${s}${l}${S}`;default:{let F=/^(.*?)\.(\w+)$/.exec(T);if(!F)return;let Y=O(F[1]);return Y?Y+s+F[2]:void 0}}},B=Me.removePrefix(r,k),N=O(B);return N&&t.strictSlashes!==!0&&(N+=`${o}?`),N};tg.exports=wl});var ng=x((u6,ig)=>{u();"use strict";var n2=(et(),Ur),s2=Zm(),vl=rg(),xl=Ii(),a2=Pi(),o2=r=>r&&typeof r=="object"&&!Array.isArray(r),de=(r,e,t=!1)=>{if(Array.isArray(r)){let f=r.map(p=>de(p,e,t));return p=>{for(let h of f){let b=h(p);if(b)return b}return!1}}let i=o2(r)&&r.tokens&&r.input;if(r===""||typeof r!="string"&&!i)throw new TypeError("Expected pattern to be a non-empty string");let n=e||{},a=xl.isWindows(e),s=i?de.compileRe(r,e):de.makeRe(r,e,!1,!0),o=s.state;delete s.state;let l=()=>!1;if(n.ignore){let f={...e,ignore:null,onMatch:null,onResult:null};l=de(n.ignore,f,t)}let c=(f,d=!1)=>{let{isMatch:p,match:h,output:b}=de.test(f,s,e,{glob:r,posix:a}),v={glob:r,state:o,regex:s,posix:a,input:f,output:b,match:h,isMatch:p};return typeof n.onResult=="function"&&n.onResult(v),p===!1?(v.isMatch=!1,d?v:!1):l(f)?(typeof n.onIgnore=="function"&&n.onIgnore(v),v.isMatch=!1,d?v:!1):(typeof n.onMatch=="function"&&n.onMatch(v),d?v:!0)};return t&&(c.state=o),c};de.test=(r,e,t,{glob:i,posix:n}={})=>{if(typeof r!="string")throw new TypeError("Expected input to be a string");if(r==="")return{isMatch:!1,output:""};let a=t||{},s=a.format||(n?xl.toPosixSlashes:null),o=r===i,l=o&&s?s(r):r;return o===!1&&(l=s?s(r):r,o=l===i),(o===!1||a.capture===!0)&&(a.matchBase===!0||a.basename===!0?o=de.matchBase(r,e,t,n):o=e.exec(l)),{isMatch:Boolean(o),match:o,output:l}};de.matchBase=(r,e,t,i=xl.isWindows(t))=>(e instanceof RegExp?e:de.makeRe(e,t)).test(n2.basename(r));de.isMatch=(r,e,t)=>de(e,t)(r);de.parse=(r,e)=>Array.isArray(r)?r.map(t=>de.parse(t,e)):vl(r,{...e,fastpaths:!1});de.scan=(r,e)=>s2(r,e);de.compileRe=(r,e,t=!1,i=!1)=>{if(t===!0)return r.output;let n=e||{},a=n.contains?"":"^",s=n.contains?"":"$",o=`${a}(?:${r.output})${s}`;r&&r.negated===!0&&(o=`^(?!${o}).*$`);let l=de.toRegex(o,e);return i===!0&&(l.state=r),l};de.makeRe=(r,e={},t=!1,i=!1)=>{if(!r||typeof r!="string")throw new TypeError("Expected a non-empty string");let n={negated:!1,fastpaths:!0};return e.fastpaths!==!1&&(r[0]==="."||r[0]==="*")&&(n.output=vl.fastpaths(r,e)),n.output||(n=vl(r,e)),de.compileRe(n,e,t,i)};de.toRegex=(r,e)=>{try{let t=e||{};return new RegExp(r,t.flags||(t.nocase?"i":""))}catch(t){if(e&&e.debug===!0)throw t;return/$^/}};de.constants=a2;ig.exports=de});var ag=x((f6,sg)=>{u();"use strict";sg.exports=ng()});var cg=x((c6,fg)=>{u();"use strict";var og=(Bn(),Nn),lg=Fm(),ot=ag(),kl=Ii(),ug=r=>r===""||r==="./",fe=(r,e,t)=>{e=[].concat(e),r=[].concat(r);let i=new Set,n=new Set,a=new Set,s=0,o=f=>{a.add(f.output),t&&t.onResult&&t.onResult(f)};for(let f=0;f!i.has(f));if(t&&c.length===0){if(t.failglob===!0)throw new Error(`No matches found for "${e.join(", ")}"`);if(t.nonull===!0||t.nullglob===!0)return t.unescape?e.map(f=>f.replace(/\\/g,"")):e}return c};fe.match=fe;fe.matcher=(r,e)=>ot(r,e);fe.isMatch=(r,e,t)=>ot(e,t)(r);fe.any=fe.isMatch;fe.not=(r,e,t={})=>{e=[].concat(e).map(String);let i=new Set,n=[],a=o=>{t.onResult&&t.onResult(o),n.push(o.output)},s=new Set(fe(r,e,{...t,onResult:a}));for(let o of n)s.has(o)||i.add(o);return[...i]};fe.contains=(r,e,t)=>{if(typeof r!="string")throw new TypeError(`Expected a string: "${og.inspect(r)}"`);if(Array.isArray(e))return e.some(i=>fe.contains(r,i,t));if(typeof e=="string"){if(ug(r)||ug(e))return!1;if(r.includes(e)||r.startsWith("./")&&r.slice(2).includes(e))return!0}return fe.isMatch(r,e,{...t,contains:!0})};fe.matchKeys=(r,e,t)=>{if(!kl.isObject(r))throw new TypeError("Expected the first argument to be an object");let i=fe(Object.keys(r),e,t),n={};for(let a of i)n[a]=r[a];return n};fe.some=(r,e,t)=>{let i=[].concat(r);for(let n of[].concat(e)){let a=ot(String(n),t);if(i.some(s=>a(s)))return!0}return!1};fe.every=(r,e,t)=>{let i=[].concat(r);for(let n of[].concat(e)){let a=ot(String(n),t);if(!i.every(s=>a(s)))return!1}return!0};fe.all=(r,e,t)=>{if(typeof r!="string")throw new TypeError(`Expected a string: "${og.inspect(r)}"`);return[].concat(e).every(i=>ot(i,t)(r))};fe.capture=(r,e,t)=>{let i=kl.isWindows(t),a=ot.makeRe(String(r),{...t,capture:!0}).exec(i?kl.toPosixSlashes(e):e);if(a)return a.slice(1).map(s=>s===void 0?"":s)};fe.makeRe=(...r)=>ot.makeRe(...r);fe.scan=(...r)=>ot.scan(...r);fe.parse=(r,e)=>{let t=[];for(let i of[].concat(r||[]))for(let n of lg(String(i),e))t.push(ot.parse(n,e));return t};fe.braces=(r,e)=>{if(typeof r!="string")throw new TypeError("Expected a string");return e&&e.nobrace===!0||!/\{.*\}/.test(r)?[r]:lg(r,e)};fe.braceExpand=(r,e)=>{if(typeof r!="string")throw new TypeError("Expected a string");return fe.braces(r,{...e,expand:!0})};fg.exports=fe});function dg(r,e){let t=e.content.files;t=t.filter(o=>typeof o=="string"),t=t.map(al);let i=cs.generateTasks(t),n=[],a=[];for(let o of i)n.push(...o.positive.map(l=>hg(l,!1))),a.push(...o.negative.map(l=>hg(l,!0)));let s=[...n,...a];return s=u2(r,s),s=s.flatMap(f2),s=s.map(l2),s}function hg(r,e){let t={original:r,base:r,ignore:e,pattern:r,glob:null};return Kh(r)&&Object.assign(t,rm(r)),t}function l2(r){let e=al(r.base);return e=cs.escapePath(e),r.pattern=r.glob?`${e}/${r.glob}`:e,r.pattern=r.ignore?`!${r.pattern}`:r.pattern,r}function u2(r,e){let t=[];return r.userConfigPath&&r.tailwindConfig.content.relative&&(t=[me.dirname(r.userConfigPath)]),e.map(i=>(i.base=me.resolve(...t,i.base),i))}function f2(r){let e=[r];try{let t=be.realpathSync(r.base);t!==r.base&&e.push({...r,base:t})}catch{}return e}function mg(r,e,t){let i=r.tailwindConfig.content.files.filter(s=>typeof s.raw=="string").map(({raw:s,extension:o="html"})=>({content:s,extension:o})),[n,a]=p2(e,t);for(let s of n){let o=me.extname(s).slice(1);i.push({file:s,extension:o})}return[i,a]}function c2(r){if(!r.some(a=>a.includes("**")&&!yg.test(a)))return()=>{};let t=[],i=[];for(let a of r){let s=pg.default.matcher(a);yg.test(a)&&i.push(s),t.push(s)}let n=!1;return a=>{if(n||i.some(f=>f(a)))return;let s=t.findIndex(f=>f(a));if(s===-1)return;let o=r[s],l=me.relative(m.cwd(),o);l[0]!=="."&&(l=`./${l}`);let c=gg.find(f=>a.includes(f));c&&(n=!0,G.warn("broad-content-glob-pattern",[`Your \`content\` configuration includes a pattern which looks like it's accidentally matching all of \`${c}\` and can cause serious performance issues.`,`Pattern: \`${l}\``,"See our documentation for recommendations:","https://tailwindcss.com/docs/content-configuration#pattern-recommendations"]))}}function p2(r,e){let t=r.map(o=>o.pattern),i=new Map,n=c2(t),a=new Set;Je.DEBUG&&console.time("Finding changed files");let s=cs.sync(t,{absolute:!0});for(let o of s){n(o);let l=e.get(o)||-1/0,c=be.statSync(o).mtimeMs;c>l&&(a.add(o),i.set(o,c))}return Je.DEBUG&&console.timeEnd("Finding changed files"),[a,i]}var pg,gg,yg,bg=R(()=>{u();ft();et();Xh();Jh();Zh();im();It();Be();pg=pe(cg());gg=["node_modules"],yg=new RegExp(`(${gg.map(r=>String.raw`\b${r}\b`).join("|")})`)});function wg(){}var vg=R(()=>{u()});function g2(r,e){for(let t of e){let i=`${r}${t}`;if(be.existsSync(i)&&be.statSync(i).isFile())return i}for(let t of e){let i=`${r}/index${t}`;if(be.existsSync(i))return i}return null}function*xg(r,e,t,i=me.extname(r)){let n=g2(me.resolve(e,r),d2.includes(i)?h2:m2);if(n===null||t.has(n))return;t.add(n),yield n,e=me.dirname(n),i=me.extname(n);let a=be.readFileSync(n,"utf-8");for(let s of[...a.matchAll(/import[\s\S]*?['"](.{3,}?)['"]/gi),...a.matchAll(/import[\s\S]*from[\s\S]*?['"](.{3,}?)['"]/gi),...a.matchAll(/require\(['"`](.+)['"`]\)/gi)])!s[1].startsWith(".")||(yield*xg(s[1],e,t,i))}function Sl(r){return r===null?new Set:new Set(xg(r,me.dirname(r),new Set))}var d2,h2,m2,kg=R(()=>{u();ft();et();d2=[".js",".cjs",".mjs"],h2=["",".js",".cjs",".mjs",".ts",".cts",".mts",".jsx",".tsx"],m2=["",".ts",".cts",".mts",".tsx",".js",".cjs",".mjs",".jsx"]});function y2(r,e){if(Al.has(r))return Al.get(r);let t=dg(r,e);return Al.set(r,t).get(r)}function b2(r){let e=na(r);if(e!==null){let[i,n,a,s]=Ag.get(e)||[],o=Sl(e),l=!1,c=new Map;for(let p of o){let h=be.statSync(p).mtimeMs;c.set(p,h),(!s||!s.has(p)||h>s.get(p))&&(l=!0)}if(!l)return[i,e,n,a];for(let p of o)delete pf.cache[p];let f=sl(zr(wg(e))),d=Vi(f);return Ag.set(e,[f,d,o,c]),[f,e,d,o]}let t=zr(r?.config??r??{});return t=sl(t),[t,null,Vi(t),[]]}function Cl(r){return({tailwindDirectives:e,registerDependency:t})=>(i,n)=>{let[a,s,o,l]=b2(r),c=new Set(l);if(e.size>0){c.add(n.opts.from);for(let b of n.messages)b.type==="dependency"&&c.add(b.file)}let[f,,d]=zh(i,n,a,s,o,c),p=fs(f),h=y2(f,a);if(e.size>0){for(let y of h)for(let w of rl(y))t(w);let[b,v]=mg(f,h,p);for(let y of b)f.changedContent.push(y);for(let[y,w]of v.entries())d.set(y,w)}for(let b of l)t({type:"dependency",file:b});for(let[b,v]of d.entries())p.set(b,v);return f}}var Sg,Ag,Al,Cg=R(()=>{u();ft();Sg=pe(Ns());yf();ia();sc();_i();Uh();Yh();bg();vg();kg();Ag=new Sg.default({maxSize:100}),Al=new WeakMap});function _l(r){let e=new Set,t=new Set,i=new Set;if(r.walkAtRules(n=>{n.name==="apply"&&i.add(n),n.name==="import"&&(n.params==='"tailwindcss/base"'||n.params==="'tailwindcss/base'"?(n.name="tailwind",n.params="base"):n.params==='"tailwindcss/components"'||n.params==="'tailwindcss/components'"?(n.name="tailwind",n.params="components"):n.params==='"tailwindcss/utilities"'||n.params==="'tailwindcss/utilities'"?(n.name="tailwind",n.params="utilities"):(n.params==='"tailwindcss/screens"'||n.params==="'tailwindcss/screens'"||n.params==='"tailwindcss/variants"'||n.params==="'tailwindcss/variants'")&&(n.name="tailwind",n.params="variants")),n.name==="tailwind"&&(n.params==="screens"&&(n.params="variants"),e.add(n.params)),["layer","responsive","variants"].includes(n.name)&&(["responsive","variants"].includes(n.name)&&G.warn(`${n.name}-at-rule-deprecated`,[`The \`@${n.name}\` directive has been deprecated in Tailwind CSS v3.0.`,"Use `@layer utilities` or `@layer components` instead.","https://tailwindcss.com/docs/upgrade-guide#replace-variants-with-layer"]),t.add(n))}),!e.has("base")||!e.has("components")||!e.has("utilities")){for(let n of t)if(n.name==="layer"&&["base","components","utilities"].includes(n.params)){if(!e.has(n.params))throw n.error(`\`@layer ${n.params}\` is used but no matching \`@tailwind ${n.params}\` directive is present.`)}else if(n.name==="responsive"){if(!e.has("utilities"))throw n.error("`@responsive` is used but `@tailwind utilities` is missing.")}else if(n.name==="variants"&&!e.has("utilities"))throw n.error("`@variants` is used but `@tailwind utilities` is missing.")}return{tailwindDirectives:e,applyDirectives:i}}var _g=R(()=>{u();Be()});function Qt(r,e=void 0,t=void 0){return r.map(i=>{let n=i.clone();return t!==void 0&&(n.raws.tailwind={...n.raws.tailwind,...t}),e!==void 0&&Eg(n,a=>{if(a.raws.tailwind?.preserveSource===!0&&a.source)return!1;a.source=e}),n})}function Eg(r,e){e(r)!==!1&&r.each?.(t=>Eg(t,e))}var Og=R(()=>{u()});function El(r){return r=Array.isArray(r)?r:[r],r=r.map(e=>e instanceof RegExp?e.source:e),r.join("")}function Ne(r){return new RegExp(El(r),"g")}function qt(r){return`(?:${r.map(El).join("|")})`}function Ol(r){return`(?:${El(r)})?`}function Rg(r){return r&&w2.test(r)?r.replace(Tg,"\\$&"):r||""}var Tg,w2,Pg=R(()=>{u();Tg=/[\\^$.*+?()[\]{}|]/g,w2=RegExp(Tg.source)});function Ig(r){let e=Array.from(v2(r));return t=>{let i=[];for(let n of e)for(let a of t.match(n)??[])i.push(S2(a));for(let n of i.slice()){let a=ve(n,".");for(let s=0;s=a.length-1){i.push(o);continue}let l=Number(a[s+1]);isNaN(l)?i.push(o):s++}}return i}}function*v2(r){let e=r.tailwindConfig.separator,t=r.tailwindConfig.prefix!==""?Ol(Ne([/-?/,Rg(r.tailwindConfig.prefix)])):"",i=qt([/\[[^\s:'"`]+:[^\s\[\]]+\]/,/\[[^\s:'"`\]]+:[^\s]+?\[[^\s]+\][^\s]+?\]/,Ne([qt([/-?(?:\w+)/,/@(?:\w+)/]),Ol(qt([Ne([qt([/-(?:\w+-)*\['[^\s]+'\]/,/-(?:\w+-)*\["[^\s]+"\]/,/-(?:\w+-)*\[`[^\s]+`\]/,/-(?:\w+-)*\[(?:[^\s\[\]]+\[[^\s\[\]]+\])*[^\s:\[\]]+\]/]),/(?![{([]])/,/(?:\/[^\s'"`\\><$]*)?/]),Ne([qt([/-(?:\w+-)*\['[^\s]+'\]/,/-(?:\w+-)*\["[^\s]+"\]/,/-(?:\w+-)*\[`[^\s]+`\]/,/-(?:\w+-)*\[(?:[^\s\[\]]+\[[^\s\[\]]+\])*[^\s\[\]]+\]/]),/(?![{([]])/,/(?:\/[^\s'"`\\$]*)?/]),/[-\/][^\s'"`\\$={><]*/]))])]),n=[qt([Ne([/@\[[^\s"'`]+\](\/[^\s"'`]+)?/,e]),Ne([/([^\s"'`\[\\]+-)?\[[^\s"'`]+\]\/[\w_-]+/,e]),Ne([/([^\s"'`\[\\]+-)?\[[^\s"'`]+\]/,e]),Ne([/[^\s"'`\[\\]+/,e])]),qt([Ne([/([^\s"'`\[\\]+-)?\[[^\s`]+\]\/[\w_-]+/,e]),Ne([/([^\s"'`\[\\]+-)?\[[^\s`]+\]/,e]),Ne([/[^\s`\[\\]+/,e])])];for(let a of n)yield Ne(["((?=((",a,")+))\\2)?",/!?/,t,i]);yield/[^<>"'`\s.(){}[\]#=%$][^<>"'`\s(){}[\]#=%$]*[^<>"'`\s.(){}[\]#=%:$]/g}function S2(r){if(!r.includes("-["))return r;let e=0,t=[],i=r.matchAll(x2);i=Array.from(i).flatMap(n=>{let[,...a]=n;return a.map((s,o)=>Object.assign([],n,{index:n.index+o,0:s}))});for(let n of i){let a=n[0],s=t[t.length-1];if(a===s?t.pop():(a==="'"||a==='"'||a==="`")&&t.push(a),!s){if(a==="["){e++;continue}else if(a==="]"){e--;continue}if(e<0)return r.substring(0,n.index-1);if(e===0&&!k2.test(a))return r.substring(0,n.index)}}return r}var x2,k2,Dg=R(()=>{u();Pg();zt();x2=/([\[\]'"`])([^\[\]'"`])?/g,k2=/[^"'`\s<>\]]+/});function A2(r,e){let t=r.tailwindConfig.content.extract;return t[e]||t.DEFAULT||$g[e]||$g.DEFAULT(r)}function C2(r,e){let t=r.content.transform;return t[e]||t.DEFAULT||Lg[e]||Lg.DEFAULT}function _2(r,e,t,i){qi.has(e)||qi.set(e,new qg.default({maxSize:25e3}));for(let n of r.split(` -`))if(n=n.trim(),!i.has(n))if(i.add(n),qi.get(e).has(n))for(let a of qi.get(e).get(n))t.add(a);else{let a=e(n).filter(o=>o!=="!*"),s=new Set(a);for(let o of s)t.add(o);qi.get(e).set(n,s)}}function E2(r,e){let t=e.offsets.sort(r),i={base:new Set,defaults:new Set,components:new Set,utilities:new Set,variants:new Set};for(let[n,a]of t)i[n.layer].add(a);return i}function Tl(r){return async e=>{let t={base:null,components:null,utilities:null,variants:null};if(e.walkAtRules(y=>{y.name==="tailwind"&&Object.keys(t).includes(y.params)&&(t[y.params]=y)}),Object.values(t).every(y=>y===null))return e;let i=new Set([...r.candidates??[],gt]),n=new Set;bt.DEBUG&&console.time("Reading changed files");let a=[];for(let y of r.changedContent){let w=C2(r.tailwindConfig,y.extension),k=A2(r,y.extension);a.push([y,{transformer:w,extractor:k}])}let s=500;for(let y=0;y{S=k?await be.promises.readFile(k,"utf8"):S,_2(E(S),O,i,n)}))}bt.DEBUG&&console.timeEnd("Reading changed files");let o=r.classCache.size;bt.DEBUG&&console.time("Generate rules"),bt.DEBUG&&console.time("Sorting candidates");let l=new Set([...i].sort((y,w)=>y===w?0:y{let w=y.raws.tailwind?.parentLayer;return w==="components"?t.components!==null:w==="utilities"?t.utilities!==null:!0});t.variants?(t.variants.before(Qt(b,t.variants.source,{layer:"variants"})),t.variants.remove()):b.length>0&&e.append(Qt(b,e.source,{layer:"variants"})),e.source.end=e.source.end??e.source.start;let v=b.some(y=>y.raws.tailwind?.parentLayer==="utilities");t.utilities&&p.size===0&&!v&&G.warn("content-problems",["No utility classes were detected in your source files. If this is unexpected, double-check the `content` option in your Tailwind CSS configuration.","https://tailwindcss.com/docs/content-configuration"]),bt.DEBUG&&(console.log("Potential classes: ",i.size),console.log("Active contexts: ",Zn.size)),r.changedContent=[],e.walkAtRules("layer",y=>{Object.keys(t).includes(y.params)&&y.remove()})}}var qg,bt,$g,Lg,qi,Mg=R(()=>{u();ft();qg=pe(Ns());It();as();Be();Og();Dg();bt=Je,$g={DEFAULT:Ig},Lg={DEFAULT:r=>r,svelte:r=>r.replace(/(?:^|\s)class:/g," ")};qi=new WeakMap});function ws(r){let e=new Map;ee.root({nodes:[r.clone()]}).walkRules(a=>{(0,bs.default)(s=>{s.walkClasses(o=>{let l=o.parent.toString(),c=e.get(l);c||e.set(l,c=new Set),c.add(o.value)})}).processSync(a.selector)});let i=Array.from(e.values(),a=>Array.from(a)),n=i.flat();return Object.assign(n,{groups:i})}function Rl(r){return O2.astSync(r)}function Ng(r,e){let t=new Set;for(let i of r)t.add(i.split(e).pop());return Array.from(t)}function Bg(r,e){let t=r.tailwindConfig.prefix;return typeof t=="function"?t(e):t+e}function*Fg(r){for(yield r;r.parent;)yield r.parent,r=r.parent}function T2(r,e={}){let t=r.nodes;r.nodes=[];let i=r.clone(e);return r.nodes=t,i}function R2(r){for(let e of Fg(r))if(r!==e){if(e.type==="root")break;r=T2(e,{nodes:[r]})}return r}function P2(r,e){let t=new Map;return r.walkRules(i=>{for(let s of Fg(i))if(s.raws.tailwind?.layer!==void 0)return;let n=R2(i),a=e.offsets.create("user");for(let s of ws(i)){let o=t.get(s)||[];t.set(s,o),o.push([{layer:"user",sort:a,important:!1},n])}}),t}function I2(r,e){for(let t of r){if(e.notClassCache.has(t)||e.applyClassCache.has(t))continue;if(e.classCache.has(t)){e.applyClassCache.set(t,e.classCache.get(t).map(([n,a])=>[n,a.clone()]));continue}let i=Array.from(Go(t,e));if(i.length===0){e.notClassCache.add(t);continue}e.applyClassCache.set(t,i)}return e.applyClassCache}function D2(r){let e=null;return{get:t=>(e=e||r(),e.get(t)),has:t=>(e=e||r(),e.has(t))}}function q2(r){return{get:e=>r.flatMap(t=>t.get(e)||[]),has:e=>r.some(t=>t.has(e))}}function jg(r){let e=r.split(/[\s\t\n]+/g);return e[e.length-1]==="!important"?[e.slice(0,-1),!0]:[e,!1]}function zg(r,e,t){let i=new Set,n=[];if(r.walkAtRules("apply",l=>{let[c]=jg(l.params);for(let f of c)i.add(f);n.push(l)}),n.length===0)return;let a=q2([t,I2(i,e)]);function s(l,c,f){let d=Rl(l),p=Rl(c),b=Rl(`.${Te(f)}`).nodes[0].nodes[0];return d.each(v=>{let y=new Set;p.each(w=>{let k=!1;w=w.clone(),w.walkClasses(S=>{S.value===b.value&&(k||(S.replaceWith(...v.nodes.map(E=>E.clone())),y.add(w),k=!0))})});for(let w of y){let k=[[]];for(let S of w.nodes)S.type==="combinator"?(k.push(S),k.push([])):k[k.length-1].push(S);w.nodes=[];for(let S of k)Array.isArray(S)&&S.sort((E,O)=>E.type==="tag"&&O.type==="class"?-1:E.type==="class"&&O.type==="tag"?1:E.type==="class"&&O.type==="pseudo"&&O.value.startsWith("::")?-1:E.type==="pseudo"&&E.value.startsWith("::")&&O.type==="class"?1:0),w.nodes=w.nodes.concat(S)}v.replaceWith(...y)}),d.toString()}let o=new Map;for(let l of n){let[c]=o.get(l.parent)||[[],l.source];o.set(l.parent,[c,l.source]);let[f,d]=jg(l.params);if(l.parent.type==="atrule"){if(l.parent.name==="screen"){let p=l.parent.params;throw l.error(`@apply is not supported within nested at-rules like @screen. We suggest you write this as @apply ${f.map(h=>`${p}:${h}`).join(" ")} instead.`)}throw l.error(`@apply is not supported within nested at-rules like @${l.parent.name}. You can fix this by un-nesting @${l.parent.name}.`)}for(let p of f){if([Bg(e,"group"),Bg(e,"peer")].includes(p))throw l.error(`@apply should not be used with the '${p}' utility`);if(!a.has(p))throw l.error(`The \`${p}\` class does not exist. If \`${p}\` is a custom class, make sure it is defined within a \`@layer\` directive.`);let h=a.get(p);for(let[,b]of h)b.type!=="atrule"&&b.walkRules(()=>{throw l.error([`The \`${p}\` class cannot be used with \`@apply\` because \`@apply\` does not currently support nested CSS.`,"Rewrite the selector without nesting or configure the `tailwindcss/nesting` plugin:","https://tailwindcss.com/docs/using-with-preprocessors#nesting"].join(` -`))});c.push([p,d,h])}}for(let[l,[c,f]]of o){let d=[];for(let[h,b,v]of c){let y=[h,...Ng([h],e.tailwindConfig.separator)];for(let[w,k]of v){let S=ws(l),E=ws(k);if(E=E.groups.filter(T=>T.some(F=>y.includes(F))).flat(),E=E.concat(Ng(E,e.tailwindConfig.separator)),S.some(T=>E.includes(T)))throw k.error(`You cannot \`@apply\` the \`${h}\` utility here because it creates a circular dependency.`);let B=ee.root({nodes:[k.clone()]});B.walk(T=>{T.source=f}),(k.type!=="atrule"||k.type==="atrule"&&k.name!=="keyframes")&&B.walkRules(T=>{if(!ws(T).some(U=>U===h)){T.remove();return}let F=typeof e.tailwindConfig.important=="string"?e.tailwindConfig.important:null,_=l.raws.tailwind!==void 0&&F&&l.selector.indexOf(F)===0?l.selector.slice(F.length):l.selector;_===""&&(_=l.selector),T.selector=s(_,T.selector,h),F&&_!==l.selector&&(T.selector=rs(T.selector,F)),T.walkDecls(U=>{U.important=w.important||b});let Q=(0,bs.default)().astSync(T.selector);Q.each(U=>pr(U)),T.selector=Q.toString()}),!!B.nodes[0]&&d.push([w.sort,B.nodes[0]])}}let p=e.offsets.sort(d).map(h=>h[1]);l.after(p)}for(let l of n)l.parent.nodes.length>1?l.remove():l.parent.remove();zg(r,e,t)}function Pl(r){return e=>{let t=D2(()=>P2(e,r));zg(e,r,t)}}var bs,O2,Ug=R(()=>{u();Ot();bs=pe(it());as();fr();Vo();es();O2=(0,bs.default)()});var Vg=x((rq,vs)=>{u();(function(){"use strict";function r(i,n,a){if(!i)return null;r.caseSensitive||(i=i.toLowerCase());var s=r.threshold===null?null:r.threshold*i.length,o=r.thresholdAbsolute,l;s!==null&&o!==null?l=Math.min(s,o):s!==null?l=s:o!==null?l=o:l=null;var c,f,d,p,h,b=n.length;for(h=0;ha)return a+1;var l=[],c,f,d,p,h;for(c=0;c<=o;c++)l[c]=[c];for(f=0;f<=s;f++)l[0][f]=f;for(c=1;c<=o;c++){for(d=e,p=1,c>a&&(p=c-a),h=o+1,h>a+c&&(h=a+c),f=1;f<=s;f++)fh?l[c][f]=a+1:n.charAt(c-1)===i.charAt(f-1)?l[c][f]=l[c-1][f-1]:l[c][f]=Math.min(l[c-1][f-1]+1,Math.min(l[c][f-1]+1,l[c-1][f]+1)),l[c][f]a)return a+1}return l[o][s]}})()});var Wg=x((iq,Hg)=>{u();var Il="(".charCodeAt(0),Dl=")".charCodeAt(0),xs="'".charCodeAt(0),ql='"'.charCodeAt(0),$l="\\".charCodeAt(0),yr="/".charCodeAt(0),Ll=",".charCodeAt(0),Ml=":".charCodeAt(0),ks="*".charCodeAt(0),$2="u".charCodeAt(0),L2="U".charCodeAt(0),M2="+".charCodeAt(0),N2=/^[a-f0-9?-]+$/i;Hg.exports=function(r){for(var e=[],t=r,i,n,a,s,o,l,c,f,d=0,p=t.charCodeAt(d),h=t.length,b=[{nodes:e}],v=0,y,w="",k="",S="";d{u();Gg.exports=function r(e,t,i){var n,a,s,o;for(n=0,a=e.length;n{u();function Yg(r,e){var t=r.type,i=r.value,n,a;return e&&(a=e(r))!==void 0?a:t==="word"||t==="space"?i:t==="string"?(n=r.quote||"",n+i+(r.unclosed?"":n)):t==="comment"?"/*"+i+(r.unclosed?"":"*/"):t==="div"?(r.before||"")+i+(r.after||""):Array.isArray(r.nodes)?(n=Kg(r.nodes,e),t!=="function"?n:i+"("+(r.before||"")+n+(r.after||"")+(r.unclosed?"":")")):i}function Kg(r,e){var t,i;if(Array.isArray(r)){for(t="",i=r.length-1;~i;i-=1)t=Yg(r[i],e)+t;return t}return Yg(r,e)}Xg.exports=Kg});var ey=x((aq,Zg)=>{u();var Ss="-".charCodeAt(0),As="+".charCodeAt(0),Nl=".".charCodeAt(0),B2="e".charCodeAt(0),F2="E".charCodeAt(0);function j2(r){var e=r.charCodeAt(0),t;if(e===As||e===Ss){if(t=r.charCodeAt(1),t>=48&&t<=57)return!0;var i=r.charCodeAt(2);return t===Nl&&i>=48&&i<=57}return e===Nl?(t=r.charCodeAt(1),t>=48&&t<=57):e>=48&&e<=57}Zg.exports=function(r){var e=0,t=r.length,i,n,a;if(t===0||!j2(r))return!1;for(i=r.charCodeAt(e),(i===As||i===Ss)&&e++;e57));)e+=1;if(i=r.charCodeAt(e),n=r.charCodeAt(e+1),i===Nl&&n>=48&&n<=57)for(e+=2;e57));)e+=1;if(i=r.charCodeAt(e),n=r.charCodeAt(e+1),a=r.charCodeAt(e+2),(i===B2||i===F2)&&(n>=48&&n<=57||(n===As||n===Ss)&&a>=48&&a<=57))for(e+=n===As||n===Ss?3:2;e57));)e+=1;return{number:r.slice(0,e),unit:r.slice(e)}}});var ny=x((oq,iy)=>{u();var z2=Wg(),ty=Qg(),ry=Jg();function $t(r){return this instanceof $t?(this.nodes=z2(r),this):new $t(r)}$t.prototype.toString=function(){return Array.isArray(this.nodes)?ry(this.nodes):""};$t.prototype.walk=function(r,e){return ty(this.nodes,r,e),this};$t.unit=ey();$t.walk=ty;$t.stringify=ry;iy.exports=$t});function Fl(r){return typeof r=="object"&&r!==null}function U2(r,e){let t=kt(e);do if(t.pop(),(0,$i.default)(r,t)!==void 0)break;while(t.length);return t.length?t:void 0}function br(r){return typeof r=="string"?r:r.reduce((e,t,i)=>t.includes(".")?`${e}[${t}]`:i===0?t:`${e}.${t}`,"")}function ay(r){return r.map(e=>`'${e}'`).join(", ")}function oy(r){return ay(Object.keys(r))}function jl(r,e,t,i={}){let n=Array.isArray(e)?br(e):e.replace(/^['"]+|['"]+$/g,""),a=Array.isArray(e)?e:kt(n),s=(0,$i.default)(r.theme,a,t);if(s===void 0){let l=`'${n}' does not exist in your theme config.`,c=a.slice(0,-1),f=(0,$i.default)(r.theme,c);if(Fl(f)){let d=Object.keys(f).filter(h=>jl(r,[...c,h]).isValid),p=(0,sy.default)(a[a.length-1],d);p?l+=` Did you mean '${br([...c,p])}'?`:d.length>0&&(l+=` '${br(c)}' has the following valid keys: ${ay(d)}`)}else{let d=U2(r.theme,n);if(d){let p=(0,$i.default)(r.theme,d);Fl(p)?l+=` '${br(d)}' has the following keys: ${oy(p)}`:l+=` '${br(d)}' is not an object.`}else l+=` Your theme has the following top-level keys: ${oy(r.theme)}`}return{isValid:!1,error:l}}if(!(typeof s=="string"||typeof s=="number"||typeof s=="function"||s instanceof String||s instanceof Number||Array.isArray(s))){let l=`'${n}' was found but does not resolve to a string.`;if(Fl(s)){let c=Object.keys(s).filter(f=>jl(r,[...a,f]).isValid);c.length&&(l+=` Did you mean something like '${br([...a,c[0]])}'?`)}return{isValid:!1,error:l}}let[o]=a;return{isValid:!0,value:mt(o)(s,i)}}function V2(r,e,t){e=e.map(n=>ly(r,n,t));let i=[""];for(let n of e)n.type==="div"&&n.value===","?i.push(""):i[i.length-1]+=Bl.default.stringify(n);return i}function ly(r,e,t){if(e.type==="function"&&t[e.value]!==void 0){let i=V2(r,e.nodes,t);e.type="word",e.value=t[e.value](r,...i)}return e}function H2(r,e,t){return Object.keys(t).some(n=>e.includes(`${n}(`))?(0,Bl.default)(e).walk(n=>{ly(r,n,t)}).toString():e}function*G2(r){r=r.replace(/^['"]+|['"]+$/g,"");let e=r.match(/^([^\s]+)(?![^\[]*\])(?:\s*\/\s*([^\/\s]+))$/),t;yield[r,void 0],e&&(r=e[1],t=e[2],yield[r,t])}function Q2(r,e,t){let i=Array.from(G2(e)).map(([n,a])=>Object.assign(jl(r,n,t,{opacityValue:a}),{resolvedPath:n,alpha:a}));return i.find(n=>n.isValid)??i[0]}function uy(r){let e=r.tailwindConfig,t={theme:(i,n,...a)=>{let{isValid:s,value:o,error:l,alpha:c}=Q2(e,n,a.length?a:void 0);if(!s){let p=i.parent,h=p?.raws.tailwind?.candidate;if(p&&h!==void 0){r.markInvalidUtilityNode(p),p.remove(),G.warn("invalid-theme-key-in-class",[`The utility \`${h}\` contains an invalid theme value and was not generated.`]);return}throw i.error(l)}let f=Xt(o),d=f!==void 0&&typeof f=="function";return(c!==void 0||d)&&(c===void 0&&(c=1),o=Ze(f,c,f)),o},screen:(i,n)=>{n=n.replace(/^['"]+/g,"").replace(/['"]+$/g,"");let s=Rt(e.theme.screens).find(({name:o})=>o===n);if(!s)throw i.error(`The '${n}' screen does not exist in your theme.`);return Tt(s)}};return i=>{i.walk(n=>{let a=W2[n.type];a!==void 0&&(n[a]=H2(n,n[a],t))})}}var $i,sy,Bl,W2,fy=R(()=>{u();$i=pe(Oa()),sy=pe(Vg());Si();Bl=pe(ny());Xn();Qn();Gi();Lr();Fr();Be();W2={atrule:"params",decl:"value"}});function cy({tailwindConfig:{theme:r}}){return function(e){e.walkAtRules("screen",t=>{let i=t.params,a=Rt(r.screens).find(({name:s})=>s===i);if(!a)throw t.error(`No \`${i}\` screen found.`);t.name="media",t.params=Tt(a)})}}var py=R(()=>{u();Xn();Qn()});function Y2(r){let e=r.filter(o=>o.type!=="pseudo"||o.nodes.length>0?!0:o.value.startsWith("::")||[":before",":after",":first-line",":first-letter"].includes(o.value)).reverse(),t=new Set(["tag","class","id","attribute"]),i=e.findIndex(o=>t.has(o.type));if(i===-1)return e.reverse().join("").trim();let n=e[i],a=dy[n.type]?dy[n.type](n):n;e=e.slice(0,i);let s=e.findIndex(o=>o.type==="combinator"&&o.value===">");return s!==-1&&(e.splice(0,s),e.unshift(Cs.default.universal())),[a,...e.reverse()].join("").trim()}function X2(r){return zl.has(r)||zl.set(r,K2.transformSync(r)),zl.get(r)}function Ul({tailwindConfig:r}){return e=>{let t=new Map,i=new Set;if(e.walkAtRules("defaults",n=>{if(n.nodes&&n.nodes.length>0){i.add(n);return}let a=n.params;t.has(a)||t.set(a,new Set),t.get(a).add(n.parent),n.remove()}),we(r,"optimizeUniversalDefaults"))for(let n of i){let a=new Map,s=t.get(n.params)??[];for(let o of s)for(let l of X2(o.selector)){let c=l.includes(":-")||l.includes("::-")||l.includes(":has")?l:"__DEFAULT__",f=a.get(c)??new Set;a.set(c,f),f.add(l)}if(a.size===0){n.remove();continue}for(let[,o]of a){let l=ee.rule({source:n.source});l.selectors=[...o],l.append(n.nodes.map(c=>c.clone())),n.before(l)}n.remove()}else if(i.size){let n=ee.rule({selectors:["*","::before","::after"]});for(let s of i)n.append(s.nodes),n.parent||s.before(n),n.source||(n.source=s.source),s.remove();let a=n.clone({selectors:["::backdrop"]});n.after(a)}}}var Cs,dy,K2,zl,hy=R(()=>{u();Ot();Cs=pe(it());ct();dy={id(r){return Cs.default.attribute({attribute:"id",operator:"=",value:r.value,quoteMark:'"'})}};K2=(0,Cs.default)(r=>r.map(e=>{let t=e.split(i=>i.type==="combinator"&&i.value===" ").pop();return Y2(t)})),zl=new Map});function Vl(){function r(e){let t=null;e.each(i=>{if(!J2.has(i.type)){t=null;return}if(t===null){t=i;return}let n=my[i.type];i.type==="atrule"&&i.name==="font-face"?t=i:n.every(a=>(i[a]??"").replace(/\s+/g," ")===(t[a]??"").replace(/\s+/g," "))?(i.nodes&&t.append(i.nodes),i.remove()):t=i}),e.each(i=>{i.type==="atrule"&&r(i)})}return e=>{r(e)}}var my,J2,gy=R(()=>{u();my={atrule:["name","params"],rule:["selector"]},J2=new Set(Object.keys(my))});function Hl(){return r=>{r.walkRules(e=>{let t=new Map,i=new Set([]),n=new Map;e.walkDecls(a=>{if(a.parent===e){if(t.has(a.prop)){if(t.get(a.prop).value===a.value){i.add(t.get(a.prop)),t.set(a.prop,a);return}n.has(a.prop)||n.set(a.prop,new Set),n.get(a.prop).add(t.get(a.prop)),n.get(a.prop).add(a)}t.set(a.prop,a)}});for(let a of i)a.remove();for(let a of n.values()){let s=new Map;for(let o of a){let l=eO(o.value);l!==null&&(s.has(l)||s.set(l,new Set),s.get(l).add(o))}for(let o of s.values()){let l=Array.from(o).slice(0,-1);for(let c of l)c.remove()}}})}}function eO(r){let e=/^-?\d*.?\d+([\w%]+)?$/g.exec(r);return e?e[1]??Z2:null}var Z2,yy=R(()=>{u();Z2=Symbol("unitless-number")});function tO(r){if(!r.walkAtRules)return;let e=new Set;if(r.walkAtRules("apply",t=>{e.add(t.parent)}),e.size!==0)for(let t of e){let i=[],n=[];for(let a of t.nodes)a.type==="atrule"&&a.name==="apply"?(n.length>0&&(i.push(n),n=[]),i.push([a])):n.push(a);if(n.length>0&&i.push(n),i.length!==1){for(let a of[...i].reverse()){let s=t.clone({nodes:[]});s.append(a),t.after(s)}t.remove()}}}function _s(){return r=>{tO(r)}}var by=R(()=>{u()});function Es(r){return async function(e,t){let{tailwindDirectives:i,applyDirectives:n}=_l(e);_s()(e,t);let a=r({tailwindDirectives:i,applyDirectives:n,registerDependency(s){t.messages.push({plugin:"tailwindcss",parent:t.opts.from,...s})},createContext(s,o){return tl(s,o,e)}})(e,t);if(a.tailwindConfig.separator==="-")throw new Error("The '-' character cannot be used as a custom separator in JIT mode due to parsing ambiguity. Please use another character like '_' instead.");Of(a.tailwindConfig),await Tl(a)(e,t),_s()(e,t),Pl(a)(e,t),uy(a)(e,t),cy(a)(e,t),Ul(a)(e,t),Vl(a)(e,t),Hl(a)(e,t)}}var wy=R(()=>{u();_g();Mg();Ug();fy();py();hy();gy();yy();by();_i();ct()});function vy(r,e){let t=null,i=null;return r.walkAtRules("config",n=>{if(i=n.source?.input.file??e.opts.from??null,i===null)throw n.error("The `@config` directive cannot be used without setting `from` in your PostCSS config.");if(t)throw n.error("Only one `@config` directive is allowed per file.");let a=n.params.match(/(['"])(.*?)\1/);if(!a)throw n.error("A path is required when using the `@config` directive.");let s=a[2];if(me.isAbsolute(s))throw n.error("The `@config` directive cannot be used with an absolute path.");if(t=me.resolve(me.dirname(i),s),!be.existsSync(t))throw n.error(`The config file at "${s}" does not exist. Make sure the path is correct and the file exists.`);n.remove()}),t||null}var xy=R(()=>{u();ft();et()});var ky=x((Vq,Wl)=>{u();Cg();wy();It();xy();Wl.exports=function(e){return{postcssPlugin:"tailwindcss",plugins:[Je.DEBUG&&function(t){return console.log(` -`),console.time("JIT TOTAL"),t},async function(t,i){e=vy(t,i)??e;let n=Cl(e);if(t.type==="document"){let a=t.nodes.filter(s=>s.type==="root");for(let s of a)s.type==="root"&&await Es(n)(s,i);return}await Es(n)(t,i)},Je.DEBUG&&function(t){return console.timeEnd("JIT TOTAL"),console.log(` -`),t}].filter(Boolean)}};Wl.exports.postcss=!0});var Ay=x((Hq,Sy)=>{u();Sy.exports=ky()});var Gl=x((Wq,Cy)=>{u();Cy.exports=()=>["and_chr 114","and_uc 15.5","chrome 114","chrome 113","chrome 109","edge 114","firefox 114","ios_saf 16.5","ios_saf 16.4","ios_saf 16.3","ios_saf 16.1","opera 99","safari 16.5","samsung 21"]});var Os={};Ge(Os,{agents:()=>rO,feature:()=>iO});function iO(){return{status:"cr",title:"CSS Feature Queries",stats:{ie:{"6":"n","7":"n","8":"n","9":"n","10":"n","11":"n","5.5":"n"},edge:{"12":"y","13":"y","14":"y","15":"y","16":"y","17":"y","18":"y","79":"y","80":"y","81":"y","83":"y","84":"y","85":"y","86":"y","87":"y","88":"y","89":"y","90":"y","91":"y","92":"y","93":"y","94":"y","95":"y","96":"y","97":"y","98":"y","99":"y","100":"y","101":"y","102":"y","103":"y","104":"y","105":"y","106":"y","107":"y","108":"y","109":"y","110":"y","111":"y","112":"y","113":"y","114":"y"},firefox:{"2":"n","3":"n","4":"n","5":"n","6":"n","7":"n","8":"n","9":"n","10":"n","11":"n","12":"n","13":"n","14":"n","15":"n","16":"n","17":"n","18":"n","19":"n","20":"n","21":"n","22":"y","23":"y","24":"y","25":"y","26":"y","27":"y","28":"y","29":"y","30":"y","31":"y","32":"y","33":"y","34":"y","35":"y","36":"y","37":"y","38":"y","39":"y","40":"y","41":"y","42":"y","43":"y","44":"y","45":"y","46":"y","47":"y","48":"y","49":"y","50":"y","51":"y","52":"y","53":"y","54":"y","55":"y","56":"y","57":"y","58":"y","59":"y","60":"y","61":"y","62":"y","63":"y","64":"y","65":"y","66":"y","67":"y","68":"y","69":"y","70":"y","71":"y","72":"y","73":"y","74":"y","75":"y","76":"y","77":"y","78":"y","79":"y","80":"y","81":"y","82":"y","83":"y","84":"y","85":"y","86":"y","87":"y","88":"y","89":"y","90":"y","91":"y","92":"y","93":"y","94":"y","95":"y","96":"y","97":"y","98":"y","99":"y","100":"y","101":"y","102":"y","103":"y","104":"y","105":"y","106":"y","107":"y","108":"y","109":"y","110":"y","111":"y","112":"y","113":"y","114":"y","115":"y","116":"y","117":"y","3.5":"n","3.6":"n"},chrome:{"4":"n","5":"n","6":"n","7":"n","8":"n","9":"n","10":"n","11":"n","12":"n","13":"n","14":"n","15":"n","16":"n","17":"n","18":"n","19":"n","20":"n","21":"n","22":"n","23":"n","24":"n","25":"n","26":"n","27":"n","28":"y","29":"y","30":"y","31":"y","32":"y","33":"y","34":"y","35":"y","36":"y","37":"y","38":"y","39":"y","40":"y","41":"y","42":"y","43":"y","44":"y","45":"y","46":"y","47":"y","48":"y","49":"y","50":"y","51":"y","52":"y","53":"y","54":"y","55":"y","56":"y","57":"y","58":"y","59":"y","60":"y","61":"y","62":"y","63":"y","64":"y","65":"y","66":"y","67":"y","68":"y","69":"y","70":"y","71":"y","72":"y","73":"y","74":"y","75":"y","76":"y","77":"y","78":"y","79":"y","80":"y","81":"y","83":"y","84":"y","85":"y","86":"y","87":"y","88":"y","89":"y","90":"y","91":"y","92":"y","93":"y","94":"y","95":"y","96":"y","97":"y","98":"y","99":"y","100":"y","101":"y","102":"y","103":"y","104":"y","105":"y","106":"y","107":"y","108":"y","109":"y","110":"y","111":"y","112":"y","113":"y","114":"y","115":"y","116":"y","117":"y"},safari:{"4":"n","5":"n","6":"n","7":"n","8":"n","9":"y","10":"y","11":"y","12":"y","13":"y","14":"y","15":"y","17":"y","9.1":"y","10.1":"y","11.1":"y","12.1":"y","13.1":"y","14.1":"y","15.1":"y","15.2-15.3":"y","15.4":"y","15.5":"y","15.6":"y","16.0":"y","16.1":"y","16.2":"y","16.3":"y","16.4":"y","16.5":"y","16.6":"y",TP:"y","3.1":"n","3.2":"n","5.1":"n","6.1":"n","7.1":"n"},opera:{"9":"n","11":"n","12":"n","15":"y","16":"y","17":"y","18":"y","19":"y","20":"y","21":"y","22":"y","23":"y","24":"y","25":"y","26":"y","27":"y","28":"y","29":"y","30":"y","31":"y","32":"y","33":"y","34":"y","35":"y","36":"y","37":"y","38":"y","39":"y","40":"y","41":"y","42":"y","43":"y","44":"y","45":"y","46":"y","47":"y","48":"y","49":"y","50":"y","51":"y","52":"y","53":"y","54":"y","55":"y","56":"y","57":"y","58":"y","60":"y","62":"y","63":"y","64":"y","65":"y","66":"y","67":"y","68":"y","69":"y","70":"y","71":"y","72":"y","73":"y","74":"y","75":"y","76":"y","77":"y","78":"y","79":"y","80":"y","81":"y","82":"y","83":"y","84":"y","85":"y","86":"y","87":"y","88":"y","89":"y","90":"y","91":"y","92":"y","93":"y","94":"y","95":"y","96":"y","97":"y","98":"y","99":"y","100":"y","12.1":"y","9.5-9.6":"n","10.0-10.1":"n","10.5":"n","10.6":"n","11.1":"n","11.5":"n","11.6":"n"},ios_saf:{"8":"n","17":"y","9.0-9.2":"y","9.3":"y","10.0-10.2":"y","10.3":"y","11.0-11.2":"y","11.3-11.4":"y","12.0-12.1":"y","12.2-12.5":"y","13.0-13.1":"y","13.2":"y","13.3":"y","13.4-13.7":"y","14.0-14.4":"y","14.5-14.8":"y","15.0-15.1":"y","15.2-15.3":"y","15.4":"y","15.5":"y","15.6":"y","16.0":"y","16.1":"y","16.2":"y","16.3":"y","16.4":"y","16.5":"y","16.6":"y","3.2":"n","4.0-4.1":"n","4.2-4.3":"n","5.0-5.1":"n","6.0-6.1":"n","7.0-7.1":"n","8.1-8.4":"n"},op_mini:{all:"y"},android:{"3":"n","4":"n","114":"y","4.4":"y","4.4.3-4.4.4":"y","2.1":"n","2.2":"n","2.3":"n","4.1":"n","4.2-4.3":"n"},bb:{"7":"n","10":"n"},op_mob:{"10":"n","11":"n","12":"n","73":"y","11.1":"n","11.5":"n","12.1":"n"},and_chr:{"114":"y"},and_ff:{"115":"y"},ie_mob:{"10":"n","11":"n"},and_uc:{"15.5":"y"},samsung:{"4":"y","20":"y","21":"y","5.0-5.4":"y","6.2-6.4":"y","7.2-7.4":"y","8.2":"y","9.2":"y","10.1":"y","11.1-11.2":"y","12.0":"y","13.0":"y","14.0":"y","15.0":"y","16.0":"y","17.0":"y","18.0":"y","19.0":"y"},and_qq:{"13.1":"y"},baidu:{"13.18":"y"},kaios:{"2.5":"y","3.0-3.1":"y"}}}}var rO,Ts=R(()=>{u();rO={ie:{prefix:"ms"},edge:{prefix:"webkit",prefix_exceptions:{"12":"ms","13":"ms","14":"ms","15":"ms","16":"ms","17":"ms","18":"ms"}},firefox:{prefix:"moz"},chrome:{prefix:"webkit"},safari:{prefix:"webkit"},opera:{prefix:"webkit",prefix_exceptions:{"9":"o","11":"o","12":"o","9.5-9.6":"o","10.0-10.1":"o","10.5":"o","10.6":"o","11.1":"o","11.5":"o","11.6":"o","12.1":"o"}},ios_saf:{prefix:"webkit"},op_mini:{prefix:"o"},android:{prefix:"webkit"},bb:{prefix:"webkit"},op_mob:{prefix:"o",prefix_exceptions:{"73":"webkit"}},and_chr:{prefix:"webkit"},and_ff:{prefix:"moz"},ie_mob:{prefix:"ms"},and_uc:{prefix:"webkit",prefix_exceptions:{"15.5":"webkit"}},samsung:{prefix:"webkit"},and_qq:{prefix:"webkit"},baidu:{prefix:"webkit"},kaios:{prefix:"moz"}}});var _y=x(()=>{u()});var _e=x((Yq,Lt)=>{u();var{list:Ql}=$e();Lt.exports.error=function(r){let e=new Error(r);throw e.autoprefixer=!0,e};Lt.exports.uniq=function(r){return[...new Set(r)]};Lt.exports.removeNote=function(r){return r.includes(" ")?r.split(" ")[0]:r};Lt.exports.escapeRegexp=function(r){return r.replace(/[$()*+-.?[\\\]^{|}]/g,"\\$&")};Lt.exports.regexp=function(r,e=!0){return e&&(r=this.escapeRegexp(r)),new RegExp(`(^|[\\s,(])(${r}($|[\\s(,]))`,"gi")};Lt.exports.editList=function(r,e){let t=Ql.comma(r),i=e(t,[]);if(t===i)return r;let n=r.match(/,\s*/);return n=n?n[0]:", ",i.join(n)};Lt.exports.splitSelector=function(r){return Ql.comma(r).map(e=>Ql.space(e).map(t=>t.split(/(?=\.|#)/g)))}});var Mt=x((Kq,Ty)=>{u();var nO=Gl(),Ey=(Ts(),Os).agents,sO=_e(),Oy=class{static prefixes(){if(this.prefixesCache)return this.prefixesCache;this.prefixesCache=[];for(let e in Ey)this.prefixesCache.push(`-${Ey[e].prefix}-`);return this.prefixesCache=sO.uniq(this.prefixesCache).sort((e,t)=>t.length-e.length),this.prefixesCache}static withPrefix(e){return this.prefixesRegexp||(this.prefixesRegexp=new RegExp(this.prefixes().join("|"))),this.prefixesRegexp.test(e)}constructor(e,t,i,n){this.data=e,this.options=i||{},this.browserslistOpts=n||{},this.selected=this.parse(t)}parse(e){let t={};for(let i in this.browserslistOpts)t[i]=this.browserslistOpts[i];return t.path=this.options.from,nO(e,t)}prefix(e){let[t,i]=e.split(" "),n=this.data[t],a=n.prefix_exceptions&&n.prefix_exceptions[i];return a||(a=n.prefix),`-${a}-`}isSelected(e){return this.selected.includes(e)}};Ty.exports=Oy});var Li=x((Xq,Ry)=>{u();Ry.exports={prefix(r){let e=r.match(/^(-\w+-)/);return e?e[0]:""},unprefixed(r){return r.replace(/^-\w+-/,"")}}});var wr=x((Jq,Iy)=>{u();var aO=Mt(),Py=Li(),oO=_e();function Yl(r,e){let t=new r.constructor;for(let i of Object.keys(r||{})){let n=r[i];i==="parent"&&typeof n=="object"?e&&(t[i]=e):i==="source"||i===null?t[i]=n:Array.isArray(n)?t[i]=n.map(a=>Yl(a,t)):i!=="_autoprefixerPrefix"&&i!=="_autoprefixerValues"&&i!=="proxyCache"&&(typeof n=="object"&&n!==null&&(n=Yl(n,t)),t[i]=n)}return t}var Rs=class{static hack(e){return this.hacks||(this.hacks={}),e.names.map(t=>(this.hacks[t]=e,this.hacks[t]))}static load(e,t,i){let n=this.hacks&&this.hacks[e];return n?new n(e,t,i):new this(e,t,i)}static clone(e,t){let i=Yl(e);for(let n in t)i[n]=t[n];return i}constructor(e,t,i){this.prefixes=t,this.name=e,this.all=i}parentPrefix(e){let t;return typeof e._autoprefixerPrefix!="undefined"?t=e._autoprefixerPrefix:e.type==="decl"&&e.prop[0]==="-"?t=Py.prefix(e.prop):e.type==="root"?t=!1:e.type==="rule"&&e.selector.includes(":-")&&/:(-\w+-)/.test(e.selector)?t=e.selector.match(/:(-\w+-)/)[1]:e.type==="atrule"&&e.name[0]==="-"?t=Py.prefix(e.name):t=this.parentPrefix(e.parent),aO.prefixes().includes(t)||(t=!1),e._autoprefixerPrefix=t,e._autoprefixerPrefix}process(e,t){if(!this.check(e))return;let i=this.parentPrefix(e),n=this.prefixes.filter(s=>!i||i===oO.removeNote(s)),a=[];for(let s of n)this.add(e,s,a.concat([s]),t)&&a.push(s);return a}clone(e,t){return Rs.clone(e,t)}};Iy.exports=Rs});var j=x((Zq,$y)=>{u();var lO=wr(),uO=Mt(),Dy=_e(),qy=class extends lO{check(){return!0}prefixed(e,t){return t+e}normalize(e){return e}otherPrefixes(e,t){for(let i of uO.prefixes())if(i!==t&&e.includes(i))return!0;return!1}set(e,t){return e.prop=this.prefixed(e.prop,t),e}needCascade(e){return e._autoprefixerCascade||(e._autoprefixerCascade=this.all.options.cascade!==!1&&e.raw("before").includes(` -`)),e._autoprefixerCascade}maxPrefixed(e,t){if(t._autoprefixerMax)return t._autoprefixerMax;let i=0;for(let n of e)n=Dy.removeNote(n),n.length>i&&(i=n.length);return t._autoprefixerMax=i,t._autoprefixerMax}calcBefore(e,t,i=""){let a=this.maxPrefixed(e,t)-Dy.removeNote(i).length,s=t.raw("before");return a>0&&(s+=Array(a).fill(" ").join("")),s}restoreBefore(e){let t=e.raw("before").split(` -`),i=t[t.length-1];this.all.group(e).up(n=>{let a=n.raw("before").split(` -`),s=a[a.length-1];s.lengths.prop===n.prop&&s.value===n.value)))return this.needCascade(e)&&(n.raws.before=this.calcBefore(i,e,t)),e.parent.insertBefore(e,n)}isAlready(e,t){let i=this.all.group(e).up(n=>n.prop===t);return i||(i=this.all.group(e).down(n=>n.prop===t)),i}add(e,t,i,n){let a=this.prefixed(e.prop,t);if(!(this.isAlready(e,a)||this.otherPrefixes(e.value,t)))return this.insert(e,t,i,n)}process(e,t){if(!this.needCascade(e)){super.process(e,t);return}let i=super.process(e,t);!i||!i.length||(this.restoreBefore(e),e.raws.before=this.calcBefore(i,e))}old(e,t){return[this.prefixed(e,t)]}};$y.exports=qy});var My=x((e$,Ly)=>{u();Ly.exports=function r(e){return{mul:t=>new r(e*t),div:t=>new r(e/t),simplify:()=>new r(e),toString:()=>e.toString()}}});var Fy=x((t$,By)=>{u();var fO=My(),cO=wr(),Kl=_e(),pO=/(min|max)-resolution\s*:\s*\d*\.?\d+(dppx|dpcm|dpi|x)/gi,dO=/(min|max)-resolution(\s*:\s*)(\d*\.?\d+)(dppx|dpcm|dpi|x)/i,Ny=class extends cO{prefixName(e,t){return e==="-moz-"?t+"--moz-device-pixel-ratio":e+t+"-device-pixel-ratio"}prefixQuery(e,t,i,n,a){return n=new fO(n),a==="dpi"?n=n.div(96):a==="dpcm"&&(n=n.mul(2.54).div(96)),n=n.simplify(),e==="-o-"&&(n=n.n+"/"+n.d),this.prefixName(e,t)+i+n}clean(e){if(!this.bad){this.bad=[];for(let t of this.prefixes)this.bad.push(this.prefixName(t,"min")),this.bad.push(this.prefixName(t,"max"))}e.params=Kl.editList(e.params,t=>t.filter(i=>this.bad.every(n=>!i.includes(n))))}process(e){let t=this.parentPrefix(e),i=t?[t]:this.prefixes;e.params=Kl.editList(e.params,(n,a)=>{for(let s of n){if(!s.includes("min-resolution")&&!s.includes("max-resolution")){a.push(s);continue}for(let o of i){let l=s.replace(pO,c=>{let f=c.match(dO);return this.prefixQuery(o,f[1],f[2],f[3],f[4])});a.push(l)}a.push(s)}return Kl.uniq(a)})}};By.exports=Ny});var zy=x((r$,jy)=>{u();var Xl="(".charCodeAt(0),Jl=")".charCodeAt(0),Ps="'".charCodeAt(0),Zl='"'.charCodeAt(0),eu="\\".charCodeAt(0),vr="/".charCodeAt(0),tu=",".charCodeAt(0),ru=":".charCodeAt(0),Is="*".charCodeAt(0),hO="u".charCodeAt(0),mO="U".charCodeAt(0),gO="+".charCodeAt(0),yO=/^[a-f0-9?-]+$/i;jy.exports=function(r){for(var e=[],t=r,i,n,a,s,o,l,c,f,d=0,p=t.charCodeAt(d),h=t.length,b=[{nodes:e}],v=0,y,w="",k="",S="";d{u();Uy.exports=function r(e,t,i){var n,a,s,o;for(n=0,a=e.length;n{u();function Hy(r,e){var t=r.type,i=r.value,n,a;return e&&(a=e(r))!==void 0?a:t==="word"||t==="space"?i:t==="string"?(n=r.quote||"",n+i+(r.unclosed?"":n)):t==="comment"?"/*"+i+(r.unclosed?"":"*/"):t==="div"?(r.before||"")+i+(r.after||""):Array.isArray(r.nodes)?(n=Wy(r.nodes,e),t!=="function"?n:i+"("+(r.before||"")+n+(r.after||"")+(r.unclosed?"":")")):i}function Wy(r,e){var t,i;if(Array.isArray(r)){for(t="",i=r.length-1;~i;i-=1)t=Hy(r[i],e)+t;return t}return Hy(r,e)}Gy.exports=Wy});var Ky=x((s$,Yy)=>{u();var Ds="-".charCodeAt(0),qs="+".charCodeAt(0),iu=".".charCodeAt(0),bO="e".charCodeAt(0),wO="E".charCodeAt(0);function vO(r){var e=r.charCodeAt(0),t;if(e===qs||e===Ds){if(t=r.charCodeAt(1),t>=48&&t<=57)return!0;var i=r.charCodeAt(2);return t===iu&&i>=48&&i<=57}return e===iu?(t=r.charCodeAt(1),t>=48&&t<=57):e>=48&&e<=57}Yy.exports=function(r){var e=0,t=r.length,i,n,a;if(t===0||!vO(r))return!1;for(i=r.charCodeAt(e),(i===qs||i===Ds)&&e++;e57));)e+=1;if(i=r.charCodeAt(e),n=r.charCodeAt(e+1),i===iu&&n>=48&&n<=57)for(e+=2;e57));)e+=1;if(i=r.charCodeAt(e),n=r.charCodeAt(e+1),a=r.charCodeAt(e+2),(i===bO||i===wO)&&(n>=48&&n<=57||(n===qs||n===Ds)&&a>=48&&a<=57))for(e+=n===qs||n===Ds?3:2;e57));)e+=1;return{number:r.slice(0,e),unit:r.slice(e)}}});var $s=x((a$,Zy)=>{u();var xO=zy(),Xy=Vy(),Jy=Qy();function Nt(r){return this instanceof Nt?(this.nodes=xO(r),this):new Nt(r)}Nt.prototype.toString=function(){return Array.isArray(this.nodes)?Jy(this.nodes):""};Nt.prototype.walk=function(r,e){return Xy(this.nodes,r,e),this};Nt.unit=Ky();Nt.walk=Xy;Nt.stringify=Jy;Zy.exports=Nt});var nb=x((o$,ib)=>{u();var{list:kO}=$e(),eb=$s(),SO=Mt(),tb=Li(),rb=class{constructor(e){this.props=["transition","transition-property"],this.prefixes=e}add(e,t){let i,n,a=this.prefixes.add[e.prop],s=this.ruleVendorPrefixes(e),o=s||a&&a.prefixes||[],l=this.parse(e.value),c=l.map(h=>this.findProp(h)),f=[];if(c.some(h=>h[0]==="-"))return;for(let h of l){if(n=this.findProp(h),n[0]==="-")continue;let b=this.prefixes.add[n];if(!(!b||!b.prefixes))for(i of b.prefixes){if(s&&!s.some(y=>i.includes(y)))continue;let v=this.prefixes.prefixed(n,i);v!=="-ms-transform"&&!c.includes(v)&&(this.disabled(n,i)||f.push(this.clone(n,v,h)))}}l=l.concat(f);let d=this.stringify(l),p=this.stringify(this.cleanFromUnprefixed(l,"-webkit-"));if(o.includes("-webkit-")&&this.cloneBefore(e,`-webkit-${e.prop}`,p),this.cloneBefore(e,e.prop,p),o.includes("-o-")){let h=this.stringify(this.cleanFromUnprefixed(l,"-o-"));this.cloneBefore(e,`-o-${e.prop}`,h)}for(i of o)if(i!=="-webkit-"&&i!=="-o-"){let h=this.stringify(this.cleanOtherPrefixes(l,i));this.cloneBefore(e,i+e.prop,h)}d!==e.value&&!this.already(e,e.prop,d)&&(this.checkForWarning(t,e),e.cloneBefore(),e.value=d)}findProp(e){let t=e[0].value;if(/^\d/.test(t)){for(let[i,n]of e.entries())if(i!==0&&n.type==="word")return n.value}return t}already(e,t,i){return e.parent.some(n=>n.prop===t&&n.value===i)}cloneBefore(e,t,i){this.already(e,t,i)||e.cloneBefore({prop:t,value:i})}checkForWarning(e,t){if(t.prop!=="transition-property")return;let i=!1,n=!1;t.parent.each(a=>{if(a.type!=="decl"||a.prop.indexOf("transition-")!==0)return;let s=kO.comma(a.value);if(a.prop==="transition-property"){s.forEach(o=>{let l=this.prefixes.add[o];l&&l.prefixes&&l.prefixes.length>0&&(i=!0)});return}return n=n||s.length>1,!1}),i&&n&&t.warn(e,"Replace transition-property to transition, because Autoprefixer could not support any cases of transition-property and other transition-*")}remove(e){let t=this.parse(e.value);t=t.filter(s=>{let o=this.prefixes.remove[this.findProp(s)];return!o||!o.remove});let i=this.stringify(t);if(e.value===i)return;if(t.length===0){e.remove();return}let n=e.parent.some(s=>s.prop===e.prop&&s.value===i),a=e.parent.some(s=>s!==e&&s.prop===e.prop&&s.value.length>i.length);if(n||a){e.remove();return}e.value=i}parse(e){let t=eb(e),i=[],n=[];for(let a of t.nodes)n.push(a),a.type==="div"&&a.value===","&&(i.push(n),n=[]);return i.push(n),i.filter(a=>a.length>0)}stringify(e){if(e.length===0)return"";let t=[];for(let i of e)i[i.length-1].type!=="div"&&i.push(this.div(e)),t=t.concat(i);return t[0].type==="div"&&(t=t.slice(1)),t[t.length-1].type==="div"&&(t=t.slice(0,-2+1||void 0)),eb.stringify({nodes:t})}clone(e,t,i){let n=[],a=!1;for(let s of i)!a&&s.type==="word"&&s.value===e?(n.push({type:"word",value:t}),a=!0):n.push(s);return n}div(e){for(let t of e)for(let i of t)if(i.type==="div"&&i.value===",")return i;return{type:"div",value:",",after:" "}}cleanOtherPrefixes(e,t){return e.filter(i=>{let n=tb.prefix(this.findProp(i));return n===""||n===t})}cleanFromUnprefixed(e,t){let i=e.map(a=>this.findProp(a)).filter(a=>a.slice(0,t.length)===t).map(a=>this.prefixes.unprefixed(a)),n=[];for(let a of e){let s=this.findProp(a),o=tb.prefix(s);!i.includes(s)&&(o===t||o==="")&&n.push(a)}return n}disabled(e,t){let i=["order","justify-content","align-self","align-content"];if(e.includes("flex")||i.includes(e)){if(this.prefixes.options.flexbox===!1)return!0;if(this.prefixes.options.flexbox==="no-2009")return t.includes("2009")}}ruleVendorPrefixes(e){let{parent:t}=e;if(t.type!=="rule")return!1;if(!t.selector.includes(":-"))return!1;let i=SO.prefixes().filter(n=>t.selector.includes(":"+n));return i.length>0?i:!1}};ib.exports=rb});var xr=x((l$,ab)=>{u();var AO=_e(),sb=class{constructor(e,t,i,n){this.unprefixed=e,this.prefixed=t,this.string=i||t,this.regexp=n||AO.regexp(t)}check(e){return e.includes(this.string)?!!e.match(this.regexp):!1}};ab.exports=sb});var He=x((u$,lb)=>{u();var CO=wr(),_O=xr(),EO=Li(),OO=_e(),ob=class extends CO{static save(e,t){let i=t.prop,n=[];for(let a in t._autoprefixerValues){let s=t._autoprefixerValues[a];if(s===t.value)continue;let o,l=EO.prefix(i);if(l==="-pie-")continue;if(l===a){o=t.value=s,n.push(o);continue}let c=e.prefixed(i,a),f=t.parent;if(!f.every(b=>b.prop!==c)){n.push(o);continue}let d=s.replace(/\s+/," ");if(f.some(b=>b.prop===t.prop&&b.value.replace(/\s+/," ")===d)){n.push(o);continue}let h=this.clone(t,{value:s});o=t.parent.insertBefore(t,h),n.push(o)}return n}check(e){let t=e.value;return t.includes(this.name)?!!t.match(this.regexp()):!1}regexp(){return this.regexpCache||(this.regexpCache=OO.regexp(this.name))}replace(e,t){return e.replace(this.regexp(),`$1${t}$2`)}value(e){return e.raws.value&&e.raws.value.value===e.value?e.raws.value.raw:e.value}add(e,t){e._autoprefixerValues||(e._autoprefixerValues={});let i=e._autoprefixerValues[t]||this.value(e),n;do if(n=i,i=this.replace(i,t),i===!1)return;while(i!==n);e._autoprefixerValues[t]=i}old(e){return new _O(this.name,e+this.name)}};lb.exports=ob});var Bt=x((f$,ub)=>{u();ub.exports={}});var su=x((c$,pb)=>{u();var fb=$s(),TO=He(),RO=Bt().insertAreas,PO=/(^|[^-])linear-gradient\(\s*(top|left|right|bottom)/i,IO=/(^|[^-])radial-gradient\(\s*\d+(\w*|%)\s+\d+(\w*|%)\s*,/i,DO=/(!\s*)?autoprefixer:\s*ignore\s+next/i,qO=/(!\s*)?autoprefixer\s*grid:\s*(on|off|(no-)?autoplace)/i,$O=["width","height","min-width","max-width","min-height","max-height","inline-size","min-inline-size","max-inline-size","block-size","min-block-size","max-block-size"];function nu(r){return r.parent.some(e=>e.prop==="grid-template"||e.prop==="grid-template-areas")}function LO(r){let e=r.parent.some(i=>i.prop==="grid-template-rows"),t=r.parent.some(i=>i.prop==="grid-template-columns");return e&&t}var cb=class{constructor(e){this.prefixes=e}add(e,t){let i=this.prefixes.add["@resolution"],n=this.prefixes.add["@keyframes"],a=this.prefixes.add["@viewport"],s=this.prefixes.add["@supports"];e.walkAtRules(f=>{if(f.name==="keyframes"){if(!this.disabled(f,t))return n&&n.process(f)}else if(f.name==="viewport"){if(!this.disabled(f,t))return a&&a.process(f)}else if(f.name==="supports"){if(this.prefixes.options.supports!==!1&&!this.disabled(f,t))return s.process(f)}else if(f.name==="media"&&f.params.includes("-resolution")&&!this.disabled(f,t))return i&&i.process(f)}),e.walkRules(f=>{if(!this.disabled(f,t))return this.prefixes.add.selectors.map(d=>d.process(f,t))});function o(f){return f.parent.nodes.some(d=>{if(d.type!=="decl")return!1;let p=d.prop==="display"&&/(inline-)?grid/.test(d.value),h=d.prop.startsWith("grid-template"),b=/^grid-([A-z]+-)?gap/.test(d.prop);return p||h||b})}function l(f){return f.parent.some(d=>d.prop==="display"&&/(inline-)?flex/.test(d.value))}let c=this.gridStatus(e,t)&&this.prefixes.add["grid-area"]&&this.prefixes.add["grid-area"].prefixes;return e.walkDecls(f=>{if(this.disabledDecl(f,t))return;let d=f.parent,p=f.prop,h=f.value;if(p==="grid-row-span"){t.warn("grid-row-span is not part of final Grid Layout. Use grid-row.",{node:f});return}else if(p==="grid-column-span"){t.warn("grid-column-span is not part of final Grid Layout. Use grid-column.",{node:f});return}else if(p==="display"&&h==="box"){t.warn("You should write display: flex by final spec instead of display: box",{node:f});return}else if(p==="text-emphasis-position")(h==="under"||h==="over")&&t.warn("You should use 2 values for text-emphasis-position For example, `under left` instead of just `under`.",{node:f});else if(/^(align|justify|place)-(items|content)$/.test(p)&&l(f))(h==="start"||h==="end")&&t.warn(`${h} value has mixed support, consider using flex-${h} instead`,{node:f});else if(p==="text-decoration-skip"&&h==="ink")t.warn("Replace text-decoration-skip: ink to text-decoration-skip-ink: auto, because spec had been changed",{node:f});else{if(c&&this.gridStatus(f,t))if(f.value==="subgrid"&&t.warn("IE does not support subgrid",{node:f}),/^(align|justify|place)-items$/.test(p)&&o(f)){let v=p.replace("-items","-self");t.warn(`IE does not support ${p} on grid containers. Try using ${v} on child elements instead: ${f.parent.selector} > * { ${v}: ${f.value} }`,{node:f})}else if(/^(align|justify|place)-content$/.test(p)&&o(f))t.warn(`IE does not support ${f.prop} on grid containers`,{node:f});else if(p==="display"&&f.value==="contents"){t.warn("Please do not use display: contents; if you have grid setting enabled",{node:f});return}else if(f.prop==="grid-gap"){let v=this.gridStatus(f,t);v==="autoplace"&&!LO(f)&&!nu(f)?t.warn("grid-gap only works if grid-template(-areas) is being used or both rows and columns have been declared and cells have not been manually placed inside the explicit grid",{node:f}):(v===!0||v==="no-autoplace")&&!nu(f)&&t.warn("grid-gap only works if grid-template(-areas) is being used",{node:f})}else if(p==="grid-auto-columns"){t.warn("grid-auto-columns is not supported by IE",{node:f});return}else if(p==="grid-auto-rows"){t.warn("grid-auto-rows is not supported by IE",{node:f});return}else if(p==="grid-auto-flow"){let v=d.some(w=>w.prop==="grid-template-rows"),y=d.some(w=>w.prop==="grid-template-columns");nu(f)?t.warn("grid-auto-flow is not supported by IE",{node:f}):h.includes("dense")?t.warn("grid-auto-flow: dense is not supported by IE",{node:f}):!v&&!y&&t.warn("grid-auto-flow works only if grid-template-rows and grid-template-columns are present in the same rule",{node:f});return}else if(h.includes("auto-fit")){t.warn("auto-fit value is not supported by IE",{node:f,word:"auto-fit"});return}else if(h.includes("auto-fill")){t.warn("auto-fill value is not supported by IE",{node:f,word:"auto-fill"});return}else p.startsWith("grid-template")&&h.includes("[")&&t.warn("Autoprefixer currently does not support line names. Try using grid-template-areas instead.",{node:f,word:"["});if(h.includes("radial-gradient"))if(IO.test(f.value))t.warn("Gradient has outdated direction syntax. New syntax is like `closest-side at 0 0` instead of `0 0, closest-side`.",{node:f});else{let v=fb(h);for(let y of v.nodes)if(y.type==="function"&&y.value==="radial-gradient")for(let w of y.nodes)w.type==="word"&&(w.value==="cover"?t.warn("Gradient has outdated direction syntax. Replace `cover` to `farthest-corner`.",{node:f}):w.value==="contain"&&t.warn("Gradient has outdated direction syntax. Replace `contain` to `closest-side`.",{node:f}))}h.includes("linear-gradient")&&PO.test(h)&&t.warn("Gradient has outdated direction syntax. New syntax is like `to left` instead of `right`.",{node:f})}$O.includes(f.prop)&&(f.value.includes("-fill-available")||(f.value.includes("fill-available")?t.warn("Replace fill-available to stretch, because spec had been changed",{node:f}):f.value.includes("fill")&&fb(h).nodes.some(y=>y.type==="word"&&y.value==="fill")&&t.warn("Replace fill to stretch, because spec had been changed",{node:f})));let b;if(f.prop==="transition"||f.prop==="transition-property")return this.prefixes.transition.add(f,t);if(f.prop==="align-self"){if(this.displayType(f)!=="grid"&&this.prefixes.options.flexbox!==!1&&(b=this.prefixes.add["align-self"],b&&b.prefixes&&b.process(f)),this.gridStatus(f,t)!==!1&&(b=this.prefixes.add["grid-row-align"],b&&b.prefixes))return b.process(f,t)}else if(f.prop==="justify-self"){if(this.gridStatus(f,t)!==!1&&(b=this.prefixes.add["grid-column-align"],b&&b.prefixes))return b.process(f,t)}else if(f.prop==="place-self"){if(b=this.prefixes.add["place-self"],b&&b.prefixes&&this.gridStatus(f,t)!==!1)return b.process(f,t)}else if(b=this.prefixes.add[f.prop],b&&b.prefixes)return b.process(f,t)}),this.gridStatus(e,t)&&RO(e,this.disabled),e.walkDecls(f=>{if(this.disabledValue(f,t))return;let d=this.prefixes.unprefixed(f.prop),p=this.prefixes.values("add",d);if(Array.isArray(p))for(let h of p)h.process&&h.process(f,t);TO.save(this.prefixes,f)})}remove(e,t){let i=this.prefixes.remove["@resolution"];e.walkAtRules((n,a)=>{this.prefixes.remove[`@${n.name}`]?this.disabled(n,t)||n.parent.removeChild(a):n.name==="media"&&n.params.includes("-resolution")&&i&&i.clean(n)});for(let n of this.prefixes.remove.selectors)e.walkRules((a,s)=>{n.check(a)&&(this.disabled(a,t)||a.parent.removeChild(s))});return e.walkDecls((n,a)=>{if(this.disabled(n,t))return;let s=n.parent,o=this.prefixes.unprefixed(n.prop);if((n.prop==="transition"||n.prop==="transition-property")&&this.prefixes.transition.remove(n),this.prefixes.remove[n.prop]&&this.prefixes.remove[n.prop].remove){let l=this.prefixes.group(n).down(c=>this.prefixes.normalize(c.prop)===o);if(o==="flex-flow"&&(l=!0),n.prop==="-webkit-box-orient"){let c={"flex-direction":!0,"flex-flow":!0};if(!n.parent.some(f=>c[f.prop]))return}if(l&&!this.withHackValue(n)){n.raw("before").includes(` -`)&&this.reduceSpaces(n),s.removeChild(a);return}}for(let l of this.prefixes.values("remove",o)){if(!l.check||!l.check(n.value))continue;if(o=l.unprefixed,this.prefixes.group(n).down(f=>f.value.includes(o))){s.removeChild(a);return}}})}withHackValue(e){return e.prop==="-webkit-background-clip"&&e.value==="text"}disabledValue(e,t){return this.gridStatus(e,t)===!1&&e.type==="decl"&&e.prop==="display"&&e.value.includes("grid")||this.prefixes.options.flexbox===!1&&e.type==="decl"&&e.prop==="display"&&e.value.includes("flex")||e.type==="decl"&&e.prop==="content"?!0:this.disabled(e,t)}disabledDecl(e,t){if(this.gridStatus(e,t)===!1&&e.type==="decl"&&(e.prop.includes("grid")||e.prop==="justify-items"))return!0;if(this.prefixes.options.flexbox===!1&&e.type==="decl"){let i=["order","justify-content","align-items","align-content"];if(e.prop.includes("flex")||i.includes(e.prop))return!0}return this.disabled(e,t)}disabled(e,t){if(!e)return!1;if(e._autoprefixerDisabled!==void 0)return e._autoprefixerDisabled;if(e.parent){let n=e.prev();if(n&&n.type==="comment"&&DO.test(n.text))return e._autoprefixerDisabled=!0,e._autoprefixerSelfDisabled=!0,!0}let i=null;if(e.nodes){let n;e.each(a=>{a.type==="comment"&&/(!\s*)?autoprefixer:\s*(off|on)/i.test(a.text)&&(typeof n!="undefined"?t.warn("Second Autoprefixer control comment was ignored. Autoprefixer applies control comment to whole block, not to next rules.",{node:a}):n=/on/i.test(a.text))}),n!==void 0&&(i=!n)}if(!e.nodes||i===null)if(e.parent){let n=this.disabled(e.parent,t);e.parent._autoprefixerSelfDisabled===!0?i=!1:i=n}else i=!1;return e._autoprefixerDisabled=i,i}reduceSpaces(e){let t=!1;if(this.prefixes.group(e).up(()=>(t=!0,!0)),t)return;let i=e.raw("before").split(` -`),n=i[i.length-1].length,a=!1;this.prefixes.group(e).down(s=>{i=s.raw("before").split(` -`);let o=i.length-1;i[o].length>n&&(a===!1&&(a=i[o].length-n),i[o]=i[o].slice(0,-a),s.raws.before=i.join(` -`))})}displayType(e){for(let t of e.parent.nodes)if(t.prop==="display"){if(t.value.includes("flex"))return"flex";if(t.value.includes("grid"))return"grid"}return!1}gridStatus(e,t){if(!e)return!1;if(e._autoprefixerGridStatus!==void 0)return e._autoprefixerGridStatus;let i=null;if(e.nodes){let n;e.each(a=>{if(a.type==="comment"&&qO.test(a.text)){let s=/:\s*autoplace/i.test(a.text),o=/no-autoplace/i.test(a.text);typeof n!="undefined"?t.warn("Second Autoprefixer grid control comment was ignored. Autoprefixer applies control comments to the whole block, not to the next rules.",{node:a}):s?n="autoplace":o?n=!0:n=/on/i.test(a.text)}}),n!==void 0&&(i=n)}if(e.type==="atrule"&&e.name==="supports"){let n=e.params;n.includes("grid")&&n.includes("auto")&&(i=!1)}if(!e.nodes||i===null)if(e.parent){let n=this.gridStatus(e.parent,t);e.parent._autoprefixerSelfDisabled===!0?i=!1:i=n}else typeof this.prefixes.options.grid!="undefined"?i=this.prefixes.options.grid:typeof m.env.AUTOPREFIXER_GRID!="undefined"?m.env.AUTOPREFIXER_GRID==="autoplace"?i="autoplace":i=!0:i=!1;return e._autoprefixerGridStatus=i,i}};pb.exports=cb});var hb=x((p$,db)=>{u();db.exports={A:{A:{"2":"K E F G A B JC"},B:{"1":"C L M H N D O P Q R S T U V W X Y Z a b c d e f g h i j n o p q r s t u v w x y z I"},C:{"1":"2 3 4 5 6 7 8 9 AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB ZB aB bB cB 0B dB 1B eB fB gB hB iB jB kB lB mB nB oB m pB qB rB sB tB P Q R 2B S T U V W X Y Z a b c d e f g h i j n o p q r s t u v w x y z I uB 3B 4B","2":"0 1 KC zB J K E F G A B C L M H N D O k l LC MC"},D:{"1":"8 9 AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB ZB aB bB cB 0B dB 1B eB fB gB hB iB jB kB lB mB nB oB m pB qB rB sB tB P Q R S T U V W X Y Z a b c d e f g h i j n o p q r s t u v w x y z I uB 3B 4B","2":"0 1 2 3 4 5 6 7 J K E F G A B C L M H N D O k l"},E:{"1":"G A B C L M H D RC 6B vB wB 7B SC TC 8B 9B xB AC yB BC CC DC EC FC GC UC","2":"0 J K E F NC 5B OC PC QC"},F:{"1":"1 2 3 4 5 6 7 8 9 H N D O k l AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB m pB qB rB sB tB P Q R 2B S T U V W X Y Z a b c d e f g h i j wB","2":"G B C VC WC XC YC vB HC ZC"},G:{"1":"D fC gC hC iC jC kC lC mC nC oC pC qC rC sC tC 8B 9B xB AC yB BC CC DC EC FC GC","2":"F 5B aC IC bC cC dC eC"},H:{"1":"uC"},I:{"1":"I zC 0C","2":"zB J vC wC xC yC IC"},J:{"2":"E A"},K:{"1":"m","2":"A B C vB HC wB"},L:{"1":"I"},M:{"1":"uB"},N:{"2":"A B"},O:{"1":"xB"},P:{"1":"J k l 1C 2C 3C 4C 5C 6B 6C 7C 8C 9C AD yB BD CD DD"},Q:{"1":"7B"},R:{"1":"ED"},S:{"1":"FD GD"}},B:4,C:"CSS Feature Queries"}});var bb=x((d$,yb)=>{u();function mb(r){return r[r.length-1]}var gb={parse(r){let e=[""],t=[e];for(let i of r){if(i==="("){e=[""],mb(t).push(e),t.push(e);continue}if(i===")"){t.pop(),e=mb(t),e.push("");continue}e[e.length-1]+=i}return t[0]},stringify(r){let e="";for(let t of r){if(typeof t=="object"){e+=`(${gb.stringify(t)})`;continue}e+=t}return e}};yb.exports=gb});var Sb=x((h$,kb)=>{u();var MO=hb(),{feature:NO}=(Ts(),Os),{parse:BO}=$e(),FO=Mt(),au=bb(),jO=He(),zO=_e(),wb=NO(MO),vb=[];for(let r in wb.stats){let e=wb.stats[r];for(let t in e){let i=e[t];/y/.test(i)&&vb.push(r+" "+t)}}var xb=class{constructor(e,t){this.Prefixes=e,this.all=t}prefixer(){if(this.prefixerCache)return this.prefixerCache;let e=this.all.browsers.selected.filter(i=>vb.includes(i)),t=new FO(this.all.browsers.data,e,this.all.options);return this.prefixerCache=new this.Prefixes(this.all.data,t,this.all.options),this.prefixerCache}parse(e){let t=e.split(":"),i=t[0],n=t[1];return n||(n=""),[i.trim(),n.trim()]}virtual(e){let[t,i]=this.parse(e),n=BO("a{}").first;return n.append({prop:t,value:i,raws:{before:""}}),n}prefixed(e){let t=this.virtual(e);if(this.disabled(t.first))return t.nodes;let i={warn:()=>null},n=this.prefixer().add[t.first.prop];n&&n.process&&n.process(t.first,i);for(let a of t.nodes){for(let s of this.prefixer().values("add",t.first.prop))s.process(a);jO.save(this.all,a)}return t.nodes}isNot(e){return typeof e=="string"&&/not\s*/i.test(e)}isOr(e){return typeof e=="string"&&/\s*or\s*/i.test(e)}isProp(e){return typeof e=="object"&&e.length===1&&typeof e[0]=="string"}isHack(e,t){return!new RegExp(`(\\(|\\s)${zO.escapeRegexp(t)}:`).test(e)}toRemove(e,t){let[i,n]=this.parse(e),a=this.all.unprefixed(i),s=this.all.cleaner();if(s.remove[i]&&s.remove[i].remove&&!this.isHack(t,a))return!0;for(let o of s.values("remove",a))if(o.check(n))return!0;return!1}remove(e,t){let i=0;for(;itypeof t!="object"?t:t.length===1&&typeof t[0]=="object"?this.cleanBrackets(t[0]):this.cleanBrackets(t))}convert(e){let t=[""];for(let i of e)t.push([`${i.prop}: ${i.value}`]),t.push(" or ");return t[t.length-1]="",t}normalize(e){if(typeof e!="object")return e;if(e=e.filter(t=>t!==""),typeof e[0]=="string"){let t=e[0].trim();if(t.includes(":")||t==="selector"||t==="not selector")return[au.stringify(e)]}return e.map(t=>this.normalize(t))}add(e,t){return e.map(i=>{if(this.isProp(i)){let n=this.prefixed(i[0]);return n.length>1?this.convert(n):i}return typeof i=="object"?this.add(i,t):i})}process(e){let t=au.parse(e.params);t=this.normalize(t),t=this.remove(t,e.params),t=this.add(t,e.params),t=this.cleanBrackets(t),e.params=au.stringify(t)}disabled(e){if(!this.all.options.grid&&(e.prop==="display"&&e.value.includes("grid")||e.prop.includes("grid")||e.prop==="justify-items"))return!0;if(this.all.options.flexbox===!1){if(e.prop==="display"&&e.value.includes("flex"))return!0;let t=["order","justify-content","align-items","align-content"];if(e.prop.includes("flex")||t.includes(e.prop))return!0}return!1}};kb.exports=xb});var _b=x((m$,Cb)=>{u();var Ab=class{constructor(e,t){this.prefix=t,this.prefixed=e.prefixed(this.prefix),this.regexp=e.regexp(this.prefix),this.prefixeds=e.possible().map(i=>[e.prefixed(i),e.regexp(i)]),this.unprefixed=e.name,this.nameRegexp=e.regexp()}isHack(e){let t=e.parent.index(e)+1,i=e.parent.nodes;for(;t{u();var{list:UO}=$e(),VO=_b(),HO=wr(),WO=Mt(),GO=_e(),Eb=class extends HO{constructor(e,t,i){super(e,t,i);this.regexpCache=new Map}check(e){return e.selector.includes(this.name)?!!e.selector.match(this.regexp()):!1}prefixed(e){return this.name.replace(/^(\W*)/,`$1${e}`)}regexp(e){if(!this.regexpCache.has(e)){let t=e?this.prefixed(e):this.name;this.regexpCache.set(e,new RegExp(`(^|[^:"'=])${GO.escapeRegexp(t)}`,"gi"))}return this.regexpCache.get(e)}possible(){return WO.prefixes()}prefixeds(e){if(e._autoprefixerPrefixeds){if(e._autoprefixerPrefixeds[this.name])return e._autoprefixerPrefixeds}else e._autoprefixerPrefixeds={};let t={};if(e.selector.includes(",")){let n=UO.comma(e.selector).filter(a=>a.includes(this.name));for(let a of this.possible())t[a]=n.map(s=>this.replace(s,a)).join(", ")}else for(let i of this.possible())t[i]=this.replace(e.selector,i);return e._autoprefixerPrefixeds[this.name]=t,e._autoprefixerPrefixeds}already(e,t,i){let n=e.parent.index(e)-1;for(;n>=0;){let a=e.parent.nodes[n];if(a.type!=="rule")return!1;let s=!1;for(let o in t[this.name]){let l=t[this.name][o];if(a.selector===l){if(i===o)return!0;s=!0;break}}if(!s)return!1;n-=1}return!1}replace(e,t){return e.replace(this.regexp(),`$1${this.prefixed(t)}`)}add(e,t){let i=this.prefixeds(e);if(this.already(e,i,t))return;let n=this.clone(e,{selector:i[this.name][t]});e.parent.insertBefore(e,n)}old(e){return new VO(this,e)}};Ob.exports=Eb});var Pb=x((y$,Rb)=>{u();var QO=wr(),Tb=class extends QO{add(e,t){let i=t+e.name;if(e.parent.some(s=>s.name===i&&s.params===e.params))return;let a=this.clone(e,{name:i});return e.parent.insertBefore(e,a)}process(e){let t=this.parentPrefix(e);for(let i of this.prefixes)(!t||t===i)&&this.add(e,i)}};Rb.exports=Tb});var Db=x((b$,Ib)=>{u();var YO=kr(),ou=class extends YO{prefixed(e){return e==="-webkit-"?":-webkit-full-screen":e==="-moz-"?":-moz-full-screen":`:${e}fullscreen`}};ou.names=[":fullscreen"];Ib.exports=ou});var $b=x((w$,qb)=>{u();var KO=kr(),lu=class extends KO{possible(){return super.possible().concat(["-moz- old","-ms- old"])}prefixed(e){return e==="-webkit-"?"::-webkit-input-placeholder":e==="-ms-"?"::-ms-input-placeholder":e==="-ms- old"?":-ms-input-placeholder":e==="-moz- old"?":-moz-placeholder":`::${e}placeholder`}};lu.names=["::placeholder"];qb.exports=lu});var Mb=x((v$,Lb)=>{u();var XO=kr(),uu=class extends XO{prefixed(e){return e==="-ms-"?":-ms-input-placeholder":`:${e}placeholder-shown`}};uu.names=[":placeholder-shown"];Lb.exports=uu});var Bb=x((x$,Nb)=>{u();var JO=kr(),ZO=_e(),fu=class extends JO{constructor(e,t,i){super(e,t,i);this.prefixes&&(this.prefixes=ZO.uniq(this.prefixes.map(n=>"-webkit-")))}prefixed(e){return e==="-webkit-"?"::-webkit-file-upload-button":`::${e}file-selector-button`}};fu.names=["::file-selector-button"];Nb.exports=fu});var Pe=x((k$,Fb)=>{u();Fb.exports=function(r){let e;return r==="-webkit- 2009"||r==="-moz-"?e=2009:r==="-ms-"?e=2012:r==="-webkit-"&&(e="final"),r==="-webkit- 2009"&&(r="-webkit-"),[e,r]}});var Vb=x((S$,Ub)=>{u();var jb=$e().list,zb=Pe(),eT=j(),Sr=class extends eT{prefixed(e,t){let i;return[i,t]=zb(t),i===2009?t+"box-flex":super.prefixed(e,t)}normalize(){return"flex"}set(e,t){let i=zb(t)[0];if(i===2009)return e.value=jb.space(e.value)[0],e.value=Sr.oldValues[e.value]||e.value,super.set(e,t);if(i===2012){let n=jb.space(e.value);n.length===3&&n[2]==="0"&&(e.value=n.slice(0,2).concat("0px").join(" "))}return super.set(e,t)}};Sr.names=["flex","box-flex"];Sr.oldValues={auto:"1",none:"0"};Ub.exports=Sr});var Gb=x((A$,Wb)=>{u();var Hb=Pe(),tT=j(),cu=class extends tT{prefixed(e,t){let i;return[i,t]=Hb(t),i===2009?t+"box-ordinal-group":i===2012?t+"flex-order":super.prefixed(e,t)}normalize(){return"order"}set(e,t){return Hb(t)[0]===2009&&/\d/.test(e.value)?(e.value=(parseInt(e.value)+1).toString(),super.set(e,t)):super.set(e,t)}};cu.names=["order","flex-order","box-ordinal-group"];Wb.exports=cu});var Yb=x((C$,Qb)=>{u();var rT=j(),pu=class extends rT{check(e){let t=e.value;return!t.toLowerCase().includes("alpha(")&&!t.includes("DXImageTransform.Microsoft")&&!t.includes("data:image/svg+xml")}};pu.names=["filter"];Qb.exports=pu});var Xb=x((_$,Kb)=>{u();var iT=j(),du=class extends iT{insert(e,t,i,n){if(t!=="-ms-")return super.insert(e,t,i);let a=this.clone(e),s=e.prop.replace(/end$/,"start"),o=t+e.prop.replace(/end$/,"span");if(!e.parent.some(l=>l.prop===o)){if(a.prop=o,e.value.includes("span"))a.value=e.value.replace(/span\s/i,"");else{let l;if(e.parent.walkDecls(s,c=>{l=c}),l){let c=Number(e.value)-Number(l.value)+"";a.value=c}else e.warn(n,`Can not prefix ${e.prop} (${s} is not found)`)}e.cloneBefore(a)}}};du.names=["grid-row-end","grid-column-end"];Kb.exports=du});var Zb=x((E$,Jb)=>{u();var nT=j(),hu=class extends nT{check(e){return!e.value.split(/\s+/).some(t=>{let i=t.toLowerCase();return i==="reverse"||i==="alternate-reverse"})}};hu.names=["animation","animation-direction"];Jb.exports=hu});var tw=x((O$,ew)=>{u();var sT=Pe(),aT=j(),mu=class extends aT{insert(e,t,i){let n;if([n,t]=sT(t),n!==2009)return super.insert(e,t,i);let a=e.value.split(/\s+/).filter(d=>d!=="wrap"&&d!=="nowrap"&&"wrap-reverse");if(a.length===0||e.parent.some(d=>d.prop===t+"box-orient"||d.prop===t+"box-direction"))return;let o=a[0],l=o.includes("row")?"horizontal":"vertical",c=o.includes("reverse")?"reverse":"normal",f=this.clone(e);return f.prop=t+"box-orient",f.value=l,this.needCascade(e)&&(f.raws.before=this.calcBefore(i,e,t)),e.parent.insertBefore(e,f),f=this.clone(e),f.prop=t+"box-direction",f.value=c,this.needCascade(e)&&(f.raws.before=this.calcBefore(i,e,t)),e.parent.insertBefore(e,f)}};mu.names=["flex-flow","box-direction","box-orient"];ew.exports=mu});var iw=x((T$,rw)=>{u();var oT=Pe(),lT=j(),gu=class extends lT{normalize(){return"flex"}prefixed(e,t){let i;return[i,t]=oT(t),i===2009?t+"box-flex":i===2012?t+"flex-positive":super.prefixed(e,t)}};gu.names=["flex-grow","flex-positive"];rw.exports=gu});var sw=x((R$,nw)=>{u();var uT=Pe(),fT=j(),yu=class extends fT{set(e,t){if(uT(t)[0]!==2009)return super.set(e,t)}};yu.names=["flex-wrap"];nw.exports=yu});var ow=x((P$,aw)=>{u();var cT=j(),Ar=Bt(),bu=class extends cT{insert(e,t,i,n){if(t!=="-ms-")return super.insert(e,t,i);let a=Ar.parse(e),[s,o]=Ar.translate(a,0,2),[l,c]=Ar.translate(a,1,3);[["grid-row",s],["grid-row-span",o],["grid-column",l],["grid-column-span",c]].forEach(([f,d])=>{Ar.insertDecl(e,f,d)}),Ar.warnTemplateSelectorNotFound(e,n),Ar.warnIfGridRowColumnExists(e,n)}};bu.names=["grid-area"];aw.exports=bu});var uw=x((I$,lw)=>{u();var pT=j(),Mi=Bt(),wu=class extends pT{insert(e,t,i){if(t!=="-ms-")return super.insert(e,t,i);if(e.parent.some(s=>s.prop==="-ms-grid-row-align"))return;let[[n,a]]=Mi.parse(e);a?(Mi.insertDecl(e,"grid-row-align",n),Mi.insertDecl(e,"grid-column-align",a)):(Mi.insertDecl(e,"grid-row-align",n),Mi.insertDecl(e,"grid-column-align",n))}};wu.names=["place-self"];lw.exports=wu});var cw=x((D$,fw)=>{u();var dT=j(),vu=class extends dT{check(e){let t=e.value;return!t.includes("/")||t.includes("span")}normalize(e){return e.replace("-start","")}prefixed(e,t){let i=super.prefixed(e,t);return t==="-ms-"&&(i=i.replace("-start","")),i}};vu.names=["grid-row-start","grid-column-start"];fw.exports=vu});var hw=x((q$,dw)=>{u();var pw=Pe(),hT=j(),Cr=class extends hT{check(e){return e.parent&&!e.parent.some(t=>t.prop&&t.prop.startsWith("grid-"))}prefixed(e,t){let i;return[i,t]=pw(t),i===2012?t+"flex-item-align":super.prefixed(e,t)}normalize(){return"align-self"}set(e,t){let i=pw(t)[0];if(i===2012)return e.value=Cr.oldValues[e.value]||e.value,super.set(e,t);if(i==="final")return super.set(e,t)}};Cr.names=["align-self","flex-item-align"];Cr.oldValues={"flex-end":"end","flex-start":"start"};dw.exports=Cr});var gw=x(($$,mw)=>{u();var mT=j(),gT=_e(),xu=class extends mT{constructor(e,t,i){super(e,t,i);this.prefixes&&(this.prefixes=gT.uniq(this.prefixes.map(n=>n==="-ms-"?"-webkit-":n)))}};xu.names=["appearance"];mw.exports=xu});var ww=x((L$,bw)=>{u();var yw=Pe(),yT=j(),ku=class extends yT{normalize(){return"flex-basis"}prefixed(e,t){let i;return[i,t]=yw(t),i===2012?t+"flex-preferred-size":super.prefixed(e,t)}set(e,t){let i;if([i,t]=yw(t),i===2012||i==="final")return super.set(e,t)}};ku.names=["flex-basis","flex-preferred-size"];bw.exports=ku});var xw=x((M$,vw)=>{u();var bT=j(),Su=class extends bT{normalize(){return this.name.replace("box-image","border")}prefixed(e,t){let i=super.prefixed(e,t);return t==="-webkit-"&&(i=i.replace("border","box-image")),i}};Su.names=["mask-border","mask-border-source","mask-border-slice","mask-border-width","mask-border-outset","mask-border-repeat","mask-box-image","mask-box-image-source","mask-box-image-slice","mask-box-image-width","mask-box-image-outset","mask-box-image-repeat"];vw.exports=Su});var Sw=x((N$,kw)=>{u();var wT=j(),lt=class extends wT{insert(e,t,i){let n=e.prop==="mask-composite",a;n?a=e.value.split(","):a=e.value.match(lt.regexp)||[],a=a.map(c=>c.trim()).filter(c=>c);let s=a.length,o;if(s&&(o=this.clone(e),o.value=a.map(c=>lt.oldValues[c]||c).join(", "),a.includes("intersect")&&(o.value+=", xor"),o.prop=t+"mask-composite"),n)return s?(this.needCascade(e)&&(o.raws.before=this.calcBefore(i,e,t)),e.parent.insertBefore(e,o)):void 0;let l=this.clone(e);return l.prop=t+l.prop,s&&(l.value=l.value.replace(lt.regexp,"")),this.needCascade(e)&&(l.raws.before=this.calcBefore(i,e,t)),e.parent.insertBefore(e,l),s?(this.needCascade(e)&&(o.raws.before=this.calcBefore(i,e,t)),e.parent.insertBefore(e,o)):e}};lt.names=["mask","mask-composite"];lt.oldValues={add:"source-over",subtract:"source-out",intersect:"source-in",exclude:"xor"};lt.regexp=new RegExp(`\\s+(${Object.keys(lt.oldValues).join("|")})\\b(?!\\))\\s*(?=[,])`,"ig");kw.exports=lt});var _w=x((B$,Cw)=>{u();var Aw=Pe(),vT=j(),_r=class extends vT{prefixed(e,t){let i;return[i,t]=Aw(t),i===2009?t+"box-align":i===2012?t+"flex-align":super.prefixed(e,t)}normalize(){return"align-items"}set(e,t){let i=Aw(t)[0];return(i===2009||i===2012)&&(e.value=_r.oldValues[e.value]||e.value),super.set(e,t)}};_r.names=["align-items","flex-align","box-align"];_r.oldValues={"flex-end":"end","flex-start":"start"};Cw.exports=_r});var Ow=x((F$,Ew)=>{u();var xT=j(),Au=class extends xT{set(e,t){return t==="-ms-"&&e.value==="contain"&&(e.value="element"),super.set(e,t)}insert(e,t,i){if(!(e.value==="all"&&t==="-ms-"))return super.insert(e,t,i)}};Au.names=["user-select"];Ew.exports=Au});var Pw=x((j$,Rw)=>{u();var Tw=Pe(),kT=j(),Cu=class extends kT{normalize(){return"flex-shrink"}prefixed(e,t){let i;return[i,t]=Tw(t),i===2012?t+"flex-negative":super.prefixed(e,t)}set(e,t){let i;if([i,t]=Tw(t),i===2012||i==="final")return super.set(e,t)}};Cu.names=["flex-shrink","flex-negative"];Rw.exports=Cu});var Dw=x((z$,Iw)=>{u();var ST=j(),_u=class extends ST{prefixed(e,t){return`${t}column-${e}`}normalize(e){return e.includes("inside")?"break-inside":e.includes("before")?"break-before":"break-after"}set(e,t){return(e.prop==="break-inside"&&e.value==="avoid-column"||e.value==="avoid-page")&&(e.value="avoid"),super.set(e,t)}insert(e,t,i){if(e.prop!=="break-inside")return super.insert(e,t,i);if(!(/region/i.test(e.value)||/page/i.test(e.value)))return super.insert(e,t,i)}};_u.names=["break-inside","page-break-inside","column-break-inside","break-before","page-break-before","column-break-before","break-after","page-break-after","column-break-after"];Iw.exports=_u});var $w=x((U$,qw)=>{u();var AT=j(),Eu=class extends AT{prefixed(e,t){return t+"print-color-adjust"}normalize(){return"color-adjust"}};Eu.names=["color-adjust","print-color-adjust"];qw.exports=Eu});var Mw=x((V$,Lw)=>{u();var CT=j(),Er=class extends CT{insert(e,t,i){if(t==="-ms-"){let n=this.set(this.clone(e),t);this.needCascade(e)&&(n.raws.before=this.calcBefore(i,e,t));let a="ltr";return e.parent.nodes.forEach(s=>{s.prop==="direction"&&(s.value==="rtl"||s.value==="ltr")&&(a=s.value)}),n.value=Er.msValues[a][e.value]||e.value,e.parent.insertBefore(e,n)}return super.insert(e,t,i)}};Er.names=["writing-mode"];Er.msValues={ltr:{"horizontal-tb":"lr-tb","vertical-rl":"tb-rl","vertical-lr":"tb-lr"},rtl:{"horizontal-tb":"rl-tb","vertical-rl":"bt-rl","vertical-lr":"bt-lr"}};Lw.exports=Er});var Bw=x((H$,Nw)=>{u();var _T=j(),Ou=class extends _T{set(e,t){return e.value=e.value.replace(/\s+fill(\s)/,"$1"),super.set(e,t)}};Ou.names=["border-image"];Nw.exports=Ou});var zw=x((W$,jw)=>{u();var Fw=Pe(),ET=j(),Or=class extends ET{prefixed(e,t){let i;return[i,t]=Fw(t),i===2012?t+"flex-line-pack":super.prefixed(e,t)}normalize(){return"align-content"}set(e,t){let i=Fw(t)[0];if(i===2012)return e.value=Or.oldValues[e.value]||e.value,super.set(e,t);if(i==="final")return super.set(e,t)}};Or.names=["align-content","flex-line-pack"];Or.oldValues={"flex-end":"end","flex-start":"start","space-between":"justify","space-around":"distribute"};jw.exports=Or});var Vw=x((G$,Uw)=>{u();var OT=j(),We=class extends OT{prefixed(e,t){return t==="-moz-"?t+(We.toMozilla[e]||e):super.prefixed(e,t)}normalize(e){return We.toNormal[e]||e}};We.names=["border-radius"];We.toMozilla={};We.toNormal={};for(let r of["top","bottom"])for(let e of["left","right"]){let t=`border-${r}-${e}-radius`,i=`border-radius-${r}${e}`;We.names.push(t),We.names.push(i),We.toMozilla[t]=i,We.toNormal[i]=t}Uw.exports=We});var Ww=x((Q$,Hw)=>{u();var TT=j(),Tu=class extends TT{prefixed(e,t){return e.includes("-start")?t+e.replace("-block-start","-before"):t+e.replace("-block-end","-after")}normalize(e){return e.includes("-before")?e.replace("-before","-block-start"):e.replace("-after","-block-end")}};Tu.names=["border-block-start","border-block-end","margin-block-start","margin-block-end","padding-block-start","padding-block-end","border-before","border-after","margin-before","margin-after","padding-before","padding-after"];Hw.exports=Tu});var Qw=x((Y$,Gw)=>{u();var RT=j(),{parseTemplate:PT,warnMissedAreas:IT,getGridGap:DT,warnGridGap:qT,inheritGridGap:$T}=Bt(),Ru=class extends RT{insert(e,t,i,n){if(t!=="-ms-")return super.insert(e,t,i);if(e.parent.some(h=>h.prop==="-ms-grid-rows"))return;let a=DT(e),s=$T(e,a),{rows:o,columns:l,areas:c}=PT({decl:e,gap:s||a}),f=Object.keys(c).length>0,d=Boolean(o),p=Boolean(l);return qT({gap:a,hasColumns:p,decl:e,result:n}),IT(c,e,n),(d&&p||f)&&e.cloneBefore({prop:"-ms-grid-rows",value:o,raws:{}}),p&&e.cloneBefore({prop:"-ms-grid-columns",value:l,raws:{}}),e}};Ru.names=["grid-template"];Gw.exports=Ru});var Kw=x((K$,Yw)=>{u();var LT=j(),Pu=class extends LT{prefixed(e,t){return t+e.replace("-inline","")}normalize(e){return e.replace(/(margin|padding|border)-(start|end)/,"$1-inline-$2")}};Pu.names=["border-inline-start","border-inline-end","margin-inline-start","margin-inline-end","padding-inline-start","padding-inline-end","border-start","border-end","margin-start","margin-end","padding-start","padding-end"];Yw.exports=Pu});var Jw=x((X$,Xw)=>{u();var MT=j(),Iu=class extends MT{check(e){return!e.value.includes("flex-")&&e.value!=="baseline"}prefixed(e,t){return t+"grid-row-align"}normalize(){return"align-self"}};Iu.names=["grid-row-align"];Xw.exports=Iu});var e0=x((J$,Zw)=>{u();var NT=j(),Tr=class extends NT{keyframeParents(e){let{parent:t}=e;for(;t;){if(t.type==="atrule"&&t.name==="keyframes")return!0;({parent:t}=t)}return!1}contain3d(e){if(e.prop==="transform-origin")return!1;for(let t of Tr.functions3d)if(e.value.includes(`${t}(`))return!0;return!1}set(e,t){return e=super.set(e,t),t==="-ms-"&&(e.value=e.value.replace(/rotatez/gi,"rotate")),e}insert(e,t,i){if(t==="-ms-"){if(!this.contain3d(e)&&!this.keyframeParents(e))return super.insert(e,t,i)}else if(t==="-o-"){if(!this.contain3d(e))return super.insert(e,t,i)}else return super.insert(e,t,i)}};Tr.names=["transform","transform-origin"];Tr.functions3d=["matrix3d","translate3d","translateZ","scale3d","scaleZ","rotate3d","rotateX","rotateY","perspective"];Zw.exports=Tr});var i0=x((Z$,r0)=>{u();var t0=Pe(),BT=j(),Du=class extends BT{normalize(){return"flex-direction"}insert(e,t,i){let n;if([n,t]=t0(t),n!==2009)return super.insert(e,t,i);if(e.parent.some(f=>f.prop===t+"box-orient"||f.prop===t+"box-direction"))return;let s=e.value,o,l;s==="inherit"||s==="initial"||s==="unset"?(o=s,l=s):(o=s.includes("row")?"horizontal":"vertical",l=s.includes("reverse")?"reverse":"normal");let c=this.clone(e);return c.prop=t+"box-orient",c.value=o,this.needCascade(e)&&(c.raws.before=this.calcBefore(i,e,t)),e.parent.insertBefore(e,c),c=this.clone(e),c.prop=t+"box-direction",c.value=l,this.needCascade(e)&&(c.raws.before=this.calcBefore(i,e,t)),e.parent.insertBefore(e,c)}old(e,t){let i;return[i,t]=t0(t),i===2009?[t+"box-orient",t+"box-direction"]:super.old(e,t)}};Du.names=["flex-direction","box-direction","box-orient"];r0.exports=Du});var s0=x((eL,n0)=>{u();var FT=j(),qu=class extends FT{check(e){return e.value==="pixelated"}prefixed(e,t){return t==="-ms-"?"-ms-interpolation-mode":super.prefixed(e,t)}set(e,t){return t!=="-ms-"?super.set(e,t):(e.prop="-ms-interpolation-mode",e.value="nearest-neighbor",e)}normalize(){return"image-rendering"}process(e,t){return super.process(e,t)}};qu.names=["image-rendering","interpolation-mode"];n0.exports=qu});var o0=x((tL,a0)=>{u();var jT=j(),zT=_e(),$u=class extends jT{constructor(e,t,i){super(e,t,i);this.prefixes&&(this.prefixes=zT.uniq(this.prefixes.map(n=>n==="-ms-"?"-webkit-":n)))}};$u.names=["backdrop-filter"];a0.exports=$u});var u0=x((rL,l0)=>{u();var UT=j(),VT=_e(),Lu=class extends UT{constructor(e,t,i){super(e,t,i);this.prefixes&&(this.prefixes=VT.uniq(this.prefixes.map(n=>n==="-ms-"?"-webkit-":n)))}check(e){return e.value.toLowerCase()==="text"}};Lu.names=["background-clip"];l0.exports=Lu});var c0=x((iL,f0)=>{u();var HT=j(),WT=["none","underline","overline","line-through","blink","inherit","initial","unset"],Mu=class extends HT{check(e){return e.value.split(/\s+/).some(t=>!WT.includes(t))}};Mu.names=["text-decoration"];f0.exports=Mu});var h0=x((nL,d0)=>{u();var p0=Pe(),GT=j(),Rr=class extends GT{prefixed(e,t){let i;return[i,t]=p0(t),i===2009?t+"box-pack":i===2012?t+"flex-pack":super.prefixed(e,t)}normalize(){return"justify-content"}set(e,t){let i=p0(t)[0];if(i===2009||i===2012){let n=Rr.oldValues[e.value]||e.value;if(e.value=n,i!==2009||n!=="distribute")return super.set(e,t)}else if(i==="final")return super.set(e,t)}};Rr.names=["justify-content","flex-pack","box-pack"];Rr.oldValues={"flex-end":"end","flex-start":"start","space-between":"justify","space-around":"distribute"};d0.exports=Rr});var g0=x((sL,m0)=>{u();var QT=j(),Nu=class extends QT{set(e,t){let i=e.value.toLowerCase();return t==="-webkit-"&&!i.includes(" ")&&i!=="contain"&&i!=="cover"&&(e.value=e.value+" "+e.value),super.set(e,t)}};Nu.names=["background-size"];m0.exports=Nu});var b0=x((aL,y0)=>{u();var YT=j(),Bu=Bt(),Fu=class extends YT{insert(e,t,i){if(t!=="-ms-")return super.insert(e,t,i);let n=Bu.parse(e),[a,s]=Bu.translate(n,0,1);n[0]&&n[0].includes("span")&&(s=n[0].join("").replace(/\D/g,"")),[[e.prop,a],[`${e.prop}-span`,s]].forEach(([l,c])=>{Bu.insertDecl(e,l,c)})}};Fu.names=["grid-row","grid-column"];y0.exports=Fu});var x0=x((oL,v0)=>{u();var KT=j(),{prefixTrackProp:w0,prefixTrackValue:XT,autoplaceGridItems:JT,getGridGap:ZT,inheritGridGap:eR}=Bt(),tR=su(),ju=class extends KT{prefixed(e,t){return t==="-ms-"?w0({prop:e,prefix:t}):super.prefixed(e,t)}normalize(e){return e.replace(/^grid-(rows|columns)/,"grid-template-$1")}insert(e,t,i,n){if(t!=="-ms-")return super.insert(e,t,i);let{parent:a,prop:s,value:o}=e,l=s.includes("rows"),c=s.includes("columns"),f=a.some(k=>k.prop==="grid-template"||k.prop==="grid-template-areas");if(f&&l)return!1;let d=new tR({options:{}}),p=d.gridStatus(a,n),h=ZT(e);h=eR(e,h)||h;let b=l?h.row:h.column;(p==="no-autoplace"||p===!0)&&!f&&(b=null);let v=XT({value:o,gap:b});e.cloneBefore({prop:w0({prop:s,prefix:t}),value:v});let y=a.nodes.find(k=>k.prop==="grid-auto-flow"),w="row";if(y&&!d.disabled(y,n)&&(w=y.value.trim()),p==="autoplace"){let k=a.nodes.find(E=>E.prop==="grid-template-rows");if(!k&&f)return;if(!k&&!f){e.warn(n,"Autoplacement does not work without grid-template-rows property");return}!a.nodes.find(E=>E.prop==="grid-template-columns")&&!f&&e.warn(n,"Autoplacement does not work without grid-template-columns property"),c&&!f&&JT(e,n,h,w)}}};ju.names=["grid-template-rows","grid-template-columns","grid-rows","grid-columns"];v0.exports=ju});var S0=x((lL,k0)=>{u();var rR=j(),zu=class extends rR{check(e){return!e.value.includes("flex-")&&e.value!=="baseline"}prefixed(e,t){return t+"grid-column-align"}normalize(){return"justify-self"}};zu.names=["grid-column-align"];k0.exports=zu});var C0=x((uL,A0)=>{u();var iR=j(),Uu=class extends iR{prefixed(e,t){return t+"scroll-chaining"}normalize(){return"overscroll-behavior"}set(e,t){return e.value==="auto"?e.value="chained":(e.value==="none"||e.value==="contain")&&(e.value="none"),super.set(e,t)}};Uu.names=["overscroll-behavior","scroll-chaining"];A0.exports=Uu});var O0=x((fL,E0)=>{u();var nR=j(),{parseGridAreas:sR,warnMissedAreas:aR,prefixTrackProp:oR,prefixTrackValue:_0,getGridGap:lR,warnGridGap:uR,inheritGridGap:fR}=Bt();function cR(r){return r.trim().slice(1,-1).split(/["']\s*["']?/g)}var Vu=class extends nR{insert(e,t,i,n){if(t!=="-ms-")return super.insert(e,t,i);let a=!1,s=!1,o=e.parent,l=lR(e);l=fR(e,l)||l,o.walkDecls(/-ms-grid-rows/,d=>d.remove()),o.walkDecls(/grid-template-(rows|columns)/,d=>{if(d.prop==="grid-template-rows"){s=!0;let{prop:p,value:h}=d;d.cloneBefore({prop:oR({prop:p,prefix:t}),value:_0({value:h,gap:l.row})})}else a=!0});let c=cR(e.value);a&&!s&&l.row&&c.length>1&&e.cloneBefore({prop:"-ms-grid-rows",value:_0({value:`repeat(${c.length}, auto)`,gap:l.row}),raws:{}}),uR({gap:l,hasColumns:a,decl:e,result:n});let f=sR({rows:c,gap:l});return aR(f,e,n),e}};Vu.names=["grid-template-areas"];E0.exports=Vu});var R0=x((cL,T0)=>{u();var pR=j(),Hu=class extends pR{set(e,t){return t==="-webkit-"&&(e.value=e.value.replace(/\s*(right|left)\s*/i,"")),super.set(e,t)}};Hu.names=["text-emphasis-position"];T0.exports=Hu});var I0=x((pL,P0)=>{u();var dR=j(),Wu=class extends dR{set(e,t){return e.prop==="text-decoration-skip-ink"&&e.value==="auto"?(e.prop=t+"text-decoration-skip",e.value="ink",e):super.set(e,t)}};Wu.names=["text-decoration-skip-ink","text-decoration-skip"];P0.exports=Wu});var N0=x((dL,M0)=>{u();"use strict";M0.exports={wrap:D0,limit:q0,validate:$0,test:Gu,curry:hR,name:L0};function D0(r,e,t){var i=e-r;return((t-r)%i+i)%i+r}function q0(r,e,t){return Math.max(r,Math.min(e,t))}function $0(r,e,t,i,n){if(!Gu(r,e,t,i,n))throw new Error(t+" is outside of range ["+r+","+e+")");return t}function Gu(r,e,t,i,n){return!(te||n&&t===e||i&&t===r)}function L0(r,e,t,i){return(t?"(":"[")+r+","+e+(i?")":"]")}function hR(r,e,t,i){var n=L0.bind(null,r,e,t,i);return{wrap:D0.bind(null,r,e),limit:q0.bind(null,r,e),validate:function(a){return $0(r,e,a,t,i)},test:function(a){return Gu(r,e,a,t,i)},toString:n,name:n}}});var j0=x((hL,F0)=>{u();var Qu=$s(),mR=N0(),gR=xr(),yR=He(),bR=_e(),B0=/top|left|right|bottom/gi,wt=class extends yR{replace(e,t){let i=Qu(e);for(let n of i.nodes)if(n.type==="function"&&n.value===this.name)if(n.nodes=this.newDirection(n.nodes),n.nodes=this.normalize(n.nodes),t==="-webkit- old"){if(!this.oldWebkit(n))return!1}else n.nodes=this.convertDirection(n.nodes),n.value=t+n.value;return i.toString()}replaceFirst(e,...t){return t.map(n=>n===" "?{type:"space",value:n}:{type:"word",value:n}).concat(e.slice(1))}normalizeUnit(e,t){return`${parseFloat(e)/t*360}deg`}normalize(e){if(!e[0])return e;if(/-?\d+(.\d+)?grad/.test(e[0].value))e[0].value=this.normalizeUnit(e[0].value,400);else if(/-?\d+(.\d+)?rad/.test(e[0].value))e[0].value=this.normalizeUnit(e[0].value,2*Math.PI);else if(/-?\d+(.\d+)?turn/.test(e[0].value))e[0].value=this.normalizeUnit(e[0].value,1);else if(e[0].value.includes("deg")){let t=parseFloat(e[0].value);t=mR.wrap(0,360,t),e[0].value=`${t}deg`}return e[0].value==="0deg"?e=this.replaceFirst(e,"to"," ","top"):e[0].value==="90deg"?e=this.replaceFirst(e,"to"," ","right"):e[0].value==="180deg"?e=this.replaceFirst(e,"to"," ","bottom"):e[0].value==="270deg"&&(e=this.replaceFirst(e,"to"," ","left")),e}newDirection(e){if(e[0].value==="to"||(B0.lastIndex=0,!B0.test(e[0].value)))return e;e.unshift({type:"word",value:"to"},{type:"space",value:" "});for(let t=2;t0&&(e[0].value==="to"?this.fixDirection(e):e[0].value.includes("deg")?this.fixAngle(e):this.isRadial(e)&&this.fixRadial(e)),e}fixDirection(e){e.splice(0,2);for(let t of e){if(t.type==="div")break;t.type==="word"&&(t.value=this.revertDirection(t.value))}}fixAngle(e){let t=e[0].value;t=parseFloat(t),t=Math.abs(450-t)%360,t=this.roundFloat(t,3),e[0].value=`${t}deg`}fixRadial(e){let t=[],i=[],n,a,s,o,l;for(o=0;o{u();var wR=xr(),vR=He();function z0(r){return new RegExp(`(^|[\\s,(])(${r}($|[\\s),]))`,"gi")}var Yu=class extends vR{regexp(){return this.regexpCache||(this.regexpCache=z0(this.name)),this.regexpCache}isStretch(){return this.name==="stretch"||this.name==="fill"||this.name==="fill-available"}replace(e,t){return t==="-moz-"&&this.isStretch()?e.replace(this.regexp(),"$1-moz-available$3"):t==="-webkit-"&&this.isStretch()?e.replace(this.regexp(),"$1-webkit-fill-available$3"):super.replace(e,t)}old(e){let t=e+this.name;return this.isStretch()&&(e==="-moz-"?t="-moz-available":e==="-webkit-"&&(t="-webkit-fill-available")),new wR(this.name,t,t,z0(t))}add(e,t){if(!(e.prop.includes("grid")&&t!=="-webkit-"))return super.add(e,t)}};Yu.names=["max-content","min-content","fit-content","fill","fill-available","stretch"];U0.exports=Yu});var G0=x((gL,W0)=>{u();var H0=xr(),xR=He(),Ku=class extends xR{replace(e,t){return t==="-webkit-"?e.replace(this.regexp(),"$1-webkit-optimize-contrast"):t==="-moz-"?e.replace(this.regexp(),"$1-moz-crisp-edges"):super.replace(e,t)}old(e){return e==="-webkit-"?new H0(this.name,"-webkit-optimize-contrast"):e==="-moz-"?new H0(this.name,"-moz-crisp-edges"):super.old(e)}};Ku.names=["pixelated"];W0.exports=Ku});var Y0=x((yL,Q0)=>{u();var kR=He(),Xu=class extends kR{replace(e,t){let i=super.replace(e,t);return t==="-webkit-"&&(i=i.replace(/("[^"]+"|'[^']+')(\s+\d+\w)/gi,"url($1)$2")),i}};Xu.names=["image-set"];Q0.exports=Xu});var X0=x((bL,K0)=>{u();var SR=$e().list,AR=He(),Ju=class extends AR{replace(e,t){return SR.space(e).map(i=>{if(i.slice(0,+this.name.length+1)!==this.name+"(")return i;let n=i.lastIndexOf(")"),a=i.slice(n+1),s=i.slice(this.name.length+1,n);if(t==="-webkit-"){let o=s.match(/\d*.?\d+%?/);o?(s=s.slice(o[0].length).trim(),s+=`, ${o[0]}`):s+=", 0.5"}return t+this.name+"("+s+")"+a}).join(" ")}};Ju.names=["cross-fade"];K0.exports=Ju});var Z0=x((wL,J0)=>{u();var CR=Pe(),_R=xr(),ER=He(),Zu=class extends ER{constructor(e,t){super(e,t);e==="display-flex"&&(this.name="flex")}check(e){return e.prop==="display"&&e.value===this.name}prefixed(e){let t,i;return[t,e]=CR(e),t===2009?this.name==="flex"?i="box":i="inline-box":t===2012?this.name==="flex"?i="flexbox":i="inline-flexbox":t==="final"&&(i=this.name),e+i}replace(e,t){return this.prefixed(t)}old(e){let t=this.prefixed(e);if(!!t)return new _R(this.name,t)}};Zu.names=["display-flex","inline-flex"];J0.exports=Zu});var tv=x((vL,ev)=>{u();var OR=He(),ef=class extends OR{constructor(e,t){super(e,t);e==="display-grid"&&(this.name="grid")}check(e){return e.prop==="display"&&e.value===this.name}};ef.names=["display-grid","inline-grid"];ev.exports=ef});var iv=x((xL,rv)=>{u();var TR=He(),tf=class extends TR{constructor(e,t){super(e,t);e==="filter-function"&&(this.name="filter")}};tf.names=["filter","filter-function"];rv.exports=tf});var ov=x((kL,av)=>{u();var nv=Li(),z=j(),sv=Fy(),RR=nb(),PR=su(),IR=Sb(),rf=Mt(),Pr=kr(),DR=Pb(),ut=He(),Ir=_e(),qR=Db(),$R=$b(),LR=Mb(),MR=Bb(),NR=Vb(),BR=Gb(),FR=Yb(),jR=Xb(),zR=Zb(),UR=tw(),VR=iw(),HR=sw(),WR=ow(),GR=uw(),QR=cw(),YR=hw(),KR=gw(),XR=ww(),JR=xw(),ZR=Sw(),e5=_w(),t5=Ow(),r5=Pw(),i5=Dw(),n5=$w(),s5=Mw(),a5=Bw(),o5=zw(),l5=Vw(),u5=Ww(),f5=Qw(),c5=Kw(),p5=Jw(),d5=e0(),h5=i0(),m5=s0(),g5=o0(),y5=u0(),b5=c0(),w5=h0(),v5=g0(),x5=b0(),k5=x0(),S5=S0(),A5=C0(),C5=O0(),_5=R0(),E5=I0(),O5=j0(),T5=V0(),R5=G0(),P5=Y0(),I5=X0(),D5=Z0(),q5=tv(),$5=iv();Pr.hack(qR);Pr.hack($R);Pr.hack(LR);Pr.hack(MR);z.hack(NR);z.hack(BR);z.hack(FR);z.hack(jR);z.hack(zR);z.hack(UR);z.hack(VR);z.hack(HR);z.hack(WR);z.hack(GR);z.hack(QR);z.hack(YR);z.hack(KR);z.hack(XR);z.hack(JR);z.hack(ZR);z.hack(e5);z.hack(t5);z.hack(r5);z.hack(i5);z.hack(n5);z.hack(s5);z.hack(a5);z.hack(o5);z.hack(l5);z.hack(u5);z.hack(f5);z.hack(c5);z.hack(p5);z.hack(d5);z.hack(h5);z.hack(m5);z.hack(g5);z.hack(y5);z.hack(b5);z.hack(w5);z.hack(v5);z.hack(x5);z.hack(k5);z.hack(S5);z.hack(A5);z.hack(C5);z.hack(_5);z.hack(E5);ut.hack(O5);ut.hack(T5);ut.hack(R5);ut.hack(P5);ut.hack(I5);ut.hack(D5);ut.hack(q5);ut.hack($5);var nf=new Map,Ni=class{constructor(e,t,i={}){this.data=e,this.browsers=t,this.options=i,[this.add,this.remove]=this.preprocess(this.select(this.data)),this.transition=new RR(this),this.processor=new PR(this)}cleaner(){if(this.cleanerCache)return this.cleanerCache;if(this.browsers.selected.length){let e=new rf(this.browsers.data,[]);this.cleanerCache=new Ni(this.data,e,this.options)}else return this;return this.cleanerCache}select(e){let t={add:{},remove:{}};for(let i in e){let n=e[i],a=n.browsers.map(l=>{let c=l.split(" ");return{browser:`${c[0]} ${c[1]}`,note:c[2]}}),s=a.filter(l=>l.note).map(l=>`${this.browsers.prefix(l.browser)} ${l.note}`);s=Ir.uniq(s),a=a.filter(l=>this.browsers.isSelected(l.browser)).map(l=>{let c=this.browsers.prefix(l.browser);return l.note?`${c} ${l.note}`:c}),a=this.sort(Ir.uniq(a)),this.options.flexbox==="no-2009"&&(a=a.filter(l=>!l.includes("2009")));let o=n.browsers.map(l=>this.browsers.prefix(l));n.mistakes&&(o=o.concat(n.mistakes)),o=o.concat(s),o=Ir.uniq(o),a.length?(t.add[i]=a,a.length!a.includes(l)))):t.remove[i]=o}return t}sort(e){return e.sort((t,i)=>{let n=Ir.removeNote(t).length,a=Ir.removeNote(i).length;return n===a?i.length-t.length:a-n})}preprocess(e){let t={selectors:[],"@supports":new IR(Ni,this)};for(let n in e.add){let a=e.add[n];if(n==="@keyframes"||n==="@viewport")t[n]=new DR(n,a,this);else if(n==="@resolution")t[n]=new sv(n,a,this);else if(this.data[n].selector)t.selectors.push(Pr.load(n,a,this));else{let s=this.data[n].props;if(s){let o=ut.load(n,a,this);for(let l of s)t[l]||(t[l]={values:[]}),t[l].values.push(o)}else{let o=t[n]&&t[n].values||[];t[n]=z.load(n,a,this),t[n].values=o}}}let i={selectors:[]};for(let n in e.remove){let a=e.remove[n];if(this.data[n].selector){let s=Pr.load(n,a);for(let o of a)i.selectors.push(s.old(o))}else if(n==="@keyframes"||n==="@viewport")for(let s of a){let o=`@${s}${n.slice(1)}`;i[o]={remove:!0}}else if(n==="@resolution")i[n]=new sv(n,a,this);else{let s=this.data[n].props;if(s){let o=ut.load(n,[],this);for(let l of a){let c=o.old(l);if(c)for(let f of s)i[f]||(i[f]={}),i[f].values||(i[f].values=[]),i[f].values.push(c)}}else for(let o of a){let l=this.decl(n).old(n,o);if(n==="align-self"){let c=t[n]&&t[n].prefixes;if(c){if(o==="-webkit- 2009"&&c.includes("-webkit-"))continue;if(o==="-webkit-"&&c.includes("-webkit- 2009"))continue}}for(let c of l)i[c]||(i[c]={}),i[c].remove=!0}}}return[t,i]}decl(e){return nf.has(e)||nf.set(e,z.load(e)),nf.get(e)}unprefixed(e){let t=this.normalize(nv.unprefixed(e));return t==="flex-direction"&&(t="flex-flow"),t}normalize(e){return this.decl(e).normalize(e)}prefixed(e,t){return e=nv.unprefixed(e),this.decl(e).prefixed(e,t)}values(e,t){let i=this[e],n=i["*"]&&i["*"].values,a=i[t]&&i[t].values;return n&&a?Ir.uniq(n.concat(a)):n||a||[]}group(e){let t=e.parent,i=t.index(e),{length:n}=t.nodes,a=this.unprefixed(e.prop),s=(o,l)=>{for(i+=o;i>=0&&i{u();lv.exports={"backdrop-filter":{feature:"css-backdrop-filter",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5","safari 16.5"]},element:{props:["background","background-image","border-image","mask","list-style","list-style-image","content","mask-image"],feature:"css-element-function",browsers:["firefox 114"]},"user-select":{mistakes:["-khtml-"],feature:"user-select-none",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5","safari 16.5"]},"background-clip":{feature:"background-clip-text",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},hyphens:{feature:"css-hyphens",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5","safari 16.5"]},fill:{props:["width","min-width","max-width","height","min-height","max-height","inline-size","min-inline-size","max-inline-size","block-size","min-block-size","max-block-size","grid","grid-template","grid-template-rows","grid-template-columns","grid-auto-columns","grid-auto-rows"],feature:"intrinsic-width",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"fill-available":{props:["width","min-width","max-width","height","min-height","max-height","inline-size","min-inline-size","max-inline-size","block-size","min-block-size","max-block-size","grid","grid-template","grid-template-rows","grid-template-columns","grid-auto-columns","grid-auto-rows"],feature:"intrinsic-width",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},stretch:{props:["width","min-width","max-width","height","min-height","max-height","inline-size","min-inline-size","max-inline-size","block-size","min-block-size","max-block-size","grid","grid-template","grid-template-rows","grid-template-columns","grid-auto-columns","grid-auto-rows"],feature:"intrinsic-width",browsers:["firefox 114"]},"fit-content":{props:["width","min-width","max-width","height","min-height","max-height","inline-size","min-inline-size","max-inline-size","block-size","min-block-size","max-block-size","grid","grid-template","grid-template-rows","grid-template-columns","grid-auto-columns","grid-auto-rows"],feature:"intrinsic-width",browsers:["firefox 114"]},"text-decoration-style":{feature:"text-decoration",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5"]},"text-decoration-color":{feature:"text-decoration",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5"]},"text-decoration-line":{feature:"text-decoration",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5"]},"text-decoration":{feature:"text-decoration",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5"]},"text-decoration-skip":{feature:"text-decoration",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5"]},"text-decoration-skip-ink":{feature:"text-decoration",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5"]},"text-size-adjust":{feature:"text-size-adjust",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5"]},"mask-clip":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-composite":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-image":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-origin":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-repeat":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-border-repeat":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-border-source":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},mask:{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-position":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-size":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-border":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-border-outset":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-border-width":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"mask-border-slice":{feature:"css-masks",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},"clip-path":{feature:"css-clip-path",browsers:["samsung 21"]},"box-decoration-break":{feature:"css-boxdecorationbreak",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5","opera 99","safari 16.5","samsung 21"]},appearance:{feature:"css-appearance",browsers:["samsung 21"]},"image-set":{props:["background","background-image","border-image","cursor","mask","mask-image","list-style","list-style-image","content"],feature:"css-image-set",browsers:["and_uc 15.5","chrome 109","samsung 21"]},"cross-fade":{props:["background","background-image","border-image","mask","list-style","list-style-image","content","mask-image"],feature:"css-cross-fade",browsers:["and_chr 114","and_uc 15.5","chrome 109","chrome 113","chrome 114","edge 114","opera 99","samsung 21"]},isolate:{props:["unicode-bidi"],feature:"css-unicode-bidi",browsers:["ios_saf 16.1","ios_saf 16.3","ios_saf 16.4","ios_saf 16.5","safari 16.5"]},"color-adjust":{feature:"css-color-adjust",browsers:["chrome 109","chrome 113","chrome 114","edge 114","opera 99"]}}});var cv=x((AL,fv)=>{u();fv.exports={}});var mv=x((CL,hv)=>{u();var L5=Gl(),{agents:M5}=(Ts(),Os),sf=_y(),N5=Mt(),B5=ov(),F5=uv(),j5=cv(),pv={browsers:M5,prefixes:F5},dv=` - Replace Autoprefixer \`browsers\` option to Browserslist config. - Use \`browserslist\` key in \`package.json\` or \`.browserslistrc\` file. - - Using \`browsers\` option can cause errors. Browserslist config can - be used for Babel, Autoprefixer, postcss-normalize and other tools. - - If you really need to use option, rename it to \`overrideBrowserslist\`. - - Learn more at: - https://github.com/browserslist/browserslist#readme - https://twitter.com/browserslist - -`;function z5(r){return Object.prototype.toString.apply(r)==="[object Object]"}var af=new Map;function U5(r,e){e.browsers.selected.length!==0&&(e.add.selectors.length>0||Object.keys(e.add).length>2||r.warn(`Autoprefixer target browsers do not need any prefixes.You do not need Autoprefixer anymore. -Check your Browserslist config to be sure that your targets are set up correctly. - - Learn more at: - https://github.com/postcss/autoprefixer#readme - https://github.com/browserslist/browserslist#readme - -`))}hv.exports=Dr;function Dr(...r){let e;if(r.length===1&&z5(r[0])?(e=r[0],r=void 0):r.length===0||r.length===1&&!r[0]?r=void 0:r.length<=2&&(Array.isArray(r[0])||!r[0])?(e=r[1],r=r[0]):typeof r[r.length-1]=="object"&&(e=r.pop()),e||(e={}),e.browser)throw new Error("Change `browser` option to `overrideBrowserslist` in Autoprefixer");if(e.browserslist)throw new Error("Change `browserslist` option to `overrideBrowserslist` in Autoprefixer");e.overrideBrowserslist?r=e.overrideBrowserslist:e.browsers&&(typeof console!="undefined"&&console.warn&&(sf.red?console.warn(sf.red(dv.replace(/`[^`]+`/g,n=>sf.yellow(n.slice(1,-1))))):console.warn(dv)),r=e.browsers);let t={ignoreUnknownVersions:e.ignoreUnknownVersions,stats:e.stats,env:e.env};function i(n){let a=pv,s=new N5(a.browsers,r,n,t),o=s.selected.join(", ")+JSON.stringify(e);return af.has(o)||af.set(o,new B5(a.prefixes,s,e)),af.get(o)}return{postcssPlugin:"autoprefixer",prepare(n){let a=i({from:n.opts.from,env:e.env});return{OnceExit(s){U5(n,a),e.remove!==!1&&a.processor.remove(s,n),e.add!==!1&&a.processor.add(s,n)}}},info(n){return n=n||{},n.from=n.from||m.cwd(),j5(i(n))},options:e,browsers:r}}Dr.postcss=!0;Dr.data=pv;Dr.defaults=L5.defaults;Dr.info=()=>Dr().info()});var gv={};Ge(gv,{default:()=>V5});var V5,yv=R(()=>{u();V5=[]});var wv={};Ge(wv,{default:()=>H5});var bv,H5,vv=R(()=>{u();Yi();bv=pe(en()),H5=St(bv.default.theme)});var kv={};Ge(kv,{default:()=>W5});var xv,W5,Sv=R(()=>{u();Yi();xv=pe(en()),W5=St(xv.default)});u();"use strict";var G5=vt(Ay()),Q5=vt($e()),Y5=vt(mv()),K5=vt((yv(),gv)),X5=vt((vv(),wv)),J5=vt((Sv(),kv)),Z5=vt((zs(),Af)),eP=vt((nl(),il)),tP=vt((ia(),ic));function vt(r){return r&&r.__esModule?r:{default:r}}console.warn("cdn.tailwindcss.com should not be used in production. To use Tailwind CSS in production, install it as a PostCSS plugin or use the Tailwind CLI: https://tailwindcss.com/docs/installation");var Ls="tailwind",of="text/tailwindcss",Av="/template.html",Yt,Cv=!0,_v=0,lf=new Set,uf,Ev="",Ov=(r=!1)=>({get(e,t){return(!r||t==="config")&&typeof e[t]=="object"&&e[t]!==null?new Proxy(e[t],Ov()):e[t]},set(e,t,i){return e[t]=i,(!r||t==="config")&&ff(!0),!0}});window[Ls]=new Proxy({config:{},defaultTheme:X5.default,defaultConfig:J5.default,colors:Z5.default,plugin:eP.default,resolveConfig:tP.default},Ov(!0));function Tv(r){uf.observe(r,{attributes:!0,attributeFilter:["type"],characterData:!0,subtree:!0,childList:!0})}new MutationObserver(async r=>{let e=!1;if(!uf){uf=new MutationObserver(async()=>await ff(!0));for(let t of document.querySelectorAll(`style[type="${of}"]`))Tv(t)}for(let t of r)for(let i of t.addedNodes)i.nodeType===1&&i.tagName==="STYLE"&&i.getAttribute("type")===of&&(Tv(i),e=!0);await ff(e)}).observe(document.documentElement,{attributes:!0,attributeFilter:["class"],childList:!0,subtree:!0});async function ff(r=!1){r&&(_v++,lf.clear());let e="";for(let i of document.querySelectorAll(`style[type="${of}"]`))e+=i.textContent;let t=new Set;for(let i of document.querySelectorAll("[class]"))for(let n of i.classList)lf.has(n)||t.add(n);if(document.body&&(Cv||t.size>0||e!==Ev||!Yt||!Yt.isConnected)){for(let n of t)lf.add(n);Cv=!1,Ev=e,self[Av]=Array.from(t).join(" ");let{css:i}=await(0,Q5.default)([(0,G5.default)({...window[Ls].config,_hash:_v,content:{files:[Av],extract:{html:n=>n.split(" ")}},plugins:[...K5.default,...Array.isArray(window[Ls].config.plugins)?window[Ls].config.plugins:[]]}),(0,Y5.default)({remove:!1})]).process(`@tailwind base;@tailwind components;@tailwind utilities;${e}`);(!Yt||!Yt.isConnected)&&(Yt=document.createElement("style"),document.head.append(Yt)),Yt.textContent=i}}})(); -/*! - * fill-range - * - * Copyright (c) 2014-present, Jon Schlinkert. - * Licensed under the MIT License. - */ -/*! - * is-number - * - * Copyright (c) 2014-present, Jon Schlinkert. - * Released under the MIT License. - */ -/*! - * to-regex-range - * - * Copyright (c) 2015-present, Jon Schlinkert. - * Released under the MIT License. - */ -/*! https://mths.be/cssesc v3.0.0 by @mathias */ - diff --git a/examples/server/public/deps_vue.esm-browser.js b/examples/server/public/deps_vue.esm-browser.js deleted file mode 100644 index 4679d9614a765..0000000000000 --- a/examples/server/public/deps_vue.esm-browser.js +++ /dev/null @@ -1,18160 +0,0 @@ -/** -* vue v3.5.12 -* (c) 2018-present Yuxi (Evan) You and Vue contributors -* @license MIT -**/ -/*! #__NO_SIDE_EFFECTS__ */ -// @__NO_SIDE_EFFECTS__ -function makeMap(str) { - const map = /* @__PURE__ */ Object.create(null); - for (const key of str.split(",")) map[key] = 1; - return (val) => val in map; -} - -const EMPTY_OBJ = Object.freeze({}) ; -const EMPTY_ARR = Object.freeze([]) ; -const NOOP = () => { -}; -const NO = () => false; -const isOn = (key) => key.charCodeAt(0) === 111 && key.charCodeAt(1) === 110 && // uppercase letter -(key.charCodeAt(2) > 122 || key.charCodeAt(2) < 97); -const isModelListener = (key) => key.startsWith("onUpdate:"); -const extend = Object.assign; -const remove = (arr, el) => { - const i = arr.indexOf(el); - if (i > -1) { - arr.splice(i, 1); - } -}; -const hasOwnProperty$1 = Object.prototype.hasOwnProperty; -const hasOwn = (val, key) => hasOwnProperty$1.call(val, key); -const isArray = Array.isArray; -const isMap = (val) => toTypeString(val) === "[object Map]"; -const isSet = (val) => toTypeString(val) === "[object Set]"; -const isDate = (val) => toTypeString(val) === "[object Date]"; -const isRegExp = (val) => toTypeString(val) === "[object RegExp]"; -const isFunction = (val) => typeof val === "function"; -const isString = (val) => typeof val === "string"; -const isSymbol = (val) => typeof val === "symbol"; -const isObject = (val) => val !== null && typeof val === "object"; -const isPromise = (val) => { - return (isObject(val) || isFunction(val)) && isFunction(val.then) && isFunction(val.catch); -}; -const objectToString = Object.prototype.toString; -const toTypeString = (value) => objectToString.call(value); -const toRawType = (value) => { - return toTypeString(value).slice(8, -1); -}; -const isPlainObject = (val) => toTypeString(val) === "[object Object]"; -const isIntegerKey = (key) => isString(key) && key !== "NaN" && key[0] !== "-" && "" + parseInt(key, 10) === key; -const isReservedProp = /* @__PURE__ */ makeMap( - // the leading comma is intentional so empty string "" is also included - ",key,ref,ref_for,ref_key,onVnodeBeforeMount,onVnodeMounted,onVnodeBeforeUpdate,onVnodeUpdated,onVnodeBeforeUnmount,onVnodeUnmounted" -); -const isBuiltInDirective = /* @__PURE__ */ makeMap( - "bind,cloak,else-if,else,for,html,if,model,on,once,pre,show,slot,text,memo" -); -const cacheStringFunction = (fn) => { - const cache = /* @__PURE__ */ Object.create(null); - return (str) => { - const hit = cache[str]; - return hit || (cache[str] = fn(str)); - }; -}; -const camelizeRE = /-(\w)/g; -const camelize = cacheStringFunction( - (str) => { - return str.replace(camelizeRE, (_, c) => c ? c.toUpperCase() : ""); - } -); -const hyphenateRE = /\B([A-Z])/g; -const hyphenate = cacheStringFunction( - (str) => str.replace(hyphenateRE, "-$1").toLowerCase() -); -const capitalize = cacheStringFunction((str) => { - return str.charAt(0).toUpperCase() + str.slice(1); -}); -const toHandlerKey = cacheStringFunction( - (str) => { - const s = str ? `on${capitalize(str)}` : ``; - return s; - } -); -const hasChanged = (value, oldValue) => !Object.is(value, oldValue); -const invokeArrayFns = (fns, ...arg) => { - for (let i = 0; i < fns.length; i++) { - fns[i](...arg); - } -}; -const def = (obj, key, value, writable = false) => { - Object.defineProperty(obj, key, { - configurable: true, - enumerable: false, - writable, - value - }); -}; -const looseToNumber = (val) => { - const n = parseFloat(val); - return isNaN(n) ? val : n; -}; -const toNumber = (val) => { - const n = isString(val) ? Number(val) : NaN; - return isNaN(n) ? val : n; -}; -let _globalThis; -const getGlobalThis = () => { - return _globalThis || (_globalThis = typeof globalThis !== "undefined" ? globalThis : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : typeof global !== "undefined" ? global : {}); -}; -function genCacheKey(source, options) { - return source + JSON.stringify( - options, - (_, val) => typeof val === "function" ? val.toString() : val - ); -} - -const PatchFlagNames = { - [1]: `TEXT`, - [2]: `CLASS`, - [4]: `STYLE`, - [8]: `PROPS`, - [16]: `FULL_PROPS`, - [32]: `NEED_HYDRATION`, - [64]: `STABLE_FRAGMENT`, - [128]: `KEYED_FRAGMENT`, - [256]: `UNKEYED_FRAGMENT`, - [512]: `NEED_PATCH`, - [1024]: `DYNAMIC_SLOTS`, - [2048]: `DEV_ROOT_FRAGMENT`, - [-1]: `HOISTED`, - [-2]: `BAIL` -}; - -const slotFlagsText = { - [1]: "STABLE", - [2]: "DYNAMIC", - [3]: "FORWARDED" -}; - -const GLOBALS_ALLOWED = "Infinity,undefined,NaN,isFinite,isNaN,parseFloat,parseInt,decodeURI,decodeURIComponent,encodeURI,encodeURIComponent,Math,Number,Date,Array,Object,Boolean,String,RegExp,Map,Set,JSON,Intl,BigInt,console,Error,Symbol"; -const isGloballyAllowed = /* @__PURE__ */ makeMap(GLOBALS_ALLOWED); - -const range = 2; -function generateCodeFrame(source, start = 0, end = source.length) { - start = Math.max(0, Math.min(start, source.length)); - end = Math.max(0, Math.min(end, source.length)); - if (start > end) return ""; - let lines = source.split(/(\r?\n)/); - const newlineSequences = lines.filter((_, idx) => idx % 2 === 1); - lines = lines.filter((_, idx) => idx % 2 === 0); - let count = 0; - const res = []; - for (let i = 0; i < lines.length; i++) { - count += lines[i].length + (newlineSequences[i] && newlineSequences[i].length || 0); - if (count >= start) { - for (let j = i - range; j <= i + range || end > count; j++) { - if (j < 0 || j >= lines.length) continue; - const line = j + 1; - res.push( - `${line}${" ".repeat(Math.max(3 - String(line).length, 0))}| ${lines[j]}` - ); - const lineLength = lines[j].length; - const newLineSeqLength = newlineSequences[j] && newlineSequences[j].length || 0; - if (j === i) { - const pad = start - (count - (lineLength + newLineSeqLength)); - const length = Math.max( - 1, - end > count ? lineLength - pad : end - start - ); - res.push(` | ` + " ".repeat(pad) + "^".repeat(length)); - } else if (j > i) { - if (end > count) { - const length = Math.max(Math.min(end - count, lineLength), 1); - res.push(` | ` + "^".repeat(length)); - } - count += lineLength + newLineSeqLength; - } - } - break; - } - } - return res.join("\n"); -} - -function normalizeStyle(value) { - if (isArray(value)) { - const res = {}; - for (let i = 0; i < value.length; i++) { - const item = value[i]; - const normalized = isString(item) ? parseStringStyle(item) : normalizeStyle(item); - if (normalized) { - for (const key in normalized) { - res[key] = normalized[key]; - } - } - } - return res; - } else if (isString(value) || isObject(value)) { - return value; - } -} -const listDelimiterRE = /;(?![^(]*\))/g; -const propertyDelimiterRE = /:([^]+)/; -const styleCommentRE = /\/\*[^]*?\*\//g; -function parseStringStyle(cssText) { - const ret = {}; - cssText.replace(styleCommentRE, "").split(listDelimiterRE).forEach((item) => { - if (item) { - const tmp = item.split(propertyDelimiterRE); - tmp.length > 1 && (ret[tmp[0].trim()] = tmp[1].trim()); - } - }); - return ret; -} -function stringifyStyle(styles) { - let ret = ""; - if (!styles || isString(styles)) { - return ret; - } - for (const key in styles) { - const value = styles[key]; - if (isString(value) || typeof value === "number") { - const normalizedKey = key.startsWith(`--`) ? key : hyphenate(key); - ret += `${normalizedKey}:${value};`; - } - } - return ret; -} -function normalizeClass(value) { - let res = ""; - if (isString(value)) { - res = value; - } else if (isArray(value)) { - for (let i = 0; i < value.length; i++) { - const normalized = normalizeClass(value[i]); - if (normalized) { - res += normalized + " "; - } - } - } else if (isObject(value)) { - for (const name in value) { - if (value[name]) { - res += name + " "; - } - } - } - return res.trim(); -} -function normalizeProps(props) { - if (!props) return null; - let { class: klass, style } = props; - if (klass && !isString(klass)) { - props.class = normalizeClass(klass); - } - if (style) { - props.style = normalizeStyle(style); - } - return props; -} - -const HTML_TAGS = "html,body,base,head,link,meta,style,title,address,article,aside,footer,header,hgroup,h1,h2,h3,h4,h5,h6,nav,section,div,dd,dl,dt,figcaption,figure,picture,hr,img,li,main,ol,p,pre,ul,a,b,abbr,bdi,bdo,br,cite,code,data,dfn,em,i,kbd,mark,q,rp,rt,ruby,s,samp,small,span,strong,sub,sup,time,u,var,wbr,area,audio,map,track,video,embed,object,param,source,canvas,script,noscript,del,ins,caption,col,colgroup,table,thead,tbody,td,th,tr,button,datalist,fieldset,form,input,label,legend,meter,optgroup,option,output,progress,select,textarea,details,dialog,menu,summary,template,blockquote,iframe,tfoot"; -const SVG_TAGS = "svg,animate,animateMotion,animateTransform,circle,clipPath,color-profile,defs,desc,discard,ellipse,feBlend,feColorMatrix,feComponentTransfer,feComposite,feConvolveMatrix,feDiffuseLighting,feDisplacementMap,feDistantLight,feDropShadow,feFlood,feFuncA,feFuncB,feFuncG,feFuncR,feGaussianBlur,feImage,feMerge,feMergeNode,feMorphology,feOffset,fePointLight,feSpecularLighting,feSpotLight,feTile,feTurbulence,filter,foreignObject,g,hatch,hatchpath,image,line,linearGradient,marker,mask,mesh,meshgradient,meshpatch,meshrow,metadata,mpath,path,pattern,polygon,polyline,radialGradient,rect,set,solidcolor,stop,switch,symbol,text,textPath,title,tspan,unknown,use,view"; -const MATH_TAGS = "annotation,annotation-xml,maction,maligngroup,malignmark,math,menclose,merror,mfenced,mfrac,mfraction,mglyph,mi,mlabeledtr,mlongdiv,mmultiscripts,mn,mo,mover,mpadded,mphantom,mprescripts,mroot,mrow,ms,mscarries,mscarry,msgroup,msline,mspace,msqrt,msrow,mstack,mstyle,msub,msubsup,msup,mtable,mtd,mtext,mtr,munder,munderover,none,semantics"; -const VOID_TAGS = "area,base,br,col,embed,hr,img,input,link,meta,param,source,track,wbr"; -const isHTMLTag = /* @__PURE__ */ makeMap(HTML_TAGS); -const isSVGTag = /* @__PURE__ */ makeMap(SVG_TAGS); -const isMathMLTag = /* @__PURE__ */ makeMap(MATH_TAGS); -const isVoidTag = /* @__PURE__ */ makeMap(VOID_TAGS); - -const specialBooleanAttrs = `itemscope,allowfullscreen,formnovalidate,ismap,nomodule,novalidate,readonly`; -const isSpecialBooleanAttr = /* @__PURE__ */ makeMap(specialBooleanAttrs); -const isBooleanAttr = /* @__PURE__ */ makeMap( - specialBooleanAttrs + `,async,autofocus,autoplay,controls,default,defer,disabled,hidden,inert,loop,open,required,reversed,scoped,seamless,checked,muted,multiple,selected` -); -function includeBooleanAttr(value) { - return !!value || value === ""; -} -const isKnownHtmlAttr = /* @__PURE__ */ makeMap( - `accept,accept-charset,accesskey,action,align,allow,alt,async,autocapitalize,autocomplete,autofocus,autoplay,background,bgcolor,border,buffered,capture,challenge,charset,checked,cite,class,code,codebase,color,cols,colspan,content,contenteditable,contextmenu,controls,coords,crossorigin,csp,data,datetime,decoding,default,defer,dir,dirname,disabled,download,draggable,dropzone,enctype,enterkeyhint,for,form,formaction,formenctype,formmethod,formnovalidate,formtarget,headers,height,hidden,high,href,hreflang,http-equiv,icon,id,importance,inert,integrity,ismap,itemprop,keytype,kind,label,lang,language,loading,list,loop,low,manifest,max,maxlength,minlength,media,min,multiple,muted,name,novalidate,open,optimum,pattern,ping,placeholder,poster,preload,radiogroup,readonly,referrerpolicy,rel,required,reversed,rows,rowspan,sandbox,scope,scoped,selected,shape,size,sizes,slot,span,spellcheck,src,srcdoc,srclang,srcset,start,step,style,summary,tabindex,target,title,translate,type,usemap,value,width,wrap` -); -const isKnownSvgAttr = /* @__PURE__ */ makeMap( - `xmlns,accent-height,accumulate,additive,alignment-baseline,alphabetic,amplitude,arabic-form,ascent,attributeName,attributeType,azimuth,baseFrequency,baseline-shift,baseProfile,bbox,begin,bias,by,calcMode,cap-height,class,clip,clipPathUnits,clip-path,clip-rule,color,color-interpolation,color-interpolation-filters,color-profile,color-rendering,contentScriptType,contentStyleType,crossorigin,cursor,cx,cy,d,decelerate,descent,diffuseConstant,direction,display,divisor,dominant-baseline,dur,dx,dy,edgeMode,elevation,enable-background,end,exponent,fill,fill-opacity,fill-rule,filter,filterRes,filterUnits,flood-color,flood-opacity,font-family,font-size,font-size-adjust,font-stretch,font-style,font-variant,font-weight,format,from,fr,fx,fy,g1,g2,glyph-name,glyph-orientation-horizontal,glyph-orientation-vertical,glyphRef,gradientTransform,gradientUnits,hanging,height,href,hreflang,horiz-adv-x,horiz-origin-x,id,ideographic,image-rendering,in,in2,intercept,k,k1,k2,k3,k4,kernelMatrix,kernelUnitLength,kerning,keyPoints,keySplines,keyTimes,lang,lengthAdjust,letter-spacing,lighting-color,limitingConeAngle,local,marker-end,marker-mid,marker-start,markerHeight,markerUnits,markerWidth,mask,maskContentUnits,maskUnits,mathematical,max,media,method,min,mode,name,numOctaves,offset,opacity,operator,order,orient,orientation,origin,overflow,overline-position,overline-thickness,panose-1,paint-order,path,pathLength,patternContentUnits,patternTransform,patternUnits,ping,pointer-events,points,pointsAtX,pointsAtY,pointsAtZ,preserveAlpha,preserveAspectRatio,primitiveUnits,r,radius,referrerPolicy,refX,refY,rel,rendering-intent,repeatCount,repeatDur,requiredExtensions,requiredFeatures,restart,result,rotate,rx,ry,scale,seed,shape-rendering,slope,spacing,specularConstant,specularExponent,speed,spreadMethod,startOffset,stdDeviation,stemh,stemv,stitchTiles,stop-color,stop-opacity,strikethrough-position,strikethrough-thickness,string,stroke,stroke-dasharray,stroke-dashoffset,stroke-linecap,stroke-linejoin,stroke-miterlimit,stroke-opacity,stroke-width,style,surfaceScale,systemLanguage,tabindex,tableValues,target,targetX,targetY,text-anchor,text-decoration,text-rendering,textLength,to,transform,transform-origin,type,u1,u2,underline-position,underline-thickness,unicode,unicode-bidi,unicode-range,units-per-em,v-alphabetic,v-hanging,v-ideographic,v-mathematical,values,vector-effect,version,vert-adv-y,vert-origin-x,vert-origin-y,viewBox,viewTarget,visibility,width,widths,word-spacing,writing-mode,x,x-height,x1,x2,xChannelSelector,xlink:actuate,xlink:arcrole,xlink:href,xlink:role,xlink:show,xlink:title,xlink:type,xmlns:xlink,xml:base,xml:lang,xml:space,y,y1,y2,yChannelSelector,z,zoomAndPan` -); -function isRenderableAttrValue(value) { - if (value == null) { - return false; - } - const type = typeof value; - return type === "string" || type === "number" || type === "boolean"; -} - -const cssVarNameEscapeSymbolsRE = /[ !"#$%&'()*+,./:;<=>?@[\\\]^`{|}~]/g; -function getEscapedCssVarName(key, doubleEscape) { - return key.replace( - cssVarNameEscapeSymbolsRE, - (s) => `\\${s}` - ); -} - -function looseCompareArrays(a, b) { - if (a.length !== b.length) return false; - let equal = true; - for (let i = 0; equal && i < a.length; i++) { - equal = looseEqual(a[i], b[i]); - } - return equal; -} -function looseEqual(a, b) { - if (a === b) return true; - let aValidType = isDate(a); - let bValidType = isDate(b); - if (aValidType || bValidType) { - return aValidType && bValidType ? a.getTime() === b.getTime() : false; - } - aValidType = isSymbol(a); - bValidType = isSymbol(b); - if (aValidType || bValidType) { - return a === b; - } - aValidType = isArray(a); - bValidType = isArray(b); - if (aValidType || bValidType) { - return aValidType && bValidType ? looseCompareArrays(a, b) : false; - } - aValidType = isObject(a); - bValidType = isObject(b); - if (aValidType || bValidType) { - if (!aValidType || !bValidType) { - return false; - } - const aKeysCount = Object.keys(a).length; - const bKeysCount = Object.keys(b).length; - if (aKeysCount !== bKeysCount) { - return false; - } - for (const key in a) { - const aHasKey = a.hasOwnProperty(key); - const bHasKey = b.hasOwnProperty(key); - if (aHasKey && !bHasKey || !aHasKey && bHasKey || !looseEqual(a[key], b[key])) { - return false; - } - } - } - return String(a) === String(b); -} -function looseIndexOf(arr, val) { - return arr.findIndex((item) => looseEqual(item, val)); -} - -const isRef$1 = (val) => { - return !!(val && val["__v_isRef"] === true); -}; -const toDisplayString = (val) => { - return isString(val) ? val : val == null ? "" : isArray(val) || isObject(val) && (val.toString === objectToString || !isFunction(val.toString)) ? isRef$1(val) ? toDisplayString(val.value) : JSON.stringify(val, replacer, 2) : String(val); -}; -const replacer = (_key, val) => { - if (isRef$1(val)) { - return replacer(_key, val.value); - } else if (isMap(val)) { - return { - [`Map(${val.size})`]: [...val.entries()].reduce( - (entries, [key, val2], i) => { - entries[stringifySymbol(key, i) + " =>"] = val2; - return entries; - }, - {} - ) - }; - } else if (isSet(val)) { - return { - [`Set(${val.size})`]: [...val.values()].map((v) => stringifySymbol(v)) - }; - } else if (isSymbol(val)) { - return stringifySymbol(val); - } else if (isObject(val) && !isArray(val) && !isPlainObject(val)) { - return String(val); - } - return val; -}; -const stringifySymbol = (v, i = "") => { - var _a; - return ( - // Symbol.description in es2019+ so we need to cast here to pass - // the lib: es2016 check - isSymbol(v) ? `Symbol(${(_a = v.description) != null ? _a : i})` : v - ); -}; - -function warn$2(msg, ...args) { - console.warn(`[Vue warn] ${msg}`, ...args); -} - -let activeEffectScope; -class EffectScope { - constructor(detached = false) { - this.detached = detached; - /** - * @internal - */ - this._active = true; - /** - * @internal - */ - this.effects = []; - /** - * @internal - */ - this.cleanups = []; - this._isPaused = false; - this.parent = activeEffectScope; - if (!detached && activeEffectScope) { - this.index = (activeEffectScope.scopes || (activeEffectScope.scopes = [])).push( - this - ) - 1; - } - } - get active() { - return this._active; - } - pause() { - if (this._active) { - this._isPaused = true; - let i, l; - if (this.scopes) { - for (i = 0, l = this.scopes.length; i < l; i++) { - this.scopes[i].pause(); - } - } - for (i = 0, l = this.effects.length; i < l; i++) { - this.effects[i].pause(); - } - } - } - /** - * Resumes the effect scope, including all child scopes and effects. - */ - resume() { - if (this._active) { - if (this._isPaused) { - this._isPaused = false; - let i, l; - if (this.scopes) { - for (i = 0, l = this.scopes.length; i < l; i++) { - this.scopes[i].resume(); - } - } - for (i = 0, l = this.effects.length; i < l; i++) { - this.effects[i].resume(); - } - } - } - } - run(fn) { - if (this._active) { - const currentEffectScope = activeEffectScope; - try { - activeEffectScope = this; - return fn(); - } finally { - activeEffectScope = currentEffectScope; - } - } else { - warn$2(`cannot run an inactive effect scope.`); - } - } - /** - * This should only be called on non-detached scopes - * @internal - */ - on() { - activeEffectScope = this; - } - /** - * This should only be called on non-detached scopes - * @internal - */ - off() { - activeEffectScope = this.parent; - } - stop(fromParent) { - if (this._active) { - let i, l; - for (i = 0, l = this.effects.length; i < l; i++) { - this.effects[i].stop(); - } - for (i = 0, l = this.cleanups.length; i < l; i++) { - this.cleanups[i](); - } - if (this.scopes) { - for (i = 0, l = this.scopes.length; i < l; i++) { - this.scopes[i].stop(true); - } - } - if (!this.detached && this.parent && !fromParent) { - const last = this.parent.scopes.pop(); - if (last && last !== this) { - this.parent.scopes[this.index] = last; - last.index = this.index; - } - } - this.parent = void 0; - this._active = false; - } - } -} -function effectScope(detached) { - return new EffectScope(detached); -} -function getCurrentScope() { - return activeEffectScope; -} -function onScopeDispose(fn, failSilently = false) { - if (activeEffectScope) { - activeEffectScope.cleanups.push(fn); - } else if (!failSilently) { - warn$2( - `onScopeDispose() is called when there is no active effect scope to be associated with.` - ); - } -} - -let activeSub; -const pausedQueueEffects = /* @__PURE__ */ new WeakSet(); -class ReactiveEffect { - constructor(fn) { - this.fn = fn; - /** - * @internal - */ - this.deps = void 0; - /** - * @internal - */ - this.depsTail = void 0; - /** - * @internal - */ - this.flags = 1 | 4; - /** - * @internal - */ - this.next = void 0; - /** - * @internal - */ - this.cleanup = void 0; - this.scheduler = void 0; - if (activeEffectScope && activeEffectScope.active) { - activeEffectScope.effects.push(this); - } - } - pause() { - this.flags |= 64; - } - resume() { - if (this.flags & 64) { - this.flags &= ~64; - if (pausedQueueEffects.has(this)) { - pausedQueueEffects.delete(this); - this.trigger(); - } - } - } - /** - * @internal - */ - notify() { - if (this.flags & 2 && !(this.flags & 32)) { - return; - } - if (!(this.flags & 8)) { - batch(this); - } - } - run() { - if (!(this.flags & 1)) { - return this.fn(); - } - this.flags |= 2; - cleanupEffect(this); - prepareDeps(this); - const prevEffect = activeSub; - const prevShouldTrack = shouldTrack; - activeSub = this; - shouldTrack = true; - try { - return this.fn(); - } finally { - if (activeSub !== this) { - warn$2( - "Active effect was not restored correctly - this is likely a Vue internal bug." - ); - } - cleanupDeps(this); - activeSub = prevEffect; - shouldTrack = prevShouldTrack; - this.flags &= ~2; - } - } - stop() { - if (this.flags & 1) { - for (let link = this.deps; link; link = link.nextDep) { - removeSub(link); - } - this.deps = this.depsTail = void 0; - cleanupEffect(this); - this.onStop && this.onStop(); - this.flags &= ~1; - } - } - trigger() { - if (this.flags & 64) { - pausedQueueEffects.add(this); - } else if (this.scheduler) { - this.scheduler(); - } else { - this.runIfDirty(); - } - } - /** - * @internal - */ - runIfDirty() { - if (isDirty(this)) { - this.run(); - } - } - get dirty() { - return isDirty(this); - } -} -let batchDepth = 0; -let batchedSub; -let batchedComputed; -function batch(sub, isComputed = false) { - sub.flags |= 8; - if (isComputed) { - sub.next = batchedComputed; - batchedComputed = sub; - return; - } - sub.next = batchedSub; - batchedSub = sub; -} -function startBatch() { - batchDepth++; -} -function endBatch() { - if (--batchDepth > 0) { - return; - } - if (batchedComputed) { - let e = batchedComputed; - batchedComputed = void 0; - while (e) { - const next = e.next; - e.next = void 0; - e.flags &= ~8; - e = next; - } - } - let error; - while (batchedSub) { - let e = batchedSub; - batchedSub = void 0; - while (e) { - const next = e.next; - e.next = void 0; - e.flags &= ~8; - if (e.flags & 1) { - try { - ; - e.trigger(); - } catch (err) { - if (!error) error = err; - } - } - e = next; - } - } - if (error) throw error; -} -function prepareDeps(sub) { - for (let link = sub.deps; link; link = link.nextDep) { - link.version = -1; - link.prevActiveLink = link.dep.activeLink; - link.dep.activeLink = link; - } -} -function cleanupDeps(sub) { - let head; - let tail = sub.depsTail; - let link = tail; - while (link) { - const prev = link.prevDep; - if (link.version === -1) { - if (link === tail) tail = prev; - removeSub(link); - removeDep(link); - } else { - head = link; - } - link.dep.activeLink = link.prevActiveLink; - link.prevActiveLink = void 0; - link = prev; - } - sub.deps = head; - sub.depsTail = tail; -} -function isDirty(sub) { - for (let link = sub.deps; link; link = link.nextDep) { - if (link.dep.version !== link.version || link.dep.computed && (refreshComputed(link.dep.computed) || link.dep.version !== link.version)) { - return true; - } - } - if (sub._dirty) { - return true; - } - return false; -} -function refreshComputed(computed) { - if (computed.flags & 4 && !(computed.flags & 16)) { - return; - } - computed.flags &= ~16; - if (computed.globalVersion === globalVersion) { - return; - } - computed.globalVersion = globalVersion; - const dep = computed.dep; - computed.flags |= 2; - if (dep.version > 0 && !computed.isSSR && computed.deps && !isDirty(computed)) { - computed.flags &= ~2; - return; - } - const prevSub = activeSub; - const prevShouldTrack = shouldTrack; - activeSub = computed; - shouldTrack = true; - try { - prepareDeps(computed); - const value = computed.fn(computed._value); - if (dep.version === 0 || hasChanged(value, computed._value)) { - computed._value = value; - dep.version++; - } - } catch (err) { - dep.version++; - throw err; - } finally { - activeSub = prevSub; - shouldTrack = prevShouldTrack; - cleanupDeps(computed); - computed.flags &= ~2; - } -} -function removeSub(link, soft = false) { - const { dep, prevSub, nextSub } = link; - if (prevSub) { - prevSub.nextSub = nextSub; - link.prevSub = void 0; - } - if (nextSub) { - nextSub.prevSub = prevSub; - link.nextSub = void 0; - } - if (dep.subsHead === link) { - dep.subsHead = nextSub; - } - if (dep.subs === link) { - dep.subs = prevSub; - if (!prevSub && dep.computed) { - dep.computed.flags &= ~4; - for (let l = dep.computed.deps; l; l = l.nextDep) { - removeSub(l, true); - } - } - } - if (!soft && !--dep.sc && dep.map) { - dep.map.delete(dep.key); - } -} -function removeDep(link) { - const { prevDep, nextDep } = link; - if (prevDep) { - prevDep.nextDep = nextDep; - link.prevDep = void 0; - } - if (nextDep) { - nextDep.prevDep = prevDep; - link.nextDep = void 0; - } -} -function effect(fn, options) { - if (fn.effect instanceof ReactiveEffect) { - fn = fn.effect.fn; - } - const e = new ReactiveEffect(fn); - if (options) { - extend(e, options); - } - try { - e.run(); - } catch (err) { - e.stop(); - throw err; - } - const runner = e.run.bind(e); - runner.effect = e; - return runner; -} -function stop(runner) { - runner.effect.stop(); -} -let shouldTrack = true; -const trackStack = []; -function pauseTracking() { - trackStack.push(shouldTrack); - shouldTrack = false; -} -function resetTracking() { - const last = trackStack.pop(); - shouldTrack = last === void 0 ? true : last; -} -function cleanupEffect(e) { - const { cleanup } = e; - e.cleanup = void 0; - if (cleanup) { - const prevSub = activeSub; - activeSub = void 0; - try { - cleanup(); - } finally { - activeSub = prevSub; - } - } -} - -let globalVersion = 0; -class Link { - constructor(sub, dep) { - this.sub = sub; - this.dep = dep; - this.version = dep.version; - this.nextDep = this.prevDep = this.nextSub = this.prevSub = this.prevActiveLink = void 0; - } -} -class Dep { - constructor(computed) { - this.computed = computed; - this.version = 0; - /** - * Link between this dep and the current active effect - */ - this.activeLink = void 0; - /** - * Doubly linked list representing the subscribing effects (tail) - */ - this.subs = void 0; - /** - * For object property deps cleanup - */ - this.map = void 0; - this.key = void 0; - /** - * Subscriber counter - */ - this.sc = 0; - { - this.subsHead = void 0; - } - } - track(debugInfo) { - if (!activeSub || !shouldTrack || activeSub === this.computed) { - return; - } - let link = this.activeLink; - if (link === void 0 || link.sub !== activeSub) { - link = this.activeLink = new Link(activeSub, this); - if (!activeSub.deps) { - activeSub.deps = activeSub.depsTail = link; - } else { - link.prevDep = activeSub.depsTail; - activeSub.depsTail.nextDep = link; - activeSub.depsTail = link; - } - addSub(link); - } else if (link.version === -1) { - link.version = this.version; - if (link.nextDep) { - const next = link.nextDep; - next.prevDep = link.prevDep; - if (link.prevDep) { - link.prevDep.nextDep = next; - } - link.prevDep = activeSub.depsTail; - link.nextDep = void 0; - activeSub.depsTail.nextDep = link; - activeSub.depsTail = link; - if (activeSub.deps === link) { - activeSub.deps = next; - } - } - } - if (activeSub.onTrack) { - activeSub.onTrack( - extend( - { - effect: activeSub - }, - debugInfo - ) - ); - } - return link; - } - trigger(debugInfo) { - this.version++; - globalVersion++; - this.notify(debugInfo); - } - notify(debugInfo) { - startBatch(); - try { - if (true) { - for (let head = this.subsHead; head; head = head.nextSub) { - if (head.sub.onTrigger && !(head.sub.flags & 8)) { - head.sub.onTrigger( - extend( - { - effect: head.sub - }, - debugInfo - ) - ); - } - } - } - for (let link = this.subs; link; link = link.prevSub) { - if (link.sub.notify()) { - ; - link.sub.dep.notify(); - } - } - } finally { - endBatch(); - } - } -} -function addSub(link) { - link.dep.sc++; - if (link.sub.flags & 4) { - const computed = link.dep.computed; - if (computed && !link.dep.subs) { - computed.flags |= 4 | 16; - for (let l = computed.deps; l; l = l.nextDep) { - addSub(l); - } - } - const currentTail = link.dep.subs; - if (currentTail !== link) { - link.prevSub = currentTail; - if (currentTail) currentTail.nextSub = link; - } - if (link.dep.subsHead === void 0) { - link.dep.subsHead = link; - } - link.dep.subs = link; - } -} -const targetMap = /* @__PURE__ */ new WeakMap(); -const ITERATE_KEY = Symbol( - "Object iterate" -); -const MAP_KEY_ITERATE_KEY = Symbol( - "Map keys iterate" -); -const ARRAY_ITERATE_KEY = Symbol( - "Array iterate" -); -function track(target, type, key) { - if (shouldTrack && activeSub) { - let depsMap = targetMap.get(target); - if (!depsMap) { - targetMap.set(target, depsMap = /* @__PURE__ */ new Map()); - } - let dep = depsMap.get(key); - if (!dep) { - depsMap.set(key, dep = new Dep()); - dep.map = depsMap; - dep.key = key; - } - { - dep.track({ - target, - type, - key - }); - } - } -} -function trigger(target, type, key, newValue, oldValue, oldTarget) { - const depsMap = targetMap.get(target); - if (!depsMap) { - globalVersion++; - return; - } - const run = (dep) => { - if (dep) { - { - dep.trigger({ - target, - type, - key, - newValue, - oldValue, - oldTarget - }); - } - } - }; - startBatch(); - if (type === "clear") { - depsMap.forEach(run); - } else { - const targetIsArray = isArray(target); - const isArrayIndex = targetIsArray && isIntegerKey(key); - if (targetIsArray && key === "length") { - const newLength = Number(newValue); - depsMap.forEach((dep, key2) => { - if (key2 === "length" || key2 === ARRAY_ITERATE_KEY || !isSymbol(key2) && key2 >= newLength) { - run(dep); - } - }); - } else { - if (key !== void 0 || depsMap.has(void 0)) { - run(depsMap.get(key)); - } - if (isArrayIndex) { - run(depsMap.get(ARRAY_ITERATE_KEY)); - } - switch (type) { - case "add": - if (!targetIsArray) { - run(depsMap.get(ITERATE_KEY)); - if (isMap(target)) { - run(depsMap.get(MAP_KEY_ITERATE_KEY)); - } - } else if (isArrayIndex) { - run(depsMap.get("length")); - } - break; - case "delete": - if (!targetIsArray) { - run(depsMap.get(ITERATE_KEY)); - if (isMap(target)) { - run(depsMap.get(MAP_KEY_ITERATE_KEY)); - } - } - break; - case "set": - if (isMap(target)) { - run(depsMap.get(ITERATE_KEY)); - } - break; - } - } - } - endBatch(); -} -function getDepFromReactive(object, key) { - const depMap = targetMap.get(object); - return depMap && depMap.get(key); -} - -function reactiveReadArray(array) { - const raw = toRaw(array); - if (raw === array) return raw; - track(raw, "iterate", ARRAY_ITERATE_KEY); - return isShallow(array) ? raw : raw.map(toReactive); -} -function shallowReadArray(arr) { - track(arr = toRaw(arr), "iterate", ARRAY_ITERATE_KEY); - return arr; -} -const arrayInstrumentations = { - __proto__: null, - [Symbol.iterator]() { - return iterator(this, Symbol.iterator, toReactive); - }, - concat(...args) { - return reactiveReadArray(this).concat( - ...args.map((x) => isArray(x) ? reactiveReadArray(x) : x) - ); - }, - entries() { - return iterator(this, "entries", (value) => { - value[1] = toReactive(value[1]); - return value; - }); - }, - every(fn, thisArg) { - return apply(this, "every", fn, thisArg, void 0, arguments); - }, - filter(fn, thisArg) { - return apply(this, "filter", fn, thisArg, (v) => v.map(toReactive), arguments); - }, - find(fn, thisArg) { - return apply(this, "find", fn, thisArg, toReactive, arguments); - }, - findIndex(fn, thisArg) { - return apply(this, "findIndex", fn, thisArg, void 0, arguments); - }, - findLast(fn, thisArg) { - return apply(this, "findLast", fn, thisArg, toReactive, arguments); - }, - findLastIndex(fn, thisArg) { - return apply(this, "findLastIndex", fn, thisArg, void 0, arguments); - }, - // flat, flatMap could benefit from ARRAY_ITERATE but are not straight-forward to implement - forEach(fn, thisArg) { - return apply(this, "forEach", fn, thisArg, void 0, arguments); - }, - includes(...args) { - return searchProxy(this, "includes", args); - }, - indexOf(...args) { - return searchProxy(this, "indexOf", args); - }, - join(separator) { - return reactiveReadArray(this).join(separator); - }, - // keys() iterator only reads `length`, no optimisation required - lastIndexOf(...args) { - return searchProxy(this, "lastIndexOf", args); - }, - map(fn, thisArg) { - return apply(this, "map", fn, thisArg, void 0, arguments); - }, - pop() { - return noTracking(this, "pop"); - }, - push(...args) { - return noTracking(this, "push", args); - }, - reduce(fn, ...args) { - return reduce(this, "reduce", fn, args); - }, - reduceRight(fn, ...args) { - return reduce(this, "reduceRight", fn, args); - }, - shift() { - return noTracking(this, "shift"); - }, - // slice could use ARRAY_ITERATE but also seems to beg for range tracking - some(fn, thisArg) { - return apply(this, "some", fn, thisArg, void 0, arguments); - }, - splice(...args) { - return noTracking(this, "splice", args); - }, - toReversed() { - return reactiveReadArray(this).toReversed(); - }, - toSorted(comparer) { - return reactiveReadArray(this).toSorted(comparer); - }, - toSpliced(...args) { - return reactiveReadArray(this).toSpliced(...args); - }, - unshift(...args) { - return noTracking(this, "unshift", args); - }, - values() { - return iterator(this, "values", toReactive); - } -}; -function iterator(self, method, wrapValue) { - const arr = shallowReadArray(self); - const iter = arr[method](); - if (arr !== self && !isShallow(self)) { - iter._next = iter.next; - iter.next = () => { - const result = iter._next(); - if (result.value) { - result.value = wrapValue(result.value); - } - return result; - }; - } - return iter; -} -const arrayProto = Array.prototype; -function apply(self, method, fn, thisArg, wrappedRetFn, args) { - const arr = shallowReadArray(self); - const needsWrap = arr !== self && !isShallow(self); - const methodFn = arr[method]; - if (methodFn !== arrayProto[method]) { - const result2 = methodFn.apply(self, args); - return needsWrap ? toReactive(result2) : result2; - } - let wrappedFn = fn; - if (arr !== self) { - if (needsWrap) { - wrappedFn = function(item, index) { - return fn.call(this, toReactive(item), index, self); - }; - } else if (fn.length > 2) { - wrappedFn = function(item, index) { - return fn.call(this, item, index, self); - }; - } - } - const result = methodFn.call(arr, wrappedFn, thisArg); - return needsWrap && wrappedRetFn ? wrappedRetFn(result) : result; -} -function reduce(self, method, fn, args) { - const arr = shallowReadArray(self); - let wrappedFn = fn; - if (arr !== self) { - if (!isShallow(self)) { - wrappedFn = function(acc, item, index) { - return fn.call(this, acc, toReactive(item), index, self); - }; - } else if (fn.length > 3) { - wrappedFn = function(acc, item, index) { - return fn.call(this, acc, item, index, self); - }; - } - } - return arr[method](wrappedFn, ...args); -} -function searchProxy(self, method, args) { - const arr = toRaw(self); - track(arr, "iterate", ARRAY_ITERATE_KEY); - const res = arr[method](...args); - if ((res === -1 || res === false) && isProxy(args[0])) { - args[0] = toRaw(args[0]); - return arr[method](...args); - } - return res; -} -function noTracking(self, method, args = []) { - pauseTracking(); - startBatch(); - const res = toRaw(self)[method].apply(self, args); - endBatch(); - resetTracking(); - return res; -} - -const isNonTrackableKeys = /* @__PURE__ */ makeMap(`__proto__,__v_isRef,__isVue`); -const builtInSymbols = new Set( - /* @__PURE__ */ Object.getOwnPropertyNames(Symbol).filter((key) => key !== "arguments" && key !== "caller").map((key) => Symbol[key]).filter(isSymbol) -); -function hasOwnProperty(key) { - if (!isSymbol(key)) key = String(key); - const obj = toRaw(this); - track(obj, "has", key); - return obj.hasOwnProperty(key); -} -class BaseReactiveHandler { - constructor(_isReadonly = false, _isShallow = false) { - this._isReadonly = _isReadonly; - this._isShallow = _isShallow; - } - get(target, key, receiver) { - const isReadonly2 = this._isReadonly, isShallow2 = this._isShallow; - if (key === "__v_isReactive") { - return !isReadonly2; - } else if (key === "__v_isReadonly") { - return isReadonly2; - } else if (key === "__v_isShallow") { - return isShallow2; - } else if (key === "__v_raw") { - if (receiver === (isReadonly2 ? isShallow2 ? shallowReadonlyMap : readonlyMap : isShallow2 ? shallowReactiveMap : reactiveMap).get(target) || // receiver is not the reactive proxy, but has the same prototype - // this means the receiver is a user proxy of the reactive proxy - Object.getPrototypeOf(target) === Object.getPrototypeOf(receiver)) { - return target; - } - return; - } - const targetIsArray = isArray(target); - if (!isReadonly2) { - let fn; - if (targetIsArray && (fn = arrayInstrumentations[key])) { - return fn; - } - if (key === "hasOwnProperty") { - return hasOwnProperty; - } - } - const res = Reflect.get( - target, - key, - // if this is a proxy wrapping a ref, return methods using the raw ref - // as receiver so that we don't have to call `toRaw` on the ref in all - // its class methods - isRef(target) ? target : receiver - ); - if (isSymbol(key) ? builtInSymbols.has(key) : isNonTrackableKeys(key)) { - return res; - } - if (!isReadonly2) { - track(target, "get", key); - } - if (isShallow2) { - return res; - } - if (isRef(res)) { - return targetIsArray && isIntegerKey(key) ? res : res.value; - } - if (isObject(res)) { - return isReadonly2 ? readonly(res) : reactive(res); - } - return res; - } -} -class MutableReactiveHandler extends BaseReactiveHandler { - constructor(isShallow2 = false) { - super(false, isShallow2); - } - set(target, key, value, receiver) { - let oldValue = target[key]; - if (!this._isShallow) { - const isOldValueReadonly = isReadonly(oldValue); - if (!isShallow(value) && !isReadonly(value)) { - oldValue = toRaw(oldValue); - value = toRaw(value); - } - if (!isArray(target) && isRef(oldValue) && !isRef(value)) { - if (isOldValueReadonly) { - return false; - } else { - oldValue.value = value; - return true; - } - } - } - const hadKey = isArray(target) && isIntegerKey(key) ? Number(key) < target.length : hasOwn(target, key); - const result = Reflect.set( - target, - key, - value, - isRef(target) ? target : receiver - ); - if (target === toRaw(receiver)) { - if (!hadKey) { - trigger(target, "add", key, value); - } else if (hasChanged(value, oldValue)) { - trigger(target, "set", key, value, oldValue); - } - } - return result; - } - deleteProperty(target, key) { - const hadKey = hasOwn(target, key); - const oldValue = target[key]; - const result = Reflect.deleteProperty(target, key); - if (result && hadKey) { - trigger(target, "delete", key, void 0, oldValue); - } - return result; - } - has(target, key) { - const result = Reflect.has(target, key); - if (!isSymbol(key) || !builtInSymbols.has(key)) { - track(target, "has", key); - } - return result; - } - ownKeys(target) { - track( - target, - "iterate", - isArray(target) ? "length" : ITERATE_KEY - ); - return Reflect.ownKeys(target); - } -} -class ReadonlyReactiveHandler extends BaseReactiveHandler { - constructor(isShallow2 = false) { - super(true, isShallow2); - } - set(target, key) { - { - warn$2( - `Set operation on key "${String(key)}" failed: target is readonly.`, - target - ); - } - return true; - } - deleteProperty(target, key) { - { - warn$2( - `Delete operation on key "${String(key)}" failed: target is readonly.`, - target - ); - } - return true; - } -} -const mutableHandlers = /* @__PURE__ */ new MutableReactiveHandler(); -const readonlyHandlers = /* @__PURE__ */ new ReadonlyReactiveHandler(); -const shallowReactiveHandlers = /* @__PURE__ */ new MutableReactiveHandler(true); -const shallowReadonlyHandlers = /* @__PURE__ */ new ReadonlyReactiveHandler(true); - -const toShallow = (value) => value; -const getProto = (v) => Reflect.getPrototypeOf(v); -function createIterableMethod(method, isReadonly2, isShallow2) { - return function(...args) { - const target = this["__v_raw"]; - const rawTarget = toRaw(target); - const targetIsMap = isMap(rawTarget); - const isPair = method === "entries" || method === Symbol.iterator && targetIsMap; - const isKeyOnly = method === "keys" && targetIsMap; - const innerIterator = target[method](...args); - const wrap = isShallow2 ? toShallow : isReadonly2 ? toReadonly : toReactive; - !isReadonly2 && track( - rawTarget, - "iterate", - isKeyOnly ? MAP_KEY_ITERATE_KEY : ITERATE_KEY - ); - return { - // iterator protocol - next() { - const { value, done } = innerIterator.next(); - return done ? { value, done } : { - value: isPair ? [wrap(value[0]), wrap(value[1])] : wrap(value), - done - }; - }, - // iterable protocol - [Symbol.iterator]() { - return this; - } - }; - }; -} -function createReadonlyMethod(type) { - return function(...args) { - { - const key = args[0] ? `on key "${args[0]}" ` : ``; - warn$2( - `${capitalize(type)} operation ${key}failed: target is readonly.`, - toRaw(this) - ); - } - return type === "delete" ? false : type === "clear" ? void 0 : this; - }; -} -function createInstrumentations(readonly, shallow) { - const instrumentations = { - get(key) { - const target = this["__v_raw"]; - const rawTarget = toRaw(target); - const rawKey = toRaw(key); - if (!readonly) { - if (hasChanged(key, rawKey)) { - track(rawTarget, "get", key); - } - track(rawTarget, "get", rawKey); - } - const { has } = getProto(rawTarget); - const wrap = shallow ? toShallow : readonly ? toReadonly : toReactive; - if (has.call(rawTarget, key)) { - return wrap(target.get(key)); - } else if (has.call(rawTarget, rawKey)) { - return wrap(target.get(rawKey)); - } else if (target !== rawTarget) { - target.get(key); - } - }, - get size() { - const target = this["__v_raw"]; - !readonly && track(toRaw(target), "iterate", ITERATE_KEY); - return Reflect.get(target, "size", target); - }, - has(key) { - const target = this["__v_raw"]; - const rawTarget = toRaw(target); - const rawKey = toRaw(key); - if (!readonly) { - if (hasChanged(key, rawKey)) { - track(rawTarget, "has", key); - } - track(rawTarget, "has", rawKey); - } - return key === rawKey ? target.has(key) : target.has(key) || target.has(rawKey); - }, - forEach(callback, thisArg) { - const observed = this; - const target = observed["__v_raw"]; - const rawTarget = toRaw(target); - const wrap = shallow ? toShallow : readonly ? toReadonly : toReactive; - !readonly && track(rawTarget, "iterate", ITERATE_KEY); - return target.forEach((value, key) => { - return callback.call(thisArg, wrap(value), wrap(key), observed); - }); - } - }; - extend( - instrumentations, - readonly ? { - add: createReadonlyMethod("add"), - set: createReadonlyMethod("set"), - delete: createReadonlyMethod("delete"), - clear: createReadonlyMethod("clear") - } : { - add(value) { - if (!shallow && !isShallow(value) && !isReadonly(value)) { - value = toRaw(value); - } - const target = toRaw(this); - const proto = getProto(target); - const hadKey = proto.has.call(target, value); - if (!hadKey) { - target.add(value); - trigger(target, "add", value, value); - } - return this; - }, - set(key, value) { - if (!shallow && !isShallow(value) && !isReadonly(value)) { - value = toRaw(value); - } - const target = toRaw(this); - const { has, get } = getProto(target); - let hadKey = has.call(target, key); - if (!hadKey) { - key = toRaw(key); - hadKey = has.call(target, key); - } else { - checkIdentityKeys(target, has, key); - } - const oldValue = get.call(target, key); - target.set(key, value); - if (!hadKey) { - trigger(target, "add", key, value); - } else if (hasChanged(value, oldValue)) { - trigger(target, "set", key, value, oldValue); - } - return this; - }, - delete(key) { - const target = toRaw(this); - const { has, get } = getProto(target); - let hadKey = has.call(target, key); - if (!hadKey) { - key = toRaw(key); - hadKey = has.call(target, key); - } else { - checkIdentityKeys(target, has, key); - } - const oldValue = get ? get.call(target, key) : void 0; - const result = target.delete(key); - if (hadKey) { - trigger(target, "delete", key, void 0, oldValue); - } - return result; - }, - clear() { - const target = toRaw(this); - const hadItems = target.size !== 0; - const oldTarget = isMap(target) ? new Map(target) : new Set(target) ; - const result = target.clear(); - if (hadItems) { - trigger( - target, - "clear", - void 0, - void 0, - oldTarget - ); - } - return result; - } - } - ); - const iteratorMethods = [ - "keys", - "values", - "entries", - Symbol.iterator - ]; - iteratorMethods.forEach((method) => { - instrumentations[method] = createIterableMethod(method, readonly, shallow); - }); - return instrumentations; -} -function createInstrumentationGetter(isReadonly2, shallow) { - const instrumentations = createInstrumentations(isReadonly2, shallow); - return (target, key, receiver) => { - if (key === "__v_isReactive") { - return !isReadonly2; - } else if (key === "__v_isReadonly") { - return isReadonly2; - } else if (key === "__v_raw") { - return target; - } - return Reflect.get( - hasOwn(instrumentations, key) && key in target ? instrumentations : target, - key, - receiver - ); - }; -} -const mutableCollectionHandlers = { - get: /* @__PURE__ */ createInstrumentationGetter(false, false) -}; -const shallowCollectionHandlers = { - get: /* @__PURE__ */ createInstrumentationGetter(false, true) -}; -const readonlyCollectionHandlers = { - get: /* @__PURE__ */ createInstrumentationGetter(true, false) -}; -const shallowReadonlyCollectionHandlers = { - get: /* @__PURE__ */ createInstrumentationGetter(true, true) -}; -function checkIdentityKeys(target, has, key) { - const rawKey = toRaw(key); - if (rawKey !== key && has.call(target, rawKey)) { - const type = toRawType(target); - warn$2( - `Reactive ${type} contains both the raw and reactive versions of the same object${type === `Map` ? ` as keys` : ``}, which can lead to inconsistencies. Avoid differentiating between the raw and reactive versions of an object and only use the reactive version if possible.` - ); - } -} - -const reactiveMap = /* @__PURE__ */ new WeakMap(); -const shallowReactiveMap = /* @__PURE__ */ new WeakMap(); -const readonlyMap = /* @__PURE__ */ new WeakMap(); -const shallowReadonlyMap = /* @__PURE__ */ new WeakMap(); -function targetTypeMap(rawType) { - switch (rawType) { - case "Object": - case "Array": - return 1 /* COMMON */; - case "Map": - case "Set": - case "WeakMap": - case "WeakSet": - return 2 /* COLLECTION */; - default: - return 0 /* INVALID */; - } -} -function getTargetType(value) { - return value["__v_skip"] || !Object.isExtensible(value) ? 0 /* INVALID */ : targetTypeMap(toRawType(value)); -} -function reactive(target) { - if (isReadonly(target)) { - return target; - } - return createReactiveObject( - target, - false, - mutableHandlers, - mutableCollectionHandlers, - reactiveMap - ); -} -function shallowReactive(target) { - return createReactiveObject( - target, - false, - shallowReactiveHandlers, - shallowCollectionHandlers, - shallowReactiveMap - ); -} -function readonly(target) { - return createReactiveObject( - target, - true, - readonlyHandlers, - readonlyCollectionHandlers, - readonlyMap - ); -} -function shallowReadonly(target) { - return createReactiveObject( - target, - true, - shallowReadonlyHandlers, - shallowReadonlyCollectionHandlers, - shallowReadonlyMap - ); -} -function createReactiveObject(target, isReadonly2, baseHandlers, collectionHandlers, proxyMap) { - if (!isObject(target)) { - { - warn$2( - `value cannot be made ${isReadonly2 ? "readonly" : "reactive"}: ${String( - target - )}` - ); - } - return target; - } - if (target["__v_raw"] && !(isReadonly2 && target["__v_isReactive"])) { - return target; - } - const existingProxy = proxyMap.get(target); - if (existingProxy) { - return existingProxy; - } - const targetType = getTargetType(target); - if (targetType === 0 /* INVALID */) { - return target; - } - const proxy = new Proxy( - target, - targetType === 2 /* COLLECTION */ ? collectionHandlers : baseHandlers - ); - proxyMap.set(target, proxy); - return proxy; -} -function isReactive(value) { - if (isReadonly(value)) { - return isReactive(value["__v_raw"]); - } - return !!(value && value["__v_isReactive"]); -} -function isReadonly(value) { - return !!(value && value["__v_isReadonly"]); -} -function isShallow(value) { - return !!(value && value["__v_isShallow"]); -} -function isProxy(value) { - return value ? !!value["__v_raw"] : false; -} -function toRaw(observed) { - const raw = observed && observed["__v_raw"]; - return raw ? toRaw(raw) : observed; -} -function markRaw(value) { - if (!hasOwn(value, "__v_skip") && Object.isExtensible(value)) { - def(value, "__v_skip", true); - } - return value; -} -const toReactive = (value) => isObject(value) ? reactive(value) : value; -const toReadonly = (value) => isObject(value) ? readonly(value) : value; - -function isRef(r) { - return r ? r["__v_isRef"] === true : false; -} -function ref(value) { - return createRef(value, false); -} -function shallowRef(value) { - return createRef(value, true); -} -function createRef(rawValue, shallow) { - if (isRef(rawValue)) { - return rawValue; - } - return new RefImpl(rawValue, shallow); -} -class RefImpl { - constructor(value, isShallow2) { - this.dep = new Dep(); - this["__v_isRef"] = true; - this["__v_isShallow"] = false; - this._rawValue = isShallow2 ? value : toRaw(value); - this._value = isShallow2 ? value : toReactive(value); - this["__v_isShallow"] = isShallow2; - } - get value() { - { - this.dep.track({ - target: this, - type: "get", - key: "value" - }); - } - return this._value; - } - set value(newValue) { - const oldValue = this._rawValue; - const useDirectValue = this["__v_isShallow"] || isShallow(newValue) || isReadonly(newValue); - newValue = useDirectValue ? newValue : toRaw(newValue); - if (hasChanged(newValue, oldValue)) { - this._rawValue = newValue; - this._value = useDirectValue ? newValue : toReactive(newValue); - { - this.dep.trigger({ - target: this, - type: "set", - key: "value", - newValue, - oldValue - }); - } - } - } -} -function triggerRef(ref2) { - if (ref2.dep) { - { - ref2.dep.trigger({ - target: ref2, - type: "set", - key: "value", - newValue: ref2._value - }); - } - } -} -function unref(ref2) { - return isRef(ref2) ? ref2.value : ref2; -} -function toValue(source) { - return isFunction(source) ? source() : unref(source); -} -const shallowUnwrapHandlers = { - get: (target, key, receiver) => key === "__v_raw" ? target : unref(Reflect.get(target, key, receiver)), - set: (target, key, value, receiver) => { - const oldValue = target[key]; - if (isRef(oldValue) && !isRef(value)) { - oldValue.value = value; - return true; - } else { - return Reflect.set(target, key, value, receiver); - } - } -}; -function proxyRefs(objectWithRefs) { - return isReactive(objectWithRefs) ? objectWithRefs : new Proxy(objectWithRefs, shallowUnwrapHandlers); -} -class CustomRefImpl { - constructor(factory) { - this["__v_isRef"] = true; - this._value = void 0; - const dep = this.dep = new Dep(); - const { get, set } = factory(dep.track.bind(dep), dep.trigger.bind(dep)); - this._get = get; - this._set = set; - } - get value() { - return this._value = this._get(); - } - set value(newVal) { - this._set(newVal); - } -} -function customRef(factory) { - return new CustomRefImpl(factory); -} -function toRefs(object) { - if (!isProxy(object)) { - warn$2(`toRefs() expects a reactive object but received a plain one.`); - } - const ret = isArray(object) ? new Array(object.length) : {}; - for (const key in object) { - ret[key] = propertyToRef(object, key); - } - return ret; -} -class ObjectRefImpl { - constructor(_object, _key, _defaultValue) { - this._object = _object; - this._key = _key; - this._defaultValue = _defaultValue; - this["__v_isRef"] = true; - this._value = void 0; - } - get value() { - const val = this._object[this._key]; - return this._value = val === void 0 ? this._defaultValue : val; - } - set value(newVal) { - this._object[this._key] = newVal; - } - get dep() { - return getDepFromReactive(toRaw(this._object), this._key); - } -} -class GetterRefImpl { - constructor(_getter) { - this._getter = _getter; - this["__v_isRef"] = true; - this["__v_isReadonly"] = true; - this._value = void 0; - } - get value() { - return this._value = this._getter(); - } -} -function toRef(source, key, defaultValue) { - if (isRef(source)) { - return source; - } else if (isFunction(source)) { - return new GetterRefImpl(source); - } else if (isObject(source) && arguments.length > 1) { - return propertyToRef(source, key, defaultValue); - } else { - return ref(source); - } -} -function propertyToRef(source, key, defaultValue) { - const val = source[key]; - return isRef(val) ? val : new ObjectRefImpl(source, key, defaultValue); -} - -class ComputedRefImpl { - constructor(fn, setter, isSSR) { - this.fn = fn; - this.setter = setter; - /** - * @internal - */ - this._value = void 0; - /** - * @internal - */ - this.dep = new Dep(this); - /** - * @internal - */ - this.__v_isRef = true; - // TODO isolatedDeclarations "__v_isReadonly" - // A computed is also a subscriber that tracks other deps - /** - * @internal - */ - this.deps = void 0; - /** - * @internal - */ - this.depsTail = void 0; - /** - * @internal - */ - this.flags = 16; - /** - * @internal - */ - this.globalVersion = globalVersion - 1; - /** - * @internal - */ - this.next = void 0; - // for backwards compat - this.effect = this; - this["__v_isReadonly"] = !setter; - this.isSSR = isSSR; - } - /** - * @internal - */ - notify() { - this.flags |= 16; - if (!(this.flags & 8) && // avoid infinite self recursion - activeSub !== this) { - batch(this, true); - return true; - } - } - get value() { - const link = this.dep.track({ - target: this, - type: "get", - key: "value" - }) ; - refreshComputed(this); - if (link) { - link.version = this.dep.version; - } - return this._value; - } - set value(newValue) { - if (this.setter) { - this.setter(newValue); - } else { - warn$2("Write operation failed: computed value is readonly"); - } - } -} -function computed$1(getterOrOptions, debugOptions, isSSR = false) { - let getter; - let setter; - if (isFunction(getterOrOptions)) { - getter = getterOrOptions; - } else { - getter = getterOrOptions.get; - setter = getterOrOptions.set; - } - const cRef = new ComputedRefImpl(getter, setter, isSSR); - if (debugOptions && !isSSR) { - cRef.onTrack = debugOptions.onTrack; - cRef.onTrigger = debugOptions.onTrigger; - } - return cRef; -} - -const TrackOpTypes = { - "GET": "get", - "HAS": "has", - "ITERATE": "iterate" -}; -const TriggerOpTypes = { - "SET": "set", - "ADD": "add", - "DELETE": "delete", - "CLEAR": "clear" -}; - -const INITIAL_WATCHER_VALUE = {}; -const cleanupMap = /* @__PURE__ */ new WeakMap(); -let activeWatcher = void 0; -function getCurrentWatcher() { - return activeWatcher; -} -function onWatcherCleanup(cleanupFn, failSilently = false, owner = activeWatcher) { - if (owner) { - let cleanups = cleanupMap.get(owner); - if (!cleanups) cleanupMap.set(owner, cleanups = []); - cleanups.push(cleanupFn); - } else if (!failSilently) { - warn$2( - `onWatcherCleanup() was called when there was no active watcher to associate with.` - ); - } -} -function watch$1(source, cb, options = EMPTY_OBJ) { - const { immediate, deep, once, scheduler, augmentJob, call } = options; - const warnInvalidSource = (s) => { - (options.onWarn || warn$2)( - `Invalid watch source: `, - s, - `A watch source can only be a getter/effect function, a ref, a reactive object, or an array of these types.` - ); - }; - const reactiveGetter = (source2) => { - if (deep) return source2; - if (isShallow(source2) || deep === false || deep === 0) - return traverse(source2, 1); - return traverse(source2); - }; - let effect; - let getter; - let cleanup; - let boundCleanup; - let forceTrigger = false; - let isMultiSource = false; - if (isRef(source)) { - getter = () => source.value; - forceTrigger = isShallow(source); - } else if (isReactive(source)) { - getter = () => reactiveGetter(source); - forceTrigger = true; - } else if (isArray(source)) { - isMultiSource = true; - forceTrigger = source.some((s) => isReactive(s) || isShallow(s)); - getter = () => source.map((s) => { - if (isRef(s)) { - return s.value; - } else if (isReactive(s)) { - return reactiveGetter(s); - } else if (isFunction(s)) { - return call ? call(s, 2) : s(); - } else { - warnInvalidSource(s); - } - }); - } else if (isFunction(source)) { - if (cb) { - getter = call ? () => call(source, 2) : source; - } else { - getter = () => { - if (cleanup) { - pauseTracking(); - try { - cleanup(); - } finally { - resetTracking(); - } - } - const currentEffect = activeWatcher; - activeWatcher = effect; - try { - return call ? call(source, 3, [boundCleanup]) : source(boundCleanup); - } finally { - activeWatcher = currentEffect; - } - }; - } - } else { - getter = NOOP; - warnInvalidSource(source); - } - if (cb && deep) { - const baseGetter = getter; - const depth = deep === true ? Infinity : deep; - getter = () => traverse(baseGetter(), depth); - } - const scope = getCurrentScope(); - const watchHandle = () => { - effect.stop(); - if (scope) { - remove(scope.effects, effect); - } - }; - if (once && cb) { - const _cb = cb; - cb = (...args) => { - _cb(...args); - watchHandle(); - }; - } - let oldValue = isMultiSource ? new Array(source.length).fill(INITIAL_WATCHER_VALUE) : INITIAL_WATCHER_VALUE; - const job = (immediateFirstRun) => { - if (!(effect.flags & 1) || !effect.dirty && !immediateFirstRun) { - return; - } - if (cb) { - const newValue = effect.run(); - if (deep || forceTrigger || (isMultiSource ? newValue.some((v, i) => hasChanged(v, oldValue[i])) : hasChanged(newValue, oldValue))) { - if (cleanup) { - cleanup(); - } - const currentWatcher = activeWatcher; - activeWatcher = effect; - try { - const args = [ - newValue, - // pass undefined as the old value when it's changed for the first time - oldValue === INITIAL_WATCHER_VALUE ? void 0 : isMultiSource && oldValue[0] === INITIAL_WATCHER_VALUE ? [] : oldValue, - boundCleanup - ]; - call ? call(cb, 3, args) : ( - // @ts-expect-error - cb(...args) - ); - oldValue = newValue; - } finally { - activeWatcher = currentWatcher; - } - } - } else { - effect.run(); - } - }; - if (augmentJob) { - augmentJob(job); - } - effect = new ReactiveEffect(getter); - effect.scheduler = scheduler ? () => scheduler(job, false) : job; - boundCleanup = (fn) => onWatcherCleanup(fn, false, effect); - cleanup = effect.onStop = () => { - const cleanups = cleanupMap.get(effect); - if (cleanups) { - if (call) { - call(cleanups, 4); - } else { - for (const cleanup2 of cleanups) cleanup2(); - } - cleanupMap.delete(effect); - } - }; - { - effect.onTrack = options.onTrack; - effect.onTrigger = options.onTrigger; - } - if (cb) { - if (immediate) { - job(true); - } else { - oldValue = effect.run(); - } - } else if (scheduler) { - scheduler(job.bind(null, true), true); - } else { - effect.run(); - } - watchHandle.pause = effect.pause.bind(effect); - watchHandle.resume = effect.resume.bind(effect); - watchHandle.stop = watchHandle; - return watchHandle; -} -function traverse(value, depth = Infinity, seen) { - if (depth <= 0 || !isObject(value) || value["__v_skip"]) { - return value; - } - seen = seen || /* @__PURE__ */ new Set(); - if (seen.has(value)) { - return value; - } - seen.add(value); - depth--; - if (isRef(value)) { - traverse(value.value, depth, seen); - } else if (isArray(value)) { - for (let i = 0; i < value.length; i++) { - traverse(value[i], depth, seen); - } - } else if (isSet(value) || isMap(value)) { - value.forEach((v) => { - traverse(v, depth, seen); - }); - } else if (isPlainObject(value)) { - for (const key in value) { - traverse(value[key], depth, seen); - } - for (const key of Object.getOwnPropertySymbols(value)) { - if (Object.prototype.propertyIsEnumerable.call(value, key)) { - traverse(value[key], depth, seen); - } - } - } - return value; -} - -const stack$1 = []; -function pushWarningContext(vnode) { - stack$1.push(vnode); -} -function popWarningContext() { - stack$1.pop(); -} -let isWarning = false; -function warn$1(msg, ...args) { - if (isWarning) return; - isWarning = true; - pauseTracking(); - const instance = stack$1.length ? stack$1[stack$1.length - 1].component : null; - const appWarnHandler = instance && instance.appContext.config.warnHandler; - const trace = getComponentTrace(); - if (appWarnHandler) { - callWithErrorHandling( - appWarnHandler, - instance, - 11, - [ - // eslint-disable-next-line no-restricted-syntax - msg + args.map((a) => { - var _a, _b; - return (_b = (_a = a.toString) == null ? void 0 : _a.call(a)) != null ? _b : JSON.stringify(a); - }).join(""), - instance && instance.proxy, - trace.map( - ({ vnode }) => `at <${formatComponentName(instance, vnode.type)}>` - ).join("\n"), - trace - ] - ); - } else { - const warnArgs = [`[Vue warn]: ${msg}`, ...args]; - if (trace.length && // avoid spamming console during tests - true) { - warnArgs.push(` -`, ...formatTrace(trace)); - } - console.warn(...warnArgs); - } - resetTracking(); - isWarning = false; -} -function getComponentTrace() { - let currentVNode = stack$1[stack$1.length - 1]; - if (!currentVNode) { - return []; - } - const normalizedStack = []; - while (currentVNode) { - const last = normalizedStack[0]; - if (last && last.vnode === currentVNode) { - last.recurseCount++; - } else { - normalizedStack.push({ - vnode: currentVNode, - recurseCount: 0 - }); - } - const parentInstance = currentVNode.component && currentVNode.component.parent; - currentVNode = parentInstance && parentInstance.vnode; - } - return normalizedStack; -} -function formatTrace(trace) { - const logs = []; - trace.forEach((entry, i) => { - logs.push(...i === 0 ? [] : [` -`], ...formatTraceEntry(entry)); - }); - return logs; -} -function formatTraceEntry({ vnode, recurseCount }) { - const postfix = recurseCount > 0 ? `... (${recurseCount} recursive calls)` : ``; - const isRoot = vnode.component ? vnode.component.parent == null : false; - const open = ` at <${formatComponentName( - vnode.component, - vnode.type, - isRoot - )}`; - const close = `>` + postfix; - return vnode.props ? [open, ...formatProps(vnode.props), close] : [open + close]; -} -function formatProps(props) { - const res = []; - const keys = Object.keys(props); - keys.slice(0, 3).forEach((key) => { - res.push(...formatProp(key, props[key])); - }); - if (keys.length > 3) { - res.push(` ...`); - } - return res; -} -function formatProp(key, value, raw) { - if (isString(value)) { - value = JSON.stringify(value); - return raw ? value : [`${key}=${value}`]; - } else if (typeof value === "number" || typeof value === "boolean" || value == null) { - return raw ? value : [`${key}=${value}`]; - } else if (isRef(value)) { - value = formatProp(key, toRaw(value.value), true); - return raw ? value : [`${key}=Ref<`, value, `>`]; - } else if (isFunction(value)) { - return [`${key}=fn${value.name ? `<${value.name}>` : ``}`]; - } else { - value = toRaw(value); - return raw ? value : [`${key}=`, value]; - } -} -function assertNumber(val, type) { - if (val === void 0) { - return; - } else if (typeof val !== "number") { - warn$1(`${type} is not a valid number - got ${JSON.stringify(val)}.`); - } else if (isNaN(val)) { - warn$1(`${type} is NaN - the duration expression might be incorrect.`); - } -} - -const ErrorCodes = { - "SETUP_FUNCTION": 0, - "0": "SETUP_FUNCTION", - "RENDER_FUNCTION": 1, - "1": "RENDER_FUNCTION", - "NATIVE_EVENT_HANDLER": 5, - "5": "NATIVE_EVENT_HANDLER", - "COMPONENT_EVENT_HANDLER": 6, - "6": "COMPONENT_EVENT_HANDLER", - "VNODE_HOOK": 7, - "7": "VNODE_HOOK", - "DIRECTIVE_HOOK": 8, - "8": "DIRECTIVE_HOOK", - "TRANSITION_HOOK": 9, - "9": "TRANSITION_HOOK", - "APP_ERROR_HANDLER": 10, - "10": "APP_ERROR_HANDLER", - "APP_WARN_HANDLER": 11, - "11": "APP_WARN_HANDLER", - "FUNCTION_REF": 12, - "12": "FUNCTION_REF", - "ASYNC_COMPONENT_LOADER": 13, - "13": "ASYNC_COMPONENT_LOADER", - "SCHEDULER": 14, - "14": "SCHEDULER", - "COMPONENT_UPDATE": 15, - "15": "COMPONENT_UPDATE", - "APP_UNMOUNT_CLEANUP": 16, - "16": "APP_UNMOUNT_CLEANUP" -}; -const ErrorTypeStrings$1 = { - ["sp"]: "serverPrefetch hook", - ["bc"]: "beforeCreate hook", - ["c"]: "created hook", - ["bm"]: "beforeMount hook", - ["m"]: "mounted hook", - ["bu"]: "beforeUpdate hook", - ["u"]: "updated", - ["bum"]: "beforeUnmount hook", - ["um"]: "unmounted hook", - ["a"]: "activated hook", - ["da"]: "deactivated hook", - ["ec"]: "errorCaptured hook", - ["rtc"]: "renderTracked hook", - ["rtg"]: "renderTriggered hook", - [0]: "setup function", - [1]: "render function", - [2]: "watcher getter", - [3]: "watcher callback", - [4]: "watcher cleanup function", - [5]: "native event handler", - [6]: "component event handler", - [7]: "vnode hook", - [8]: "directive hook", - [9]: "transition hook", - [10]: "app errorHandler", - [11]: "app warnHandler", - [12]: "ref function", - [13]: "async component loader", - [14]: "scheduler flush", - [15]: "component update", - [16]: "app unmount cleanup function" -}; -function callWithErrorHandling(fn, instance, type, args) { - try { - return args ? fn(...args) : fn(); - } catch (err) { - handleError(err, instance, type); - } -} -function callWithAsyncErrorHandling(fn, instance, type, args) { - if (isFunction(fn)) { - const res = callWithErrorHandling(fn, instance, type, args); - if (res && isPromise(res)) { - res.catch((err) => { - handleError(err, instance, type); - }); - } - return res; - } - if (isArray(fn)) { - const values = []; - for (let i = 0; i < fn.length; i++) { - values.push(callWithAsyncErrorHandling(fn[i], instance, type, args)); - } - return values; - } else { - warn$1( - `Invalid value type passed to callWithAsyncErrorHandling(): ${typeof fn}` - ); - } -} -function handleError(err, instance, type, throwInDev = true) { - const contextVNode = instance ? instance.vnode : null; - const { errorHandler, throwUnhandledErrorInProduction } = instance && instance.appContext.config || EMPTY_OBJ; - if (instance) { - let cur = instance.parent; - const exposedInstance = instance.proxy; - const errorInfo = ErrorTypeStrings$1[type] ; - while (cur) { - const errorCapturedHooks = cur.ec; - if (errorCapturedHooks) { - for (let i = 0; i < errorCapturedHooks.length; i++) { - if (errorCapturedHooks[i](err, exposedInstance, errorInfo) === false) { - return; - } - } - } - cur = cur.parent; - } - if (errorHandler) { - pauseTracking(); - callWithErrorHandling(errorHandler, null, 10, [ - err, - exposedInstance, - errorInfo - ]); - resetTracking(); - return; - } - } - logError(err, type, contextVNode, throwInDev, throwUnhandledErrorInProduction); -} -function logError(err, type, contextVNode, throwInDev = true, throwInProd = false) { - { - const info = ErrorTypeStrings$1[type]; - if (contextVNode) { - pushWarningContext(contextVNode); - } - warn$1(`Unhandled error${info ? ` during execution of ${info}` : ``}`); - if (contextVNode) { - popWarningContext(); - } - if (throwInDev) { - throw err; - } else { - console.error(err); - } - } -} - -const queue = []; -let flushIndex = -1; -const pendingPostFlushCbs = []; -let activePostFlushCbs = null; -let postFlushIndex = 0; -const resolvedPromise = /* @__PURE__ */ Promise.resolve(); -let currentFlushPromise = null; -const RECURSION_LIMIT = 100; -function nextTick(fn) { - const p = currentFlushPromise || resolvedPromise; - return fn ? p.then(this ? fn.bind(this) : fn) : p; -} -function findInsertionIndex(id) { - let start = flushIndex + 1; - let end = queue.length; - while (start < end) { - const middle = start + end >>> 1; - const middleJob = queue[middle]; - const middleJobId = getId(middleJob); - if (middleJobId < id || middleJobId === id && middleJob.flags & 2) { - start = middle + 1; - } else { - end = middle; - } - } - return start; -} -function queueJob(job) { - if (!(job.flags & 1)) { - const jobId = getId(job); - const lastJob = queue[queue.length - 1]; - if (!lastJob || // fast path when the job id is larger than the tail - !(job.flags & 2) && jobId >= getId(lastJob)) { - queue.push(job); - } else { - queue.splice(findInsertionIndex(jobId), 0, job); - } - job.flags |= 1; - queueFlush(); - } -} -function queueFlush() { - if (!currentFlushPromise) { - currentFlushPromise = resolvedPromise.then(flushJobs); - } -} -function queuePostFlushCb(cb) { - if (!isArray(cb)) { - if (activePostFlushCbs && cb.id === -1) { - activePostFlushCbs.splice(postFlushIndex + 1, 0, cb); - } else if (!(cb.flags & 1)) { - pendingPostFlushCbs.push(cb); - cb.flags |= 1; - } - } else { - pendingPostFlushCbs.push(...cb); - } - queueFlush(); -} -function flushPreFlushCbs(instance, seen, i = flushIndex + 1) { - { - seen = seen || /* @__PURE__ */ new Map(); - } - for (; i < queue.length; i++) { - const cb = queue[i]; - if (cb && cb.flags & 2) { - if (instance && cb.id !== instance.uid) { - continue; - } - if (checkRecursiveUpdates(seen, cb)) { - continue; - } - queue.splice(i, 1); - i--; - if (cb.flags & 4) { - cb.flags &= ~1; - } - cb(); - if (!(cb.flags & 4)) { - cb.flags &= ~1; - } - } - } -} -function flushPostFlushCbs(seen) { - if (pendingPostFlushCbs.length) { - const deduped = [...new Set(pendingPostFlushCbs)].sort( - (a, b) => getId(a) - getId(b) - ); - pendingPostFlushCbs.length = 0; - if (activePostFlushCbs) { - activePostFlushCbs.push(...deduped); - return; - } - activePostFlushCbs = deduped; - { - seen = seen || /* @__PURE__ */ new Map(); - } - for (postFlushIndex = 0; postFlushIndex < activePostFlushCbs.length; postFlushIndex++) { - const cb = activePostFlushCbs[postFlushIndex]; - if (checkRecursiveUpdates(seen, cb)) { - continue; - } - if (cb.flags & 4) { - cb.flags &= ~1; - } - if (!(cb.flags & 8)) cb(); - cb.flags &= ~1; - } - activePostFlushCbs = null; - postFlushIndex = 0; - } -} -const getId = (job) => job.id == null ? job.flags & 2 ? -1 : Infinity : job.id; -function flushJobs(seen) { - { - seen = seen || /* @__PURE__ */ new Map(); - } - const check = (job) => checkRecursiveUpdates(seen, job) ; - try { - for (flushIndex = 0; flushIndex < queue.length; flushIndex++) { - const job = queue[flushIndex]; - if (job && !(job.flags & 8)) { - if (check(job)) { - continue; - } - if (job.flags & 4) { - job.flags &= ~1; - } - callWithErrorHandling( - job, - job.i, - job.i ? 15 : 14 - ); - if (!(job.flags & 4)) { - job.flags &= ~1; - } - } - } - } finally { - for (; flushIndex < queue.length; flushIndex++) { - const job = queue[flushIndex]; - if (job) { - job.flags &= ~1; - } - } - flushIndex = -1; - queue.length = 0; - flushPostFlushCbs(seen); - currentFlushPromise = null; - if (queue.length || pendingPostFlushCbs.length) { - flushJobs(seen); - } - } -} -function checkRecursiveUpdates(seen, fn) { - const count = seen.get(fn) || 0; - if (count > RECURSION_LIMIT) { - const instance = fn.i; - const componentName = instance && getComponentName(instance.type); - handleError( - `Maximum recursive updates exceeded${componentName ? ` in component <${componentName}>` : ``}. This means you have a reactive effect that is mutating its own dependencies and thus recursively triggering itself. Possible sources include component template, render function, updated hook or watcher source function.`, - null, - 10 - ); - return true; - } - seen.set(fn, count + 1); - return false; -} - -let isHmrUpdating = false; -const hmrDirtyComponents = /* @__PURE__ */ new Map(); -{ - getGlobalThis().__VUE_HMR_RUNTIME__ = { - createRecord: tryWrap(createRecord), - rerender: tryWrap(rerender), - reload: tryWrap(reload) - }; -} -const map = /* @__PURE__ */ new Map(); -function registerHMR(instance) { - const id = instance.type.__hmrId; - let record = map.get(id); - if (!record) { - createRecord(id, instance.type); - record = map.get(id); - } - record.instances.add(instance); -} -function unregisterHMR(instance) { - map.get(instance.type.__hmrId).instances.delete(instance); -} -function createRecord(id, initialDef) { - if (map.has(id)) { - return false; - } - map.set(id, { - initialDef: normalizeClassComponent(initialDef), - instances: /* @__PURE__ */ new Set() - }); - return true; -} -function normalizeClassComponent(component) { - return isClassComponent(component) ? component.__vccOpts : component; -} -function rerender(id, newRender) { - const record = map.get(id); - if (!record) { - return; - } - record.initialDef.render = newRender; - [...record.instances].forEach((instance) => { - if (newRender) { - instance.render = newRender; - normalizeClassComponent(instance.type).render = newRender; - } - instance.renderCache = []; - isHmrUpdating = true; - instance.update(); - isHmrUpdating = false; - }); -} -function reload(id, newComp) { - const record = map.get(id); - if (!record) return; - newComp = normalizeClassComponent(newComp); - updateComponentDef(record.initialDef, newComp); - const instances = [...record.instances]; - for (let i = 0; i < instances.length; i++) { - const instance = instances[i]; - const oldComp = normalizeClassComponent(instance.type); - let dirtyInstances = hmrDirtyComponents.get(oldComp); - if (!dirtyInstances) { - if (oldComp !== record.initialDef) { - updateComponentDef(oldComp, newComp); - } - hmrDirtyComponents.set(oldComp, dirtyInstances = /* @__PURE__ */ new Set()); - } - dirtyInstances.add(instance); - instance.appContext.propsCache.delete(instance.type); - instance.appContext.emitsCache.delete(instance.type); - instance.appContext.optionsCache.delete(instance.type); - if (instance.ceReload) { - dirtyInstances.add(instance); - instance.ceReload(newComp.styles); - dirtyInstances.delete(instance); - } else if (instance.parent) { - queueJob(() => { - isHmrUpdating = true; - instance.parent.update(); - isHmrUpdating = false; - dirtyInstances.delete(instance); - }); - } else if (instance.appContext.reload) { - instance.appContext.reload(); - } else if (typeof window !== "undefined") { - window.location.reload(); - } else { - console.warn( - "[HMR] Root or manually mounted instance modified. Full reload required." - ); - } - if (instance.root.ce && instance !== instance.root) { - instance.root.ce._removeChildStyle(oldComp); - } - } - queuePostFlushCb(() => { - hmrDirtyComponents.clear(); - }); -} -function updateComponentDef(oldComp, newComp) { - extend(oldComp, newComp); - for (const key in oldComp) { - if (key !== "__file" && !(key in newComp)) { - delete oldComp[key]; - } - } -} -function tryWrap(fn) { - return (id, arg) => { - try { - return fn(id, arg); - } catch (e) { - console.error(e); - console.warn( - `[HMR] Something went wrong during Vue component hot-reload. Full reload required.` - ); - } - }; -} - -let devtools$1; -let buffer = []; -let devtoolsNotInstalled = false; -function emit$1(event, ...args) { - if (devtools$1) { - devtools$1.emit(event, ...args); - } else if (!devtoolsNotInstalled) { - buffer.push({ event, args }); - } -} -function setDevtoolsHook$1(hook, target) { - var _a, _b; - devtools$1 = hook; - if (devtools$1) { - devtools$1.enabled = true; - buffer.forEach(({ event, args }) => devtools$1.emit(event, ...args)); - buffer = []; - } else if ( - // handle late devtools injection - only do this if we are in an actual - // browser environment to avoid the timer handle stalling test runner exit - // (#4815) - typeof window !== "undefined" && // some envs mock window but not fully - window.HTMLElement && // also exclude jsdom - // eslint-disable-next-line no-restricted-syntax - !((_b = (_a = window.navigator) == null ? void 0 : _a.userAgent) == null ? void 0 : _b.includes("jsdom")) - ) { - const replay = target.__VUE_DEVTOOLS_HOOK_REPLAY__ = target.__VUE_DEVTOOLS_HOOK_REPLAY__ || []; - replay.push((newHook) => { - setDevtoolsHook$1(newHook, target); - }); - setTimeout(() => { - if (!devtools$1) { - target.__VUE_DEVTOOLS_HOOK_REPLAY__ = null; - devtoolsNotInstalled = true; - buffer = []; - } - }, 3e3); - } else { - devtoolsNotInstalled = true; - buffer = []; - } -} -function devtoolsInitApp(app, version) { - emit$1("app:init" /* APP_INIT */, app, version, { - Fragment, - Text, - Comment, - Static - }); -} -function devtoolsUnmountApp(app) { - emit$1("app:unmount" /* APP_UNMOUNT */, app); -} -const devtoolsComponentAdded = /* @__PURE__ */ createDevtoolsComponentHook("component:added" /* COMPONENT_ADDED */); -const devtoolsComponentUpdated = /* @__PURE__ */ createDevtoolsComponentHook("component:updated" /* COMPONENT_UPDATED */); -const _devtoolsComponentRemoved = /* @__PURE__ */ createDevtoolsComponentHook( - "component:removed" /* COMPONENT_REMOVED */ -); -const devtoolsComponentRemoved = (component) => { - if (devtools$1 && typeof devtools$1.cleanupBuffer === "function" && // remove the component if it wasn't buffered - !devtools$1.cleanupBuffer(component)) { - _devtoolsComponentRemoved(component); - } -}; -/*! #__NO_SIDE_EFFECTS__ */ -// @__NO_SIDE_EFFECTS__ -function createDevtoolsComponentHook(hook) { - return (component) => { - emit$1( - hook, - component.appContext.app, - component.uid, - component.parent ? component.parent.uid : void 0, - component - ); - }; -} -const devtoolsPerfStart = /* @__PURE__ */ createDevtoolsPerformanceHook("perf:start" /* PERFORMANCE_START */); -const devtoolsPerfEnd = /* @__PURE__ */ createDevtoolsPerformanceHook("perf:end" /* PERFORMANCE_END */); -function createDevtoolsPerformanceHook(hook) { - return (component, type, time) => { - emit$1(hook, component.appContext.app, component.uid, component, type, time); - }; -} -function devtoolsComponentEmit(component, event, params) { - emit$1( - "component:emit" /* COMPONENT_EMIT */, - component.appContext.app, - component, - event, - params - ); -} - -let currentRenderingInstance = null; -let currentScopeId = null; -function setCurrentRenderingInstance(instance) { - const prev = currentRenderingInstance; - currentRenderingInstance = instance; - currentScopeId = instance && instance.type.__scopeId || null; - return prev; -} -function pushScopeId(id) { - currentScopeId = id; -} -function popScopeId() { - currentScopeId = null; -} -const withScopeId = (_id) => withCtx; -function withCtx(fn, ctx = currentRenderingInstance, isNonScopedSlot) { - if (!ctx) return fn; - if (fn._n) { - return fn; - } - const renderFnWithContext = (...args) => { - if (renderFnWithContext._d) { - setBlockTracking(-1); - } - const prevInstance = setCurrentRenderingInstance(ctx); - let res; - try { - res = fn(...args); - } finally { - setCurrentRenderingInstance(prevInstance); - if (renderFnWithContext._d) { - setBlockTracking(1); - } - } - { - devtoolsComponentUpdated(ctx); - } - return res; - }; - renderFnWithContext._n = true; - renderFnWithContext._c = true; - renderFnWithContext._d = true; - return renderFnWithContext; -} - -function validateDirectiveName(name) { - if (isBuiltInDirective(name)) { - warn$1("Do not use built-in directive ids as custom directive id: " + name); - } -} -function withDirectives(vnode, directives) { - if (currentRenderingInstance === null) { - warn$1(`withDirectives can only be used inside render functions.`); - return vnode; - } - const instance = getComponentPublicInstance(currentRenderingInstance); - const bindings = vnode.dirs || (vnode.dirs = []); - for (let i = 0; i < directives.length; i++) { - let [dir, value, arg, modifiers = EMPTY_OBJ] = directives[i]; - if (dir) { - if (isFunction(dir)) { - dir = { - mounted: dir, - updated: dir - }; - } - if (dir.deep) { - traverse(value); - } - bindings.push({ - dir, - instance, - value, - oldValue: void 0, - arg, - modifiers - }); - } - } - return vnode; -} -function invokeDirectiveHook(vnode, prevVNode, instance, name) { - const bindings = vnode.dirs; - const oldBindings = prevVNode && prevVNode.dirs; - for (let i = 0; i < bindings.length; i++) { - const binding = bindings[i]; - if (oldBindings) { - binding.oldValue = oldBindings[i].value; - } - let hook = binding.dir[name]; - if (hook) { - pauseTracking(); - callWithAsyncErrorHandling(hook, instance, 8, [ - vnode.el, - binding, - vnode, - prevVNode - ]); - resetTracking(); - } - } -} - -const TeleportEndKey = Symbol("_vte"); -const isTeleport = (type) => type.__isTeleport; -const isTeleportDisabled = (props) => props && (props.disabled || props.disabled === ""); -const isTeleportDeferred = (props) => props && (props.defer || props.defer === ""); -const isTargetSVG = (target) => typeof SVGElement !== "undefined" && target instanceof SVGElement; -const isTargetMathML = (target) => typeof MathMLElement === "function" && target instanceof MathMLElement; -const resolveTarget = (props, select) => { - const targetSelector = props && props.to; - if (isString(targetSelector)) { - if (!select) { - warn$1( - `Current renderer does not support string target for Teleports. (missing querySelector renderer option)` - ); - return null; - } else { - const target = select(targetSelector); - if (!target && !isTeleportDisabled(props)) { - warn$1( - `Failed to locate Teleport target with selector "${targetSelector}". Note the target element must exist before the component is mounted - i.e. the target cannot be rendered by the component itself, and ideally should be outside of the entire Vue component tree.` - ); - } - return target; - } - } else { - if (!targetSelector && !isTeleportDisabled(props)) { - warn$1(`Invalid Teleport target: ${targetSelector}`); - } - return targetSelector; - } -}; -const TeleportImpl = { - name: "Teleport", - __isTeleport: true, - process(n1, n2, container, anchor, parentComponent, parentSuspense, namespace, slotScopeIds, optimized, internals) { - const { - mc: mountChildren, - pc: patchChildren, - pbc: patchBlockChildren, - o: { insert, querySelector, createText, createComment } - } = internals; - const disabled = isTeleportDisabled(n2.props); - let { shapeFlag, children, dynamicChildren } = n2; - if (isHmrUpdating) { - optimized = false; - dynamicChildren = null; - } - if (n1 == null) { - const placeholder = n2.el = createComment("teleport start") ; - const mainAnchor = n2.anchor = createComment("teleport end") ; - insert(placeholder, container, anchor); - insert(mainAnchor, container, anchor); - const mount = (container2, anchor2) => { - if (shapeFlag & 16) { - if (parentComponent && parentComponent.isCE) { - parentComponent.ce._teleportTarget = container2; - } - mountChildren( - children, - container2, - anchor2, - parentComponent, - parentSuspense, - namespace, - slotScopeIds, - optimized - ); - } - }; - const mountToTarget = () => { - const target = n2.target = resolveTarget(n2.props, querySelector); - const targetAnchor = prepareAnchor(target, n2, createText, insert); - if (target) { - if (namespace !== "svg" && isTargetSVG(target)) { - namespace = "svg"; - } else if (namespace !== "mathml" && isTargetMathML(target)) { - namespace = "mathml"; - } - if (!disabled) { - mount(target, targetAnchor); - updateCssVars(n2, false); - } - } else if (!disabled) { - warn$1( - "Invalid Teleport target on mount:", - target, - `(${typeof target})` - ); - } - }; - if (disabled) { - mount(container, mainAnchor); - updateCssVars(n2, true); - } - if (isTeleportDeferred(n2.props)) { - queuePostRenderEffect(mountToTarget, parentSuspense); - } else { - mountToTarget(); - } - } else { - n2.el = n1.el; - n2.targetStart = n1.targetStart; - const mainAnchor = n2.anchor = n1.anchor; - const target = n2.target = n1.target; - const targetAnchor = n2.targetAnchor = n1.targetAnchor; - const wasDisabled = isTeleportDisabled(n1.props); - const currentContainer = wasDisabled ? container : target; - const currentAnchor = wasDisabled ? mainAnchor : targetAnchor; - if (namespace === "svg" || isTargetSVG(target)) { - namespace = "svg"; - } else if (namespace === "mathml" || isTargetMathML(target)) { - namespace = "mathml"; - } - if (dynamicChildren) { - patchBlockChildren( - n1.dynamicChildren, - dynamicChildren, - currentContainer, - parentComponent, - parentSuspense, - namespace, - slotScopeIds - ); - traverseStaticChildren(n1, n2, true); - } else if (!optimized) { - patchChildren( - n1, - n2, - currentContainer, - currentAnchor, - parentComponent, - parentSuspense, - namespace, - slotScopeIds, - false - ); - } - if (disabled) { - if (!wasDisabled) { - moveTeleport( - n2, - container, - mainAnchor, - internals, - 1 - ); - } else { - if (n2.props && n1.props && n2.props.to !== n1.props.to) { - n2.props.to = n1.props.to; - } - } - } else { - if ((n2.props && n2.props.to) !== (n1.props && n1.props.to)) { - const nextTarget = n2.target = resolveTarget( - n2.props, - querySelector - ); - if (nextTarget) { - moveTeleport( - n2, - nextTarget, - null, - internals, - 0 - ); - } else { - warn$1( - "Invalid Teleport target on update:", - target, - `(${typeof target})` - ); - } - } else if (wasDisabled) { - moveTeleport( - n2, - target, - targetAnchor, - internals, - 1 - ); - } - } - updateCssVars(n2, disabled); - } - }, - remove(vnode, parentComponent, parentSuspense, { um: unmount, o: { remove: hostRemove } }, doRemove) { - const { - shapeFlag, - children, - anchor, - targetStart, - targetAnchor, - target, - props - } = vnode; - if (target) { - hostRemove(targetStart); - hostRemove(targetAnchor); - } - doRemove && hostRemove(anchor); - if (shapeFlag & 16) { - const shouldRemove = doRemove || !isTeleportDisabled(props); - for (let i = 0; i < children.length; i++) { - const child = children[i]; - unmount( - child, - parentComponent, - parentSuspense, - shouldRemove, - !!child.dynamicChildren - ); - } - } - }, - move: moveTeleport, - hydrate: hydrateTeleport -}; -function moveTeleport(vnode, container, parentAnchor, { o: { insert }, m: move }, moveType = 2) { - if (moveType === 0) { - insert(vnode.targetAnchor, container, parentAnchor); - } - const { el, anchor, shapeFlag, children, props } = vnode; - const isReorder = moveType === 2; - if (isReorder) { - insert(el, container, parentAnchor); - } - if (!isReorder || isTeleportDisabled(props)) { - if (shapeFlag & 16) { - for (let i = 0; i < children.length; i++) { - move( - children[i], - container, - parentAnchor, - 2 - ); - } - } - } - if (isReorder) { - insert(anchor, container, parentAnchor); - } -} -function hydrateTeleport(node, vnode, parentComponent, parentSuspense, slotScopeIds, optimized, { - o: { nextSibling, parentNode, querySelector, insert, createText } -}, hydrateChildren) { - const target = vnode.target = resolveTarget( - vnode.props, - querySelector - ); - if (target) { - const disabled = isTeleportDisabled(vnode.props); - const targetNode = target._lpa || target.firstChild; - if (vnode.shapeFlag & 16) { - if (disabled) { - vnode.anchor = hydrateChildren( - nextSibling(node), - vnode, - parentNode(node), - parentComponent, - parentSuspense, - slotScopeIds, - optimized - ); - vnode.targetStart = targetNode; - vnode.targetAnchor = targetNode && nextSibling(targetNode); - } else { - vnode.anchor = nextSibling(node); - let targetAnchor = targetNode; - while (targetAnchor) { - if (targetAnchor && targetAnchor.nodeType === 8) { - if (targetAnchor.data === "teleport start anchor") { - vnode.targetStart = targetAnchor; - } else if (targetAnchor.data === "teleport anchor") { - vnode.targetAnchor = targetAnchor; - target._lpa = vnode.targetAnchor && nextSibling(vnode.targetAnchor); - break; - } - } - targetAnchor = nextSibling(targetAnchor); - } - if (!vnode.targetAnchor) { - prepareAnchor(target, vnode, createText, insert); - } - hydrateChildren( - targetNode && nextSibling(targetNode), - vnode, - target, - parentComponent, - parentSuspense, - slotScopeIds, - optimized - ); - } - } - updateCssVars(vnode, disabled); - } - return vnode.anchor && nextSibling(vnode.anchor); -} -const Teleport = TeleportImpl; -function updateCssVars(vnode, isDisabled) { - const ctx = vnode.ctx; - if (ctx && ctx.ut) { - let node, anchor; - if (isDisabled) { - node = vnode.el; - anchor = vnode.anchor; - } else { - node = vnode.targetStart; - anchor = vnode.targetAnchor; - } - while (node && node !== anchor) { - if (node.nodeType === 1) node.setAttribute("data-v-owner", ctx.uid); - node = node.nextSibling; - } - ctx.ut(); - } -} -function prepareAnchor(target, vnode, createText, insert) { - const targetStart = vnode.targetStart = createText(""); - const targetAnchor = vnode.targetAnchor = createText(""); - targetStart[TeleportEndKey] = targetAnchor; - if (target) { - insert(targetStart, target); - insert(targetAnchor, target); - } - return targetAnchor; -} - -const leaveCbKey = Symbol("_leaveCb"); -const enterCbKey$1 = Symbol("_enterCb"); -function useTransitionState() { - const state = { - isMounted: false, - isLeaving: false, - isUnmounting: false, - leavingVNodes: /* @__PURE__ */ new Map() - }; - onMounted(() => { - state.isMounted = true; - }); - onBeforeUnmount(() => { - state.isUnmounting = true; - }); - return state; -} -const TransitionHookValidator = [Function, Array]; -const BaseTransitionPropsValidators = { - mode: String, - appear: Boolean, - persisted: Boolean, - // enter - onBeforeEnter: TransitionHookValidator, - onEnter: TransitionHookValidator, - onAfterEnter: TransitionHookValidator, - onEnterCancelled: TransitionHookValidator, - // leave - onBeforeLeave: TransitionHookValidator, - onLeave: TransitionHookValidator, - onAfterLeave: TransitionHookValidator, - onLeaveCancelled: TransitionHookValidator, - // appear - onBeforeAppear: TransitionHookValidator, - onAppear: TransitionHookValidator, - onAfterAppear: TransitionHookValidator, - onAppearCancelled: TransitionHookValidator -}; -const recursiveGetSubtree = (instance) => { - const subTree = instance.subTree; - return subTree.component ? recursiveGetSubtree(subTree.component) : subTree; -}; -const BaseTransitionImpl = { - name: `BaseTransition`, - props: BaseTransitionPropsValidators, - setup(props, { slots }) { - const instance = getCurrentInstance(); - const state = useTransitionState(); - return () => { - const children = slots.default && getTransitionRawChildren(slots.default(), true); - if (!children || !children.length) { - return; - } - const child = findNonCommentChild(children); - const rawProps = toRaw(props); - const { mode } = rawProps; - if (mode && mode !== "in-out" && mode !== "out-in" && mode !== "default") { - warn$1(`invalid mode: ${mode}`); - } - if (state.isLeaving) { - return emptyPlaceholder(child); - } - const innerChild = getInnerChild$1(child); - if (!innerChild) { - return emptyPlaceholder(child); - } - let enterHooks = resolveTransitionHooks( - innerChild, - rawProps, - state, - instance, - // #11061, ensure enterHooks is fresh after clone - (hooks) => enterHooks = hooks - ); - if (innerChild.type !== Comment) { - setTransitionHooks(innerChild, enterHooks); - } - const oldChild = instance.subTree; - const oldInnerChild = oldChild && getInnerChild$1(oldChild); - if (oldInnerChild && oldInnerChild.type !== Comment && !isSameVNodeType(innerChild, oldInnerChild) && recursiveGetSubtree(instance).type !== Comment) { - const leavingHooks = resolveTransitionHooks( - oldInnerChild, - rawProps, - state, - instance - ); - setTransitionHooks(oldInnerChild, leavingHooks); - if (mode === "out-in" && innerChild.type !== Comment) { - state.isLeaving = true; - leavingHooks.afterLeave = () => { - state.isLeaving = false; - if (!(instance.job.flags & 8)) { - instance.update(); - } - delete leavingHooks.afterLeave; - }; - return emptyPlaceholder(child); - } else if (mode === "in-out" && innerChild.type !== Comment) { - leavingHooks.delayLeave = (el, earlyRemove, delayedLeave) => { - const leavingVNodesCache = getLeavingNodesForType( - state, - oldInnerChild - ); - leavingVNodesCache[String(oldInnerChild.key)] = oldInnerChild; - el[leaveCbKey] = () => { - earlyRemove(); - el[leaveCbKey] = void 0; - delete enterHooks.delayedLeave; - }; - enterHooks.delayedLeave = delayedLeave; - }; - } - } - return child; - }; - } -}; -function findNonCommentChild(children) { - let child = children[0]; - if (children.length > 1) { - let hasFound = false; - for (const c of children) { - if (c.type !== Comment) { - if (hasFound) { - warn$1( - " can only be used on a single element or component. Use for lists." - ); - break; - } - child = c; - hasFound = true; - } - } - } - return child; -} -const BaseTransition = BaseTransitionImpl; -function getLeavingNodesForType(state, vnode) { - const { leavingVNodes } = state; - let leavingVNodesCache = leavingVNodes.get(vnode.type); - if (!leavingVNodesCache) { - leavingVNodesCache = /* @__PURE__ */ Object.create(null); - leavingVNodes.set(vnode.type, leavingVNodesCache); - } - return leavingVNodesCache; -} -function resolveTransitionHooks(vnode, props, state, instance, postClone) { - const { - appear, - mode, - persisted = false, - onBeforeEnter, - onEnter, - onAfterEnter, - onEnterCancelled, - onBeforeLeave, - onLeave, - onAfterLeave, - onLeaveCancelled, - onBeforeAppear, - onAppear, - onAfterAppear, - onAppearCancelled - } = props; - const key = String(vnode.key); - const leavingVNodesCache = getLeavingNodesForType(state, vnode); - const callHook = (hook, args) => { - hook && callWithAsyncErrorHandling( - hook, - instance, - 9, - args - ); - }; - const callAsyncHook = (hook, args) => { - const done = args[1]; - callHook(hook, args); - if (isArray(hook)) { - if (hook.every((hook2) => hook2.length <= 1)) done(); - } else if (hook.length <= 1) { - done(); - } - }; - const hooks = { - mode, - persisted, - beforeEnter(el) { - let hook = onBeforeEnter; - if (!state.isMounted) { - if (appear) { - hook = onBeforeAppear || onBeforeEnter; - } else { - return; - } - } - if (el[leaveCbKey]) { - el[leaveCbKey]( - true - /* cancelled */ - ); - } - const leavingVNode = leavingVNodesCache[key]; - if (leavingVNode && isSameVNodeType(vnode, leavingVNode) && leavingVNode.el[leaveCbKey]) { - leavingVNode.el[leaveCbKey](); - } - callHook(hook, [el]); - }, - enter(el) { - let hook = onEnter; - let afterHook = onAfterEnter; - let cancelHook = onEnterCancelled; - if (!state.isMounted) { - if (appear) { - hook = onAppear || onEnter; - afterHook = onAfterAppear || onAfterEnter; - cancelHook = onAppearCancelled || onEnterCancelled; - } else { - return; - } - } - let called = false; - const done = el[enterCbKey$1] = (cancelled) => { - if (called) return; - called = true; - if (cancelled) { - callHook(cancelHook, [el]); - } else { - callHook(afterHook, [el]); - } - if (hooks.delayedLeave) { - hooks.delayedLeave(); - } - el[enterCbKey$1] = void 0; - }; - if (hook) { - callAsyncHook(hook, [el, done]); - } else { - done(); - } - }, - leave(el, remove) { - const key2 = String(vnode.key); - if (el[enterCbKey$1]) { - el[enterCbKey$1]( - true - /* cancelled */ - ); - } - if (state.isUnmounting) { - return remove(); - } - callHook(onBeforeLeave, [el]); - let called = false; - const done = el[leaveCbKey] = (cancelled) => { - if (called) return; - called = true; - remove(); - if (cancelled) { - callHook(onLeaveCancelled, [el]); - } else { - callHook(onAfterLeave, [el]); - } - el[leaveCbKey] = void 0; - if (leavingVNodesCache[key2] === vnode) { - delete leavingVNodesCache[key2]; - } - }; - leavingVNodesCache[key2] = vnode; - if (onLeave) { - callAsyncHook(onLeave, [el, done]); - } else { - done(); - } - }, - clone(vnode2) { - const hooks2 = resolveTransitionHooks( - vnode2, - props, - state, - instance, - postClone - ); - if (postClone) postClone(hooks2); - return hooks2; - } - }; - return hooks; -} -function emptyPlaceholder(vnode) { - if (isKeepAlive(vnode)) { - vnode = cloneVNode(vnode); - vnode.children = null; - return vnode; - } -} -function getInnerChild$1(vnode) { - if (!isKeepAlive(vnode)) { - if (isTeleport(vnode.type) && vnode.children) { - return findNonCommentChild(vnode.children); - } - return vnode; - } - if (vnode.component) { - return vnode.component.subTree; - } - const { shapeFlag, children } = vnode; - if (children) { - if (shapeFlag & 16) { - return children[0]; - } - if (shapeFlag & 32 && isFunction(children.default)) { - return children.default(); - } - } -} -function setTransitionHooks(vnode, hooks) { - if (vnode.shapeFlag & 6 && vnode.component) { - vnode.transition = hooks; - setTransitionHooks(vnode.component.subTree, hooks); - } else if (vnode.shapeFlag & 128) { - vnode.ssContent.transition = hooks.clone(vnode.ssContent); - vnode.ssFallback.transition = hooks.clone(vnode.ssFallback); - } else { - vnode.transition = hooks; - } -} -function getTransitionRawChildren(children, keepComment = false, parentKey) { - let ret = []; - let keyedFragmentCount = 0; - for (let i = 0; i < children.length; i++) { - let child = children[i]; - const key = parentKey == null ? child.key : String(parentKey) + String(child.key != null ? child.key : i); - if (child.type === Fragment) { - if (child.patchFlag & 128) keyedFragmentCount++; - ret = ret.concat( - getTransitionRawChildren(child.children, keepComment, key) - ); - } else if (keepComment || child.type !== Comment) { - ret.push(key != null ? cloneVNode(child, { key }) : child); - } - } - if (keyedFragmentCount > 1) { - for (let i = 0; i < ret.length; i++) { - ret[i].patchFlag = -2; - } - } - return ret; -} - -/*! #__NO_SIDE_EFFECTS__ */ -// @__NO_SIDE_EFFECTS__ -function defineComponent(options, extraOptions) { - return isFunction(options) ? ( - // #8236: extend call and options.name access are considered side-effects - // by Rollup, so we have to wrap it in a pure-annotated IIFE. - /* @__PURE__ */ (() => extend({ name: options.name }, extraOptions, { setup: options }))() - ) : options; -} - -function useId() { - const i = getCurrentInstance(); - if (i) { - return (i.appContext.config.idPrefix || "v") + "-" + i.ids[0] + i.ids[1]++; - } else { - warn$1( - `useId() is called when there is no active component instance to be associated with.` - ); - } - return ""; -} -function markAsyncBoundary(instance) { - instance.ids = [instance.ids[0] + instance.ids[2]++ + "-", 0, 0]; -} - -const knownTemplateRefs = /* @__PURE__ */ new WeakSet(); -function useTemplateRef(key) { - const i = getCurrentInstance(); - const r = shallowRef(null); - if (i) { - const refs = i.refs === EMPTY_OBJ ? i.refs = {} : i.refs; - let desc; - if ((desc = Object.getOwnPropertyDescriptor(refs, key)) && !desc.configurable) { - warn$1(`useTemplateRef('${key}') already exists.`); - } else { - Object.defineProperty(refs, key, { - enumerable: true, - get: () => r.value, - set: (val) => r.value = val - }); - } - } else { - warn$1( - `useTemplateRef() is called when there is no active component instance to be associated with.` - ); - } - const ret = readonly(r) ; - { - knownTemplateRefs.add(ret); - } - return ret; -} - -function setRef(rawRef, oldRawRef, parentSuspense, vnode, isUnmount = false) { - if (isArray(rawRef)) { - rawRef.forEach( - (r, i) => setRef( - r, - oldRawRef && (isArray(oldRawRef) ? oldRawRef[i] : oldRawRef), - parentSuspense, - vnode, - isUnmount - ) - ); - return; - } - if (isAsyncWrapper(vnode) && !isUnmount) { - return; - } - const refValue = vnode.shapeFlag & 4 ? getComponentPublicInstance(vnode.component) : vnode.el; - const value = isUnmount ? null : refValue; - const { i: owner, r: ref } = rawRef; - if (!owner) { - warn$1( - `Missing ref owner context. ref cannot be used on hoisted vnodes. A vnode with ref must be created inside the render function.` - ); - return; - } - const oldRef = oldRawRef && oldRawRef.r; - const refs = owner.refs === EMPTY_OBJ ? owner.refs = {} : owner.refs; - const setupState = owner.setupState; - const rawSetupState = toRaw(setupState); - const canSetSetupRef = setupState === EMPTY_OBJ ? () => false : (key) => { - { - if (hasOwn(rawSetupState, key) && !isRef(rawSetupState[key])) { - warn$1( - `Template ref "${key}" used on a non-ref value. It will not work in the production build.` - ); - } - if (knownTemplateRefs.has(rawSetupState[key])) { - return false; - } - } - return hasOwn(rawSetupState, key); - }; - if (oldRef != null && oldRef !== ref) { - if (isString(oldRef)) { - refs[oldRef] = null; - if (canSetSetupRef(oldRef)) { - setupState[oldRef] = null; - } - } else if (isRef(oldRef)) { - oldRef.value = null; - } - } - if (isFunction(ref)) { - callWithErrorHandling(ref, owner, 12, [value, refs]); - } else { - const _isString = isString(ref); - const _isRef = isRef(ref); - if (_isString || _isRef) { - const doSet = () => { - if (rawRef.f) { - const existing = _isString ? canSetSetupRef(ref) ? setupState[ref] : refs[ref] : ref.value; - if (isUnmount) { - isArray(existing) && remove(existing, refValue); - } else { - if (!isArray(existing)) { - if (_isString) { - refs[ref] = [refValue]; - if (canSetSetupRef(ref)) { - setupState[ref] = refs[ref]; - } - } else { - ref.value = [refValue]; - if (rawRef.k) refs[rawRef.k] = ref.value; - } - } else if (!existing.includes(refValue)) { - existing.push(refValue); - } - } - } else if (_isString) { - refs[ref] = value; - if (canSetSetupRef(ref)) { - setupState[ref] = value; - } - } else if (_isRef) { - ref.value = value; - if (rawRef.k) refs[rawRef.k] = value; - } else { - warn$1("Invalid template ref type:", ref, `(${typeof ref})`); - } - }; - if (value) { - doSet.id = -1; - queuePostRenderEffect(doSet, parentSuspense); - } else { - doSet(); - } - } else { - warn$1("Invalid template ref type:", ref, `(${typeof ref})`); - } - } -} - -let hasLoggedMismatchError = false; -const logMismatchError = () => { - if (hasLoggedMismatchError) { - return; - } - console.error("Hydration completed but contains mismatches."); - hasLoggedMismatchError = true; -}; -const isSVGContainer = (container) => container.namespaceURI.includes("svg") && container.tagName !== "foreignObject"; -const isMathMLContainer = (container) => container.namespaceURI.includes("MathML"); -const getContainerType = (container) => { - if (container.nodeType !== 1) return void 0; - if (isSVGContainer(container)) return "svg"; - if (isMathMLContainer(container)) return "mathml"; - return void 0; -}; -const isComment = (node) => node.nodeType === 8; -function createHydrationFunctions(rendererInternals) { - const { - mt: mountComponent, - p: patch, - o: { - patchProp, - createText, - nextSibling, - parentNode, - remove, - insert, - createComment - } - } = rendererInternals; - const hydrate = (vnode, container) => { - if (!container.hasChildNodes()) { - warn$1( - `Attempting to hydrate existing markup but container is empty. Performing full mount instead.` - ); - patch(null, vnode, container); - flushPostFlushCbs(); - container._vnode = vnode; - return; - } - hydrateNode(container.firstChild, vnode, null, null, null); - flushPostFlushCbs(); - container._vnode = vnode; - }; - const hydrateNode = (node, vnode, parentComponent, parentSuspense, slotScopeIds, optimized = false) => { - optimized = optimized || !!vnode.dynamicChildren; - const isFragmentStart = isComment(node) && node.data === "["; - const onMismatch = () => handleMismatch( - node, - vnode, - parentComponent, - parentSuspense, - slotScopeIds, - isFragmentStart - ); - const { type, ref, shapeFlag, patchFlag } = vnode; - let domType = node.nodeType; - vnode.el = node; - { - def(node, "__vnode", vnode, true); - def(node, "__vueParentComponent", parentComponent, true); - } - if (patchFlag === -2) { - optimized = false; - vnode.dynamicChildren = null; - } - let nextNode = null; - switch (type) { - case Text: - if (domType !== 3) { - if (vnode.children === "") { - insert(vnode.el = createText(""), parentNode(node), node); - nextNode = node; - } else { - nextNode = onMismatch(); - } - } else { - if (node.data !== vnode.children) { - warn$1( - `Hydration text mismatch in`, - node.parentNode, - ` - - rendered on server: ${JSON.stringify( - node.data - )} - - expected on client: ${JSON.stringify(vnode.children)}` - ); - logMismatchError(); - node.data = vnode.children; - } - nextNode = nextSibling(node); - } - break; - case Comment: - if (isTemplateNode(node)) { - nextNode = nextSibling(node); - replaceNode( - vnode.el = node.content.firstChild, - node, - parentComponent - ); - } else if (domType !== 8 || isFragmentStart) { - nextNode = onMismatch(); - } else { - nextNode = nextSibling(node); - } - break; - case Static: - if (isFragmentStart) { - node = nextSibling(node); - domType = node.nodeType; - } - if (domType === 1 || domType === 3) { - nextNode = node; - const needToAdoptContent = !vnode.children.length; - for (let i = 0; i < vnode.staticCount; i++) { - if (needToAdoptContent) - vnode.children += nextNode.nodeType === 1 ? nextNode.outerHTML : nextNode.data; - if (i === vnode.staticCount - 1) { - vnode.anchor = nextNode; - } - nextNode = nextSibling(nextNode); - } - return isFragmentStart ? nextSibling(nextNode) : nextNode; - } else { - onMismatch(); - } - break; - case Fragment: - if (!isFragmentStart) { - nextNode = onMismatch(); - } else { - nextNode = hydrateFragment( - node, - vnode, - parentComponent, - parentSuspense, - slotScopeIds, - optimized - ); - } - break; - default: - if (shapeFlag & 1) { - if ((domType !== 1 || vnode.type.toLowerCase() !== node.tagName.toLowerCase()) && !isTemplateNode(node)) { - nextNode = onMismatch(); - } else { - nextNode = hydrateElement( - node, - vnode, - parentComponent, - parentSuspense, - slotScopeIds, - optimized - ); - } - } else if (shapeFlag & 6) { - vnode.slotScopeIds = slotScopeIds; - const container = parentNode(node); - if (isFragmentStart) { - nextNode = locateClosingAnchor(node); - } else if (isComment(node) && node.data === "teleport start") { - nextNode = locateClosingAnchor(node, node.data, "teleport end"); - } else { - nextNode = nextSibling(node); - } - mountComponent( - vnode, - container, - null, - parentComponent, - parentSuspense, - getContainerType(container), - optimized - ); - if (isAsyncWrapper(vnode)) { - let subTree; - if (isFragmentStart) { - subTree = createVNode(Fragment); - subTree.anchor = nextNode ? nextNode.previousSibling : container.lastChild; - } else { - subTree = node.nodeType === 3 ? createTextVNode("") : createVNode("div"); - } - subTree.el = node; - vnode.component.subTree = subTree; - } - } else if (shapeFlag & 64) { - if (domType !== 8) { - nextNode = onMismatch(); - } else { - nextNode = vnode.type.hydrate( - node, - vnode, - parentComponent, - parentSuspense, - slotScopeIds, - optimized, - rendererInternals, - hydrateChildren - ); - } - } else if (shapeFlag & 128) { - nextNode = vnode.type.hydrate( - node, - vnode, - parentComponent, - parentSuspense, - getContainerType(parentNode(node)), - slotScopeIds, - optimized, - rendererInternals, - hydrateNode - ); - } else { - warn$1("Invalid HostVNode type:", type, `(${typeof type})`); - } - } - if (ref != null) { - setRef(ref, null, parentSuspense, vnode); - } - return nextNode; - }; - const hydrateElement = (el, vnode, parentComponent, parentSuspense, slotScopeIds, optimized) => { - optimized = optimized || !!vnode.dynamicChildren; - const { type, props, patchFlag, shapeFlag, dirs, transition } = vnode; - const forcePatch = type === "input" || type === "option"; - { - if (dirs) { - invokeDirectiveHook(vnode, null, parentComponent, "created"); - } - let needCallTransitionHooks = false; - if (isTemplateNode(el)) { - needCallTransitionHooks = needTransition( - null, - // no need check parentSuspense in hydration - transition - ) && parentComponent && parentComponent.vnode.props && parentComponent.vnode.props.appear; - const content = el.content.firstChild; - if (needCallTransitionHooks) { - transition.beforeEnter(content); - } - replaceNode(content, el, parentComponent); - vnode.el = el = content; - } - if (shapeFlag & 16 && // skip if element has innerHTML / textContent - !(props && (props.innerHTML || props.textContent))) { - let next = hydrateChildren( - el.firstChild, - vnode, - el, - parentComponent, - parentSuspense, - slotScopeIds, - optimized - ); - let hasWarned = false; - while (next) { - if (!isMismatchAllowed(el, 1 /* CHILDREN */)) { - if (!hasWarned) { - warn$1( - `Hydration children mismatch on`, - el, - ` -Server rendered element contains more child nodes than client vdom.` - ); - hasWarned = true; - } - logMismatchError(); - } - const cur = next; - next = next.nextSibling; - remove(cur); - } - } else if (shapeFlag & 8) { - let clientText = vnode.children; - if (clientText[0] === "\n" && (el.tagName === "PRE" || el.tagName === "TEXTAREA")) { - clientText = clientText.slice(1); - } - if (el.textContent !== clientText) { - if (!isMismatchAllowed(el, 0 /* TEXT */)) { - warn$1( - `Hydration text content mismatch on`, - el, - ` - - rendered on server: ${el.textContent} - - expected on client: ${vnode.children}` - ); - logMismatchError(); - } - el.textContent = vnode.children; - } - } - if (props) { - { - const isCustomElement = el.tagName.includes("-"); - for (const key in props) { - if (// #11189 skip if this node has directives that have created hooks - // as it could have mutated the DOM in any possible way - !(dirs && dirs.some((d) => d.dir.created)) && propHasMismatch(el, key, props[key], vnode, parentComponent)) { - logMismatchError(); - } - if (forcePatch && (key.endsWith("value") || key === "indeterminate") || isOn(key) && !isReservedProp(key) || // force hydrate v-bind with .prop modifiers - key[0] === "." || isCustomElement) { - patchProp(el, key, null, props[key], void 0, parentComponent); - } - } - } - } - let vnodeHooks; - if (vnodeHooks = props && props.onVnodeBeforeMount) { - invokeVNodeHook(vnodeHooks, parentComponent, vnode); - } - if (dirs) { - invokeDirectiveHook(vnode, null, parentComponent, "beforeMount"); - } - if ((vnodeHooks = props && props.onVnodeMounted) || dirs || needCallTransitionHooks) { - queueEffectWithSuspense(() => { - vnodeHooks && invokeVNodeHook(vnodeHooks, parentComponent, vnode); - needCallTransitionHooks && transition.enter(el); - dirs && invokeDirectiveHook(vnode, null, parentComponent, "mounted"); - }, parentSuspense); - } - } - return el.nextSibling; - }; - const hydrateChildren = (node, parentVNode, container, parentComponent, parentSuspense, slotScopeIds, optimized) => { - optimized = optimized || !!parentVNode.dynamicChildren; - const children = parentVNode.children; - const l = children.length; - let hasWarned = false; - for (let i = 0; i < l; i++) { - const vnode = optimized ? children[i] : children[i] = normalizeVNode(children[i]); - const isText = vnode.type === Text; - if (node) { - if (isText && !optimized) { - if (i + 1 < l && normalizeVNode(children[i + 1]).type === Text) { - insert( - createText( - node.data.slice(vnode.children.length) - ), - container, - nextSibling(node) - ); - node.data = vnode.children; - } - } - node = hydrateNode( - node, - vnode, - parentComponent, - parentSuspense, - slotScopeIds, - optimized - ); - } else if (isText && !vnode.children) { - insert(vnode.el = createText(""), container); - } else { - if (!isMismatchAllowed(container, 1 /* CHILDREN */)) { - if (!hasWarned) { - warn$1( - `Hydration children mismatch on`, - container, - ` -Server rendered element contains fewer child nodes than client vdom.` - ); - hasWarned = true; - } - logMismatchError(); - } - patch( - null, - vnode, - container, - null, - parentComponent, - parentSuspense, - getContainerType(container), - slotScopeIds - ); - } - } - return node; - }; - const hydrateFragment = (node, vnode, parentComponent, parentSuspense, slotScopeIds, optimized) => { - const { slotScopeIds: fragmentSlotScopeIds } = vnode; - if (fragmentSlotScopeIds) { - slotScopeIds = slotScopeIds ? slotScopeIds.concat(fragmentSlotScopeIds) : fragmentSlotScopeIds; - } - const container = parentNode(node); - const next = hydrateChildren( - nextSibling(node), - vnode, - container, - parentComponent, - parentSuspense, - slotScopeIds, - optimized - ); - if (next && isComment(next) && next.data === "]") { - return nextSibling(vnode.anchor = next); - } else { - logMismatchError(); - insert(vnode.anchor = createComment(`]`), container, next); - return next; - } - }; - const handleMismatch = (node, vnode, parentComponent, parentSuspense, slotScopeIds, isFragment) => { - if (!isMismatchAllowed(node.parentElement, 1 /* CHILDREN */)) { - warn$1( - `Hydration node mismatch: -- rendered on server:`, - node, - node.nodeType === 3 ? `(text)` : isComment(node) && node.data === "[" ? `(start of fragment)` : ``, - ` -- expected on client:`, - vnode.type - ); - logMismatchError(); - } - vnode.el = null; - if (isFragment) { - const end = locateClosingAnchor(node); - while (true) { - const next2 = nextSibling(node); - if (next2 && next2 !== end) { - remove(next2); - } else { - break; - } - } - } - const next = nextSibling(node); - const container = parentNode(node); - remove(node); - patch( - null, - vnode, - container, - next, - parentComponent, - parentSuspense, - getContainerType(container), - slotScopeIds - ); - return next; - }; - const locateClosingAnchor = (node, open = "[", close = "]") => { - let match = 0; - while (node) { - node = nextSibling(node); - if (node && isComment(node)) { - if (node.data === open) match++; - if (node.data === close) { - if (match === 0) { - return nextSibling(node); - } else { - match--; - } - } - } - } - return node; - }; - const replaceNode = (newNode, oldNode, parentComponent) => { - const parentNode2 = oldNode.parentNode; - if (parentNode2) { - parentNode2.replaceChild(newNode, oldNode); - } - let parent = parentComponent; - while (parent) { - if (parent.vnode.el === oldNode) { - parent.vnode.el = parent.subTree.el = newNode; - } - parent = parent.parent; - } - }; - const isTemplateNode = (node) => { - return node.nodeType === 1 && node.tagName === "TEMPLATE"; - }; - return [hydrate, hydrateNode]; -} -function propHasMismatch(el, key, clientValue, vnode, instance) { - let mismatchType; - let mismatchKey; - let actual; - let expected; - if (key === "class") { - actual = el.getAttribute("class"); - expected = normalizeClass(clientValue); - if (!isSetEqual(toClassSet(actual || ""), toClassSet(expected))) { - mismatchType = 2 /* CLASS */; - mismatchKey = `class`; - } - } else if (key === "style") { - actual = el.getAttribute("style") || ""; - expected = isString(clientValue) ? clientValue : stringifyStyle(normalizeStyle(clientValue)); - const actualMap = toStyleMap(actual); - const expectedMap = toStyleMap(expected); - if (vnode.dirs) { - for (const { dir, value } of vnode.dirs) { - if (dir.name === "show" && !value) { - expectedMap.set("display", "none"); - } - } - } - if (instance) { - resolveCssVars(instance, vnode, expectedMap); - } - if (!isMapEqual(actualMap, expectedMap)) { - mismatchType = 3 /* STYLE */; - mismatchKey = "style"; - } - } else if (el instanceof SVGElement && isKnownSvgAttr(key) || el instanceof HTMLElement && (isBooleanAttr(key) || isKnownHtmlAttr(key))) { - if (isBooleanAttr(key)) { - actual = el.hasAttribute(key); - expected = includeBooleanAttr(clientValue); - } else if (clientValue == null) { - actual = el.hasAttribute(key); - expected = false; - } else { - if (el.hasAttribute(key)) { - actual = el.getAttribute(key); - } else if (key === "value" && el.tagName === "TEXTAREA") { - actual = el.value; - } else { - actual = false; - } - expected = isRenderableAttrValue(clientValue) ? String(clientValue) : false; - } - if (actual !== expected) { - mismatchType = 4 /* ATTRIBUTE */; - mismatchKey = key; - } - } - if (mismatchType != null && !isMismatchAllowed(el, mismatchType)) { - const format = (v) => v === false ? `(not rendered)` : `${mismatchKey}="${v}"`; - const preSegment = `Hydration ${MismatchTypeString[mismatchType]} mismatch on`; - const postSegment = ` - - rendered on server: ${format(actual)} - - expected on client: ${format(expected)} - Note: this mismatch is check-only. The DOM will not be rectified in production due to performance overhead. - You should fix the source of the mismatch.`; - { - warn$1(preSegment, el, postSegment); - } - return true; - } - return false; -} -function toClassSet(str) { - return new Set(str.trim().split(/\s+/)); -} -function isSetEqual(a, b) { - if (a.size !== b.size) { - return false; - } - for (const s of a) { - if (!b.has(s)) { - return false; - } - } - return true; -} -function toStyleMap(str) { - const styleMap = /* @__PURE__ */ new Map(); - for (const item of str.split(";")) { - let [key, value] = item.split(":"); - key = key.trim(); - value = value && value.trim(); - if (key && value) { - styleMap.set(key, value); - } - } - return styleMap; -} -function isMapEqual(a, b) { - if (a.size !== b.size) { - return false; - } - for (const [key, value] of a) { - if (value !== b.get(key)) { - return false; - } - } - return true; -} -function resolveCssVars(instance, vnode, expectedMap) { - const root = instance.subTree; - if (instance.getCssVars && (vnode === root || root && root.type === Fragment && root.children.includes(vnode))) { - const cssVars = instance.getCssVars(); - for (const key in cssVars) { - expectedMap.set( - `--${getEscapedCssVarName(key)}`, - String(cssVars[key]) - ); - } - } - if (vnode === root && instance.parent) { - resolveCssVars(instance.parent, instance.vnode, expectedMap); - } -} -const allowMismatchAttr = "data-allow-mismatch"; -const MismatchTypeString = { - [0 /* TEXT */]: "text", - [1 /* CHILDREN */]: "children", - [2 /* CLASS */]: "class", - [3 /* STYLE */]: "style", - [4 /* ATTRIBUTE */]: "attribute" -}; -function isMismatchAllowed(el, allowedType) { - if (allowedType === 0 /* TEXT */ || allowedType === 1 /* CHILDREN */) { - while (el && !el.hasAttribute(allowMismatchAttr)) { - el = el.parentElement; - } - } - const allowedAttr = el && el.getAttribute(allowMismatchAttr); - if (allowedAttr == null) { - return false; - } else if (allowedAttr === "") { - return true; - } else { - const list = allowedAttr.split(","); - if (allowedType === 0 /* TEXT */ && list.includes("children")) { - return true; - } - return allowedAttr.split(",").includes(MismatchTypeString[allowedType]); - } -} - -const requestIdleCallback = getGlobalThis().requestIdleCallback || ((cb) => setTimeout(cb, 1)); -const cancelIdleCallback = getGlobalThis().cancelIdleCallback || ((id) => clearTimeout(id)); -const hydrateOnIdle = (timeout = 1e4) => (hydrate) => { - const id = requestIdleCallback(hydrate, { timeout }); - return () => cancelIdleCallback(id); -}; -function elementIsVisibleInViewport(el) { - const { top, left, bottom, right } = el.getBoundingClientRect(); - const { innerHeight, innerWidth } = window; - return (top > 0 && top < innerHeight || bottom > 0 && bottom < innerHeight) && (left > 0 && left < innerWidth || right > 0 && right < innerWidth); -} -const hydrateOnVisible = (opts) => (hydrate, forEach) => { - const ob = new IntersectionObserver((entries) => { - for (const e of entries) { - if (!e.isIntersecting) continue; - ob.disconnect(); - hydrate(); - break; - } - }, opts); - forEach((el) => { - if (!(el instanceof Element)) return; - if (elementIsVisibleInViewport(el)) { - hydrate(); - ob.disconnect(); - return false; - } - ob.observe(el); - }); - return () => ob.disconnect(); -}; -const hydrateOnMediaQuery = (query) => (hydrate) => { - if (query) { - const mql = matchMedia(query); - if (mql.matches) { - hydrate(); - } else { - mql.addEventListener("change", hydrate, { once: true }); - return () => mql.removeEventListener("change", hydrate); - } - } -}; -const hydrateOnInteraction = (interactions = []) => (hydrate, forEach) => { - if (isString(interactions)) interactions = [interactions]; - let hasHydrated = false; - const doHydrate = (e) => { - if (!hasHydrated) { - hasHydrated = true; - teardown(); - hydrate(); - e.target.dispatchEvent(new e.constructor(e.type, e)); - } - }; - const teardown = () => { - forEach((el) => { - for (const i of interactions) { - el.removeEventListener(i, doHydrate); - } - }); - }; - forEach((el) => { - for (const i of interactions) { - el.addEventListener(i, doHydrate, { once: true }); - } - }); - return teardown; -}; -function forEachElement(node, cb) { - if (isComment(node) && node.data === "[") { - let depth = 1; - let next = node.nextSibling; - while (next) { - if (next.nodeType === 1) { - const result = cb(next); - if (result === false) { - break; - } - } else if (isComment(next)) { - if (next.data === "]") { - if (--depth === 0) break; - } else if (next.data === "[") { - depth++; - } - } - next = next.nextSibling; - } - } else { - cb(node); - } -} - -const isAsyncWrapper = (i) => !!i.type.__asyncLoader; -/*! #__NO_SIDE_EFFECTS__ */ -// @__NO_SIDE_EFFECTS__ -function defineAsyncComponent(source) { - if (isFunction(source)) { - source = { loader: source }; - } - const { - loader, - loadingComponent, - errorComponent, - delay = 200, - hydrate: hydrateStrategy, - timeout, - // undefined = never times out - suspensible = true, - onError: userOnError - } = source; - let pendingRequest = null; - let resolvedComp; - let retries = 0; - const retry = () => { - retries++; - pendingRequest = null; - return load(); - }; - const load = () => { - let thisRequest; - return pendingRequest || (thisRequest = pendingRequest = loader().catch((err) => { - err = err instanceof Error ? err : new Error(String(err)); - if (userOnError) { - return new Promise((resolve, reject) => { - const userRetry = () => resolve(retry()); - const userFail = () => reject(err); - userOnError(err, userRetry, userFail, retries + 1); - }); - } else { - throw err; - } - }).then((comp) => { - if (thisRequest !== pendingRequest && pendingRequest) { - return pendingRequest; - } - if (!comp) { - warn$1( - `Async component loader resolved to undefined. If you are using retry(), make sure to return its return value.` - ); - } - if (comp && (comp.__esModule || comp[Symbol.toStringTag] === "Module")) { - comp = comp.default; - } - if (comp && !isObject(comp) && !isFunction(comp)) { - throw new Error(`Invalid async component load result: ${comp}`); - } - resolvedComp = comp; - return comp; - })); - }; - return defineComponent({ - name: "AsyncComponentWrapper", - __asyncLoader: load, - __asyncHydrate(el, instance, hydrate) { - const doHydrate = hydrateStrategy ? () => { - const teardown = hydrateStrategy( - hydrate, - (cb) => forEachElement(el, cb) - ); - if (teardown) { - (instance.bum || (instance.bum = [])).push(teardown); - } - } : hydrate; - if (resolvedComp) { - doHydrate(); - } else { - load().then(() => !instance.isUnmounted && doHydrate()); - } - }, - get __asyncResolved() { - return resolvedComp; - }, - setup() { - const instance = currentInstance; - markAsyncBoundary(instance); - if (resolvedComp) { - return () => createInnerComp(resolvedComp, instance); - } - const onError = (err) => { - pendingRequest = null; - handleError( - err, - instance, - 13, - !errorComponent - ); - }; - if (suspensible && instance.suspense || isInSSRComponentSetup) { - return load().then((comp) => { - return () => createInnerComp(comp, instance); - }).catch((err) => { - onError(err); - return () => errorComponent ? createVNode(errorComponent, { - error: err - }) : null; - }); - } - const loaded = ref(false); - const error = ref(); - const delayed = ref(!!delay); - if (delay) { - setTimeout(() => { - delayed.value = false; - }, delay); - } - if (timeout != null) { - setTimeout(() => { - if (!loaded.value && !error.value) { - const err = new Error( - `Async component timed out after ${timeout}ms.` - ); - onError(err); - error.value = err; - } - }, timeout); - } - load().then(() => { - loaded.value = true; - if (instance.parent && isKeepAlive(instance.parent.vnode)) { - instance.parent.update(); - } - }).catch((err) => { - onError(err); - error.value = err; - }); - return () => { - if (loaded.value && resolvedComp) { - return createInnerComp(resolvedComp, instance); - } else if (error.value && errorComponent) { - return createVNode(errorComponent, { - error: error.value - }); - } else if (loadingComponent && !delayed.value) { - return createVNode(loadingComponent); - } - }; - } - }); -} -function createInnerComp(comp, parent) { - const { ref: ref2, props, children, ce } = parent.vnode; - const vnode = createVNode(comp, props, children); - vnode.ref = ref2; - vnode.ce = ce; - delete parent.vnode.ce; - return vnode; -} - -const isKeepAlive = (vnode) => vnode.type.__isKeepAlive; -const KeepAliveImpl = { - name: `KeepAlive`, - // Marker for special handling inside the renderer. We are not using a === - // check directly on KeepAlive in the renderer, because importing it directly - // would prevent it from being tree-shaken. - __isKeepAlive: true, - props: { - include: [String, RegExp, Array], - exclude: [String, RegExp, Array], - max: [String, Number] - }, - setup(props, { slots }) { - const instance = getCurrentInstance(); - const sharedContext = instance.ctx; - if (!sharedContext.renderer) { - return () => { - const children = slots.default && slots.default(); - return children && children.length === 1 ? children[0] : children; - }; - } - const cache = /* @__PURE__ */ new Map(); - const keys = /* @__PURE__ */ new Set(); - let current = null; - { - instance.__v_cache = cache; - } - const parentSuspense = instance.suspense; - const { - renderer: { - p: patch, - m: move, - um: _unmount, - o: { createElement } - } - } = sharedContext; - const storageContainer = createElement("div"); - sharedContext.activate = (vnode, container, anchor, namespace, optimized) => { - const instance2 = vnode.component; - move(vnode, container, anchor, 0, parentSuspense); - patch( - instance2.vnode, - vnode, - container, - anchor, - instance2, - parentSuspense, - namespace, - vnode.slotScopeIds, - optimized - ); - queuePostRenderEffect(() => { - instance2.isDeactivated = false; - if (instance2.a) { - invokeArrayFns(instance2.a); - } - const vnodeHook = vnode.props && vnode.props.onVnodeMounted; - if (vnodeHook) { - invokeVNodeHook(vnodeHook, instance2.parent, vnode); - } - }, parentSuspense); - { - devtoolsComponentAdded(instance2); - } - }; - sharedContext.deactivate = (vnode) => { - const instance2 = vnode.component; - invalidateMount(instance2.m); - invalidateMount(instance2.a); - move(vnode, storageContainer, null, 1, parentSuspense); - queuePostRenderEffect(() => { - if (instance2.da) { - invokeArrayFns(instance2.da); - } - const vnodeHook = vnode.props && vnode.props.onVnodeUnmounted; - if (vnodeHook) { - invokeVNodeHook(vnodeHook, instance2.parent, vnode); - } - instance2.isDeactivated = true; - }, parentSuspense); - { - devtoolsComponentAdded(instance2); - } - }; - function unmount(vnode) { - resetShapeFlag(vnode); - _unmount(vnode, instance, parentSuspense, true); - } - function pruneCache(filter) { - cache.forEach((vnode, key) => { - const name = getComponentName(vnode.type); - if (name && !filter(name)) { - pruneCacheEntry(key); - } - }); - } - function pruneCacheEntry(key) { - const cached = cache.get(key); - if (cached && (!current || !isSameVNodeType(cached, current))) { - unmount(cached); - } else if (current) { - resetShapeFlag(current); - } - cache.delete(key); - keys.delete(key); - } - watch( - () => [props.include, props.exclude], - ([include, exclude]) => { - include && pruneCache((name) => matches(include, name)); - exclude && pruneCache((name) => !matches(exclude, name)); - }, - // prune post-render after `current` has been updated - { flush: "post", deep: true } - ); - let pendingCacheKey = null; - const cacheSubtree = () => { - if (pendingCacheKey != null) { - if (isSuspense(instance.subTree.type)) { - queuePostRenderEffect(() => { - cache.set(pendingCacheKey, getInnerChild(instance.subTree)); - }, instance.subTree.suspense); - } else { - cache.set(pendingCacheKey, getInnerChild(instance.subTree)); - } - } - }; - onMounted(cacheSubtree); - onUpdated(cacheSubtree); - onBeforeUnmount(() => { - cache.forEach((cached) => { - const { subTree, suspense } = instance; - const vnode = getInnerChild(subTree); - if (cached.type === vnode.type && cached.key === vnode.key) { - resetShapeFlag(vnode); - const da = vnode.component.da; - da && queuePostRenderEffect(da, suspense); - return; - } - unmount(cached); - }); - }); - return () => { - pendingCacheKey = null; - if (!slots.default) { - return current = null; - } - const children = slots.default(); - const rawVNode = children[0]; - if (children.length > 1) { - { - warn$1(`KeepAlive should contain exactly one component child.`); - } - current = null; - return children; - } else if (!isVNode(rawVNode) || !(rawVNode.shapeFlag & 4) && !(rawVNode.shapeFlag & 128)) { - current = null; - return rawVNode; - } - let vnode = getInnerChild(rawVNode); - if (vnode.type === Comment) { - current = null; - return vnode; - } - const comp = vnode.type; - const name = getComponentName( - isAsyncWrapper(vnode) ? vnode.type.__asyncResolved || {} : comp - ); - const { include, exclude, max } = props; - if (include && (!name || !matches(include, name)) || exclude && name && matches(exclude, name)) { - vnode.shapeFlag &= ~256; - current = vnode; - return rawVNode; - } - const key = vnode.key == null ? comp : vnode.key; - const cachedVNode = cache.get(key); - if (vnode.el) { - vnode = cloneVNode(vnode); - if (rawVNode.shapeFlag & 128) { - rawVNode.ssContent = vnode; - } - } - pendingCacheKey = key; - if (cachedVNode) { - vnode.el = cachedVNode.el; - vnode.component = cachedVNode.component; - if (vnode.transition) { - setTransitionHooks(vnode, vnode.transition); - } - vnode.shapeFlag |= 512; - keys.delete(key); - keys.add(key); - } else { - keys.add(key); - if (max && keys.size > parseInt(max, 10)) { - pruneCacheEntry(keys.values().next().value); - } - } - vnode.shapeFlag |= 256; - current = vnode; - return isSuspense(rawVNode.type) ? rawVNode : vnode; - }; - } -}; -const KeepAlive = KeepAliveImpl; -function matches(pattern, name) { - if (isArray(pattern)) { - return pattern.some((p) => matches(p, name)); - } else if (isString(pattern)) { - return pattern.split(",").includes(name); - } else if (isRegExp(pattern)) { - pattern.lastIndex = 0; - return pattern.test(name); - } - return false; -} -function onActivated(hook, target) { - registerKeepAliveHook(hook, "a", target); -} -function onDeactivated(hook, target) { - registerKeepAliveHook(hook, "da", target); -} -function registerKeepAliveHook(hook, type, target = currentInstance) { - const wrappedHook = hook.__wdc || (hook.__wdc = () => { - let current = target; - while (current) { - if (current.isDeactivated) { - return; - } - current = current.parent; - } - return hook(); - }); - injectHook(type, wrappedHook, target); - if (target) { - let current = target.parent; - while (current && current.parent) { - if (isKeepAlive(current.parent.vnode)) { - injectToKeepAliveRoot(wrappedHook, type, target, current); - } - current = current.parent; - } - } -} -function injectToKeepAliveRoot(hook, type, target, keepAliveRoot) { - const injected = injectHook( - type, - hook, - keepAliveRoot, - true - /* prepend */ - ); - onUnmounted(() => { - remove(keepAliveRoot[type], injected); - }, target); -} -function resetShapeFlag(vnode) { - vnode.shapeFlag &= ~256; - vnode.shapeFlag &= ~512; -} -function getInnerChild(vnode) { - return vnode.shapeFlag & 128 ? vnode.ssContent : vnode; -} - -function injectHook(type, hook, target = currentInstance, prepend = false) { - if (target) { - const hooks = target[type] || (target[type] = []); - const wrappedHook = hook.__weh || (hook.__weh = (...args) => { - pauseTracking(); - const reset = setCurrentInstance(target); - const res = callWithAsyncErrorHandling(hook, target, type, args); - reset(); - resetTracking(); - return res; - }); - if (prepend) { - hooks.unshift(wrappedHook); - } else { - hooks.push(wrappedHook); - } - return wrappedHook; - } else { - const apiName = toHandlerKey(ErrorTypeStrings$1[type].replace(/ hook$/, "")); - warn$1( - `${apiName} is called when there is no active component instance to be associated with. Lifecycle injection APIs can only be used during execution of setup().` + (` If you are using async setup(), make sure to register lifecycle hooks before the first await statement.` ) - ); - } -} -const createHook = (lifecycle) => (hook, target = currentInstance) => { - if (!isInSSRComponentSetup || lifecycle === "sp") { - injectHook(lifecycle, (...args) => hook(...args), target); - } -}; -const onBeforeMount = createHook("bm"); -const onMounted = createHook("m"); -const onBeforeUpdate = createHook( - "bu" -); -const onUpdated = createHook("u"); -const onBeforeUnmount = createHook( - "bum" -); -const onUnmounted = createHook("um"); -const onServerPrefetch = createHook( - "sp" -); -const onRenderTriggered = createHook("rtg"); -const onRenderTracked = createHook("rtc"); -function onErrorCaptured(hook, target = currentInstance) { - injectHook("ec", hook, target); -} - -const COMPONENTS = "components"; -const DIRECTIVES = "directives"; -function resolveComponent(name, maybeSelfReference) { - return resolveAsset(COMPONENTS, name, true, maybeSelfReference) || name; -} -const NULL_DYNAMIC_COMPONENT = Symbol.for("v-ndc"); -function resolveDynamicComponent(component) { - if (isString(component)) { - return resolveAsset(COMPONENTS, component, false) || component; - } else { - return component || NULL_DYNAMIC_COMPONENT; - } -} -function resolveDirective(name) { - return resolveAsset(DIRECTIVES, name); -} -function resolveAsset(type, name, warnMissing = true, maybeSelfReference = false) { - const instance = currentRenderingInstance || currentInstance; - if (instance) { - const Component = instance.type; - if (type === COMPONENTS) { - const selfName = getComponentName( - Component, - false - ); - if (selfName && (selfName === name || selfName === camelize(name) || selfName === capitalize(camelize(name)))) { - return Component; - } - } - const res = ( - // local registration - // check instance[type] first which is resolved for options API - resolve(instance[type] || Component[type], name) || // global registration - resolve(instance.appContext[type], name) - ); - if (!res && maybeSelfReference) { - return Component; - } - if (warnMissing && !res) { - const extra = type === COMPONENTS ? ` -If this is a native custom element, make sure to exclude it from component resolution via compilerOptions.isCustomElement.` : ``; - warn$1(`Failed to resolve ${type.slice(0, -1)}: ${name}${extra}`); - } - return res; - } else { - warn$1( - `resolve${capitalize(type.slice(0, -1))} can only be used in render() or setup().` - ); - } -} -function resolve(registry, name) { - return registry && (registry[name] || registry[camelize(name)] || registry[capitalize(camelize(name))]); -} - -function renderList(source, renderItem, cache, index) { - let ret; - const cached = cache && cache[index]; - const sourceIsArray = isArray(source); - if (sourceIsArray || isString(source)) { - const sourceIsReactiveArray = sourceIsArray && isReactive(source); - let needsWrap = false; - if (sourceIsReactiveArray) { - needsWrap = !isShallow(source); - source = shallowReadArray(source); - } - ret = new Array(source.length); - for (let i = 0, l = source.length; i < l; i++) { - ret[i] = renderItem( - needsWrap ? toReactive(source[i]) : source[i], - i, - void 0, - cached && cached[i] - ); - } - } else if (typeof source === "number") { - if (!Number.isInteger(source)) { - warn$1(`The v-for range expect an integer value but got ${source}.`); - } - ret = new Array(source); - for (let i = 0; i < source; i++) { - ret[i] = renderItem(i + 1, i, void 0, cached && cached[i]); - } - } else if (isObject(source)) { - if (source[Symbol.iterator]) { - ret = Array.from( - source, - (item, i) => renderItem(item, i, void 0, cached && cached[i]) - ); - } else { - const keys = Object.keys(source); - ret = new Array(keys.length); - for (let i = 0, l = keys.length; i < l; i++) { - const key = keys[i]; - ret[i] = renderItem(source[key], key, i, cached && cached[i]); - } - } - } else { - ret = []; - } - if (cache) { - cache[index] = ret; - } - return ret; -} - -function createSlots(slots, dynamicSlots) { - for (let i = 0; i < dynamicSlots.length; i++) { - const slot = dynamicSlots[i]; - if (isArray(slot)) { - for (let j = 0; j < slot.length; j++) { - slots[slot[j].name] = slot[j].fn; - } - } else if (slot) { - slots[slot.name] = slot.key ? (...args) => { - const res = slot.fn(...args); - if (res) res.key = slot.key; - return res; - } : slot.fn; - } - } - return slots; -} - -function renderSlot(slots, name, props = {}, fallback, noSlotted) { - if (currentRenderingInstance.ce || currentRenderingInstance.parent && isAsyncWrapper(currentRenderingInstance.parent) && currentRenderingInstance.parent.ce) { - if (name !== "default") props.name = name; - return openBlock(), createBlock( - Fragment, - null, - [createVNode("slot", props, fallback && fallback())], - 64 - ); - } - let slot = slots[name]; - if (slot && slot.length > 1) { - warn$1( - `SSR-optimized slot function detected in a non-SSR-optimized render function. You need to mark this component with $dynamic-slots in the parent template.` - ); - slot = () => []; - } - if (slot && slot._c) { - slot._d = false; - } - openBlock(); - const validSlotContent = slot && ensureValidVNode(slot(props)); - const slotKey = props.key || // slot content array of a dynamic conditional slot may have a branch - // key attached in the `createSlots` helper, respect that - validSlotContent && validSlotContent.key; - const rendered = createBlock( - Fragment, - { - key: (slotKey && !isSymbol(slotKey) ? slotKey : `_${name}`) + // #7256 force differentiate fallback content from actual content - (!validSlotContent && fallback ? "_fb" : "") - }, - validSlotContent || (fallback ? fallback() : []), - validSlotContent && slots._ === 1 ? 64 : -2 - ); - if (!noSlotted && rendered.scopeId) { - rendered.slotScopeIds = [rendered.scopeId + "-s"]; - } - if (slot && slot._c) { - slot._d = true; - } - return rendered; -} -function ensureValidVNode(vnodes) { - return vnodes.some((child) => { - if (!isVNode(child)) return true; - if (child.type === Comment) return false; - if (child.type === Fragment && !ensureValidVNode(child.children)) - return false; - return true; - }) ? vnodes : null; -} - -function toHandlers(obj, preserveCaseIfNecessary) { - const ret = {}; - if (!isObject(obj)) { - warn$1(`v-on with no argument expects an object value.`); - return ret; - } - for (const key in obj) { - ret[preserveCaseIfNecessary && /[A-Z]/.test(key) ? `on:${key}` : toHandlerKey(key)] = obj[key]; - } - return ret; -} - -const getPublicInstance = (i) => { - if (!i) return null; - if (isStatefulComponent(i)) return getComponentPublicInstance(i); - return getPublicInstance(i.parent); -}; -const publicPropertiesMap = ( - // Move PURE marker to new line to workaround compiler discarding it - // due to type annotation - /* @__PURE__ */ extend(/* @__PURE__ */ Object.create(null), { - $: (i) => i, - $el: (i) => i.vnode.el, - $data: (i) => i.data, - $props: (i) => shallowReadonly(i.props) , - $attrs: (i) => shallowReadonly(i.attrs) , - $slots: (i) => shallowReadonly(i.slots) , - $refs: (i) => shallowReadonly(i.refs) , - $parent: (i) => getPublicInstance(i.parent), - $root: (i) => getPublicInstance(i.root), - $host: (i) => i.ce, - $emit: (i) => i.emit, - $options: (i) => resolveMergedOptions(i) , - $forceUpdate: (i) => i.f || (i.f = () => { - queueJob(i.update); - }), - $nextTick: (i) => i.n || (i.n = nextTick.bind(i.proxy)), - $watch: (i) => instanceWatch.bind(i) - }) -); -const isReservedPrefix = (key) => key === "_" || key === "$"; -const hasSetupBinding = (state, key) => state !== EMPTY_OBJ && !state.__isScriptSetup && hasOwn(state, key); -const PublicInstanceProxyHandlers = { - get({ _: instance }, key) { - if (key === "__v_skip") { - return true; - } - const { ctx, setupState, data, props, accessCache, type, appContext } = instance; - if (key === "__isVue") { - return true; - } - let normalizedProps; - if (key[0] !== "$") { - const n = accessCache[key]; - if (n !== void 0) { - switch (n) { - case 1 /* SETUP */: - return setupState[key]; - case 2 /* DATA */: - return data[key]; - case 4 /* CONTEXT */: - return ctx[key]; - case 3 /* PROPS */: - return props[key]; - } - } else if (hasSetupBinding(setupState, key)) { - accessCache[key] = 1 /* SETUP */; - return setupState[key]; - } else if (data !== EMPTY_OBJ && hasOwn(data, key)) { - accessCache[key] = 2 /* DATA */; - return data[key]; - } else if ( - // only cache other properties when instance has declared (thus stable) - // props - (normalizedProps = instance.propsOptions[0]) && hasOwn(normalizedProps, key) - ) { - accessCache[key] = 3 /* PROPS */; - return props[key]; - } else if (ctx !== EMPTY_OBJ && hasOwn(ctx, key)) { - accessCache[key] = 4 /* CONTEXT */; - return ctx[key]; - } else if (shouldCacheAccess) { - accessCache[key] = 0 /* OTHER */; - } - } - const publicGetter = publicPropertiesMap[key]; - let cssModule, globalProperties; - if (publicGetter) { - if (key === "$attrs") { - track(instance.attrs, "get", ""); - markAttrsAccessed(); - } else if (key === "$slots") { - track(instance, "get", key); - } - return publicGetter(instance); - } else if ( - // css module (injected by vue-loader) - (cssModule = type.__cssModules) && (cssModule = cssModule[key]) - ) { - return cssModule; - } else if (ctx !== EMPTY_OBJ && hasOwn(ctx, key)) { - accessCache[key] = 4 /* CONTEXT */; - return ctx[key]; - } else if ( - // global properties - globalProperties = appContext.config.globalProperties, hasOwn(globalProperties, key) - ) { - { - return globalProperties[key]; - } - } else if (currentRenderingInstance && (!isString(key) || // #1091 avoid internal isRef/isVNode checks on component instance leading - // to infinite warning loop - key.indexOf("__v") !== 0)) { - if (data !== EMPTY_OBJ && isReservedPrefix(key[0]) && hasOwn(data, key)) { - warn$1( - `Property ${JSON.stringify( - key - )} must be accessed via $data because it starts with a reserved character ("$" or "_") and is not proxied on the render context.` - ); - } else if (instance === currentRenderingInstance) { - warn$1( - `Property ${JSON.stringify(key)} was accessed during render but is not defined on instance.` - ); - } - } - }, - set({ _: instance }, key, value) { - const { data, setupState, ctx } = instance; - if (hasSetupBinding(setupState, key)) { - setupState[key] = value; - return true; - } else if (setupState.__isScriptSetup && hasOwn(setupState, key)) { - warn$1(`Cannot mutate - + + -

- -
-

Conversations

+
+
+ - -
- + New conversation -
-
- {{ conv.messages[0].content }} -
-
- Conversations are saved to browser's localStorage -
-
+ +
+ +
+
+

Conversations

-
- -
-
- 🦙 llama.cpp - chat + + +
+ + +
+ + New conversation +
+
+ {{ conv.messages[0].content }} +
+
+ Conversations are saved to browser's localStorage +
+
- -
- - - + - - - -
-
- - {{ messages.length === 0 ? 'Send a message to start' : '' }} -
-
-
+ +
+
+ + {{ messages.length === 0 ? 'Send a message to start' : '' }} +
+
- - - - +
+ + + + +
+
+ + +
+ + + + +
- -
- - - - - + +
+
+ + +
- -
-
- - -
+ +
+ + +
- -
- - - -
+ @@ -231,7 +303,7 @@

Settings

Penalties settings
@@ -252,10 +324,11 @@

Settings

+
@@ -273,458 +346,6 @@

Settings

- - diff --git a/examples/server/public_simplechat/simplechat.js b/examples/server/public_simplechat/simplechat.js index 8e0df3b61df2b..2fcd24a860bd4 100644 --- a/examples/server/public_simplechat/simplechat.js +++ b/examples/server/public_simplechat/simplechat.js @@ -407,6 +407,9 @@ class SimpleChat { if (curLine.startsWith("data:")) { curLine = curLine.substring(5); } + if (curLine.trim() === "[DONE]") { + break; + } let curJson = JSON.parse(curLine); console.debug("DBUG:SC:PART:Json:", curJson); this.append_response(this.response_extract_stream(curJson, apiEP)); diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 9c86407c28eba..8cb992470a302 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -16,12 +16,7 @@ // auto generated files (update with ./deps.sh) #include "index.html.hpp" -#include "completion.js.hpp" #include "loading.html.hpp" -#include "deps_daisyui.min.css.hpp" -#include "deps_markdown-it.js.hpp" -#include "deps_tailwindcss.js.hpp" -#include "deps_vue.esm-browser.js.hpp" #include #include @@ -38,8 +33,10 @@ using json = nlohmann::ordered_json; enum stop_type { - STOP_TYPE_FULL, - STOP_TYPE_PARTIAL, + STOP_TYPE_NONE, + STOP_TYPE_EOS, + STOP_TYPE_WORD, + STOP_TYPE_LIMIT, }; // state diagram: https://github.com/ggerganov/llama.cpp/pull/9283 @@ -57,7 +54,10 @@ enum server_state { }; enum server_task_type { - SERVER_TASK_TYPE_INFERENCE, + SERVER_TASK_TYPE_COMPLETION, + SERVER_TASK_TYPE_EMBEDDING, + SERVER_TASK_TYPE_RERANK, + SERVER_TASK_TYPE_INFILL, SERVER_TASK_TYPE_CANCEL, SERVER_TASK_TYPE_NEXT_RESPONSE, SERVER_TASK_TYPE_METRICS, @@ -67,22 +67,286 @@ enum server_task_type { SERVER_TASK_TYPE_SET_LORA, }; -enum server_task_inf_type { - SERVER_TASK_INF_TYPE_COMPLETION, - SERVER_TASK_INF_TYPE_EMBEDDING, - SERVER_TASK_INF_TYPE_RERANK, - SERVER_TASK_INF_TYPE_INFILL, +// https://community.openai.com/t/openai-chat-list-of-error-codes-and-types/357791/11 +enum error_type { + ERROR_TYPE_INVALID_REQUEST, + ERROR_TYPE_AUTHENTICATION, + ERROR_TYPE_SERVER, + ERROR_TYPE_NOT_FOUND, + ERROR_TYPE_PERMISSION, + ERROR_TYPE_UNAVAILABLE, // custom error + ERROR_TYPE_NOT_SUPPORTED, // custom error +}; + +struct slot_params { + bool stream = true; + bool cache_prompt = true; // remember the prompt to avoid reprocessing all prompt + + int32_t n_keep = 0; // number of tokens to keep from initial prompt + int32_t n_discard = 0; // number of tokens after n_keep that may be discarded when shifting context, 0 defaults to half + int32_t n_predict = -1; // new tokens to predict + int32_t n_indent = 0; // mininum line indentation for the generated text in number of whitespace characters + + int64_t t_max_prompt_ms = -1; // TODO: implement + int64_t t_max_predict_ms = -1; // if positive, limit the generation phase to this time limit + + std::vector antiprompt; + bool timings_per_token = false; + bool ignore_eos = false; + + struct common_params_sampling sampling; + struct common_params_speculative speculative; + + // OAI-compat fields + bool verbose = false; + bool oaicompat = false; + bool oaicompat_chat = true; + std::string oaicompat_model; + std::string oaicompat_cmpl_id; + + json to_json() const { + std::vector samplers; + samplers.reserve(sampling.samplers.size()); + for (const auto & sampler : sampling.samplers) { + samplers.emplace_back(common_sampler_type_to_str(sampler)); + } + + return json { + {"n_predict", n_predict}, // Server configured n_predict + {"seed", sampling.seed}, + {"temperature", sampling.temp}, + {"dynatemp_range", sampling.dynatemp_range}, + {"dynatemp_exponent", sampling.dynatemp_exponent}, + {"top_k", sampling.top_k}, + {"top_p", sampling.top_p}, + {"min_p", sampling.min_p}, + {"xtc_probability", sampling.xtc_probability}, + {"xtc_threshold", sampling.xtc_threshold}, + {"typical_p", sampling.typ_p}, + {"repeat_last_n", sampling.penalty_last_n}, + {"repeat_penalty", sampling.penalty_repeat}, + {"presence_penalty", sampling.penalty_present}, + {"frequency_penalty", sampling.penalty_freq}, + {"dry_multiplier", sampling.dry_multiplier}, + {"dry_base", sampling.dry_base}, + {"dry_allowed_length", sampling.dry_allowed_length}, + {"dry_penalty_last_n", sampling.dry_penalty_last_n}, + {"dry_sequence_breakers", sampling.dry_sequence_breakers}, + {"mirostat", sampling.mirostat}, + {"mirostat_tau", sampling.mirostat_tau}, + {"mirostat_eta", sampling.mirostat_eta}, + {"penalize_nl", sampling.penalize_nl}, + {"stop", antiprompt}, + {"max_tokens", n_predict}, // User configured n_predict + {"n_keep", n_keep}, + {"n_discard", n_discard}, + {"ignore_eos", sampling.ignore_eos}, + {"stream", stream}, + {"logit_bias", format_logit_bias(sampling.logit_bias)}, + {"n_probs", sampling.n_probs}, + {"min_keep", sampling.min_keep}, + {"grammar", sampling.grammar}, + {"samplers", samplers}, + {"speculative.n_max", speculative.n_max}, + {"speculative.n_min", speculative.n_min}, + {"speculative.p_min", speculative.p_min}, + {"timings_per_token", timings_per_token}, + }; + } }; struct server_task { - int id = -1; // to be filled by server_queue - int id_target = -1; // used by SERVER_TASK_TYPE_CANCEL + int id = -1; // to be filled by server_queue + int index = -1; // used when there are multiple prompts (batch request) - llama_tokens prompt_tokens; server_task_type type; - json data; - server_task_inf_type inf_type = SERVER_TASK_INF_TYPE_COMPLETION; + // used by SERVER_TASK_TYPE_CANCEL + int id_target = -1; + + // used by SERVER_TASK_TYPE_INFERENCE + slot_params params; + llama_tokens prompt_tokens; + int id_selected_slot = -1; + + // used by SERVER_TASK_TYPE_SLOT_SAVE, SERVER_TASK_TYPE_SLOT_RESTORE, SERVER_TASK_TYPE_SLOT_ERASE + struct slot_action { + int slot_id; + std::string filename; + std::string filepath; + }; + slot_action slot_action; + + // used by SERVER_TASK_TYPE_METRICS + bool metrics_reset_bucket = false; + + server_task(server_task_type type) : type(type) {} + + static slot_params params_from_json_cmpl( + const llama_model * model, + const common_params & params_base, + const json & data) { + slot_params params; + + // Sampling parameter defaults are loaded from the global server context (but individual requests can still override them) + slot_params defaults; + defaults.sampling = params_base.sampling; + defaults.speculative = params_base.speculative; + + // enabling this will output extra debug information in the HTTP responses from the server + params.verbose = params_base.verbosity > 9; + params.timings_per_token = json_value(data, "timings_per_token", false); + + params.stream = json_value(data, "stream", false); + params.cache_prompt = json_value(data, "cache_prompt", true); + params.n_predict = json_value(data, "n_predict", json_value(data, "max_tokens", defaults.n_predict)); + params.n_indent = json_value(data, "n_indent", defaults.n_indent); + params.n_keep = json_value(data, "n_keep", defaults.n_keep); + params.n_discard = json_value(data, "n_discard", defaults.n_discard); + //params.t_max_prompt_ms = json_value(data, "t_max_prompt_ms", defaults.t_max_prompt_ms); // TODO: implement + params.t_max_predict_ms = json_value(data, "t_max_predict_ms", defaults.t_max_predict_ms); + + params.sampling.top_k = json_value(data, "top_k", defaults.sampling.top_k); + params.sampling.top_p = json_value(data, "top_p", defaults.sampling.top_p); + params.sampling.min_p = json_value(data, "min_p", defaults.sampling.min_p); + params.sampling.xtc_probability = json_value(data, "xtc_probability", defaults.sampling.xtc_probability); + params.sampling.xtc_threshold = json_value(data, "xtc_threshold", defaults.sampling.xtc_threshold); + params.sampling.typ_p = json_value(data, "typical_p", defaults.sampling.typ_p); + params.sampling.temp = json_value(data, "temperature", defaults.sampling.temp); + params.sampling.dynatemp_range = json_value(data, "dynatemp_range", defaults.sampling.dynatemp_range); + params.sampling.dynatemp_exponent = json_value(data, "dynatemp_exponent", defaults.sampling.dynatemp_exponent); + params.sampling.penalty_last_n = json_value(data, "repeat_last_n", defaults.sampling.penalty_last_n); + params.sampling.penalty_repeat = json_value(data, "repeat_penalty", defaults.sampling.penalty_repeat); + params.sampling.penalty_freq = json_value(data, "frequency_penalty", defaults.sampling.penalty_freq); + params.sampling.penalty_present = json_value(data, "presence_penalty", defaults.sampling.penalty_present); + params.sampling.dry_multiplier = json_value(data, "dry_multiplier", defaults.sampling.dry_multiplier); + params.sampling.dry_base = json_value(data, "dry_base", defaults.sampling.dry_base); + params.sampling.dry_allowed_length = json_value(data, "dry_allowed_length", defaults.sampling.dry_allowed_length); + params.sampling.dry_penalty_last_n = json_value(data, "dry_penalty_last_n", defaults.sampling.dry_penalty_last_n); + params.sampling.mirostat = json_value(data, "mirostat", defaults.sampling.mirostat); + params.sampling.mirostat_tau = json_value(data, "mirostat_tau", defaults.sampling.mirostat_tau); + params.sampling.mirostat_eta = json_value(data, "mirostat_eta", defaults.sampling.mirostat_eta); + params.sampling.penalize_nl = json_value(data, "penalize_nl", defaults.sampling.penalize_nl); + params.sampling.seed = json_value(data, "seed", defaults.sampling.seed); + params.sampling.n_probs = json_value(data, "n_probs", defaults.sampling.n_probs); + params.sampling.min_keep = json_value(data, "min_keep", defaults.sampling.min_keep); + + params.speculative.n_min = json_value(data, "speculative.n_min", defaults.speculative.n_min); + params.speculative.n_max = json_value(data, "speculative.n_max", defaults.speculative.n_max); + params.speculative.p_min = json_value(data, "speculative.p_min", defaults.speculative.p_min); + + params.speculative.n_min = std::min(params.speculative.n_max, params.speculative.n_min); + params.speculative.n_min = std::max(params.speculative.n_min, 2); + params.speculative.n_max = std::max(params.speculative.n_max, 0); + + if (params.sampling.dry_base < 1.0f) { + params.sampling.dry_base = defaults.sampling.dry_base; + } + + // sequence breakers for DRY + { + // Currently, this is not compatible with TextGen WebUI, Koboldcpp and SillyTavern format + // Ref: https://github.com/oobabooga/text-generation-webui/blob/d1af7a41ade7bd3c3a463bfa640725edb818ebaf/extensions/openai/typing.py#L39 + + if (data.contains("dry_sequence_breakers")) { + params.sampling.dry_sequence_breakers = json_value(data, "dry_sequence_breakers", std::vector()); + if (params.sampling.dry_sequence_breakers.empty()) { + throw std::runtime_error("Error: dry_sequence_breakers must be a non-empty array of strings"); + } + } + } + + // process "json_schema" and "grammar" + if (data.contains("json_schema") && !data.at("json_schema").is_null() && data.contains("grammar") && !data.at("grammar").is_null()) { + throw std::runtime_error("Either \"json_schema\" or \"grammar\" can be specified, but not both"); + } + if (data.contains("json_schema") && !data.contains("grammar")) { + try { + auto schema = json_value(data, "json_schema", json::object()); + params.sampling.grammar = json_schema_to_grammar(schema); + } catch (const std::exception & e) { + throw std::runtime_error(std::string("\"json_schema\": ") + e.what()); + } + } else { + params.sampling.grammar = json_value(data, "grammar", defaults.sampling.grammar); + } + + { + params.sampling.logit_bias.clear(); + params.ignore_eos = json_value(data, "ignore_eos", false); + + const auto & logit_bias = data.find("logit_bias"); + if (logit_bias != data.end() && logit_bias->is_array()) { + const int n_vocab = llama_n_vocab(model); + for (const auto & el : *logit_bias) { + // TODO: we may want to throw errors here, in case "el" is incorrect + if (el.is_array() && el.size() == 2) { + float bias; + if (el[1].is_number()) { + bias = el[1].get(); + } else if (el[1].is_boolean() && !el[1].get()) { + bias = -INFINITY; + } else { + continue; + } + + if (el[0].is_number_integer()) { + llama_token tok = el[0].get(); + if (tok >= 0 && tok < n_vocab) { + params.sampling.logit_bias.push_back({tok, bias}); + } + } else if (el[0].is_string()) { + auto toks = common_tokenize(model, el[0].get(), false); + for (auto tok : toks) { + params.sampling.logit_bias.push_back({tok, bias}); + } + } + } + } + } + } + + { + params.antiprompt.clear(); + + const auto & stop = data.find("stop"); + if (stop != data.end() && stop->is_array()) { + for (const auto & word : *stop) { + if (!word.empty()) { + params.antiprompt.push_back(word); + } + } + } + } + + { + const auto & samplers = data.find("samplers"); + if (samplers != data.end()) { + if (samplers->is_array()) { + std::vector sampler_names; + for (const auto & name : *samplers) { + if (name.is_string()) { + sampler_names.emplace_back(name); + } + } + params.sampling.samplers = common_sampler_types_from_names(sampler_names, false); + } else if (samplers->is_string()){ + std::string sampler_string; + for (const auto & name : *samplers) { + sampler_string += name; + } + params.sampling.samplers = common_sampler_types_from_chars(sampler_string); + } + } else { + params.sampling.samplers = defaults.sampling.samplers; + } + } + + std::string model_name = params_base.model_alias.empty() ? DEFAULT_OAICOMPAT_MODEL : params_base.model_alias; + params.oaicompat_model = json_value(data, "model", model_name); + + return params; + } // utility function static std::unordered_set get_list_id(const std::vector & tasks) { @@ -94,45 +358,548 @@ struct server_task { } }; +struct result_timings { + int32_t prompt_n = -1; + double prompt_ms; + double prompt_per_token_ms; + double prompt_per_second; + + int32_t predicted_n = -1; + double predicted_ms; + double predicted_per_token_ms; + double predicted_per_second; + + json to_json() const { + return { + {"prompt_n", prompt_n}, + {"prompt_ms", prompt_ms}, + {"prompt_per_token_ms", prompt_per_token_ms}, + {"prompt_per_second", prompt_per_second}, + + {"predicted_n", predicted_n}, + {"predicted_ms", predicted_ms}, + {"predicted_per_token_ms", predicted_per_token_ms}, + {"predicted_per_second", predicted_per_second}, + }; + } +}; + struct server_task_result { - int id = -1; + int id = -1; + int id_slot = -1; + virtual bool is_error() { + // only used by server_task_result_error + return false; + } + virtual bool is_stop() { + // only used by server_task_result_cmpl_* + return false; + } + virtual int get_index() { + return -1; + } + virtual json to_json() = 0; + virtual ~server_task_result() = default; +}; + +// using shared_ptr for polymorphism of server_task_result +using server_task_result_ptr = std::unique_ptr; + +inline std::string stop_type_to_str(stop_type type) { + switch (type) { + case STOP_TYPE_EOS: return "eos"; + case STOP_TYPE_WORD: return "word"; + case STOP_TYPE_LIMIT: return "limit"; + default: return "none"; + } +} - json data; +struct completion_token_output { + llama_token tok; + std::string text_to_send; + struct token_prob { + llama_token tok; + std::string tok_str; + float prob; + }; + std::vector probs; + + json to_json() const { + json probs_for_token = json::array(); + for (const auto & p : probs) { + probs_for_token.push_back(json { + {"tok_str", p.tok_str}, + {"prob", p.prob}, + }); + } + return probs_for_token; + } - bool stop; - bool error; + static json probs_vector_to_json(const std::vector & probs) { + json out = json::array(); + for (const auto & prob : probs) { + const std::string tok_str = prob.text_to_send; + out.push_back(json { + {"content", tok_str}, + {"probs", prob.to_json()}, + }); + } + return out; + } }; -struct server_static_file { - const unsigned char * data; - unsigned int size; - const char * mime_type; +struct server_task_result_cmpl_final : server_task_result { + int index = 0; + std::string content; + bool stream; + result_timings timings; + std::string prompt; + + bool truncated; + int32_t n_decoded; + int32_t n_prompt_tokens; + int32_t n_tokens_cached; + int32_t has_new_line; + std::string stopping_word; + stop_type stop = STOP_TYPE_NONE; + + std::vector probs_output; + + slot_params generation_params; + + // OAI-compat fields + bool verbose = false; + bool oaicompat = false; + bool oaicompat_chat = true; // TODO: support oaicompat for non-chat + std::string oaicompat_model; + std::string oaicompat_cmpl_id; + + virtual int get_index() override { + return index; + } + + virtual bool is_stop() override { + return true; // in stream mode, final responses are considered stop + } + + virtual json to_json() override { + return oaicompat + ? (stream ? to_json_oaicompat_chat_stream() : to_json_oaicompat_chat()) + : to_json_non_oaicompat(); + } + + json to_json_non_oaicompat() { + json res = json { + {"index", index}, + {"content", stream ? "" : content}, // in stream mode, content is already in last partial chunk + {"id_slot", id_slot}, + {"stop", true}, + {"model", oaicompat_model}, + {"tokens_predicted", n_decoded}, + {"tokens_evaluated", n_prompt_tokens}, + {"generation_settings", generation_params.to_json()}, + {"prompt", prompt}, + {"has_new_line", has_new_line}, + {"truncated", truncated}, + {"stop_type", stop_type_to_str(stop)}, + {"stopping_word", stopping_word}, + {"tokens_cached", n_tokens_cached}, + {"timings", timings.to_json()}, + }; + if (!probs_output.empty()) { + res["completion_probabilities"] = completion_token_output::probs_vector_to_json(probs_output); + } + return res; + } + + json to_json_oaicompat_chat() { + std::string finish_reason = "length"; + if (stop == STOP_TYPE_WORD || stop == STOP_TYPE_EOS) { + finish_reason = "stop"; + } + + json choices = json::array({json{ + {"finish_reason", finish_reason}, + {"index", 0}, + {"message", json{ + {"content", content}, + {"role", "assistant"} + } + }}}); + + std::time_t t = std::time(0); + + json res = json { + {"choices", choices}, + {"created", t}, + {"model", oaicompat_model}, + {"object", "chat.completion"}, + {"usage", json { + {"completion_tokens", n_decoded}, + {"prompt_tokens", n_prompt_tokens}, + {"total_tokens", n_decoded + n_prompt_tokens} + }}, + {"id", oaicompat_cmpl_id} + }; + + // extra fields for debugging purposes + if (verbose) { + res["__verbose"] = to_json_non_oaicompat(); + } + if (timings.prompt_n >= 0) { + res.push_back({"timings", timings.to_json()}); + } + + return res; + } + + json to_json_oaicompat_chat_stream() { + std::time_t t = std::time(0); + std::string finish_reason = "length"; + if (stop == STOP_TYPE_WORD || stop == STOP_TYPE_EOS) { + finish_reason = "stop"; + } + + json choices = json::array({json{{"finish_reason", finish_reason}, + {"index", 0}, + {"delta", json::object()}}}); + + json ret = json { + {"choices", choices}, + {"created", t}, + {"id", oaicompat_cmpl_id}, + {"model", oaicompat_model}, + {"object", "chat.completion.chunk"}, + {"usage", json { + {"completion_tokens", n_decoded}, + {"prompt_tokens", n_prompt_tokens}, + {"total_tokens", n_decoded + n_prompt_tokens}, + }}, + }; + + if (timings.prompt_n >= 0) { + ret.push_back({"timings", timings.to_json()}); + } + + return ret; + } }; -struct slot_params { - bool stream = true; - bool cache_prompt = true; // remember the prompt to avoid reprocessing all prompt +struct server_task_result_cmpl_partial : server_task_result { + int index = 0; + std::string content; - int32_t n_keep = 0; // number of tokens to keep from initial prompt - int32_t n_discard = 0; // number of tokens after n_keep that may be discarded when shifting context, 0 defaults to half - int32_t n_predict = -1; // new tokens to predict - int32_t n_indent = 0; // mininum line indentation for the generated text in number of whitespace characters + int32_t n_decoded; + int32_t n_prompt_tokens; - int64_t t_max_prompt_ms = -1; // TODO: implement - int64_t t_max_predict_ms = -1; // if positive, limit the generation phase to this time limit + std::vector probs_output; + result_timings timings; - std::vector antiprompt; + // OAI-compat fields + bool verbose = false; + bool oaicompat = false; + bool oaicompat_chat = true; // TODO: support oaicompat for non-chat + std::string oaicompat_model; + std::string oaicompat_cmpl_id; - struct common_params_sampling sampling; - struct common_params_speculative speculative; + virtual int get_index() override { + return index; + } + + virtual bool is_stop() override { + return false; // in stream mode, partial responses are not considered stop + } + + virtual json to_json() override { + return oaicompat ? to_json_oaicompat() : to_json_non_oaicompat(); + } + + json to_json_non_oaicompat() { + // non-OAI-compat JSON + json res = json { + {"index", index}, + {"content", content}, + {"stop", false}, + {"id_slot", id_slot}, + {"tokens_predicted", n_decoded}, + {"tokens_evaluated", n_prompt_tokens}, + }; + // populate the timings object when needed (usually for the last response or with timings_per_token enabled) + if (timings.prompt_n > 0) { + res.push_back({"timings", timings.to_json()}); + } + if (!probs_output.empty()) { + res["completion_probabilities"] = completion_token_output::probs_vector_to_json(probs_output); + } + return res; + } + + json to_json_oaicompat() { + bool first = n_decoded == 0; + std::time_t t = std::time(0); + json choices; + + if (first) { + if (content.empty()) { + choices = json::array({json{{"finish_reason", nullptr}, + {"index", 0}, + {"delta", json{{"role", "assistant"}}}}}); + } else { + // We have to send this as two updates to conform to openai behavior + json initial_ret = json{{"choices", json::array({json{ + {"finish_reason", nullptr}, + {"index", 0}, + {"delta", json{ + {"role", "assistant"} + }}}})}, + {"created", t}, + {"id", oaicompat_cmpl_id}, + {"model", oaicompat_model}, + {"object", "chat.completion.chunk"}}; + + json second_ret = json{ + {"choices", json::array({json{{"finish_reason", nullptr}, + {"index", 0}, + {"delta", json{ + {"content", content}}} + }})}, + {"created", t}, + {"id", oaicompat_cmpl_id}, + {"model", oaicompat_model}, + {"object", "chat.completion.chunk"}}; + + return std::vector({initial_ret, second_ret}); + } + } else { + choices = json::array({json{ + {"finish_reason", nullptr}, + {"index", 0}, + {"delta", + json{ + {"content", content}, + }}, + }}); + } + + json ret = json { + {"choices", choices}, + {"created", t}, + {"id", oaicompat_cmpl_id}, + {"model", oaicompat_model}, + {"object", "chat.completion.chunk"} + }; + + if (timings.prompt_n >= 0) { + ret.push_back({"timings", timings.to_json()}); + } + + return std::vector({ret}); + } +}; + +struct server_task_result_embd : server_task_result { + int index = 0; + std::vector embedding; + + virtual int get_index() override { + return index; + } + + virtual json to_json() override { + return json { + {"index", index}, + {"embedding", embedding}, + }; + } +}; + +struct server_task_result_rerank : server_task_result { + int index = 0; + float score = -1e6; + + virtual int get_index() override { + return index; + } + + virtual json to_json() override { + return json { + {"index", index}, + {"score", score}, + }; + } +}; + +// this function maybe used outside of server_task_result_error +static json format_error_response(const std::string & message, const enum error_type type) { + std::string type_str; + int code = 500; + switch (type) { + case ERROR_TYPE_INVALID_REQUEST: + type_str = "invalid_request_error"; + code = 400; + break; + case ERROR_TYPE_AUTHENTICATION: + type_str = "authentication_error"; + code = 401; + break; + case ERROR_TYPE_NOT_FOUND: + type_str = "not_found_error"; + code = 404; + break; + case ERROR_TYPE_SERVER: + type_str = "server_error"; + code = 500; + break; + case ERROR_TYPE_PERMISSION: + type_str = "permission_error"; + code = 403; + break; + case ERROR_TYPE_NOT_SUPPORTED: + type_str = "not_supported_error"; + code = 501; + break; + case ERROR_TYPE_UNAVAILABLE: + type_str = "unavailable_error"; + code = 503; + break; + } + return json { + {"code", code}, + {"message", message}, + {"type", type_str}, + }; +} + +struct server_task_result_error : server_task_result { + int index = 0; + error_type err_type = ERROR_TYPE_SERVER; + std::string err_msg; + + virtual bool is_error() override { + return true; + } + + virtual json to_json() override { + return format_error_response(err_msg, err_type); + } +}; + +struct server_task_result_metrics : server_task_result { + int n_idle_slots; + int n_processing_slots; + int n_tasks_deferred; + int64_t t_start; + + int32_t kv_cache_tokens_count; + int32_t kv_cache_used_cells; + + // TODO: somehow reuse server_metrics in the future, instead of duplicating the fields + uint64_t n_prompt_tokens_processed_total = 0; + uint64_t t_prompt_processing_total = 0; + uint64_t n_tokens_predicted_total = 0; + uint64_t t_tokens_generation_total = 0; + + uint64_t n_prompt_tokens_processed = 0; + uint64_t t_prompt_processing = 0; + + uint64_t n_tokens_predicted = 0; + uint64_t t_tokens_generation = 0; + + uint64_t n_decode_total = 0; + uint64_t n_busy_slots_total = 0; + + // while we can also use std::vector this requires copying the slot object which can be quite messy + // therefore, we use json to temporarily store the slot.to_json() result + json slots_data = json::array(); + + virtual json to_json() override { + return json { + { "idle", n_idle_slots }, + { "processing", n_processing_slots }, + { "deferred", n_tasks_deferred }, + { "t_start", t_start }, + + { "n_prompt_tokens_processed_total", n_prompt_tokens_processed_total }, + { "t_tokens_generation_total", t_tokens_generation_total }, + { "n_tokens_predicted_total", n_tokens_predicted_total }, + { "t_prompt_processing_total", t_prompt_processing_total }, + + { "n_prompt_tokens_processed", n_prompt_tokens_processed }, + { "t_prompt_processing", t_prompt_processing }, + { "n_tokens_predicted", n_tokens_predicted }, + { "t_tokens_generation", t_tokens_generation }, + + { "n_decode_total", n_decode_total }, + { "n_busy_slots_total", n_busy_slots_total }, + + { "kv_cache_tokens_count", kv_cache_tokens_count }, + { "kv_cache_used_cells", kv_cache_used_cells }, + + { "slots", slots_data }, + }; + } +}; + +struct server_task_result_slot_save_load : server_task_result { + std::string filename; + bool is_save; // true = save, false = load + + size_t n_tokens; + size_t n_bytes; + double t_ms; + + virtual json to_json() override { + if (is_save) { + return json { + { "id_slot", id_slot }, + { "filename", filename }, + { "n_saved", n_tokens }, + { "n_written", n_bytes }, + { "timings", { + { "save_ms", t_ms } + }}, + }; + } else { + return json { + { "id_slot", id_slot }, + { "filename", filename }, + { "n_restored", n_tokens }, + { "n_read", n_bytes }, + { "timings", { + { "restore_ms", t_ms } + }}, + }; + } + } +}; + +struct server_task_result_slot_erase : server_task_result { + size_t n_erased; + + virtual json to_json() override { + return json { + { "id_slot", id_slot }, + { "n_erased", n_erased }, + }; + } +}; + +struct server_task_result_apply_lora : server_task_result { + virtual json to_json() override { + return json {{ "success", true }}; + } }; struct server_slot { int id; int id_task = -1; - llama_batch batch_spec; + // only used for completion/embedding/infill/rerank + server_task_type task_type = SERVER_TASK_TYPE_COMPLETION; + + llama_batch batch_spec = {}; + llama_context * ctx = nullptr; llama_context * ctx_dft = nullptr; common_speculative * spec = nullptr; @@ -168,18 +935,11 @@ struct server_slot { llama_tokens cache_tokens; std::vector generated_token_probs; - server_task_inf_type inf_type = SERVER_TASK_INF_TYPE_COMPLETION; - bool has_next_token = true; bool has_new_line = false; bool truncated = false; - bool stopped_eos = false; - bool stopped_word = false; - bool stopped_limit = false; + stop_type stop; - bool oaicompat = false; - - std::string oaicompat_model; std::string stopping_word; // sampling @@ -209,18 +969,20 @@ struct server_slot { generated_text = ""; has_new_line = false; truncated = false; - stopped_eos = false; - stopped_word = false; - stopped_limit = false; + stop = STOP_TYPE_NONE; stopping_word = ""; n_past = 0; n_sent_text = 0; n_sent_token_probs = 0; - inf_type = SERVER_TASK_INF_TYPE_COMPLETION; + task_type = SERVER_TASK_TYPE_COMPLETION; generated_token_probs.clear(); } + bool is_non_causal() const { + return task_type == SERVER_TASK_TYPE_EMBEDDING || task_type == SERVER_TASK_TYPE_RERANK; + } + bool has_budget(const common_params & global_params) { if (params.n_predict == -1 && global_params.n_predict == -1) { return true; // limitless @@ -264,38 +1026,40 @@ struct server_slot { } } - json get_formated_timings() const { - return json { - {"prompt_n", n_prompt_tokens_processed}, - {"prompt_ms", t_prompt_processing}, - {"prompt_per_token_ms", t_prompt_processing / n_prompt_tokens_processed}, - {"prompt_per_second", 1e3 / t_prompt_processing * n_prompt_tokens_processed}, - - {"predicted_n", n_decoded}, - {"predicted_ms", t_token_generation}, - {"predicted_per_token_ms", t_token_generation / n_decoded}, - {"predicted_per_second", 1e3 / t_token_generation * n_decoded}, - }; + result_timings get_timings() const { + result_timings timings; + timings.prompt_n = n_prompt_tokens_processed; + timings.prompt_ms = t_prompt_processing; + timings.prompt_per_token_ms = t_prompt_processing / n_prompt_tokens_processed; + timings.prompt_per_second = 1e3 / t_prompt_processing * n_prompt_tokens_processed; + + timings.predicted_n = n_decoded; + timings.predicted_ms = t_token_generation; + timings.predicted_per_token_ms = t_token_generation / n_decoded; + timings.predicted_per_second = 1e3 / t_token_generation * n_decoded; + + return timings; } - size_t find_stopping_strings(const std::string & text, const size_t last_token_size, const stop_type type) { + size_t find_stopping_strings(const std::string & text, const size_t last_token_size, bool is_full_stop) { size_t stop_pos = std::string::npos; for (const std::string & word : params.antiprompt) { size_t pos; - if (type == STOP_TYPE_FULL) { + if (is_full_stop) { const size_t tmp = word.size() + last_token_size; const size_t from_pos = text.size() > tmp ? text.size() - tmp : 0; pos = text.find(word, from_pos); } else { + // otherwise, partial stop pos = find_partial_stop_string(word, text); } if (pos != std::string::npos && (stop_pos == std::string::npos || pos < stop_pos)) { - if (type == STOP_TYPE_FULL) { - stopped_word = true; + if (is_full_stop) { + stop = STOP_TYPE_WORD; stopping_word = word; has_next_token = false; } @@ -322,6 +1086,28 @@ struct server_slot { t_token_generation, n_decoded, t_gen, n_gen_second, t_prompt_processing + t_token_generation, n_prompt_tokens_processed + n_decoded); } + + json to_json() const { + return json { + {"id", id}, + {"id_task", id_task}, + {"n_ctx", n_ctx}, + {"speculative", can_speculate()}, + {"is_processing", is_processing()}, + {"non_causal", is_non_causal()}, + {"params", params.to_json()}, + {"prompt", common_detokenize(ctx, prompt_tokens)}, + {"next_token", + { + {"has_next_token", has_next_token}, + {"has_new_line", has_new_line}, + {"n_remain", n_remaining}, + {"n_decoded", n_decoded}, + {"stopping_word", stopping_word}, + } + }, + }; + } }; struct server_metrics { @@ -394,9 +1180,7 @@ struct server_queue { // Add a new task to the end of the queue int post(server_task task, bool front = false) { std::unique_lock lock(mutex_tasks); - if (task.id == -1) { - task.id = id++; - } + GGML_ASSERT(task.id != -1); QUE_DBG("new task, id = %d, front = %d\n", task.id, front); if (front) { queue_tasks.push_front(std::move(task)); @@ -520,8 +1304,8 @@ struct server_response { // for keeping track of all tasks waiting for the result std::unordered_set waiting_task_ids; - // the main result queue - std::vector queue_results; + // the main result queue (using ptr for polymorphism) + std::vector queue_results; std::mutex mutex_results; std::condition_variable condition_results; @@ -561,7 +1345,7 @@ struct server_response { } // This function blocks the thread until there is a response for one of the id_tasks - server_task_result recv(const std::unordered_set & id_tasks) { + server_task_result_ptr recv(const std::unordered_set & id_tasks) { while (true) { std::unique_lock lock(mutex_results); condition_results.wait(lock, [&]{ @@ -569,8 +1353,8 @@ struct server_response { }); for (int i = 0; i < (int) queue_results.size(); i++) { - if (id_tasks.find(queue_results[i].id) != id_tasks.end()) { - server_task_result res = queue_results[i]; + if (id_tasks.find(queue_results[i]->id) != id_tasks.end()) { + server_task_result_ptr res = std::move(queue_results[i]); queue_results.erase(queue_results.begin() + i); return res; } @@ -581,21 +1365,21 @@ struct server_response { } // single-task version of recv() - server_task_result recv(int id_task) { + server_task_result_ptr recv(int id_task) { std::unordered_set id_tasks = {id_task}; return recv(id_tasks); } // Send a new result to a waiting id_task - void send(server_task_result & result) { - SRV_DBG("sending result for task id = %d\n", result.id); + void send(server_task_result_ptr && result) { + SRV_DBG("sending result for task id = %d\n", result->id); std::unique_lock lock(mutex_results); for (const auto & id_task : waiting_task_ids) { - if (result.id == id_task) { - SRV_DBG("task id = %d moved to result queue\n", result.id); + if (result->id == id_task) { + SRV_DBG("task id = %d pushed to result queue\n", result->id); - queue_results.push_back(std::move(result)); + queue_results.emplace_back(std::move(result)); condition_results.notify_all(); return; } @@ -694,8 +1478,9 @@ struct server_context { params_dft.devices = params_base.speculative.devices; params_dft.model = params_base.speculative.model; - params_dft.n_ctx = params_base.speculative.n_ctx; + params_dft.n_ctx = params_base.speculative.n_ctx == 0 ? params_base.n_ctx / params_base.n_parallel : params_base.speculative.n_ctx; params_dft.n_gpu_layers = params_base.speculative.n_gpu_layers; + params_dft.n_parallel = 1; common_init_result llama_init_dft = common_init_from_params(params_dft); @@ -715,8 +1500,14 @@ struct server_context { return false; } - cparams_dft = common_context_params_to_llama(params_base); - cparams_dft.n_batch = llama_n_ctx(llama_init_dft.context); + const int n_ctx_dft = llama_n_ctx(llama_init_dft.context); + + cparams_dft = common_context_params_to_llama(params_dft); + cparams_dft.n_batch = n_ctx_dft; + + // force F16 KV cache for the draft model for extra performance + cparams_dft.type_k = GGML_TYPE_F16; + cparams_dft.type_v = GGML_TYPE_F16; // the context is not needed - we will create one for each slot llama_free(llama_init_dft.context); @@ -747,6 +1538,7 @@ struct server_context { server_slot slot; slot.id = i; + slot.ctx = ctx; slot.n_ctx = n_ctx_slot; slot.n_predict = params_base.n_predict; @@ -779,8 +1571,7 @@ struct server_context { slots.push_back(slot); } - default_generation_settings_for_props = get_formated_generation(slots.front()); - default_generation_settings_for_props["seed"] = -1; + default_generation_settings_for_props = slots[0].to_json(); // the update_slots() logic will always submit a maximum of n_batch or n_parallel tokens // note that n_batch can be > n_ctx (e.g. for non-causal attention models such as BERT where the KV cache is not used) @@ -823,220 +1614,69 @@ struct server_context { continue; } - // length of the Longest Common Subsequence between the current slot's prompt and the input prompt - int cur_lcs_len = common_lcs(slot.cache_tokens, task.prompt_tokens); - - // fraction of the common subsequence length compared to the current slot's prompt length - float cur_similarity = static_cast(cur_lcs_len) / static_cast(slot.cache_tokens.size()); - - // select the current slot if the criteria match - if (cur_lcs_len > lcs_len && cur_similarity > slot_prompt_similarity) { - lcs_len = cur_lcs_len; - similarity = cur_similarity; - ret = &slot; - } - } - - if (ret != nullptr) { - SLT_DBG(*ret, "selected slot by lcs similarity, lcs_len = %d, similarity = %f\n", lcs_len, similarity); - } - } - - // find the slot that has been least recently used - if (ret == nullptr) { - int64_t t_last = ggml_time_us(); - for (server_slot & slot : slots) { - // skip the slot if it is not available - if (slot.is_processing()) { - continue; - } - - // select the current slot if the criteria match - if (slot.t_last_used < t_last) { - t_last = slot.t_last_used; - ret = &slot; - } - } - - if (ret != nullptr) { - SLT_DBG(*ret, "selected slot by lru, t_last = %" PRId64 "\n", t_last); - } - } - - return ret; - } - - bool launch_slot_with_task(server_slot & slot, const server_task & task) { - // Sampling parameter defaults are loaded from the global server context (but individual requests can still override them) - slot_params defaults; - defaults.sampling = params_base.sampling; - defaults.speculative = params_base.speculative; - - const auto & data = task.data; - - if (data.count("__oaicompat") != 0) { - slot.oaicompat = true; - slot.oaicompat_model = json_value(data, "model", std::string(DEFAULT_OAICOMPAT_MODEL)); - } else { - slot.oaicompat = false; - slot.oaicompat_model = ""; - } - - slot.params.stream = json_value(data, "stream", false); - slot.params.cache_prompt = json_value(data, "cache_prompt", true); - slot.params.n_predict = json_value(data, "n_predict", json_value(data, "max_tokens", defaults.n_predict)); - slot.params.n_indent = json_value(data, "n_indent", defaults.n_indent); - slot.params.n_keep = json_value(data, "n_keep", defaults.n_keep); - slot.params.n_discard = json_value(data, "n_discard", defaults.n_discard); - //slot.params.t_max_prompt_ms = json_value(data, "t_max_prompt_ms", defaults.t_max_prompt_ms); // TODO: implement - slot.params.t_max_predict_ms = json_value(data, "t_max_predict_ms", defaults.t_max_predict_ms); - - slot.params.sampling.top_k = json_value(data, "top_k", defaults.sampling.top_k); - slot.params.sampling.top_p = json_value(data, "top_p", defaults.sampling.top_p); - slot.params.sampling.min_p = json_value(data, "min_p", defaults.sampling.min_p); - slot.params.sampling.xtc_probability = json_value(data, "xtc_probability", defaults.sampling.xtc_probability); - slot.params.sampling.xtc_threshold = json_value(data, "xtc_threshold", defaults.sampling.xtc_threshold); - slot.params.sampling.typ_p = json_value(data, "typical_p", defaults.sampling.typ_p); - slot.params.sampling.temp = json_value(data, "temperature", defaults.sampling.temp); - slot.params.sampling.dynatemp_range = json_value(data, "dynatemp_range", defaults.sampling.dynatemp_range); - slot.params.sampling.dynatemp_exponent = json_value(data, "dynatemp_exponent", defaults.sampling.dynatemp_exponent); - slot.params.sampling.penalty_last_n = json_value(data, "repeat_last_n", defaults.sampling.penalty_last_n); - slot.params.sampling.penalty_repeat = json_value(data, "repeat_penalty", defaults.sampling.penalty_repeat); - slot.params.sampling.penalty_freq = json_value(data, "frequency_penalty", defaults.sampling.penalty_freq); - slot.params.sampling.penalty_present = json_value(data, "presence_penalty", defaults.sampling.penalty_present); - slot.params.sampling.dry_multiplier = json_value(data, "dry_multiplier", defaults.sampling.dry_multiplier); - slot.params.sampling.dry_base = json_value(data, "dry_base", defaults.sampling.dry_base); - slot.params.sampling.dry_allowed_length = json_value(data, "dry_allowed_length", defaults.sampling.dry_allowed_length); - slot.params.sampling.dry_penalty_last_n = json_value(data, "dry_penalty_last_n", defaults.sampling.dry_penalty_last_n); - slot.params.sampling.mirostat = json_value(data, "mirostat", defaults.sampling.mirostat); - slot.params.sampling.mirostat_tau = json_value(data, "mirostat_tau", defaults.sampling.mirostat_tau); - slot.params.sampling.mirostat_eta = json_value(data, "mirostat_eta", defaults.sampling.mirostat_eta); - slot.params.sampling.penalize_nl = json_value(data, "penalize_nl", defaults.sampling.penalize_nl); - slot.params.sampling.seed = json_value(data, "seed", defaults.sampling.seed); - slot.params.sampling.n_probs = json_value(data, "n_probs", defaults.sampling.n_probs); - slot.params.sampling.min_keep = json_value(data, "min_keep", defaults.sampling.min_keep); - - slot.params.speculative.n_min = json_value(data, "speculative.n_min", defaults.speculative.n_min); - slot.params.speculative.n_max = json_value(data, "speculative.n_max", defaults.speculative.n_max); - slot.params.speculative.p_min = json_value(data, "speculative.p_min", defaults.speculative.p_min); - - slot.params.speculative.n_min = std::min(slot.params.speculative.n_max, slot.params.speculative.n_min); - - if (slot.params.sampling.dry_base < 1.0f) { - slot.params.sampling.dry_base = defaults.sampling.dry_base; - } - - // sequence breakers for DRY - { - // Currently, this is not compatible with TextGen WebUI, Koboldcpp and SillyTavern format - // Ref: https://github.com/oobabooga/text-generation-webui/blob/d1af7a41ade7bd3c3a463bfa640725edb818ebaf/extensions/openai/typing.py#L39 - - if (data.contains("dry_sequence_breakers")) { - slot.params.sampling.dry_sequence_breakers = json_value(data, "dry_sequence_breakers", std::vector()); - if (slot.params.sampling.dry_sequence_breakers.empty()) { - send_error(task, "Error: dry_sequence_breakers must be a non-empty array of strings", ERROR_TYPE_INVALID_REQUEST); - return false; - } - } - } - - // process "json_schema" and "grammar" - if (data.contains("json_schema") && !data.at("json_schema").is_null() && data.contains("grammar") && !data.at("grammar").is_null()) { - send_error(task, "Either \"json_schema\" or \"grammar\" can be specified, but not both", ERROR_TYPE_INVALID_REQUEST); - return false; - } - if (data.contains("json_schema") && !data.contains("grammar")) { - try { - auto schema = json_value(data, "json_schema", json::object()); - slot.params.sampling.grammar = json_schema_to_grammar(schema); - } catch (const std::exception & e) { - send_error(task, std::string("\"json_schema\": ") + e.what(), ERROR_TYPE_INVALID_REQUEST); - return false; - } - } else { - slot.params.sampling.grammar = json_value(data, "grammar", defaults.sampling.grammar); - } - - if (slot.n_predict > 0 && slot.params.n_predict > slot.n_predict) { - // Might be better to reject the request with a 400 ? - slot.params.n_predict = slot.n_predict; - SLT_WRN(slot, "n_predict = %d exceeds server configuration, setting to %d", slot.n_predict, slot.n_predict); - } - - { - slot.params.sampling.logit_bias.clear(); - - if (json_value(data, "ignore_eos", false) && has_eos_token) { - slot.params.sampling.logit_bias.push_back({llama_token_eos(model), -INFINITY}); - } - - const auto & logit_bias = data.find("logit_bias"); - if (logit_bias != data.end() && logit_bias->is_array()) { - const int n_vocab = llama_n_vocab(model); - for (const auto & el : *logit_bias) { - // TODO: we may want to throw errors here, in case "el" is incorrect - if (el.is_array() && el.size() == 2) { - float bias; - if (el[1].is_number()) { - bias = el[1].get(); - } else if (el[1].is_boolean() && !el[1].get()) { - bias = -INFINITY; - } else { - continue; - } - - if (el[0].is_number_integer()) { - llama_token tok = el[0].get(); - if (tok >= 0 && tok < n_vocab) { - slot.params.sampling.logit_bias.push_back({tok, bias}); - } - } else if (el[0].is_string()) { - auto toks = common_tokenize(model, el[0].get(), false); - for (auto tok : toks) { - slot.params.sampling.logit_bias.push_back({tok, bias}); - } - } - } - } - } - } - - { - slot.params.antiprompt.clear(); + // length of the Longest Common Subsequence between the current slot's prompt and the input prompt + int cur_lcs_len = common_lcs(slot.cache_tokens, task.prompt_tokens); - const auto & stop = data.find("stop"); - if (stop != data.end() && stop->is_array()) { - for (const auto & word : *stop) { - if (!word.empty()) { - slot.params.antiprompt.push_back(word); - } + // fraction of the common subsequence length compared to the current slot's prompt length + float cur_similarity = static_cast(cur_lcs_len) / static_cast(slot.cache_tokens.size()); + + // select the current slot if the criteria match + if (cur_lcs_len > lcs_len && cur_similarity > slot_prompt_similarity) { + lcs_len = cur_lcs_len; + similarity = cur_similarity; + ret = &slot; } } + + if (ret != nullptr) { + SLT_DBG(*ret, "selected slot by lcs similarity, lcs_len = %d, similarity = %f\n", lcs_len, similarity); + } } - { - const auto & samplers = data.find("samplers"); - if (samplers != data.end()) { - if (samplers->is_array()) { - std::vector sampler_names; - for (const auto & name : *samplers) { - if (name.is_string()) { - sampler_names.emplace_back(name); - } - } - slot.params.sampling.samplers = common_sampler_types_from_names(sampler_names, false); - } else if (samplers->is_string()){ - std::string sampler_string; - for (const auto & name : *samplers) { - sampler_string += name; - } - slot.params.sampling.samplers = common_sampler_types_from_chars(sampler_string); + // find the slot that has been least recently used + if (ret == nullptr) { + int64_t t_last = ggml_time_us(); + for (server_slot & slot : slots) { + // skip the slot if it is not available + if (slot.is_processing()) { + continue; } - } else { - slot.params.sampling.samplers = defaults.sampling.samplers; + + // select the current slot if the criteria match + if (slot.t_last_used < t_last) { + t_last = slot.t_last_used; + ret = &slot; + } + } + + if (ret != nullptr) { + SLT_DBG(*ret, "selected slot by lru, t_last = %" PRId64 "\n", t_last); } } + return ret; + } + + bool launch_slot_with_task(server_slot & slot, const server_task & task) { + slot.reset(); + slot.id_task = task.id; + slot.index = task.index; + slot.task_type = task.type; + slot.params = std::move(task.params); + slot.prompt_tokens = std::move(task.prompt_tokens); + + SLT_DBG(slot, "launching slot : %s\n", safe_json_to_str(slot.to_json()).c_str()); + + if (slot.n_predict > 0 && slot.params.n_predict > slot.n_predict) { + // Might be better to reject the request with a 400 ? + slot.params.n_predict = slot.n_predict; + SLT_WRN(slot, "n_predict = %d exceeds server configuration, setting to %d", slot.n_predict, slot.n_predict); + } + + if (slot.params.ignore_eos && has_eos_token) { + slot.params.sampling.logit_bias.push_back({llama_token_eos(model), -INFINITY}); + } + { if (slot.smpl != nullptr) { common_sampler_free(slot.smpl); @@ -1108,14 +1748,14 @@ struct server_context { const std::string str_test = slot.generated_text.substr(pos); bool send_text = true; - size_t stop_pos = slot.find_stopping_strings(str_test, token_str.size(), STOP_TYPE_FULL); + size_t stop_pos = slot.find_stopping_strings(str_test, token_str.size(), true); if (stop_pos != std::string::npos) { slot.generated_text.erase( slot.generated_text.begin() + pos + stop_pos, slot.generated_text.end()); pos = std::min(slot.n_sent_text, slot.generated_text.size()); } else if (slot.has_next_token) { - stop_pos = slot.find_stopping_strings(str_test, token_str.size(), STOP_TYPE_PARTIAL); + stop_pos = slot.find_stopping_strings(str_test, token_str.size(), false); send_text = stop_pos == std::string::npos; } @@ -1139,7 +1779,7 @@ struct server_context { // check the limits if (slot.n_decoded > 0 && slot.has_next_token && !slot.has_budget(params_base)) { - slot.stopped_limit = true; + slot.stop = STOP_TYPE_LIMIT; slot.has_next_token = false; SLT_DBG(slot, "stopped by limit, n_decoded = %d, n_predict = %d\n", slot.n_decoded, slot.params.n_predict); @@ -1148,7 +1788,7 @@ struct server_context { if (slot.has_new_line) { // if we have already seen a new line, we stop after a certain time limit if (slot.params.t_max_predict_ms > 0 && (ggml_time_us() - slot.t_start_generation > 1000.0f*slot.params.t_max_predict_ms)) { - slot.stopped_limit = true; + slot.stop = STOP_TYPE_LIMIT; slot.has_next_token = false; SLT_DBG(slot, "stopped by time limit, n_decoded = %d, t_max_predict_ms = %d ms\n", slot.n_decoded, (int) slot.params.t_max_predict_ms); @@ -1168,7 +1808,7 @@ struct server_context { } if (pos < slot.generated_text.size() && n_indent < slot.params.n_indent) { - slot.stopped_limit = true; + slot.stop = STOP_TYPE_LIMIT; slot.has_next_token = false; // cut the last line @@ -1197,7 +1837,7 @@ struct server_context { // if context shift is disabled, we stop when it reaches the context limit if (slot.n_past >= slot.n_ctx) { slot.truncated = true; - slot.stopped_limit = true; + slot.stop = STOP_TYPE_LIMIT; slot.has_next_token = false; SLT_DBG(slot, "stopped due to running out of context capacity, n_past = %d, n_prompt_tokens = %d, n_decoded = %d, n_ctx = %d\n", @@ -1205,7 +1845,7 @@ struct server_context { } if (llama_token_is_eog(model, result.tok)) { - slot.stopped_eos = true; + slot.stop = STOP_TYPE_EOS; slot.has_next_token = false; SLT_DBG(slot, "%s", "stopped by EOS\n"); @@ -1215,7 +1855,7 @@ struct server_context { if (slot.params.n_predict < 1 && slot.n_predict < 1 && slot.n_prompt_tokens + slot.n_decoded >= n_ctx_train) { slot.truncated = true; - slot.stopped_limit = true; + slot.stop = STOP_TYPE_LIMIT; slot.has_next_token = false; // stop prediction SLT_WRN(slot, @@ -1229,59 +1869,6 @@ struct server_context { return slot.has_next_token; // continue } - json get_formated_generation(const server_slot & slot) const { - std::vector samplers; - samplers.reserve(slot.params.sampling.samplers.size()); - for (const auto & sampler : slot.params.sampling.samplers) { - samplers.emplace_back(common_sampler_type_to_str(sampler)); - } - - return json { - {"n_ctx", slot.n_ctx}, - {"n_predict", slot.n_predict}, // Server configured n_predict - {"model", params_base.model_alias}, - {"seed", slot.params.sampling.seed}, - {"seed_cur", slot.smpl ? common_sampler_get_seed(slot.smpl) : 0}, - {"temperature", slot.params.sampling.temp}, - {"dynatemp_range", slot.params.sampling.dynatemp_range}, - {"dynatemp_exponent", slot.params.sampling.dynatemp_exponent}, - {"top_k", slot.params.sampling.top_k}, - {"top_p", slot.params.sampling.top_p}, - {"min_p", slot.params.sampling.min_p}, - {"xtc_probability", slot.params.sampling.xtc_probability}, - {"xtc_threshold", slot.params.sampling.xtc_threshold}, - {"typical_p", slot.params.sampling.typ_p}, - {"repeat_last_n", slot.params.sampling.penalty_last_n}, - {"repeat_penalty", slot.params.sampling.penalty_repeat}, - {"presence_penalty", slot.params.sampling.penalty_present}, - {"frequency_penalty", slot.params.sampling.penalty_freq}, - {"dry_multiplier", slot.params.sampling.dry_multiplier}, - {"dry_base", slot.params.sampling.dry_base}, - {"dry_allowed_length", slot.params.sampling.dry_allowed_length}, - {"dry_penalty_last_n", slot.params.sampling.dry_penalty_last_n}, - {"dry_sequence_breakers", slot.params.sampling.dry_sequence_breakers}, - {"mirostat", slot.params.sampling.mirostat}, - {"mirostat_tau", slot.params.sampling.mirostat_tau}, - {"mirostat_eta", slot.params.sampling.mirostat_eta}, - {"penalize_nl", slot.params.sampling.penalize_nl}, - {"stop", slot.params.antiprompt}, - {"max_tokens", slot.params.n_predict}, // User configured n_predict - {"n_keep", slot.params.n_keep}, - {"n_discard", slot.params.n_discard}, - {"ignore_eos", slot.params.sampling.ignore_eos}, - {"stream", slot.params.stream}, - //{"logit_bias", slot.params.sampling.logit_bias}, - {"n_probs", slot.params.sampling.n_probs}, - {"min_keep", slot.params.sampling.min_keep}, - {"grammar", slot.params.sampling.grammar}, - {"samplers", samplers}, - {"speculative", slot.can_speculate()}, - {"speculative.n_max", slot.params.speculative.n_max}, - {"speculative.n_min", slot.params.speculative.n_min}, - {"speculative.p_min", slot.params.speculative.p_min}, - }; - } - void send_error(const server_task & task, const std::string & error, const enum error_type type = ERROR_TYPE_SERVER) { send_error(task.id, error, type); } @@ -1293,108 +1880,103 @@ struct server_context { void send_error(const int id_task, const std::string & error, const enum error_type type = ERROR_TYPE_SERVER) { SRV_ERR("task id = %d, error: %s\n", id_task, error.c_str()); - server_task_result res; - res.id = id_task; - res.stop = false; - res.error = true; - res.data = format_error_response(error, type); - - queue_results.send(res); - } - - void send_partial_response(server_slot & slot, completion_token_output tkn) { - server_task_result res; - res.id = slot.id_task; - res.error = false; - res.stop = false; - res.data = json { - {"content", tkn.text_to_send}, - {"stop", false}, - {"id_slot", slot.id}, - {"multimodal", false}, - {"index", slot.index}, - }; + auto res = std::make_unique(); + res->id = id_task; + res->err_type = type; + res->err_msg = error; + + queue_results.send(std::move(res)); + } + + void send_partial_response(server_slot & slot, const completion_token_output & tkn) { + auto res = std::make_unique(); + + res->id = slot.id_task; + res->index = slot.index; + res->content = tkn.text_to_send; + + res->n_decoded = slot.n_decoded; + res->n_prompt_tokens = slot.n_prompt_tokens; + res->verbose = slot.params.verbose; + res->oaicompat = slot.params.oaicompat; + res->oaicompat_chat = slot.params.oaicompat_chat; + res->oaicompat_model = slot.params.oaicompat_model; + res->oaicompat_cmpl_id = slot.params.oaicompat_cmpl_id; + + // populate res.probs_output if (slot.params.sampling.n_probs > 0) { const llama_tokens to_send_toks = common_tokenize(ctx, tkn.text_to_send, false); + const size_t probs_pos = std::min(slot.n_sent_token_probs, slot.generated_token_probs.size()); const size_t probs_stop_pos = std::min(slot.n_sent_token_probs + to_send_toks.size(), slot.generated_token_probs.size()); std::vector probs_output; if (probs_pos < probs_stop_pos) { - probs_output = std::vector( + res->probs_output = std::vector( slot.generated_token_probs.begin() + probs_pos, slot.generated_token_probs.begin() + probs_stop_pos); } - slot.n_sent_token_probs = probs_stop_pos; - - res.data["completion_probabilities"] = probs_vector_to_json(ctx, probs_output); } - if (slot.oaicompat) { - res.data["oaicompat_token_ctr"] = slot.n_decoded; - res.data["model"] = slot.oaicompat_model; + // populate timings if this is final response or timings_per_token is enabled + if (slot.stop != STOP_TYPE_NONE || slot.params.timings_per_token) { + res->timings = slot.get_timings(); } - queue_results.send(res); + queue_results.send(std::move(res)); } - void send_final_response(const server_slot & slot) { - server_task_result res; - res.id = slot.id_task; - res.error = false; - res.stop = true; - res.data = json { - {"content", !slot.params.stream ? slot.generated_text : ""}, - {"id_slot", slot.id}, - {"stop", true}, - {"model", params_base.model_alias}, - {"tokens_predicted", slot.n_decoded}, - {"tokens_evaluated", slot.n_prompt_tokens}, - {"generation_settings", get_formated_generation(slot)}, - {"prompt", common_detokenize(ctx, slot.prompt_tokens)}, - {"has_new_line", slot.has_new_line}, - {"truncated", slot.truncated}, - {"stopped_eos", slot.stopped_eos}, - {"stopped_word", slot.stopped_word}, - {"stopped_limit", slot.stopped_limit}, - {"stopping_word", slot.stopping_word}, - {"tokens_cached", slot.n_past}, - {"timings", slot.get_formated_timings()}, - {"index", slot.index}, - }; + void send_final_response(server_slot & slot) { + auto res = std::make_unique(); + res->id = slot.id_task; + res->id_slot = slot.id; + + res->index = slot.index; + res->content = slot.generated_text; + res->timings = slot.get_timings(); + res->prompt = common_detokenize(ctx, slot.prompt_tokens, true); + + res->truncated = slot.truncated; + res->n_decoded = slot.n_decoded; + res->n_prompt_tokens = slot.n_prompt_tokens; + res->n_tokens_cached = slot.n_past; + res->has_new_line = slot.has_new_line; + res->stopping_word = slot.stopping_word; + res->stop = slot.stop; + + res->verbose = slot.params.verbose; + res->stream = slot.params.stream; + res->oaicompat = slot.params.oaicompat; + res->oaicompat_chat = slot.params.oaicompat_chat; + res->oaicompat_model = slot.params.oaicompat_model; + res->oaicompat_cmpl_id = slot.params.oaicompat_cmpl_id; + // populate res.probs_output if (slot.params.sampling.n_probs > 0) { - std::vector probs; - if (!slot.params.stream && slot.stopped_word) { + if (!slot.params.stream && slot.stop == STOP_TYPE_WORD) { const llama_tokens stop_word_toks = common_tokenize(ctx, slot.stopping_word, false); size_t safe_offset = std::min(slot.generated_token_probs.size(), stop_word_toks.size()); - probs = std::vector( + res->probs_output = std::vector( slot.generated_token_probs.begin(), slot.generated_token_probs.end() - safe_offset); } else { - probs = std::vector( + res->probs_output = std::vector( slot.generated_token_probs.begin(), slot.generated_token_probs.end()); } - - res.data["completion_probabilities"] = probs_vector_to_json(ctx, probs); } - if (slot.oaicompat) { - res.data["oaicompat_token_ctr"] = slot.n_decoded; - res.data["model"] = slot.oaicompat_model; - } + res->generation_params = slot.params; // copy the parameters - queue_results.send(res); + queue_results.send(std::move(res)); } void send_embedding(const server_slot & slot, const llama_batch & batch) { - server_task_result res; - res.id = slot.id_task; - res.error = false; - res.stop = true; + auto res = std::make_unique(); + res->id = slot.id_task; + res->index = slot.index; const int n_embd = llama_n_embd(model); @@ -1413,32 +1995,23 @@ struct server_context { if (embd == NULL) { SLT_ERR(slot, "failed to get embeddings, token = %d, seq_id = %d\n", batch.token[i], batch.seq_id[i][0]); - res.data = json { - {"embedding", std::vector(n_embd, 0.0f)}, - {"index", slot.index}, - }; - + res->embedding = std::vector(n_embd, 0.0f); continue; } common_embd_normalize(embd, embd_res.data(), n_embd); - - res.data = json { - {"embedding", embd_res}, - {"index", slot.index}, - }; + res->embedding = embd_res; } SLT_DBG(slot, "%s", "sending embeddings\n"); - queue_results.send(res); + queue_results.send(std::move(res)); } void send_rerank(const server_slot & slot, const llama_batch & batch) { - server_task_result res; - res.id = slot.id_task; - res.error = false; - res.stop = true; + auto res = std::make_unique(); + res->id = slot.id_task; + res->index = slot.index; for (int i = 0; i < batch.n_tokens; ++i) { if (!batch.logits[i] || batch.seq_id[i][0] != slot.id) { @@ -1453,104 +2026,29 @@ struct server_context { if (embd == NULL) { SLT_ERR(slot, "failed to get embeddings, token = %d, seq_id = %d\n", batch.token[i], batch.seq_id[i][0]); - res.data = json { - {"index", slot.index}, - {"score", -1e6}, - }; - + res->score = -1e6; continue; } - res.data = json { - {"index", slot.index}, - {"score", embd[0]}, - }; + res->score = embd[0]; } - SLT_DBG(slot, "sending rerank result, res = '%s'\n", res.data.dump().c_str()); + SLT_DBG(slot, "sending rerank result, res.score = %f\n", res->score); - queue_results.send(res); + queue_results.send(std::move(res)); } // // Functions to create new task(s) and receive result(s) // - // break the input "prompt" into multiple tasks if needed, then format and tokenize the input prompt(s) - std::vector create_tasks_inference(json data, server_task_inf_type inf_type) { - std::vector tasks; - auto create_task = [&](json & task_data, llama_tokens & prompt_tokens) { - SRV_DBG("create task, n_tokens = %d\n", (int) prompt_tokens.size()); - server_task task; - task.id = queue_tasks.get_new_id(); - task.inf_type = inf_type; - task.type = SERVER_TASK_TYPE_INFERENCE; - task.data = task_data; - task.prompt_tokens = std::move(prompt_tokens); - tasks.push_back(std::move(task)); - }; - - static constexpr const char * error_msg = "\"prompt\" must be a string, an array of token ids or an array of prompts"; - if (!data.contains("prompt")) { - throw std::runtime_error(error_msg); - } - - // because llama_tokenize api is thread-safe, we can tokenize the prompt from HTTP thread - bool add_special = inf_type != SERVER_TASK_INF_TYPE_RERANK && inf_type != SERVER_TASK_INF_TYPE_INFILL; - std::vector tokenized_prompts = tokenize_input_prompts(ctx, data.at("prompt"), add_special, true); - switch (inf_type) { - case SERVER_TASK_INF_TYPE_RERANK: - { - // prompts[0] is the question - // the rest are the answers/documents - GGML_ASSERT(tokenized_prompts.size() > 1); - SRV_DBG("creating rerank tasks, n_prompts = %d\n", (int) tokenized_prompts.size() - 1); - for (size_t i = 1; i < tokenized_prompts.size(); i++) { - data["index"] = i - 1; - auto tokens = format_rerank(model, tokenized_prompts[0], tokenized_prompts[i]); - create_task(data, tokens); - } - } break; - case SERVER_TASK_INF_TYPE_INFILL: - { - SRV_DBG("creating infill tasks, n_prompts = %d\n", (int) tokenized_prompts.size()); - for (size_t i = 0; i < tokenized_prompts.size(); i++) { - data["index"] = i; - auto tokens = format_infill( - ctx, - data.at("input_prefix"), - data.at("input_suffix"), - data.at("input_extra"), - params_base.n_batch, - params_base.n_predict, - slots[0].n_ctx, // TODO: there should be a better way - params_base.spm_infill, - tokenized_prompts[i] - ); - create_task(data, tokens); - } - } break; - default: - { - SRV_DBG("creating multi-prompt tasks, n_prompts = %d\n", (int) tokenized_prompts.size()); - for (size_t i = 0; i < tokenized_prompts.size(); i++) { - data["index"] = i; - create_task(data, tokenized_prompts[i]); - } - } - } - - return tasks; - } - void cancel_tasks(const std::unordered_set & id_tasks) { std::vector cancel_tasks; cancel_tasks.reserve(id_tasks.size()); for (const auto & id_task : id_tasks) { SRV_WRN("cancel task, id_task = %d\n", id_task); - server_task task; - task.type = SERVER_TASK_TYPE_CANCEL; + server_task task(SERVER_TASK_TYPE_CANCEL); task.id_target = id_task; cancel_tasks.push_back(task); queue_results.remove_waiting_task_id(id_task); @@ -1559,50 +2057,58 @@ struct server_context { queue_tasks.post(cancel_tasks, true); } - // receive the results from task(s) created by create_tasks_inference - void receive_cmpl_results( + // receive the results from task(s) + void receive_multi_results( const std::unordered_set & id_tasks, - const std::function&)> & result_handler, + const std::function&)> & result_handler, const std::function & error_handler) { - // TODO: currently, there is no way to detect the client has cancelled the request - std::vector results(id_tasks.size()); + std::vector results(id_tasks.size()); for (size_t i = 0; i < id_tasks.size(); i++) { - server_task_result result = queue_results.recv(id_tasks); + server_task_result_ptr result = queue_results.recv(id_tasks); - if (result.error) { - error_handler(result.data); + if (result->is_error()) { + error_handler(result->to_json()); cancel_tasks(id_tasks); return; } - const size_t idx = result.data["index"]; + GGML_ASSERT( + dynamic_cast(result.get()) != nullptr + || dynamic_cast(result.get()) != nullptr + || dynamic_cast(result.get()) != nullptr + ); + const size_t idx = result->get_index(); GGML_ASSERT(idx < results.size() && "index out of range"); - - results[idx] = result; + results[idx] = std::move(result); } result_handler(results); } - // receive the results from task(s) created by create_tasks_inference, in stream mode + // receive the results from task(s), in stream mode void receive_cmpl_results_stream( - const std::unordered_set & id_tasks, const - std::function & result_handler, const - std::function & error_handler) { + const std::unordered_set & id_tasks, + const std::function & result_handler, + const std::function & error_handler) { size_t n_finished = 0; while (true) { - server_task_result result = queue_results.recv(id_tasks); - if (!result_handler(result)) { + server_task_result_ptr result = queue_results.recv(id_tasks); + + if (result->is_error()) { + error_handler(result->to_json()); cancel_tasks(id_tasks); - break; + return; } - if (result.error) { - error_handler(result.data); + GGML_ASSERT( + dynamic_cast(result.get()) != nullptr + || dynamic_cast(result.get()) != nullptr + ); + if (!result_handler(result)) { cancel_tasks(id_tasks); break; } - if (result.stop) { + if (result->is_stop()) { if (++n_finished == id_tasks.size()) { break; } @@ -1616,9 +2122,12 @@ struct server_context { void process_single_task(server_task task) { switch (task.type) { - case SERVER_TASK_TYPE_INFERENCE: + case SERVER_TASK_TYPE_COMPLETION: + case SERVER_TASK_TYPE_INFILL: + case SERVER_TASK_TYPE_EMBEDDING: + case SERVER_TASK_TYPE_RERANK: { - const int id_slot = json_value(task.data, "id_slot", -1); + const int id_slot = task.id_selected_slot; server_slot * slot = id_slot != -1 ? get_slot_by_id(id_slot) : get_available_slot(task); @@ -1635,13 +2144,6 @@ struct server_context { break; } - slot->reset(); - - slot->id_task = task.id; - slot->inf_type = task.inf_type; - slot->index = json_value(task.data, "index", 0); - slot->prompt_tokens = std::move(task.prompt_tokens); - if (!launch_slot_with_task(*slot, task)) { SRV_ERR("failed to launch slot with task, id_task = %d\n", task.id); break; @@ -1669,21 +2171,7 @@ struct server_context { int n_processing_slots = 0; for (server_slot & slot : slots) { - json slot_data = get_formated_generation(slot); - slot_data["id"] = slot.id; - slot_data["id_task"] = slot.id_task; - slot_data["is_processing"] = slot.is_processing(); - slot_data["prompt"] = common_detokenize(ctx, slot.prompt_tokens); - slot_data["next_token"] = { - {"has_next_token", slot.has_next_token}, - {"has_new_line", slot.has_new_line}, - {"n_remain", slot.n_remaining}, - {"n_decoded", slot.n_decoded}, - {"stopped_eos", slot.stopped_eos}, - {"stopped_word", slot.stopped_word}, - {"stopped_limit", slot.stopped_limit}, - {"stopping_word", slot.stopping_word}, - }; + json slot_data = slot.to_json(); if (slot.is_processing()) { n_processing_slots++; @@ -1695,43 +2183,38 @@ struct server_context { } SRV_DBG("n_idle_slots = %d, n_processing_slots = %d\n", n_idle_slots, n_processing_slots); - server_task_result res; - res.id = task.id; - res.stop = true; - res.error = false; - res.data = { - { "idle", n_idle_slots }, - { "processing", n_processing_slots }, - { "deferred", queue_tasks.queue_tasks_deferred.size() }, - { "t_start", metrics.t_start}, - - { "n_prompt_tokens_processed_total", metrics.n_prompt_tokens_processed_total}, - { "t_tokens_generation_total", metrics.t_tokens_generation_total}, - { "n_tokens_predicted_total", metrics.n_tokens_predicted_total}, - { "t_prompt_processing_total", metrics.t_prompt_processing_total}, + auto res = std::make_unique(); + res->id = task.id; + res->slots_data = std::move(slots_data); + res->n_idle_slots = n_idle_slots; + res->n_processing_slots = n_processing_slots; + res->n_tasks_deferred = queue_tasks.queue_tasks_deferred.size(); + res->t_start = metrics.t_start; - { "n_prompt_tokens_processed", metrics.n_prompt_tokens_processed}, - { "t_prompt_processing", metrics.t_prompt_processing}, - { "n_tokens_predicted", metrics.n_tokens_predicted}, - { "t_tokens_generation", metrics.t_tokens_generation}, + res->kv_cache_tokens_count = llama_get_kv_cache_token_count(ctx); + res->kv_cache_used_cells = llama_get_kv_cache_used_cells(ctx); - { "n_decode_total", metrics.n_decode_total}, - { "n_busy_slots_total", metrics.n_busy_slots_total}, + res->n_prompt_tokens_processed_total = metrics.n_prompt_tokens_processed_total; + res->t_prompt_processing_total = metrics.t_prompt_processing_total; + res->n_tokens_predicted_total = metrics.n_tokens_predicted_total; + res->t_tokens_generation_total = metrics.t_tokens_generation_total; - { "kv_cache_tokens_count", llama_get_kv_cache_token_count(ctx)}, - { "kv_cache_used_cells", llama_get_kv_cache_used_cells(ctx)}, + res->n_prompt_tokens_processed = metrics.n_prompt_tokens_processed; + res->t_prompt_processing = metrics.t_prompt_processing; + res->n_tokens_predicted = metrics.n_tokens_predicted; + res->t_tokens_generation = metrics.t_tokens_generation; - { "slots", slots_data }, - }; + res->n_decode_total = metrics.n_decode_total; + res->n_busy_slots_total = metrics.n_busy_slots_total; - if (json_value(task.data, "reset_bucket", false)) { + if (task.metrics_reset_bucket) { metrics.reset_bucket(); } - queue_results.send(res); + queue_results.send(std::move(res)); } break; case SERVER_TASK_TYPE_SLOT_SAVE: { - int id_slot = task.data.at("id_slot"); + int id_slot = task.slot_action.slot_id; server_slot * slot = get_slot_by_id(id_slot); if (slot == nullptr) { send_error(task, "Invalid slot ID", ERROR_TYPE_INVALID_REQUEST); @@ -1747,32 +2230,27 @@ struct server_context { const size_t token_count = slot->cache_tokens.size(); const int64_t t_start = ggml_time_us(); - std::string filename = task.data.at("filename"); - std::string filepath = task.data.at("filepath"); + std::string filename = task.slot_action.filename; + std::string filepath = task.slot_action.filepath; const size_t nwrite = llama_state_seq_save_file(ctx, filepath.c_str(), slot->id, slot->cache_tokens.data(), token_count); const int64_t t_end = ggml_time_us(); const double t_save_ms = (t_end - t_start) / 1000.0; - server_task_result result; - result.id = task.id; - result.stop = true; - result.error = false; - result.data = json { - { "id_slot", id_slot }, - { "filename", filename }, - { "n_saved", token_count }, // tokens saved - { "n_written", nwrite }, // bytes written - { "timings", { - { "save_ms", t_save_ms } - } } - }; - queue_results.send(result); + auto res = std::make_unique(); + res->id = task.id; + res->id_slot = id_slot; + res->filename = filename; + res->is_save = true; + res->n_tokens = token_count; + res->n_bytes = nwrite; + res->t_ms = t_save_ms; + queue_results.send(std::move(res)); } break; case SERVER_TASK_TYPE_SLOT_RESTORE: { - int id_slot = task.data.at("id_slot"); + int id_slot = task.slot_action.slot_id; server_slot * slot = get_slot_by_id(id_slot); if (slot == nullptr) { send_error(task, "Invalid slot ID", ERROR_TYPE_INVALID_REQUEST); @@ -1787,8 +2265,8 @@ struct server_context { const int64_t t_start = ggml_time_us(); - std::string filename = task.data.at("filename"); - std::string filepath = task.data.at("filepath"); + std::string filename = task.slot_action.filename; + std::string filepath = task.slot_action.filepath; slot->cache_tokens.resize(slot->n_ctx); size_t token_count = 0; @@ -1803,24 +2281,19 @@ struct server_context { const int64_t t_end = ggml_time_us(); const double t_restore_ms = (t_end - t_start) / 1000.0; - server_task_result result; - result.id = task.id; - result.stop = true; - result.error = false; - result.data = json { - { "id_slot", id_slot }, - { "filename", filename }, - { "n_restored", token_count }, // tokens restored - { "n_read", nread }, // bytes read - { "timings", { - { "restore_ms", t_restore_ms } - } } - }; - queue_results.send(result); + auto res = std::make_unique(); + res->id = task.id; + res->id_slot = id_slot; + res->filename = filename; + res->is_save = false; + res->n_tokens = token_count; + res->n_bytes = nread; + res->t_ms = t_restore_ms; + queue_results.send(std::move(res)); } break; case SERVER_TASK_TYPE_SLOT_ERASE: { - int id_slot = task.data.at("id_slot"); + int id_slot = task.slot_action.slot_id; server_slot * slot = get_slot_by_id(id_slot); if (slot == nullptr) { send_error(task, "Invalid slot ID", ERROR_TYPE_INVALID_REQUEST); @@ -1838,25 +2311,18 @@ struct server_context { llama_kv_cache_seq_rm(ctx, slot->id, -1, -1); slot->cache_tokens.clear(); - server_task_result result; - result.id = task.id; - result.stop = true; - result.error = false; - result.data = json { - { "id_slot", id_slot }, - { "n_erased", n_erased } - }; - queue_results.send(result); + auto res = std::make_unique(); + res->id = task.id; + res->id_slot = id_slot; + res->n_erased = n_erased; + queue_results.send(std::move(res)); } break; case SERVER_TASK_TYPE_SET_LORA: { common_lora_adapters_apply(ctx, loras); - server_task_result result; - result.id = task.id; - result.stop = true; - result.error = false; - result.data = json{{ "success", true }}; - queue_results.send(result); + auto res = std::make_unique(); + res->id = task.id; + queue_results.send(std::move(res)); } break; } } @@ -1886,10 +2352,8 @@ struct server_context { { SRV_DBG("%s", "posting NEXT_RESPONSE\n"); - server_task task; - task.type = SERVER_TASK_TYPE_NEXT_RESPONSE; - task.id_target = -1; - + server_task task(SERVER_TASK_TYPE_NEXT_RESPONSE); + task.id = queue_tasks.get_new_id(); queue_tasks.post(task); } @@ -2003,7 +2467,7 @@ struct server_context { continue; } - if (slot.inf_type == SERVER_TASK_INF_TYPE_EMBEDDING || slot.inf_type == SERVER_TASK_INF_TYPE_RERANK) { + if (slot.is_non_causal()) { if (slot.n_prompt_tokens > n_ubatch) { slot.release(); send_error(slot, "input is too large to process. increase the physical batch size", ERROR_TYPE_SERVER); @@ -2118,7 +2582,7 @@ struct server_context { } // non-causal tasks require to fit the entire prompt in the physical batch - if (slot.inf_type == SERVER_TASK_INF_TYPE_EMBEDDING || slot.inf_type == SERVER_TASK_INF_TYPE_RERANK) { + if (slot.is_non_causal()) { // cannot fit the prompt in the current batch - will try next iter if (batch.n_tokens + slot.n_prompt_tokens > n_batch) { continue; @@ -2126,10 +2590,7 @@ struct server_context { } // check that we are in the right batch_type, if not defer the slot - const bool slot_type = - slot.inf_type == SERVER_TASK_INF_TYPE_EMBEDDING || - slot.inf_type == SERVER_TASK_INF_TYPE_RERANK ? 1 : 0; - + int slot_type = slot.is_non_causal(); if (batch_type == -1) { batch_type = slot_type; } else if (batch_type != slot_type) { @@ -2246,7 +2707,7 @@ struct server_context { } if (slot.state == SLOT_STATE_DONE_PROMPT) { - if (slot.inf_type == SERVER_TASK_INF_TYPE_EMBEDDING) { + if (slot.task_type == SERVER_TASK_TYPE_EMBEDDING) { // prompt evaluated for embedding send_embedding(slot, batch_view); slot.release(); @@ -2254,7 +2715,7 @@ struct server_context { continue; // continue loop of slots } - if (slot.inf_type == SERVER_TASK_INF_TYPE_RERANK) { + if (slot.task_type == SERVER_TASK_TYPE_RERANK) { send_rerank(slot, batch_view); slot.release(); slot.i_batch = -1; @@ -2274,21 +2735,28 @@ struct server_context { common_sampler_accept(slot.smpl, id, true); slot.n_decoded += 1; + + const int64_t t_current = ggml_time_us(); + if (slot.n_decoded == 1) { - slot.t_start_generation = ggml_time_us(); + slot.t_start_generation = t_current; slot.t_prompt_processing = (slot.t_start_generation - slot.t_start_process_prompt) / 1e3; metrics.on_prompt_eval(slot); } + slot.t_token_generation = (t_current - slot.t_start_generation) / 1e3; + completion_token_output result; result.tok = id; const auto * cur_p = common_sampler_get_candidates(slot.smpl); for (size_t i = 0; i < (size_t) slot.params.sampling.n_probs; ++i) { + auto tok_id = cur_p->data[i].id; result.probs.push_back({ - cur_p->data[i].id, - i >= cur_p->size ? 0.0f : cur_p->data[i].p, + tok_id, + tokens_to_output_formatted_string(ctx, tok_id), + i >= cur_p->size ? 0.0f : cur_p->data[i].p, }); } @@ -2308,10 +2776,33 @@ struct server_context { continue; } + if (slot.state != SLOT_STATE_GENERATING) { + continue; + } + + // determine the max draft that fits the current slot state + int n_draft_max = slot.params.speculative.n_max; + + // note: n_past is not yet increased for the `id` token sampled above + // also, need to leave space for 1 extra token to allow context shifts + n_draft_max = std::min(n_draft_max, slot.n_ctx - slot.n_past - 2); + + if (slot.n_remaining > 0) { + n_draft_max = std::min(n_draft_max, slot.n_remaining - 1); + } + + SLT_DBG(slot, "max possible draft: %d\n", n_draft_max); + + if (n_draft_max < slot.params.speculative.n_min) { + SLT_DBG(slot, "the max possible draft is too small: %d < %d - skipping speculative decoding\n", n_draft_max, slot.params.speculative.n_min); + + continue; + } + llama_token id = slot.sampled; struct common_speculative_params params_spec; - params_spec.n_draft = slot.params.speculative.n_max; + params_spec.n_draft = n_draft_max; params_spec.n_reuse = llama_n_ctx(slot.ctx_dft) - slot.params.speculative.n_max; params_spec.p_min = slot.params.speculative.p_min; @@ -2319,6 +2810,8 @@ struct server_context { // ignore small drafts if (slot.params.speculative.n_min > (int) draft.size()) { + SLT_DBG(slot, "ignoring small draft: %d < %d\n", (int) draft.size(), slot.params.speculative.n_min); + continue; } @@ -2330,6 +2823,8 @@ struct server_context { common_batch_add(slot.batch_spec, draft[i], slot.n_past + 1 + i, { slot.id }, true); } + SLT_DBG(slot, "decoding speculative batch, size = %d\n", slot.batch_spec.n_tokens); + llama_decode(ctx, slot.batch_spec); // the accepted tokens from the speculation @@ -2358,7 +2853,7 @@ struct server_context { } } - SRV_DBG("accepted %d/%d draft tokens\n", (int) ids.size() - 1, (int) draft.size()); + SLT_DBG(slot, "accepted %d/%d draft tokens, new n_past = %d\n", (int) ids.size() - 1, (int) draft.size(), slot.n_past); } } @@ -2413,17 +2908,9 @@ int main(int argc, char ** argv) { common_init(); - // enabling this will output extra debug information in the HTTP responses from the server - // see format_final_response_oaicompat() - const bool verbose = params.verbosity > 9; - // struct that contains llama context and inference server_context ctx_server; - if (params.model_alias == "unknown") { - params.model_alias = params.model; - } - llama_backend_init(); llama_numa_init(params.numa); @@ -2432,16 +2919,6 @@ int main(int argc, char ** argv) { LOG_INF("%s\n", common_params_get_system_info(params).c_str()); LOG_INF("\n"); - // static files - std::map static_files = { - { "/", { index_html, index_html_len, "text/html; charset=utf-8" }}, - { "/completion.js", { completion_js, completion_js_len, "text/javascript; charset=utf-8" }}, - { "/deps_daisyui.min.css", { deps_daisyui_min_css, deps_daisyui_min_css_len, "text/css; charset=utf-8" }}, - { "/deps_markdown-it.js", { deps_markdown_it_js, deps_markdown_it_js_len, "text/javascript; charset=utf-8" }}, - { "/deps_tailwindcss.js", { deps_tailwindcss_js, deps_tailwindcss_js_len, "text/javascript; charset=utf-8" }}, - { "/deps_vue.esm-browser.js", { deps_vue_esm_browser_js, deps_vue_esm_browser_js_len, "text/javascript; charset=utf-8" }}, - }; - std::unique_ptr svr; #ifdef CPPHTTPLIB_OPENSSL_SUPPORT if (params.ssl_file_key != "" && params.ssl_file_cert != "") { @@ -2468,20 +2945,20 @@ int main(int argc, char ** argv) { auto res_error = [](httplib::Response & res, const json & error_data) { json final_response {{"error", error_data}}; - res.set_content(final_response.dump(-1, ' ', false, json::error_handler_t::replace), MIMETYPE_JSON); + res.set_content(safe_json_to_str(final_response), MIMETYPE_JSON); res.status = json_value(error_data, "code", 500); }; auto res_ok = [](httplib::Response & res, const json & data) { - res.set_content(data.dump(-1, ' ', false, json::error_handler_t::replace), MIMETYPE_JSON); + res.set_content(safe_json_to_str(data), MIMETYPE_JSON); res.status = 200; }; - svr->set_exception_handler([&res_error](const httplib::Request &, httplib::Response & res, std::exception_ptr ep) { + svr->set_exception_handler([&res_error](const httplib::Request &, httplib::Response & res, const std::exception_ptr & ep) { std::string message; try { std::rethrow_exception(ep); - } catch (std::exception & e) { + } catch (const std::exception & e) { message = e.what(); } catch (...) { message = "Unknown Exception"; @@ -2522,7 +2999,7 @@ int main(int argc, char ** argv) { // Middlewares // - auto middleware_validate_api_key = [¶ms, &res_error, &static_files](const httplib::Request & req, httplib::Response & res) { + auto middleware_validate_api_key = [¶ms, &res_error](const httplib::Request & req, httplib::Response & res) { static const std::unordered_set public_endpoints = { "/health", "/models", @@ -2535,7 +3012,7 @@ int main(int argc, char ** argv) { } // If path is public or is static file, skip validation - if (public_endpoints.find(req.path) != public_endpoints.end() || static_files.find(req.path) != static_files.end()) { + if (public_endpoints.find(req.path) != public_endpoints.end() || req.path == "/") { return true; } @@ -2610,27 +3087,33 @@ int main(int argc, char ** argv) { } // request slots data using task queue - server_task task; + server_task task(SERVER_TASK_TYPE_METRICS); task.id = ctx_server.queue_tasks.get_new_id(); - task.type = SERVER_TASK_TYPE_METRICS; - ctx_server.queue_results.add_waiting_task_id(task.id); ctx_server.queue_tasks.post(task, true); // high-priority task // get the result - server_task_result result = ctx_server.queue_results.recv(task.id); + server_task_result_ptr result = ctx_server.queue_results.recv(task.id); ctx_server.queue_results.remove_waiting_task_id(task.id); + if (result->is_error()) { + res_error(res, result->to_json()); + return; + } + + // TODO: get rid of this dynamic_cast + auto res_metrics = dynamic_cast(result.get()); + GGML_ASSERT(res_metrics != nullptr); + // optionally return "fail_on_no_slot" error - const int n_idle_slots = result.data.at("idle"); if (req.has_param("fail_on_no_slot")) { - if (n_idle_slots == 0) { + if (res_metrics->n_idle_slots == 0) { res_error(res, format_error_response("no slot available", ERROR_TYPE_UNAVAILABLE)); return; } } - res_ok(res, result.data.at("slots")); + res_ok(res, res_metrics->slots_data); }; const auto handle_metrics = [&](const httplib::Request &, httplib::Response & res) { @@ -2640,83 +3123,77 @@ int main(int argc, char ** argv) { } // request slots data using task queue - server_task task; + server_task task(SERVER_TASK_TYPE_METRICS); task.id = ctx_server.queue_tasks.get_new_id(); - task.id_target = -1; - task.type = SERVER_TASK_TYPE_METRICS; - task.data.push_back({{"reset_bucket", true}}); + task.metrics_reset_bucket = true; ctx_server.queue_results.add_waiting_task_id(task.id); ctx_server.queue_tasks.post(task, true); // high-priority task // get the result - server_task_result result = ctx_server.queue_results.recv(task.id); + server_task_result_ptr result = ctx_server.queue_results.recv(task.id); ctx_server.queue_results.remove_waiting_task_id(task.id); - json data = result.data; - - const uint64_t n_prompt_tokens_processed = data.at("n_prompt_tokens_processed"); - const uint64_t t_prompt_processing = data.at("t_prompt_processing"); - - const uint64_t n_tokens_predicted = data.at("n_tokens_predicted"); - const uint64_t t_tokens_generation = data.at("t_tokens_generation"); - - const uint64_t n_decode_total = data.at("n_decode_total"); - const uint64_t n_busy_slots_total = data.at("n_busy_slots_total"); + if (result->is_error()) { + res_error(res, result->to_json()); + return; + } - const int32_t kv_cache_used_cells = data.at("kv_cache_used_cells"); + // TODO: get rid of this dynamic_cast + auto res_metrics = dynamic_cast(result.get()); + GGML_ASSERT(res_metrics != nullptr); // metrics definition: https://prometheus.io/docs/practices/naming/#metric-names json all_metrics_def = json { {"counter", {{ {"name", "prompt_tokens_total"}, {"help", "Number of prompt tokens processed."}, - {"value", (uint64_t) data.at("n_prompt_tokens_processed_total")} + {"value", (uint64_t) res_metrics->n_prompt_tokens_processed_total} }, { {"name", "prompt_seconds_total"}, {"help", "Prompt process time"}, - {"value", (uint64_t) data.at("t_prompt_processing_total") / 1.e3} + {"value", (uint64_t) res_metrics->t_prompt_processing_total / 1.e3} }, { {"name", "tokens_predicted_total"}, {"help", "Number of generation tokens processed."}, - {"value", (uint64_t) data.at("n_tokens_predicted_total")} + {"value", (uint64_t) res_metrics->n_tokens_predicted_total} }, { {"name", "tokens_predicted_seconds_total"}, {"help", "Predict process time"}, - {"value", (uint64_t) data.at("t_tokens_generation_total") / 1.e3} + {"value", (uint64_t) res_metrics->t_tokens_generation_total / 1.e3} }, { {"name", "n_decode_total"}, {"help", "Total number of llama_decode() calls"}, - {"value", n_decode_total} + {"value", res_metrics->n_decode_total} }, { {"name", "n_busy_slots_per_decode"}, {"help", "Average number of busy slots per llama_decode() call"}, - {"value", (float) n_busy_slots_total / (float) n_decode_total} + {"value", (float) res_metrics->n_busy_slots_total / (float) res_metrics->n_decode_total} }}}, {"gauge", {{ {"name", "prompt_tokens_seconds"}, {"help", "Average prompt throughput in tokens/s."}, - {"value", n_prompt_tokens_processed ? 1.e3 / t_prompt_processing * n_prompt_tokens_processed : 0.} + {"value", res_metrics->n_prompt_tokens_processed ? 1.e3 / res_metrics->t_prompt_processing * res_metrics->n_prompt_tokens_processed : 0.} },{ {"name", "predicted_tokens_seconds"}, {"help", "Average generation throughput in tokens/s."}, - {"value", n_tokens_predicted ? 1.e3 / t_tokens_generation * n_tokens_predicted : 0.} + {"value", res_metrics->n_tokens_predicted ? 1.e3 / res_metrics->t_tokens_generation * res_metrics->n_tokens_predicted : 0.} },{ {"name", "kv_cache_usage_ratio"}, {"help", "KV-cache usage. 1 means 100 percent usage."}, - {"value", 1. * kv_cache_used_cells / params.n_ctx} + {"value", 1. * res_metrics->kv_cache_used_cells / params.n_ctx} },{ {"name", "kv_cache_tokens"}, {"help", "KV-cache tokens."}, - {"value", (uint64_t) data.at("kv_cache_tokens_count")} + {"value", (uint64_t) res_metrics->kv_cache_tokens_count} },{ {"name", "requests_processing"}, {"help", "Number of request processing."}, - {"value", (uint64_t) data.at("processing")} + {"value", (uint64_t) res_metrics->n_processing_slots} },{ {"name", "requests_deferred"}, {"help", "Number of request deferred."}, - {"value", (uint64_t) data.at("deferred")} + {"value", (uint64_t) res_metrics->n_tasks_deferred} }}} }; @@ -2737,8 +3214,7 @@ int main(int argc, char ** argv) { } } - const int64_t t_start = data.at("t_start"); - res.set_header("Process-Start-Time-Unix", std::to_string(t_start)); + res.set_header("Process-Start-Time-Unix", std::to_string(res_metrics->t_start)); res.set_content(prometheus.str(), "text/plain; version=0.0.4"); res.status = 200; // HTTP OK @@ -2753,25 +3229,24 @@ int main(int argc, char ** argv) { } std::string filepath = params.slot_save_path + filename; - server_task task; - task.type = SERVER_TASK_TYPE_SLOT_SAVE; - task.data = { - { "id_slot", id_slot }, - { "filename", filename }, - { "filepath", filepath }, - }; + server_task task(SERVER_TASK_TYPE_SLOT_SAVE); + task.id = ctx_server.queue_tasks.get_new_id(); + task.slot_action.slot_id = id_slot; + task.slot_action.filename = filename; + task.slot_action.filepath = filepath; - const int id_task = ctx_server.queue_tasks.post(task); - ctx_server.queue_results.add_waiting_task_id(id_task); + ctx_server.queue_results.add_waiting_task_id(task.id); + ctx_server.queue_tasks.post(task); - server_task_result result = ctx_server.queue_results.recv(id_task); - ctx_server.queue_results.remove_waiting_task_id(id_task); + server_task_result_ptr result = ctx_server.queue_results.recv(task.id); + ctx_server.queue_results.remove_waiting_task_id(task.id); - if (result.error) { - res_error(res, result.data); - } else { - res_ok(res, result.data); + if (result->is_error()) { + res_error(res, result->to_json()); + return; } + + res_ok(res, result->to_json()); }; const auto handle_slots_restore = [&ctx_server, &res_error, &res_ok, ¶ms](const httplib::Request & req, httplib::Response & res, int id_slot) { @@ -2783,45 +3258,45 @@ int main(int argc, char ** argv) { } std::string filepath = params.slot_save_path + filename; - server_task task; - task.type = SERVER_TASK_TYPE_SLOT_RESTORE; - task.data = { - { "id_slot", id_slot }, - { "filename", filename }, - { "filepath", filepath }, - }; + server_task task(SERVER_TASK_TYPE_SLOT_RESTORE); + task.id = ctx_server.queue_tasks.get_new_id(); + task.slot_action.slot_id = id_slot; + task.slot_action.filename = filename; + task.slot_action.filepath = filepath; - const int id_task = ctx_server.queue_tasks.post(task); - ctx_server.queue_results.add_waiting_task_id(id_task); + ctx_server.queue_results.add_waiting_task_id(task.id); + ctx_server.queue_tasks.post(task); - server_task_result result = ctx_server.queue_results.recv(id_task); - ctx_server.queue_results.remove_waiting_task_id(id_task); + server_task_result_ptr result = ctx_server.queue_results.recv(task.id); + ctx_server.queue_results.remove_waiting_task_id(task.id); - if (result.error) { - res_error(res, result.data); - } else { - res_ok(res, result.data); + if (result->is_error()) { + res_error(res, result->to_json()); + return; } + + GGML_ASSERT(dynamic_cast(result.get()) != nullptr); + res_ok(res, result->to_json()); }; const auto handle_slots_erase = [&ctx_server, &res_error, &res_ok](const httplib::Request & /* req */, httplib::Response & res, int id_slot) { - server_task task; - task.type = SERVER_TASK_TYPE_SLOT_ERASE; - task.data = { - { "id_slot", id_slot }, - }; + server_task task(SERVER_TASK_TYPE_SLOT_ERASE); + task.id = ctx_server.queue_tasks.get_new_id(); + task.slot_action.slot_id = id_slot; - const int id_task = ctx_server.queue_tasks.post(task); - ctx_server.queue_results.add_waiting_task_id(id_task); + ctx_server.queue_results.add_waiting_task_id(task.id); + ctx_server.queue_tasks.post(task); - server_task_result result = ctx_server.queue_results.recv(id_task); - ctx_server.queue_results.remove_waiting_task_id(id_task); + server_task_result_ptr result = ctx_server.queue_results.recv(task.id); + ctx_server.queue_results.remove_waiting_task_id(task.id); - if (result.error) { - res_error(res, result.data); - } else { - res_ok(res, result.data); + if (result->is_error()) { + res_error(res, result->to_json()); + return; } + + GGML_ASSERT(dynamic_cast(result.get()) != nullptr); + res_ok(res, result->to_json()); }; const auto handle_slots_action = [¶ms, &res_error, &handle_slots_save, &handle_slots_restore, &handle_slots_erase](const httplib::Request & req, httplib::Response & res) { @@ -2854,9 +3329,11 @@ int main(int argc, char ** argv) { }; const auto handle_props = [&ctx_server, &res_ok](const httplib::Request &, httplib::Response & res) { + // this endpoint is publicly available, please only return what is safe to be exposed json data = { { "default_generation_settings", ctx_server.default_generation_settings_for_props }, { "total_slots", ctx_server.params_base.n_parallel }, + { "model_path", ctx_server.params_base.model }, { "chat_template", llama_get_chat_template(ctx_server.model) }, }; @@ -2876,13 +3353,50 @@ int main(int argc, char ** argv) { res_ok(res, {{ "success", true }}); }; - const auto handle_completions_generic = [&ctx_server, &res_error, &res_ok](server_task_inf_type inf_type, json & data, httplib::Response & res) { + // handle completion-like requests (completion, chat, infill) + // we can optionally provide a custom format for partial results and final results + const auto handle_completions_generic = [&ctx_server, &res_error, &res_ok]( + server_task_type type, + json & data, + httplib::Response & res, + bool oaicompat = false, + bool oaicompat_chat = false) { + GGML_ASSERT(type == SERVER_TASK_TYPE_COMPLETION || type == SERVER_TASK_TYPE_INFILL); + if (ctx_server.params_base.embedding) { res_error(res, format_error_response("This server does not support completions. Start it without `--embeddings`", ERROR_TYPE_NOT_SUPPORTED)); return; } - std::vector tasks = ctx_server.create_tasks_inference(data, inf_type); + auto completion_id = gen_chatcmplid(); + std::vector tasks; + + try { + std::vector tokenized_prompts = tokenize_input_prompts(ctx_server.ctx, data.at("prompt"), true, true); + tasks.reserve(tokenized_prompts.size()); + for (size_t i = 0; i < tokenized_prompts.size(); i++) { + server_task task = server_task(type); + + task.id = ctx_server.queue_tasks.get_new_id(); + task.index = i; + + task.prompt_tokens = std::move(tokenized_prompts[i]); + task.params = server_task::params_from_json_cmpl(ctx_server.model, ctx_server.params_base, data); + task.id_selected_slot = json_value(data, "id_slot", -1); + + // OAI-compat + task.params.oaicompat = oaicompat; + task.params.oaicompat_chat = oaicompat_chat; + task.params.oaicompat_cmpl_id = completion_id; + // oaicompat_model is already populated by params_from_json_cmpl + + tasks.push_back(task); + } + } catch (const std::exception & e) { + res_error(res, format_error_response(e.what(), ERROR_TYPE_INVALID_REQUEST)); + return; + } + ctx_server.queue_results.add_waiting_tasks(tasks); ctx_server.queue_tasks.post(tasks); @@ -2890,15 +3404,15 @@ int main(int argc, char ** argv) { const auto task_ids = server_task::get_list_id(tasks); if (!stream) { - ctx_server.receive_cmpl_results(task_ids, [&](std::vector & results) { + ctx_server.receive_multi_results(task_ids, [&](std::vector & results) { if (results.size() == 1) { // single result - res_ok(res, results[0].data); + res_ok(res, results[0]->to_json()); } else { // multiple results (multitask) json arr = json::array(); - for (const auto & res : results) { - arr.push_back(res.data); + for (auto & res : results) { + arr.push_back(res->to_json()); } res_ok(res, arr); } @@ -2908,12 +3422,26 @@ int main(int argc, char ** argv) { ctx_server.queue_results.remove_waiting_task_ids(task_ids); } else { - const auto chunked_content_provider = [task_ids, &ctx_server](size_t, httplib::DataSink & sink) { - ctx_server.receive_cmpl_results_stream(task_ids, [&](const server_task_result & result) -> bool { - return server_sent_event(sink, "data", result.data); + const auto chunked_content_provider = [task_ids, &ctx_server, oaicompat](size_t, httplib::DataSink & sink) { + ctx_server.receive_cmpl_results_stream(task_ids, [&](server_task_result_ptr & result) -> bool { + json res_json = result->to_json(); + if (res_json.is_array()) { + for (const auto & res : res_json) { + if (!server_sent_event(sink, "data", res)) { + return false; + } + } + return true; + } else { + return server_sent_event(sink, "data", res_json); + } }, [&](const json & error_data) { server_sent_event(sink, "error", error_data); }); + if (oaicompat) { + static const std::string ev_done = "data: [DONE]\n\n"; + sink.write(ev_done.data(), ev_done.size()); + } sink.done(); return false; }; @@ -2928,7 +3456,12 @@ int main(int argc, char ** argv) { const auto handle_completions = [&handle_completions_generic](const httplib::Request & req, httplib::Response & res) { json data = json::parse(req.body); - return handle_completions_generic(SERVER_TASK_INF_TYPE_COMPLETION, data, res); + return handle_completions_generic( + SERVER_TASK_TYPE_COMPLETION, + data, + res, + /* oaicompat */ false, + /* oaicompat_chat */ false); }; const auto handle_infill = [&ctx_server, &res_error, &handle_completions_generic](const httplib::Request & req, httplib::Response & res) { @@ -2951,6 +3484,11 @@ int main(int argc, char ** argv) { json data = json::parse(req.body); // validate input + if (data.contains("prompt") && !data.at("prompt").is_string()) { + // prompt is optional + res_error(res, format_error_response("\"prompt\" must be a string", ERROR_TYPE_INVALID_REQUEST)); + } + if (!data.contains("input_prefix")) { res_error(res, format_error_response("\"input_prefix\" is required", ERROR_TYPE_INVALID_REQUEST)); } @@ -2960,9 +3498,11 @@ int main(int argc, char ** argv) { } if (data.contains("input_extra") && !data.at("input_extra").is_array()) { + // input_extra is optional res_error(res, format_error_response("\"input_extra\" must be an array of {\"filename\": string, \"text\": string}", ERROR_TYPE_INVALID_REQUEST)); return; } + json input_extra = json_value(data, "input_extra", json::array()); for (const auto & chunk : input_extra) { // { "text": string, "filename": string } @@ -2978,67 +3518,40 @@ int main(int argc, char ** argv) { } data["input_extra"] = input_extra; // default to empty array if it's not exist - return handle_completions_generic(SERVER_TASK_INF_TYPE_INFILL, data, res); + std::string prompt = json_value(data, "prompt", std::string()); + std::vector tokenized_prompts = tokenize_input_prompts(ctx_server.ctx, prompt, true, true); + SRV_DBG("creating infill tasks, n_prompts = %d\n", (int) tokenized_prompts.size()); + data["prompt"] = format_infill( + ctx_server.ctx, + data.at("input_prefix"), + data.at("input_suffix"), + data.at("input_extra"), + ctx_server.params_base.n_batch, + ctx_server.params_base.n_predict, + ctx_server.slots[0].n_ctx, // TODO: there should be a better way + ctx_server.params_base.spm_infill, + tokenized_prompts[0] + ); + + return handle_completions_generic(SERVER_TASK_TYPE_INFILL, data, res); }; - // TODO: maybe merge this function with "handle_completions_generic" - const auto handle_chat_completions = [&ctx_server, ¶ms, &res_error, &res_ok, verbose](const httplib::Request & req, httplib::Response & res) { + const auto handle_chat_completions = [&ctx_server, ¶ms, &res_error, &handle_completions_generic](const httplib::Request & req, httplib::Response & res) { if (ctx_server.params_base.embedding) { res_error(res, format_error_response("This server does not support completions. Start it without `--embeddings`", ERROR_TYPE_NOT_SUPPORTED)); return; } json data = oaicompat_completion_params_parse(ctx_server.model, json::parse(req.body), params.chat_template); - - std::vector tasks = ctx_server.create_tasks_inference(data, SERVER_TASK_INF_TYPE_COMPLETION); - ctx_server.queue_results.add_waiting_tasks(tasks); - ctx_server.queue_tasks.post(tasks); - - bool stream = json_value(data, "stream", false); - const auto task_ids = server_task::get_list_id(tasks); - const auto completion_id = gen_chatcmplid(); - - if (!stream) { - ctx_server.receive_cmpl_results(task_ids, [&](const std::vector & results) { - // multitask is never support in chat completion, there is only one result - json result_oai = format_final_response_oaicompat(data, results[0].data, completion_id, /*.streaming =*/ false, verbose); - res_ok(res, result_oai); - }, [&](const json & error_data) { - res_error(res, error_data); - }); - - ctx_server.queue_results.remove_waiting_task_ids(task_ids); - } else { - const auto chunked_content_provider = [task_ids, &ctx_server, completion_id](size_t, httplib::DataSink & sink) { - ctx_server.receive_cmpl_results_stream(task_ids, [&](const server_task_result & result) -> bool { - std::vector result_array = format_partial_response_oaicompat(result.data, completion_id); - for (auto & event_data : result_array) { - if (event_data.empty()) { - continue; // skip the stop token - } - if (!server_sent_event(sink, "data", event_data)) { - return false; // connection is closed - } - } - return true; // ok - }, [&](const json & error_data) { - server_sent_event(sink, "error", error_data); - }); - static const std::string ev_done = "data: [DONE]\n\n"; - sink.write(ev_done.data(), ev_done.size()); - sink.done(); - return true; - }; - - auto on_complete = [task_ids, &ctx_server] (bool) { - ctx_server.queue_results.remove_waiting_task_ids(task_ids); - }; - - res.set_chunked_content_provider("text/event-stream", chunked_content_provider, on_complete); - } + return handle_completions_generic( + SERVER_TASK_TYPE_COMPLETION, + data, + res, + /* oaicompat */ true, + /* oaicompat_chat */ true); }; - const auto handle_models = [¶ms, &ctx_server](const httplib::Request &, httplib::Response & res) { + const auto handle_models = [¶ms, &ctx_server, &res_ok](const httplib::Request &, httplib::Response & res) { json models = { {"object", "list"}, {"data", { @@ -3052,7 +3565,7 @@ int main(int argc, char ** argv) { }} }; - res.set_content(models.dump(), MIMETYPE_JSON); + res_ok(res, models); }; const auto handle_tokenize = [&ctx_server, &res_ok](const httplib::Request & req, httplib::Response & res) { @@ -3110,12 +3623,12 @@ int main(int argc, char ** argv) { const auto handle_embeddings = [&ctx_server, &res_error, &res_ok](const httplib::Request & req, httplib::Response & res) { const json body = json::parse(req.body); - bool is_openai = false; + bool oaicompat = false; // an input prompt can be a string or a list of tokens (integer) json prompt; if (body.count("input") != 0) { - is_openai = true; + oaicompat = true; prompt = body.at("input"); } else if (body.count("content") != 0) { // with "content", we only support single prompt @@ -3129,16 +3642,26 @@ int main(int argc, char ** argv) { json responses = json::array(); bool error = false; { - std::vector tasks = ctx_server.create_tasks_inference({{"prompt", prompt}}, SERVER_TASK_INF_TYPE_EMBEDDING); + std::vector tasks; + std::vector tokenized_prompts = tokenize_input_prompts(ctx_server.ctx, prompt, /* add_special */ false, true); + for (size_t i = 0; i < tokenized_prompts.size(); i++) { + server_task task = server_task(SERVER_TASK_TYPE_EMBEDDING); + task.id = ctx_server.queue_tasks.get_new_id(); + task.index = i; + task.prompt_tokens = std::move(tokenized_prompts[i]); + tasks.push_back(task); + } + ctx_server.queue_results.add_waiting_tasks(tasks); ctx_server.queue_tasks.post(tasks); // get the result std::unordered_set task_ids = server_task::get_list_id(tasks); - ctx_server.receive_cmpl_results(task_ids, [&](std::vector & results) { - for (const auto & res : results) { - responses.push_back(res.data); + ctx_server.receive_multi_results(task_ids, [&](std::vector & results) { + for (auto & res : results) { + GGML_ASSERT(dynamic_cast(res.get()) != nullptr); + responses.push_back(res->to_json()); } }, [&](const json & error_data) { res_error(res, error_data); @@ -3153,9 +3676,9 @@ int main(int argc, char ** argv) { } // write JSON response - json root = is_openai + json root = oaicompat ? format_embeddings_response_oaicompat(body, responses) - : responses[0]; + : responses.size() == 1 ? responses[0] : json(responses); res_ok(res, root); }; @@ -3194,29 +3717,33 @@ int main(int argc, char ** argv) { return; } - // construct prompt object: array of ["query", "doc0", "doc1", ...] - json prompt; - prompt.push_back(query); - for (const auto & doc : documents) { - prompt.push_back(doc); - } - - LOG_DBG("rerank prompt: %s\n", prompt.dump().c_str()); + llama_tokens tokenized_query = tokenize_input_prompts(ctx_server.ctx, query, /* add_special */ false, true)[0]; // create and queue the task json responses = json::array(); bool error = false; { - std::vector tasks = ctx_server.create_tasks_inference({{"prompt", prompt}}, SERVER_TASK_INF_TYPE_RERANK); + std::vector tasks; + std::vector tokenized_docs = tokenize_input_prompts(ctx_server.ctx, documents, /* add_special */ false, true); + tasks.reserve(tokenized_docs.size()); + for (size_t i = 0; i < tokenized_docs.size(); i++) { + server_task task = server_task(SERVER_TASK_TYPE_RERANK); + task.id = ctx_server.queue_tasks.get_new_id(); + task.index = i; + task.prompt_tokens = format_rerank(ctx_server.model, tokenized_query, tokenized_docs[i]); + tasks.push_back(task); + } + ctx_server.queue_results.add_waiting_tasks(tasks); ctx_server.queue_tasks.post(tasks); // get the result std::unordered_set task_ids = server_task::get_list_id(tasks); - ctx_server.receive_cmpl_results(task_ids, [&](std::vector & results) { - for (const auto & res : results) { - responses.push_back(res.data); + ctx_server.receive_multi_results(task_ids, [&](std::vector & results) { + for (auto & res : results) { + GGML_ASSERT(dynamic_cast(res.get()) != nullptr); + responses.push_back(res->to_json()); } }, [&](const json & error_data) { res_error(res, error_data); @@ -3267,36 +3794,42 @@ int main(int argc, char ** argv) { } } - server_task task; - task.type = SERVER_TASK_TYPE_SET_LORA; - const int id_task = ctx_server.queue_tasks.post(task); - ctx_server.queue_results.add_waiting_task_id(id_task); + server_task task(SERVER_TASK_TYPE_SET_LORA); + task.id = ctx_server.queue_tasks.get_new_id(); + ctx_server.queue_results.add_waiting_task_id(task.id); + ctx_server.queue_tasks.post(task); + + server_task_result_ptr result = ctx_server.queue_results.recv(task.id); + ctx_server.queue_results.remove_waiting_task_id(task.id); - server_task_result result = ctx_server.queue_results.recv(id_task); - ctx_server.queue_results.remove_waiting_task_id(id_task); + if (result->is_error()) { + res_error(res, result->to_json()); + return; + } - res_ok(res, result.data); - res.status = 200; // HTTP OK + GGML_ASSERT(dynamic_cast(result.get()) != nullptr); + res_ok(res, result->to_json()); }; // // Router // - // register static assets routes - if (!params.public_path.empty()) { - // Set the base directory for serving static files - bool is_found = svr->set_mount_point("/", params.public_path); - if (!is_found) { - LOG_ERR("%s: static assets path not found: %s\n", __func__, params.public_path.c_str()); - return 1; - } + if (!params.webui) { + LOG_INF("Web UI is disabled\n"); } else { - // using embedded static files - for (const auto & it : static_files) { - const server_static_file & static_file = it.second; - svr->Get(it.first.c_str(), [&static_file](const httplib::Request &, httplib::Response & res) { - res.set_content(reinterpret_cast(static_file.data), static_file.size, static_file.mime_type); + // register static assets routes + if (!params.public_path.empty()) { + // Set the base directory for serving static files + bool is_found = svr->set_mount_point("/", params.public_path); + if (!is_found) { + LOG_ERR("%s: static assets path not found: %s\n", __func__, params.public_path.c_str()); + return 1; + } + } else { + // using embedded static index.html + svr->Get("/", [](const httplib::Request &, httplib::Response & res) { + res.set_content(reinterpret_cast(index_html), index_html_len, "text/html; charset=utf-8"); return false; }); } @@ -3347,8 +3880,18 @@ int main(int argc, char ** argv) { llama_backend_free(); }; - // bind HTTP listen port, run the HTTP server in a thread - if (!svr->bind_to_port(params.hostname, params.port)) { + // bind HTTP listen port + bool was_bound = false; + if (params.port == 0) { + int bound_port = svr->bind_to_any_port(params.hostname); + if ((was_bound = (bound_port >= 0))) { + params.port = bound_port; + } + } else { + was_bound = svr->bind_to_port(params.hostname, params.port); + } + + if (!was_bound) { //LOG_ERROR("couldn't bind HTTP server socket", { // {"hostname", params.hostname}, // {"port", params.port}, @@ -3357,6 +3900,8 @@ int main(int argc, char ** argv) { clean_up(); return 1; } + + // run the HTTP server in a thread std::thread t([&]() { svr->listen_after_bind(); }); svr->wait_until_ready(); diff --git a/examples/server/tests/README.md b/examples/server/tests/README.md index 2930a2e0dea0f..fa3d0a2f5ff66 100644 --- a/examples/server/tests/README.md +++ b/examples/server/tests/README.md @@ -44,4 +44,10 @@ To run with stdout/stderr display in real time (verbose output, but useful for d DEBUG=1 ./tests.sh -s -v -x ``` +Hint: You can compile and run test in single command, useful for local developement: + +```shell +cmake --build build -j --target llama-server && ./examples/server/tests/tests.sh +``` + To see all available arguments, please refer to [pytest documentation](https://docs.pytest.org/en/stable/how-to/usage.html) diff --git a/examples/server/tests/tests.sh b/examples/server/tests/tests.sh index 1e285dcdac14b..1e0777de367fc 100755 --- a/examples/server/tests/tests.sh +++ b/examples/server/tests/tests.sh @@ -1,5 +1,9 @@ #!/bin/bash +# make sure we are in the right directory +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) +cd $SCRIPT_DIR + set -eu if [ $# -lt 1 ] diff --git a/examples/server/tests/unit/test_basic.py b/examples/server/tests/unit/test_basic.py index 84db5ca1ca192..1485de8ceb3fc 100644 --- a/examples/server/tests/unit/test_basic.py +++ b/examples/server/tests/unit/test_basic.py @@ -1,4 +1,5 @@ import pytest +import requests from utils import * server = ServerPreset.tinyllama2() @@ -22,7 +23,12 @@ def test_server_props(): server.start() res = server.make_request("GET", "/props") assert res.status_code == 200 + assert ".gguf" in res.body["model_path"] assert res.body["total_slots"] == server.n_slots + default_val = res.body["default_generation_settings"] + assert server.n_ctx is not None and server.n_slots is not None + assert default_val["n_ctx"] == server.n_ctx / server.n_slots + assert default_val["params"]["seed"] == server.seed def test_server_models(): @@ -32,3 +38,59 @@ def test_server_models(): assert res.status_code == 200 assert len(res.body["data"]) == 1 assert res.body["data"][0]["id"] == server.model_alias + + +def test_server_slots(): + global server + + # without slots endpoint enabled, this should return error + server.server_slots = False + server.start() + res = server.make_request("GET", "/slots") + assert res.status_code == 501 # ERROR_TYPE_NOT_SUPPORTED + assert "error" in res.body + server.stop() + + # with slots endpoint enabled, this should return slots info + server.server_slots = True + server.n_slots = 2 + server.start() + res = server.make_request("GET", "/slots") + assert res.status_code == 200 + assert len(res.body) == server.n_slots + assert server.n_ctx is not None and server.n_slots is not None + assert res.body[0]["n_ctx"] == server.n_ctx / server.n_slots + assert "params" in res.body[0] + assert res.body[0]["params"]["seed"] == server.seed + + +def test_load_split_model(): + global server + server.model_hf_repo = "ggml-org/models" + server.model_hf_file = "tinyllamas/split/stories15M-q8_0-00001-of-00003.gguf" + server.model_alias = "tinyllama-split" + server.start() + res = server.make_request("POST", "/completion", data={ + "n_predict": 16, + "prompt": "Hello", + "temperature": 0.0, + }) + assert res.status_code == 200 + assert match_regex("(little|girl)+", res.body["content"]) + + +def test_no_webui(): + global server + # default: webui enabled + server.start() + url = f"http://{server.server_host}:{server.server_port}" + res = requests.get(url) + assert res.status_code == 200 + assert "" in res.text + server.stop() + + # with --no-webui + server.no_webui = True + server.start() + res = requests.get(url) + assert res.status_code == 404 diff --git a/examples/server/tests/unit/test_chat_completion.py b/examples/server/tests/unit/test_chat_completion.py index d7aeb288d45cc..6573cc17f7b87 100644 --- a/examples/server/tests/unit/test_chat_completion.py +++ b/examples/server/tests/unit/test_chat_completion.py @@ -12,13 +12,13 @@ def create_server(): @pytest.mark.parametrize( - "model,system_prompt,user_prompt,max_tokens,re_content,n_prompt,n_predicted,truncated", + "model,system_prompt,user_prompt,max_tokens,re_content,n_prompt,n_predicted,finish_reason", [ - ("llama-2", "Book", "What is the best book", 8, "(Suddenly)+", 77, 8, False), - ("codellama70b", "You are a coding assistant.", "Write the fibonacci function in c++.", 128, "(Aside|she|felter|alonger)+", 104, 64, False), + (None, "Book", "What is the best book", 8, "(Suddenly)+", 77, 8, "length"), + ("codellama70b", "You are a coding assistant.", "Write the fibonacci function in c++.", 128, "(Aside|she|felter|alonger)+", 104, 64, "length"), ] ) -def test_chat_completion(model, system_prompt, user_prompt, max_tokens, re_content, n_prompt, n_predicted, truncated): +def test_chat_completion(model, system_prompt, user_prompt, max_tokens, re_content, n_prompt, n_predicted, finish_reason): global server server.start() res = server.make_request("POST", "/chat/completions", data={ @@ -30,29 +30,28 @@ def test_chat_completion(model, system_prompt, user_prompt, max_tokens, re_conte ], }) assert res.status_code == 200 + assert "cmpl" in res.body["id"] # make sure the completion id has the expected format + assert res.body["model"] == model if model is not None else server.model_alias assert res.body["usage"]["prompt_tokens"] == n_prompt assert res.body["usage"]["completion_tokens"] == n_predicted choice = res.body["choices"][0] assert "assistant" == choice["message"]["role"] assert match_regex(re_content, choice["message"]["content"]) - if truncated: - assert choice["finish_reason"] == "length" - else: - assert choice["finish_reason"] == "stop" + assert choice["finish_reason"] == finish_reason @pytest.mark.parametrize( - "model,system_prompt,user_prompt,max_tokens,re_content,n_prompt,n_predicted,truncated", + "system_prompt,user_prompt,max_tokens,re_content,n_prompt,n_predicted,finish_reason", [ - ("llama-2", "Book", "What is the best book", 8, "(Suddenly)+", 77, 8, False), - ("codellama70b", "You are a coding assistant.", "Write the fibonacci function in c++.", 128, "(Aside|she|felter|alonger)+", 104, 64, False), + ("Book", "What is the best book", 8, "(Suddenly)+", 77, 8, "length"), + ("You are a coding assistant.", "Write the fibonacci function in c++.", 128, "(Aside|she|felter|alonger)+", 104, 64, "length"), ] ) -def test_chat_completion_stream(model, system_prompt, user_prompt, max_tokens, re_content, n_prompt, n_predicted, truncated): +def test_chat_completion_stream(system_prompt, user_prompt, max_tokens, re_content, n_prompt, n_predicted, finish_reason): global server + server.model_alias = None # try using DEFAULT_OAICOMPAT_MODEL server.start() res = server.make_stream_request("POST", "/chat/completions", data={ - "model": model, "max_tokens": max_tokens, "messages": [ {"role": "system", "content": system_prompt}, @@ -61,18 +60,19 @@ def test_chat_completion_stream(model, system_prompt, user_prompt, max_tokens, r "stream": True, }) content = "" + last_cmpl_id = None for data in res: choice = data["choices"][0] + assert "gpt-3.5" in data["model"] # DEFAULT_OAICOMPAT_MODEL, maybe changed in the future + if last_cmpl_id is None: + last_cmpl_id = data["id"] + assert last_cmpl_id == data["id"] # make sure the completion id is the same for all events in the stream if choice["finish_reason"] in ["stop", "length"]: assert data["usage"]["prompt_tokens"] == n_prompt assert data["usage"]["completion_tokens"] == n_predicted assert "content" not in choice["delta"] assert match_regex(re_content, content) - # FIXME: not sure why this is incorrect in stream mode - # if truncated: - # assert choice["finish_reason"] == "length" - # else: - # assert choice["finish_reason"] == "stop" + assert choice["finish_reason"] == finish_reason else: assert choice["finish_reason"] is None content += choice["delta"]["content"] @@ -93,7 +93,7 @@ def test_chat_completion_with_openai_library(): temperature=0.8, ) print(res) - assert res.choices[0].finish_reason == "stop" + assert res.choices[0].finish_reason == "length" assert res.choices[0].message.content is not None assert match_regex("(Suddenly)+", res.choices[0].message.content) @@ -127,3 +127,39 @@ def test_completion_with_response_format(response_format: dict, n_predicted: int assert res.status_code != 200 assert "error" in res.body + +@pytest.mark.parametrize("messages", [ + None, + "string", + [123], + [{}], + [{"role": 123}], + [{"role": "system", "content": 123}], + # [{"content": "hello"}], # TODO: should not be a valid case + [{"role": "system", "content": "test"}, {}], +]) +def test_invalid_chat_completion_req(messages): + global server + server.start() + res = server.make_request("POST", "/chat/completions", data={ + "messages": messages, + }) + assert res.status_code == 400 or res.status_code == 500 + assert "error" in res.body + + +def test_chat_completion_with_timings_per_token(): + global server + server.start() + res = server.make_stream_request("POST", "/chat/completions", data={ + "max_tokens": 10, + "messages": [{"role": "user", "content": "test"}], + "stream": True, + "timings_per_token": True, + }) + for data in res: + assert "timings" in data + assert "prompt_per_second" in data["timings"] + assert "predicted_per_second" in data["timings"] + assert "predicted_n" in data["timings"] + assert data["timings"]["predicted_n"] <= 10 diff --git a/examples/server/tests/unit/test_completion.py b/examples/server/tests/unit/test_completion.py index 2fa30dd033431..7f4f9cd038be4 100644 --- a/examples/server/tests/unit/test_completion.py +++ b/examples/server/tests/unit/test_completion.py @@ -42,15 +42,39 @@ def test_completion_stream(prompt: str, n_predict: int, re_content: str, n_promp }) content = "" for data in res: + assert "stop" in data and type(data["stop"]) == bool if data["stop"]: assert data["timings"]["prompt_n"] == n_prompt assert data["timings"]["predicted_n"] == n_predicted assert data["truncated"] == truncated + assert data["stop_type"] == "limit" + assert "generation_settings" in data + assert server.n_predict is not None + assert data["generation_settings"]["n_predict"] == min(n_predict, server.n_predict) + assert data["generation_settings"]["seed"] == server.seed assert match_regex(re_content, content) else: content += data["content"] +def test_completion_stream_vs_non_stream(): + global server + server.start() + res_stream = server.make_stream_request("POST", "/completion", data={ + "n_predict": 8, + "prompt": "I believe the meaning of life is", + "stream": True, + }) + res_non_stream = server.make_request("POST", "/completion", data={ + "n_predict": 8, + "prompt": "I believe the meaning of life is", + }) + content_stream = "" + for data in res_stream: + content_stream += data["content"] + assert content_stream == res_non_stream.body["content"] + + @pytest.mark.parametrize("n_slots", [1, 2]) def test_consistent_result_same_seed(n_slots: int): global server @@ -221,3 +245,24 @@ def check_slots_status(): assert len(res.body["content"]) > 10 # FIXME: the result is not deterministic when using other slot than slot 0 # assert match_regex(re_content, res.body["content"]) + + +def test_n_probs(): + global server + server.start() + res = server.make_request("POST", "/completion", data={ + "prompt": "I believe the meaning of life is", + "n_probs": 10, + "temperature": 0.0, + "n_predict": 5, + }) + assert res.status_code == 200 + assert "completion_probabilities" in res.body + assert len(res.body["completion_probabilities"]) == 5 + for tok in res.body["completion_probabilities"]: + assert "probs" in tok + assert len(tok["probs"]) == 10 + for prob in tok["probs"]: + assert "prob" in prob + assert "tok_str" in prob + assert 0.0 <= prob["prob"] <= 1.0 diff --git a/examples/server/tests/unit/test_infill.py b/examples/server/tests/unit/test_infill.py index 38ce6c42954ed..ad4b8192a7875 100644 --- a/examples/server/tests/unit/test_infill.py +++ b/examples/server/tests/unit/test_infill.py @@ -8,28 +8,70 @@ def create_server(): global server server = ServerPreset.tinyllama_infill() + def test_infill_without_input_extra(): global server server.start() res = server.make_request("POST", "/infill", data={ - "prompt": "Complete this", - "input_prefix": "#include \n#include \"llama.h\"\n\nint main() {\n int n_threads = llama_", + "input_prefix": "#include \n#include \"llama.h\"\n\nint main() {\n", + "prompt": " int n_threads = llama_", "input_suffix": "}\n", }) assert res.status_code == 200 - assert match_regex("(One|day|she|saw|big|scary|bird)+", res.body["content"]) + assert match_regex("(Ann|small|shiny)+", res.body["content"]) + def test_infill_with_input_extra(): global server server.start() res = server.make_request("POST", "/infill", data={ - "prompt": "Complete this", "input_extra": [{ "filename": "llama.h", "text": "LLAMA_API int32_t llama_n_threads();\n" }], - "input_prefix": "#include \n#include \"llama.h\"\n\nint main() {\n int n_threads = llama_", + "input_prefix": "#include \n#include \"llama.h\"\n\nint main() {\n", + "prompt": " int n_threads = llama_", + "input_suffix": "}\n", + }) + assert res.status_code == 200 + assert match_regex("(Dad|excited|park)+", res.body["content"]) + + +@pytest.mark.parametrize("input_extra", [ + {}, + {"filename": "ok"}, + {"filename": 123}, + {"filename": 123, "text": "abc"}, + {"filename": 123, "text": 456}, +]) +def test_invalid_input_extra_req(input_extra): + global server + server.start() + res = server.make_request("POST", "/infill", data={ + "input_extra": [input_extra], + "input_prefix": "#include \n#include \"llama.h\"\n\nint main() {\n", + "prompt": " int n_threads = llama_", + "input_suffix": "}\n", + }) + assert res.status_code == 400 + assert "error" in res.body + + +@pytest.mark.skipif(not is_slow_test_allowed(), reason="skipping slow test") +def test_with_qwen_model(): + global server + server.model_file = None + server.model_hf_repo = "ggml-org/Qwen2.5-Coder-1.5B-IQ3_XXS-GGUF" + server.model_hf_file = "qwen2.5-coder-1.5b-iq3_xxs-imat.gguf" + server.start(timeout_seconds=600) + res = server.make_request("POST", "/infill", data={ + "input_extra": [{ + "filename": "llama.h", + "text": "LLAMA_API int32_t llama_n_threads();\n" + }], + "input_prefix": "#include \n#include \"llama.h\"\n\nint main() {\n", + "prompt": " int n_threads = llama_", "input_suffix": "}\n", }) assert res.status_code == 200 - assert match_regex("(cuts|Jimmy|mom|came|into|the|room)+", res.body["content"]) + assert res.body["content"] == "n_threads();\n printf(\"Number of threads: %d\\n\", n_threads);\n return 0;\n" diff --git a/examples/server/tests/unit/test_rerank.py b/examples/server/tests/unit/test_rerank.py index 3a49fd3ac6bdf..189bc4c962329 100644 --- a/examples/server/tests/unit/test_rerank.py +++ b/examples/server/tests/unit/test_rerank.py @@ -36,3 +36,20 @@ def test_rerank(): assert most_relevant["relevance_score"] > least_relevant["relevance_score"] assert most_relevant["index"] == 2 assert least_relevant["index"] == 3 + + +@pytest.mark.parametrize("documents", [ + [], + None, + 123, + [1, 2, 3], +]) +def test_invalid_rerank_req(documents): + global server + server.start() + res = server.make_request("POST", "/rerank", data={ + "query": "Machine learning is", + "documents": documents, + }) + assert res.status_code == 400 + assert "error" in res.body diff --git a/examples/server/tests/unit/test_speculative.py b/examples/server/tests/unit/test_speculative.py new file mode 100644 index 0000000000000..3bb5733cbdf48 --- /dev/null +++ b/examples/server/tests/unit/test_speculative.py @@ -0,0 +1,134 @@ +import pytest +from utils import * + +# We use a F16 MOE gguf as main model, and q4_0 as draft model + +server = ServerPreset.stories15m_moe() + +MODEL_DRAFT_FILE_URL = "https://huggingface.co/ggml-org/models/resolve/main/tinyllamas/stories15M-q4_0.gguf" + +def create_server(): + global server + server = ServerPreset.stories15m_moe() + # download draft model file if needed + file_name = MODEL_DRAFT_FILE_URL.split('/').pop() + model_draft_file = f'../../../{file_name}' + if not os.path.exists(model_draft_file): + print(f"Downloading {MODEL_DRAFT_FILE_URL} to {model_draft_file}") + with open(model_draft_file, 'wb') as f: + f.write(requests.get(MODEL_DRAFT_FILE_URL).content) + print(f"Done downloading draft model file") + # set default values + server.model_draft = model_draft_file + server.draft_min = 4 + server.draft_max = 8 + + +@pytest.fixture(scope="module", autouse=True) +def fixture_create_server(): + return create_server() + + +def test_with_and_without_draft(): + global server + server.model_draft = None # disable draft model + server.start() + res = server.make_request("POST", "/completion", data={ + "prompt": "I believe the meaning of life is", + "temperature": 0.0, + "top_k": 1, + }) + assert res.status_code == 200 + content_no_draft = res.body["content"] + server.stop() + + # create new server with draft model + create_server() + server.start() + res = server.make_request("POST", "/completion", data={ + "prompt": "I believe the meaning of life is", + "temperature": 0.0, + "top_k": 1, + }) + assert res.status_code == 200 + content_draft = res.body["content"] + + assert content_no_draft == content_draft + + +def test_different_draft_min_draft_max(): + global server + test_values = [ + (1, 2), + (1, 4), + (4, 8), + (4, 12), + (8, 16), + ] + last_content = None + for draft_min, draft_max in test_values: + server.stop() + server.draft_min = draft_min + server.draft_max = draft_max + server.start() + res = server.make_request("POST", "/completion", data={ + "prompt": "I believe the meaning of life is", + "temperature": 0.0, + "top_k": 1, + }) + assert res.status_code == 200 + if last_content is not None: + assert last_content == res.body["content"] + last_content = res.body["content"] + + +def test_slot_ctx_not_exceeded(): + global server + server.n_ctx = 64 + server.start() + res = server.make_request("POST", "/completion", data={ + "prompt": "Hello " * 56, + "temperature": 0.0, + "top_k": 1, + "speculative.p_min": 0.0, + }) + assert res.status_code == 200 + assert len(res.body["content"]) > 0 + + +def test_with_ctx_shift(): + global server + server.n_ctx = 64 + server.start() + res = server.make_request("POST", "/completion", data={ + "prompt": "Hello " * 56, + "temperature": 0.0, + "top_k": 1, + "n_predict": 64, + "speculative.p_min": 0.0, + }) + assert res.status_code == 200 + assert len(res.body["content"]) > 0 + assert res.body["tokens_predicted"] == 64 + assert res.body["truncated"] == True + + +@pytest.mark.parametrize("n_slots,n_requests", [ + (1, 2), + (2, 2), +]) +def test_multi_requests_parallel(n_slots: int, n_requests: int): + global server + server.n_slots = n_slots + server.start() + tasks = [] + for _ in range(n_requests): + tasks.append((server.make_request, ("POST", "/completion", { + "prompt": "I believe the meaning of life is", + "temperature": 0.0, + "top_k": 1, + }))) + results = parallel_function_calls(tasks) + for res in results: + assert res.status_code == 200 + assert match_regex("(wise|kind|owl|answer)+", res.body["content"]) diff --git a/examples/server/tests/utils.py b/examples/server/tests/utils.py index a831f113f4161..d988ccf5e3061 100644 --- a/examples/server/tests/utils.py +++ b/examples/server/tests/utils.py @@ -46,6 +46,7 @@ class ServerProcess: model_alias: str | None = None model_url: str | None = None model_file: str | None = None + model_draft: str | None = None n_threads: int | None = None n_gpu_layer: int | None = None n_batch: int | None = None @@ -63,11 +64,15 @@ class ServerProcess: server_embeddings: bool | None = False server_reranking: bool | None = False server_metrics: bool | None = False + server_slots: bool | None = False draft: int | None = None api_key: str | None = None response_format: str | None = None lora_files: List[str] | None = None disable_ctx_shift: int | None = False + draft_min: int | None = None + draft_max: int | None = None + no_webui: bool | None = None # session variables process: subprocess.Popen | None = None @@ -88,7 +93,6 @@ def start(self, timeout_seconds: int = 10) -> None: else: server_path = "../../../build/bin/llama-server" server_args = [ - "--slots", # requires to get slot status via /slots endpoint "--host", self.server_host, "--port", @@ -102,6 +106,8 @@ def start(self, timeout_seconds: int = 10) -> None: server_args.extend(["--model", self.model_file]) if self.model_url: server_args.extend(["--model-url", self.model_url]) + if self.model_draft: + server_args.extend(["--model-draft", self.model_draft]) if self.model_hf_repo: server_args.extend(["--hf-repo", self.model_hf_repo]) if self.model_hf_file: @@ -124,6 +130,8 @@ def start(self, timeout_seconds: int = 10) -> None: server_args.append("--reranking") if self.server_metrics: server_args.append("--metrics") + if self.server_slots: + server_args.append("--slots") if self.model_alias: server_args.extend(["--alias", self.model_alias]) if self.n_ctx: @@ -147,6 +155,12 @@ def start(self, timeout_seconds: int = 10) -> None: server_args.extend(["--no-context-shift"]) if self.api_key: server_args.extend(["--api-key", self.api_key]) + if self.draft_max: + server_args.extend(["--draft-max", self.draft_max]) + if self.draft_min: + server_args.extend(["--draft-min", self.draft_min]) + if self.no_webui: + server_args.append("--no-webui") args = [str(arg) for arg in [server_path, *server_args]] print(f"bench: starting server with: {' '.join(args)}") @@ -172,7 +186,7 @@ def start(self, timeout_seconds: int = 10) -> None: start_time = time.time() while time.time() - start_time < timeout_seconds: try: - response = self.make_request("GET", "/slots", headers={ + response = self.make_request("GET", "/health", headers={ "Authorization": f"Bearer {self.api_key}" if self.api_key else None }) if response.status_code == 200: @@ -185,7 +199,8 @@ def start(self, timeout_seconds: int = 10) -> None: raise TimeoutError(f"Server did not start within {timeout_seconds} seconds") def stop(self) -> None: - server_instances.remove(self) + if self in server_instances: + server_instances.remove(self) if self.process: print(f"Stopping server with pid={self.process.pid}") self.process.kill() @@ -214,7 +229,7 @@ def make_request( result.headers = dict(response.headers) result.status_code = response.status_code result.body = response.json() if parse_body else None - print("Response from server", result.body) + print("Response from server", json.dumps(result.body, indent=2)) return result def make_stream_request( @@ -235,7 +250,7 @@ def make_stream_request( break elif line.startswith('data: '): data = json.loads(line[6:]) - print("Partial response from server", data) + print("Partial response from server", json.dumps(data, indent=2)) yield data @@ -359,3 +374,6 @@ def match_regex(regex: str, text: str) -> bool: ).search(text) is not None ) + +def is_slow_test_allowed(): + return os.environ.get("SLOW_TESTS") == "1" or os.environ.get("SLOW_TESTS") == "ON" diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index 1665e9dc37db6..8f545aea52dc4 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -20,6 +20,7 @@ #include #include #include +#include #define DEFAULT_OAICOMPAT_MODEL "gpt-3.5-turbo-0613" @@ -40,17 +41,6 @@ using json = nlohmann::ordered_json; #define QUE_ERR(fmt, ...) LOG_ERR("que %12.*s: " fmt, 12, __func__, __VA_ARGS__) #define QUE_DBG(fmt, ...) LOG_DBG("que %12.*s: " fmt, 12, __func__, __VA_ARGS__) -// https://community.openai.com/t/openai-chat-list-of-error-codes-and-types/357791/11 -enum error_type { - ERROR_TYPE_INVALID_REQUEST, - ERROR_TYPE_AUTHENTICATION, - ERROR_TYPE_SERVER, - ERROR_TYPE_NOT_FOUND, - ERROR_TYPE_PERMISSION, - ERROR_TYPE_UNAVAILABLE, // custom error - ERROR_TYPE_NOT_SUPPORTED, // custom error -}; - template static T json_value(const json & body, const std::string & key, const T & default_value) { // Fallback null to default value @@ -174,6 +164,9 @@ static std::vector tokenize_input_prompts(llama_context * ctx, con } else { throw std::runtime_error("\"prompt\" must be a string, an list of tokens, a list of mixed strings & tokens, or a list of prompts"); } + if (result.empty()) { + throw std::runtime_error("\"prompt\" must not be empty"); + } return result; } @@ -337,12 +330,12 @@ static std::string llama_get_chat_template(const struct llama_model * model) { std::string template_key = "tokenizer.chat_template"; // call with NULL buffer to get the total size of the string int32_t res = llama_model_meta_val_str(model, template_key.c_str(), NULL, 0); - if (res < 0) { + if (res < 2) { return ""; } else { std::vector model_template(res, 0); llama_model_meta_val_str(model, template_key.c_str(), model_template.data(), model_template.size()); - return std::string(model_template.data(), model_template.size()); + return std::string(model_template.data(), model_template.size() - 1); } } @@ -485,48 +478,11 @@ static std::string tokens_to_output_formatted_string(const llama_context * ctx, return out; } -struct completion_token_output { - llama_token tok; - std::string text_to_send; - - struct token_prob { - llama_token tok; - float prob; - }; - - std::vector probs; -}; - -// convert a vector of completion_token_output to json -static json probs_vector_to_json(const llama_context * ctx, const std::vector & probs) { - json out = json::array(); - - for (const auto & prob : probs) { - json probs_for_token = json::array(); - - for (const auto & p : prob.probs) { - const std::string tok_str = tokens_to_output_formatted_string(ctx, p.tok); - probs_for_token.push_back(json { - {"tok_str", tok_str}, - {"prob", p.prob}, - }); - } - - const std::string tok_str = tokens_to_output_formatted_string(ctx, prob.tok); - out.push_back(json { - {"content", tok_str}, - {"probs", probs_for_token}, - }); - } - - return out; -} - static bool server_sent_event(httplib::DataSink & sink, const char * event, const json & data) { const std::string str = std::string(event) + ": " + data.dump(-1, ' ', false, json::error_handler_t::replace) + - "\n\n"; // note: these newlines are important (not sure why though, if you know, add a comment to explain) + "\n\n"; // required by RFC 8895 - A message is terminated by a blank line (two line terminators in a row). LOG_DBG("data stream, to_send: %s", str.c_str()); @@ -543,8 +499,6 @@ static json oaicompat_completion_params_parse( const std::string & chat_template) { json llama_params; - llama_params["__oaicompat"] = true; - // Apply chat template to the list of messages llama_params["prompt"] = format_chat(model, chat_template, body.at("messages")); @@ -604,155 +558,6 @@ static json oaicompat_completion_params_parse( return llama_params; } -static json format_final_response_oaicompat(const json & request, const json & result, const std::string & completion_id, bool streaming = false, bool verbose = false) { - bool stopped_word = result.count("stopped_word") != 0; - bool stopped_eos = json_value(result, "stopped_eos", false); - int num_tokens_predicted = json_value(result, "tokens_predicted", 0); - int num_prompt_tokens = json_value(result, "tokens_evaluated", 0); - std::string content = json_value(result, "content", std::string("")); - - std::string finish_reason = "length"; - if (stopped_word || stopped_eos) { - finish_reason = "stop"; - } - - json choices = - streaming ? json::array({json{{"finish_reason", finish_reason}, - {"index", 0}, - {"delta", json::object()}}}) - : json::array({json{{"finish_reason", finish_reason}, - {"index", 0}, - {"message", json{{"content", content}, - {"role", "assistant"}}}}}); - - std::time_t t = std::time(0); - - json res = json { - {"choices", choices}, - {"created", t}, - {"model", - json_value(request, "model", std::string(DEFAULT_OAICOMPAT_MODEL))}, - {"object", streaming ? "chat.completion.chunk" : "chat.completion"}, - {"usage", json { - {"completion_tokens", num_tokens_predicted}, - {"prompt_tokens", num_prompt_tokens}, - {"total_tokens", num_tokens_predicted + num_prompt_tokens} - }}, - {"id", completion_id} - }; - - // extra fields for debugging purposes - if (verbose) { - res["__verbose"] = result; - } - - if (result.contains("completion_probabilities")) { - res["completion_probabilities"] = json_value(result, "completion_probabilities", json::array()); - } - - return res; -} - -// return value is vector as there is one case where we might need to generate two responses -static std::vector format_partial_response_oaicompat(const json & result, const std::string & completion_id) { - if (!result.contains("model") || !result.contains("oaicompat_token_ctr")) { - return std::vector({result}); - } - - bool first = json_value(result, "oaicompat_token_ctr", 0) == 0; - std::string modelname = json_value(result, "model", std::string(DEFAULT_OAICOMPAT_MODEL)); - - bool stopped_word = json_value(result, "stopped_word", false); - bool stopped_eos = json_value(result, "stopped_eos", false); - bool stopped_limit = json_value(result, "stopped_limit", false); - std::string content = json_value(result, "content", std::string("")); - - std::string finish_reason; - if (stopped_word || stopped_eos) { - finish_reason = "stop"; - } - if (stopped_limit) { - finish_reason = "length"; - } - - std::time_t t = std::time(0); - - json choices; - - if (!finish_reason.empty()) { - choices = json::array({json{{"finish_reason", finish_reason}, - {"index", 0}, - {"delta", json::object()}}}); - } else { - if (first) { - if (content.empty()) { - choices = json::array({json{{"finish_reason", nullptr}, - {"index", 0}, - {"delta", json{{"role", "assistant"}}}}}); - } else { - // We have to send this as two updates to conform to openai behavior - json initial_ret = json{{"choices", json::array({json{ - {"finish_reason", nullptr}, - {"index", 0}, - {"delta", json{ - {"role", "assistant"} - }}}})}, - {"created", t}, - {"id", completion_id}, - {"model", modelname}, - {"object", "chat.completion.chunk"}}; - - json second_ret = json{ - {"choices", json::array({json{{"finish_reason", nullptr}, - {"index", 0}, - {"delta", json{ - {"content", content}}} - }})}, - {"created", t}, - {"id", completion_id}, - {"model", modelname}, - {"object", "chat.completion.chunk"}}; - - return std::vector({initial_ret, second_ret}); - } - } else { - // Some idiosyncrasy in task processing logic makes several trailing calls - // with empty content, we ignore these at the calee site. - if (content.empty()) { - return std::vector({json::object()}); - } - - choices = json::array({json{ - {"finish_reason", nullptr}, - {"index", 0}, - {"delta", - json{ - {"content", content}, - }}, - }}); - } - } - - json ret = json { - {"choices", choices}, - {"created", t}, - {"id", completion_id}, - {"model", modelname}, - {"object", "chat.completion.chunk"} - }; - if (!finish_reason.empty()) { - int num_tokens_predicted = json_value(result, "tokens_predicted", 0); - int num_prompt_tokens = json_value(result, "tokens_evaluated", 0); - ret.push_back({"usage", json { - {"completion_tokens", num_tokens_predicted}, - {"prompt_tokens", num_prompt_tokens}, - {"total_tokens", num_tokens_predicted + num_prompt_tokens} - }}); - } - - return std::vector({ret}); -} - static json format_embeddings_response_oaicompat(const json & request, const json & embeddings) { json data = json::array(); int i = 0; @@ -845,42 +650,17 @@ static json format_detokenized_response(const std::string & content) { }; } -static json format_error_response(const std::string & message, const enum error_type type) { - std::string type_str; - int code = 500; - switch (type) { - case ERROR_TYPE_INVALID_REQUEST: - type_str = "invalid_request_error"; - code = 400; - break; - case ERROR_TYPE_AUTHENTICATION: - type_str = "authentication_error"; - code = 401; - break; - case ERROR_TYPE_NOT_FOUND: - type_str = "not_found_error"; - code = 404; - break; - case ERROR_TYPE_SERVER: - type_str = "server_error"; - code = 500; - break; - case ERROR_TYPE_PERMISSION: - type_str = "permission_error"; - code = 403; - break; - case ERROR_TYPE_NOT_SUPPORTED: - type_str = "not_supported_error"; - code = 501; - break; - case ERROR_TYPE_UNAVAILABLE: - type_str = "unavailable_error"; - code = 503; - break; +static json format_logit_bias(const std::vector & logit_bias) { + json data = json::array(); + for (const auto & lb : logit_bias) { + data.push_back(json{ + {"bias", lb.bias}, + {"token", lb.token}, + }); } - return json { - {"code", code}, - {"message", message}, - {"type", type_str}, - }; + return data; +} + +static std::string safe_json_to_str(json data) { + return data.dump(-1, ' ', false, json::error_handler_t::replace); } diff --git a/examples/server/webui/index.html b/examples/server/webui/index.html new file mode 100644 index 0000000000000..c7e18b45e1a51 --- /dev/null +++ b/examples/server/webui/index.html @@ -0,0 +1,268 @@ + + + + + + + 🦙 llama.cpp - chat + + + +
+
+ + + +
+ +
+
+

Conversations

+ + + +
+ + +
+ + New conversation +
+
+ {{ conv.messages[0].content }} +
+
+ Conversations are saved to browser's localStorage +
+
+
+ + +
+ +
+ + + +
llama.cpp
+ + +
+ + + + + +
+
+ + +
+
+ + {{ messages.length === 0 ? 'Send a message to start' : '' }} +
+
+
+
+ + + + +
+
+ + +
+ + + + + +
+
+ + +
+
+ + +
+
+
+ + +
+ + + +
+
+ +
+ + + + + + + +
+ + + + + + + + diff --git a/examples/server/webui/package-lock.json b/examples/server/webui/package-lock.json new file mode 100644 index 0000000000000..6b93090f06274 --- /dev/null +++ b/examples/server/webui/package-lock.json @@ -0,0 +1,2783 @@ +{ + "name": "webui", + "version": "0.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "webui", + "version": "0.0.0", + "dependencies": { + "autoprefixer": "^10.4.20", + "daisyui": "^4.12.14", + "markdown-it": "^14.1.0", + "postcss": "^8.4.49", + "tailwindcss": "^3.4.15", + "vite-plugin-singlefile": "^2.0.3", + "vue": "^3.5.13" + }, + "devDependencies": { + "vite": "^5.4.10" + } + }, + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.28.0.tgz", + "integrity": "sha512-wLJuPLT6grGZsy34g4N1yRfYeouklTgPhH1gWXCYspenKYD0s3cR99ZevOGw5BexMNywkbV3UkjADisozBmpPQ==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.28.0.tgz", + "integrity": "sha512-eiNkznlo0dLmVG/6wf+Ifi/v78G4d4QxRhuUl+s8EWZpDewgk7PX3ZyECUXU0Zq/Ca+8nU8cQpNC4Xgn2gFNDA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.28.0.tgz", + "integrity": "sha512-8hxgfReVs7k9Js1uAIhS6zq3I+wKQETInnWQtgzt8JfGx51R1N6DRVy3F4o0lQwumbErRz52YqwjfvuwRxGv1w==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.28.0.tgz", + "integrity": "sha512-lA1zZB3bFx5oxu9fYud4+g1mt+lYXCoch0M0V/xhqLoGatbzVse0wlSQ1UYOWKpuSu3gyN4qEc0Dxf/DII1bhQ==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.28.0.tgz", + "integrity": "sha512-aI2plavbUDjCQB/sRbeUZWX9qp12GfYkYSJOrdYTL/C5D53bsE2/nBPuoiJKoWp5SN78v2Vr8ZPnB+/VbQ2pFA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.28.0.tgz", + "integrity": "sha512-WXveUPKtfqtaNvpf0iOb0M6xC64GzUX/OowbqfiCSXTdi/jLlOmH0Ba94/OkiY2yTGTwteo4/dsHRfh5bDCZ+w==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.28.0.tgz", + "integrity": "sha512-yLc3O2NtOQR67lI79zsSc7lk31xjwcaocvdD1twL64PK1yNaIqCeWI9L5B4MFPAVGEVjH5k1oWSGuYX1Wutxpg==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.28.0.tgz", + "integrity": "sha512-+P9G9hjEpHucHRXqesY+3X9hD2wh0iNnJXX/QhS/J5vTdG6VhNYMxJ2rJkQOxRUd17u5mbMLHM7yWGZdAASfcg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.28.0.tgz", + "integrity": "sha512-1xsm2rCKSTpKzi5/ypT5wfc+4bOGa/9yI/eaOLW0oMs7qpC542APWhl4A37AENGZ6St6GBMWhCCMM6tXgTIplw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.28.0.tgz", + "integrity": "sha512-zgWxMq8neVQeXL+ouSf6S7DoNeo6EPgi1eeqHXVKQxqPy1B2NvTbaOUWPn/7CfMKL7xvhV0/+fq/Z/J69g1WAQ==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.28.0.tgz", + "integrity": "sha512-VEdVYacLniRxbRJLNtzwGt5vwS0ycYshofI7cWAfj7Vg5asqj+pt+Q6x4n+AONSZW/kVm+5nklde0qs2EUwU2g==", + "cpu": [ + "riscv64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.28.0.tgz", + "integrity": "sha512-LQlP5t2hcDJh8HV8RELD9/xlYtEzJkm/aWGsauvdO2ulfl3QYRjqrKW+mGAIWP5kdNCBheqqqYIGElSRCaXfpw==", + "cpu": [ + "s390x" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.28.0.tgz", + "integrity": "sha512-Nl4KIzteVEKE9BdAvYoTkW19pa7LR/RBrT6F1dJCV/3pbjwDcaOq+edkP0LXuJ9kflW/xOK414X78r+K84+msw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.28.0.tgz", + "integrity": "sha512-eKpJr4vBDOi4goT75MvW+0dXcNUqisK4jvibY9vDdlgLx+yekxSm55StsHbxUsRxSTt3JEQvlr3cGDkzcSP8bw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.28.0.tgz", + "integrity": "sha512-Vi+WR62xWGsE/Oj+mD0FNAPY2MEox3cfyG0zLpotZdehPFXwz6lypkGs5y38Jd/NVSbOD02aVad6q6QYF7i8Bg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.28.0.tgz", + "integrity": "sha512-kN/Vpip8emMLn/eOza+4JwqDZBL6MPNpkdaEsgUtW1NYN3DZvZqSQrbKzJcTL6hd8YNmFTn7XGWMwccOcJBL0A==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.28.0.tgz", + "integrity": "sha512-Bvno2/aZT6usSa7lRDL2+hMjVAGjuqaymF1ApZm31JXzniR/hvr14jpU+/z4X6Gt5BPlzosscyJZGUvguXIqeQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@vue/compiler-dom": { + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.13.tgz", + "integrity": "sha512-ZOJ46sMOKUjO3e94wPdCzQ6P1Lx/vhp2RSvfaab88Ajexs0AHeV0uasYhi99WPaogmBlRHNRuly8xV75cNTMDA==", + "license": "MIT", + "dependencies": { + "@vue/compiler-core": "3.5.13", + "@vue/shared": "3.5.13" + } + }, + "node_modules/@vue/compiler-dom/node_modules/@babel/helper-string-parser": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@vue/compiler-dom/node_modules/@babel/helper-validator-identifier": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@vue/compiler-dom/node_modules/@babel/parser": { + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.2.tgz", + "integrity": "sha512-DWMCZH9WA4Maitz2q21SRKHo9QXZxkDsbNZoVD62gusNtNBBqDg9i7uOhASfTfIGNzW+O+r7+jAlM8dwphcJKQ==", + "license": "MIT", + "dependencies": { + "@babel/types": "^7.26.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@vue/compiler-dom/node_modules/@babel/types": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.0.tgz", + "integrity": "sha512-Z/yiTPj+lDVnF7lWeKCIJzaIkI0vYO87dMpZ4bg4TDrFe4XXLFWL1TbXU27gBP3QccxV9mZICCrnjnYlJjXHOA==", + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@vue/compiler-dom/node_modules/@vue/compiler-core": { + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.13.tgz", + "integrity": "sha512-oOdAkwqUfW1WqpwSYJce06wvt6HljgY3fGeM9NcVA1HaYOij3mZG9Rkysn0OHuyUAGMbEbARIpsG+LPVlBJ5/Q==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.25.3", + "@vue/shared": "3.5.13", + "entities": "^4.5.0", + "estree-walker": "^2.0.2", + "source-map-js": "^1.2.0" + } + }, + "node_modules/@vue/compiler-dom/node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "license": "MIT" + }, + "node_modules/@vue/compiler-dom/node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@vue/compiler-sfc": { + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.13.tgz", + "integrity": "sha512-6VdaljMpD82w6c2749Zhf5T9u5uLBWKnVue6XWxprDobftnletJ8+oel7sexFfM3qIxNmVE7LSFGTpv6obNyaQ==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.25.3", + "@vue/compiler-core": "3.5.13", + "@vue/compiler-dom": "3.5.13", + "@vue/compiler-ssr": "3.5.13", + "@vue/shared": "3.5.13", + "estree-walker": "^2.0.2", + "magic-string": "^0.30.11", + "postcss": "^8.4.48", + "source-map-js": "^1.2.0" + } + }, + "node_modules/@vue/compiler-sfc/node_modules/@babel/helper-string-parser": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@vue/compiler-sfc/node_modules/@babel/helper-validator-identifier": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@vue/compiler-sfc/node_modules/@babel/parser": { + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.2.tgz", + "integrity": "sha512-DWMCZH9WA4Maitz2q21SRKHo9QXZxkDsbNZoVD62gusNtNBBqDg9i7uOhASfTfIGNzW+O+r7+jAlM8dwphcJKQ==", + "license": "MIT", + "dependencies": { + "@babel/types": "^7.26.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@vue/compiler-sfc/node_modules/@babel/types": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.0.tgz", + "integrity": "sha512-Z/yiTPj+lDVnF7lWeKCIJzaIkI0vYO87dMpZ4bg4TDrFe4XXLFWL1TbXU27gBP3QccxV9mZICCrnjnYlJjXHOA==", + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@vue/compiler-sfc/node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "license": "MIT" + }, + "node_modules/@vue/compiler-sfc/node_modules/@vue/compiler-core": { + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.13.tgz", + "integrity": "sha512-oOdAkwqUfW1WqpwSYJce06wvt6HljgY3fGeM9NcVA1HaYOij3mZG9Rkysn0OHuyUAGMbEbARIpsG+LPVlBJ5/Q==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.25.3", + "@vue/shared": "3.5.13", + "entities": "^4.5.0", + "estree-walker": "^2.0.2", + "source-map-js": "^1.2.0" + } + }, + "node_modules/@vue/compiler-sfc/node_modules/@vue/compiler-ssr": { + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.13.tgz", + "integrity": "sha512-wMH6vrYHxQl/IybKJagqbquvxpWCuVYpoUJfCqFZwa/JY1GdATAQ+TgVtgrwwMZ0D07QhA99rs/EAAWfvG6KpA==", + "license": "MIT", + "dependencies": { + "@vue/compiler-dom": "3.5.13", + "@vue/shared": "3.5.13" + } + }, + "node_modules/@vue/compiler-sfc/node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "license": "MIT" + }, + "node_modules/@vue/compiler-sfc/node_modules/magic-string": { + "version": "0.30.14", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.14.tgz", + "integrity": "sha512-5c99P1WKTed11ZC0HMJOj6CDIue6F8ySu+bJL+85q1zBEIY8IklrJ1eiKC2NDRh3Ct3FcvmJPyQHb9erXMTJNw==", + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0" + } + }, + "node_modules/@vue/compiler-sfc/node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@vue/runtime-dom": { + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.13.tgz", + "integrity": "sha512-dLaj94s93NYLqjLiyFzVs9X6dWhTdAlEAciC3Moq7gzAc13VJUdCnjjRurNM6uTLFATRHexHCTu/Xp3eW6yoog==", + "license": "MIT", + "dependencies": { + "@vue/reactivity": "3.5.13", + "@vue/runtime-core": "3.5.13", + "@vue/shared": "3.5.13", + "csstype": "^3.1.3" + } + }, + "node_modules/@vue/runtime-dom/node_modules/@vue/reactivity": { + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.13.tgz", + "integrity": "sha512-NaCwtw8o48B9I6L1zl2p41OHo/2Z4wqYGGIK1Khu5T7yxrn+ATOixn/Udn2m+6kZKB/J7cuT9DbWWhRxqixACg==", + "license": "MIT", + "dependencies": { + "@vue/shared": "3.5.13" + } + }, + "node_modules/@vue/runtime-dom/node_modules/@vue/runtime-core": { + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.13.tgz", + "integrity": "sha512-Fj4YRQ3Az0WTZw1sFe+QDb0aXCerigEpw418pw1HBUKFtnQHWzwojaukAs2X/c9DQz4MQ4bsXTGlcpGxU/RCIw==", + "license": "MIT", + "dependencies": { + "@vue/reactivity": "3.5.13", + "@vue/shared": "3.5.13" + } + }, + "node_modules/@vue/runtime-dom/node_modules/csstype": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", + "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", + "license": "MIT" + }, + "node_modules/@vue/server-renderer": { + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.13.tgz", + "integrity": "sha512-wAi4IRJV/2SAW3htkTlB+dHeRmpTiVIK1OGLWV1yeStVSebSQQOwGwIq0D3ZIoBj2C2qpgz5+vX9iEBkTdk5YA==", + "license": "MIT", + "dependencies": { + "@vue/compiler-ssr": "3.5.13", + "@vue/shared": "3.5.13" + }, + "peerDependencies": { + "vue": "3.5.13" + } + }, + "node_modules/@vue/server-renderer/node_modules/@vue/compiler-ssr": { + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.13.tgz", + "integrity": "sha512-wMH6vrYHxQl/IybKJagqbquvxpWCuVYpoUJfCqFZwa/JY1GdATAQ+TgVtgrwwMZ0D07QhA99rs/EAAWfvG6KpA==", + "license": "MIT", + "dependencies": { + "@vue/compiler-dom": "3.5.13", + "@vue/shared": "3.5.13" + } + }, + "node_modules/@vue/shared": { + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.13.tgz", + "integrity": "sha512-/hnE/qP5ZoGpol0a5mDi45bOd7t3tjYJBjsgCsivow7D48cJeV5l05RD82lPqi7gRiphZM37rnhW1l6ZoCNNnQ==", + "license": "MIT" + }, + "node_modules/arg": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", + "license": "MIT" + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "license": "Python-2.0" + }, + "node_modules/autoprefixer": { + "version": "10.4.20", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.20.tgz", + "integrity": "sha512-XY25y5xSv/wEoqzDyXXME4AFfkZI0P23z6Fs3YgymDnKJkCGOnkL0iTxCa85UTqaSgfcqyf3UA6+c7wUvx/16g==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "browserslist": "^4.23.3", + "caniuse-lite": "^1.0.30001646", + "fraction.js": "^4.3.7", + "normalize-range": "^0.1.2", + "picocolors": "^1.0.1", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/browserslist": { + "version": "4.24.2", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.2.tgz", + "integrity": "sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "caniuse-lite": "^1.0.30001669", + "electron-to-chromium": "^1.5.41", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.1" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/browserslist/node_modules/electron-to-chromium": { + "version": "1.5.67", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.67.tgz", + "integrity": "sha512-nz88NNBsD7kQSAGGJyp8hS6xSPtWwqNogA0mjtc2nUYeEf3nURK9qpV18TuBdDmEDgVWotS8Wkzf+V52dSQ/LQ==", + "license": "ISC" + }, + "node_modules/browserslist/node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/browserslist/node_modules/node-releases": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==", + "license": "MIT" + }, + "node_modules/browserslist/node_modules/update-browserslist-db": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz", + "integrity": "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.0" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001684", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001684.tgz", + "integrity": "sha512-G1LRwLIQjBQoyq0ZJGqGIJUXzJ8irpbjHLpVRXDvBEScFJ9b17sgK6vlx0GAJFE21okD7zXl08rRRUfq6HdoEQ==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/chokidar/node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/chokidar/node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/chokidar/node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/chokidar/node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/chokidar/node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/chokidar/node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/chokidar/node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/chokidar/node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/css-selector-tokenizer": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/css-selector-tokenizer/-/css-selector-tokenizer-0.8.0.tgz", + "integrity": "sha512-Jd6Ig3/pe62/qe5SBPTN8h8LeUg/pT4lLgtavPf7updwwHpvFzxvOQBHYj2LZDMjUnBzgvIUSjRcf6oT5HzHFg==", + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "fastparse": "^1.1.2" + } + }, + "node_modules/css-selector-tokenizer/node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/css-selector-tokenizer/node_modules/fastparse": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/fastparse/-/fastparse-1.1.2.tgz", + "integrity": "sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ==", + "license": "MIT" + }, + "node_modules/culori": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/culori/-/culori-3.3.0.tgz", + "integrity": "sha512-pHJg+jbuFsCjz9iclQBqyL3B2HLCBF71BwVNujUYEvCeQMvV97R59MNK3R2+jgJ3a1fcZgI9B3vYgz8lzr/BFQ==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/daisyui": { + "version": "4.12.14", + "resolved": "https://registry.npmjs.org/daisyui/-/daisyui-4.12.14.tgz", + "integrity": "sha512-hA27cdBasdwd4/iEjn+aidoCrRroDuo3G5W9NDKaVCJI437Mm/3eSL/2u7MkZ0pt8a+TrYF3aT2pFVemTS3how==", + "license": "MIT", + "dependencies": { + "css-selector-tokenizer": "^0.8", + "culori": "^3", + "picocolors": "^1", + "postcss-js": "^4" + }, + "engines": { + "node": ">=16.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/daisyui" + } + }, + "node_modules/didyoumean": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", + "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==", + "license": "Apache-2.0" + }, + "node_modules/dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "license": "MIT" + }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/esbuild/node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/fast-glob/node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/fast-glob/node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/fast-glob/node_modules/fastq": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-glob/node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/fast-glob/node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/fast-glob/node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/fast-glob/node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/fraction.js": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", + "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==", + "license": "MIT", + "engines": { + "node": "*" + }, + "funding": { + "type": "patreon", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob/node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jiti": { + "version": "1.21.6", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.6.tgz", + "integrity": "sha512-2yTgeWTWzMWkHu6Jp9NKgePDaYHbntiwvYuuJLbbN9vl7DC9DvXKOB2BC3ZZ92D3cvV/aflH0osDfwpHepQ53w==", + "license": "MIT", + "bin": { + "jiti": "bin/jiti.js" + } + }, + "node_modules/lilconfig": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", + "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", + "license": "MIT", + "dependencies": { + "uc.micro": "^2.0.0" + } + }, + "node_modules/markdown-it": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", + "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1", + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" + }, + "bin": { + "markdown-it": "bin/markdown-it.mjs" + } + }, + "node_modules/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==", + "license": "MIT" + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/micromatch/node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/micromatch/node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/micromatch/node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/micromatch/node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/micromatch/node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/normalize-range": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", + "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "license": "ISC" + }, + "node_modules/postcss": { + "version": "8.4.49", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.49.tgz", + "integrity": "sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.7", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-import": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", + "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", + "license": "MIT", + "dependencies": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/postcss-import/node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postcss-import/node_modules/read-cache": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "license": "MIT", + "dependencies": { + "pify": "^2.3.0" + } + }, + "node_modules/postcss-js": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.1.tgz", + "integrity": "sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==", + "license": "MIT", + "dependencies": { + "camelcase-css": "^2.0.1" + }, + "engines": { + "node": "^12 || ^14 || >= 16" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": "^8.4.21" + } + }, + "node_modules/postcss-js/node_modules/camelcase-css": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", + "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/postcss-load-config": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-4.0.2.tgz", + "integrity": "sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "lilconfig": "^3.0.0", + "yaml": "^2.3.4" + }, + "engines": { + "node": ">= 14" + }, + "peerDependencies": { + "postcss": ">=8.0.9", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "postcss": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/postcss-load-config/node_modules/lilconfig": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.2.tgz", + "integrity": "sha512-eop+wDAvpItUys0FWkHIKeC9ybYrTGbU41U5K7+bttZZeohvnY7M9dZ5kB21GNWiFT2q1OoPTvncPCgSOVO5ow==", + "license": "MIT", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/postcss-load-config/node_modules/yaml": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.6.1.tgz", + "integrity": "sha512-7r0XPzioN/Q9kXBro/XPnA6kznR73DHq+GXh5ON7ZozRO6aMjbmiBuKste2wslTFkC5d1dw0GooOCepZXJ2SAg==", + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/postcss-nested": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz", + "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "postcss-selector-parser": "^6.1.1" + }, + "engines": { + "node": ">=12.0" + }, + "peerDependencies": { + "postcss": "^8.2.14" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", + "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-selector-parser/node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-selector-parser/node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "license": "MIT" + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "license": "MIT" + }, + "node_modules/postcss/node_modules/nanoid": { + "version": "3.3.8", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz", + "integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/postcss/node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/punycode.js": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", + "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/resolve": { + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", + "license": "MIT", + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve/node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve/node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/resolve/node_modules/is-core-module": { + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.1.tgz", + "integrity": "sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==", + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve/node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "license": "MIT" + }, + "node_modules/resolve/node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/rollup": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.28.0.tgz", + "integrity": "sha512-G9GOrmgWHBma4YfCcX8PjH0qhXSdH8B4HDE2o4/jaxj93S4DPCIDoLcXz99eWMji4hB29UFCEd7B2gwGJDR9cQ==", + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.6" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.28.0", + "@rollup/rollup-android-arm64": "4.28.0", + "@rollup/rollup-darwin-arm64": "4.28.0", + "@rollup/rollup-darwin-x64": "4.28.0", + "@rollup/rollup-freebsd-arm64": "4.28.0", + "@rollup/rollup-freebsd-x64": "4.28.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.28.0", + "@rollup/rollup-linux-arm-musleabihf": "4.28.0", + "@rollup/rollup-linux-arm64-gnu": "4.28.0", + "@rollup/rollup-linux-arm64-musl": "4.28.0", + "@rollup/rollup-linux-powerpc64le-gnu": "4.28.0", + "@rollup/rollup-linux-riscv64-gnu": "4.28.0", + "@rollup/rollup-linux-s390x-gnu": "4.28.0", + "@rollup/rollup-linux-x64-gnu": "4.28.0", + "@rollup/rollup-linux-x64-musl": "4.28.0", + "@rollup/rollup-win32-arm64-msvc": "4.28.0", + "@rollup/rollup-win32-ia32-msvc": "4.28.0", + "@rollup/rollup-win32-x64-msvc": "4.28.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/rollup/node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.28.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.28.0.tgz", + "integrity": "sha512-lmKx9yHsppblnLQZOGxdO66gT77bvdBtr/0P+TPOseowE7D9AJoBw8ZDULRasXRWf1Z86/gcOdpBrV6VDUY36Q==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/rollup/node_modules/@types/estree": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", + "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", + "license": "MIT" + }, + "node_modules/sucrase": { + "version": "3.35.0", + "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.0.tgz", + "integrity": "sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==", + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "glob": "^10.3.10", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "ts-interface-checker": "^0.1.9" + }, + "bin": { + "sucrase": "bin/sucrase", + "sucrase-node": "bin/sucrase-node" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/sucrase/node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/sucrase/node_modules/@jridgewell/gen-mapping": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", + "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", + "license": "MIT", + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/sucrase/node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/sucrase/node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/sucrase/node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "license": "MIT" + }, + "node_modules/sucrase/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/sucrase/node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/sucrase/node_modules/ansi-regex": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/sucrase/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/sucrase/node_modules/any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", + "license": "MIT" + }, + "node_modules/sucrase/node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" + }, + "node_modules/sucrase/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/sucrase/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/sucrase/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "license": "MIT" + }, + "node_modules/sucrase/node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/sucrase/node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/sucrase/node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "license": "MIT" + }, + "node_modules/sucrase/node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "license": "MIT" + }, + "node_modules/sucrase/node_modules/foreground-child": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", + "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/sucrase/node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/sucrase/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/sucrase/node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, + "node_modules/sucrase/node_modules/jackspeak": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/sucrase/node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "license": "MIT" + }, + "node_modules/sucrase/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "license": "ISC" + }, + "node_modules/sucrase/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/sucrase/node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/sucrase/node_modules/mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, + "node_modules/sucrase/node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sucrase/node_modules/package-json-from-dist": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "license": "BlueOak-1.0.0" + }, + "node_modules/sucrase/node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/sucrase/node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/sucrase/node_modules/pirates": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", + "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==", + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/sucrase/node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/sucrase/node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/sucrase/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/sucrase/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/sucrase/node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/sucrase/node_modules/string-width-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/sucrase/node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "license": "MIT" + }, + "node_modules/sucrase/node_modules/string-width-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/sucrase/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/sucrase/node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/sucrase/node_modules/strip-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/sucrase/node_modules/thenify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0" + } + }, + "node_modules/sucrase/node_modules/thenify-all": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "license": "MIT", + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/sucrase/node_modules/ts-interface-checker": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", + "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", + "license": "Apache-2.0" + }, + "node_modules/sucrase/node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/sucrase/node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/sucrase/node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/sucrase/node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/sucrase/node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/sucrase/node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "license": "MIT" + }, + "node_modules/sucrase/node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/sucrase/node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tailwindcss": { + "version": "3.4.15", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.15.tgz", + "integrity": "sha512-r4MeXnfBmSOuKUWmXe6h2CcyfzJCEk4F0pptO5jlnYSIViUkVmsawj80N5h2lO3gwcmSb4n3PuN+e+GC1Guylw==", + "license": "MIT", + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "arg": "^5.0.2", + "chokidar": "^3.6.0", + "didyoumean": "^1.2.2", + "dlv": "^1.1.3", + "fast-glob": "^3.3.2", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "jiti": "^1.21.6", + "lilconfig": "^2.1.0", + "micromatch": "^4.0.8", + "normalize-path": "^3.0.0", + "object-hash": "^3.0.0", + "picocolors": "^1.1.1", + "postcss": "^8.4.47", + "postcss-import": "^15.1.0", + "postcss-js": "^4.0.1", + "postcss-load-config": "^4.0.2", + "postcss-nested": "^6.2.0", + "postcss-selector-parser": "^6.1.2", + "resolve": "^1.22.8", + "sucrase": "^3.35.0" + }, + "bin": { + "tailwind": "lib/cli.js", + "tailwindcss": "lib/cli.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/uc.micro": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==", + "license": "MIT" + }, + "node_modules/vite": { + "version": "5.4.11", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.11.tgz", + "integrity": "sha512-c7jFQRklXua0mTzneGW9QVyxFjUgwcihC4bXEtujIo2ouWCe1Ajt/amn2PCxYnhYfd5k09JX3SB7OYWFKYqj8Q==", + "license": "MIT", + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/vite-plugin-singlefile": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/vite-plugin-singlefile/-/vite-plugin-singlefile-2.0.3.tgz", + "integrity": "sha512-OEBEwdX8nCGPSdtaB1D7rryYnT+YfPTS8ojL1TDyeUF+bWDCTfRriQqw6T0vl9EbKI/KMg7szN3awst6cLrKkA==", + "license": "MIT", + "dependencies": { + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">18.0.0" + }, + "peerDependencies": { + "rollup": "^4.24.3", + "vite": "^5.4.10" + } + }, + "node_modules/vue": { + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.13.tgz", + "integrity": "sha512-wmeiSMxkZCSc+PM2w2VRsOYAZC8GdipNFRTsLSfodVqI9mbejKeXEGr8SckuLnrQPGe3oJN5c3K0vpoU9q/wCQ==", + "license": "MIT", + "dependencies": { + "@vue/compiler-dom": "3.5.13", + "@vue/compiler-sfc": "3.5.13", + "@vue/runtime-dom": "3.5.13", + "@vue/server-renderer": "3.5.13", + "@vue/shared": "3.5.13" + }, + "peerDependencies": { + "typescript": "*" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + } + } +} diff --git a/examples/server/webui/package.json b/examples/server/webui/package.json new file mode 100644 index 0000000000000..2a45ece14be85 --- /dev/null +++ b/examples/server/webui/package.json @@ -0,0 +1,23 @@ +{ + "name": "webui", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "preview": "vite preview" + }, + "devDependencies": { + "vite": "^5.4.10" + }, + "dependencies": { + "autoprefixer": "^10.4.20", + "daisyui": "^4.12.14", + "markdown-it": "^14.1.0", + "postcss": "^8.4.49", + "tailwindcss": "^3.4.15", + "vite-plugin-singlefile": "^2.0.3", + "vue": "^3.5.13" + } +} diff --git a/examples/server/webui/postcss.config.js b/examples/server/webui/postcss.config.js new file mode 100644 index 0000000000000..2e7af2b7f1a6f --- /dev/null +++ b/examples/server/webui/postcss.config.js @@ -0,0 +1,6 @@ +export default { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +} diff --git a/examples/server/public/completion.js b/examples/server/webui/src/completion.js similarity index 100% rename from examples/server/public/completion.js rename to examples/server/webui/src/completion.js diff --git a/examples/server/webui/src/main.js b/examples/server/webui/src/main.js new file mode 100644 index 0000000000000..9b5b123293362 --- /dev/null +++ b/examples/server/webui/src/main.js @@ -0,0 +1,456 @@ +import './styles.css'; +import { createApp, defineComponent, shallowRef, computed, h } from 'vue/dist/vue.esm-bundler.js'; +import { llama } from './completion.js'; +import MarkdownIt from 'markdown-it'; + +// utility functions +const isString = (x) => !!x.toLowerCase; +const isNumeric = (n) => !isString(n) && !isNaN(n); +const escapeAttr = (str) => str.replace(/>/g, '>').replace(/"/g, '"'); +const copyStr = (str) => navigator.clipboard.writeText(str); + +// constants +const BASE_URL = localStorage.getItem('base') // for debugging + || (new URL('.', document.baseURI).href).toString(); // for production +const CONFIG_DEFAULT = { + // Note: in order not to introduce breaking changes, please keep the same data type (number, string, etc) if you want to change the default value. Do not use null or undefined for default value. + apiKey: '', + systemMessage: 'You are a helpful assistant.', + // make sure these default values are in sync with `common.h` + samplers: 'dkypmxt', + temperature: 0.8, + dynatemp_range: 0.0, + dynatemp_exponent: 1.0, + top_k: 40, + top_p: 0.95, + min_p: 0.05, + xtc_probability: 0.0, + xtc_threshold: 0.1, + typical_p: 1.0, + repeat_last_n: 64, + repeat_penalty: 1.0, + presence_penalty: 0.0, + frequency_penalty: 0.0, + dry_multiplier: 0.0, + dry_base: 1.75, + dry_allowed_length: 2, + dry_penalty_last_n: -1, + max_tokens: -1, + custom: '', // custom json-stringified object +}; +const CONFIG_INFO = { + apiKey: 'Set the API Key if you are using --api-key option for the server.', + systemMessage: 'The starting message that defines how model should behave.', + samplers: 'The order at which samplers are applied, in simplified way. Default is "dkypmxt": dry->top_k->typ_p->top_p->min_p->xtc->temperature', + temperature: 'Controls the randomness of the generated text by affecting the probability distribution of the output tokens. Higher = more random, lower = more focused.', + dynatemp_range: 'Addon for the temperature sampler. The added value to the range of dynamic temperature, which adjusts probabilities by entropy of tokens.', + dynatemp_exponent: 'Addon for the temperature sampler. Smoothes out the probability redistribution based on the most probable token.', + top_k: 'Keeps only k top tokens.', + top_p: 'Limits tokens to those that together have a cumulative probability of at least p', + min_p: 'Limits tokens based on the minimum probability for a token to be considered, relative to the probability of the most likely token.', + xtc_probability: 'XTC sampler cuts out top tokens; this parameter controls the chance of cutting tokens at all. 0 disables XTC.', + xtc_threshold: 'XTC sampler cuts out top tokens; this parameter controls the token probability that is required to cut that token.', + typical_p: 'Sorts and limits tokens based on the difference between log-probability and entropy.', + repeat_last_n: 'Last n tokens to consider for penalizing repetition', + repeat_penalty: 'Controls the repetition of token sequences in the generated text', + presence_penalty: 'Limits tokens based on whether they appear in the output or not.', + frequency_penalty: 'Limits tokens based on how often they appear in the output.', + dry_multiplier: 'DRY sampling reduces repetition in generated text even across long contexts. This parameter sets the DRY sampling multiplier.', + dry_base: 'DRY sampling reduces repetition in generated text even across long contexts. This parameter sets the DRY sampling base value.', + dry_allowed_length: 'DRY sampling reduces repetition in generated text even across long contexts. This parameter sets the allowed length for DRY sampling.', + dry_penalty_last_n: 'DRY sampling reduces repetition in generated text even across long contexts. This parameter sets DRY penalty for the last n tokens.', + max_tokens: 'The maximum number of token per output.', + custom: '', // custom json-stringified object +}; +// config keys having numeric value (i.e. temperature, top_k, top_p, etc) +const CONFIG_NUMERIC_KEYS = Object.entries(CONFIG_DEFAULT).filter(e => isNumeric(e[1])).map(e => e[0]); +// list of themes supported by daisyui +const THEMES = ['light', 'dark', 'cupcake', 'bumblebee', 'emerald', 'corporate', 'synthwave', 'retro', 'cyberpunk', 'valentine', 'halloween', 'garden', 'forest', 'aqua', 'lofi', 'pastel', 'fantasy', 'wireframe', 'black', 'luxury', 'dracula', 'cmyk', 'autumn', 'business', 'acid', 'lemonade', 'night', 'coffee', 'winter', 'dim', 'nord', 'sunset']; + +// markdown support +const VueMarkdown = defineComponent( + (props) => { + const md = shallowRef(new MarkdownIt({ breaks: true })); + const origFenchRenderer = md.value.renderer.rules.fence; + md.value.renderer.rules.fence = (tokens, idx, ...args) => { + const content = tokens[idx].content; + const origRendered = origFenchRenderer(tokens, idx, ...args); + return `
+ + ${origRendered} +
`; + }; + window.copyStr = copyStr; + const content = computed(() => md.value.render(props.source)); + return () => h("div", { innerHTML: content.value }); + }, + { props: ["source"] } +); + +// input field to be used by settings modal +const SettingsModalShortInput = defineComponent({ + template: document.getElementById('settings-modal-short-input').innerHTML, + props: { + label: { type: String, required: false }, + configKey: String, + configDefault: Object, + configInfo: Object, + modelValue: [Object, String, Number], + }, +}); + +// coversations is stored in localStorage +// format: { [convId]: { id: string, lastModified: number, messages: [...] } } +// convId is a string prefixed with 'conv-' +const StorageUtils = { + // manage conversations + getAllConversations() { + const res = []; + for (const key in localStorage) { + if (key.startsWith('conv-')) { + res.push(JSON.parse(localStorage.getItem(key))); + } + } + res.sort((a, b) => b.lastModified - a.lastModified); + return res; + }, + // can return null if convId does not exist + getOneConversation(convId) { + return JSON.parse(localStorage.getItem(convId) || 'null'); + }, + // if convId does not exist, create one + appendMsg(convId, msg) { + if (msg.content === null) return; + const conv = StorageUtils.getOneConversation(convId) || { + id: convId, + lastModified: Date.now(), + messages: [], + }; + conv.messages.push(msg); + conv.lastModified = Date.now(); + localStorage.setItem(convId, JSON.stringify(conv)); + }, + getNewConvId() { + return `conv-${Date.now()}`; + }, + remove(convId) { + localStorage.removeItem(convId); + }, + filterAndKeepMsgs(convId, predicate) { + const conv = StorageUtils.getOneConversation(convId); + if (!conv) return; + conv.messages = conv.messages.filter(predicate); + conv.lastModified = Date.now(); + localStorage.setItem(convId, JSON.stringify(conv)); + }, + popMsg(convId) { + const conv = StorageUtils.getOneConversation(convId); + if (!conv) return; + const msg = conv.messages.pop(); + conv.lastModified = Date.now(); + if (conv.messages.length === 0) { + StorageUtils.remove(convId); + } else { + localStorage.setItem(convId, JSON.stringify(conv)); + } + return msg; + }, + + // manage config + getConfig() { + const savedVal = JSON.parse(localStorage.getItem('config') || '{}'); + // to prevent breaking changes in the future, we always provide default value for missing keys + return { + ...CONFIG_DEFAULT, + ...savedVal, + }; + }, + setConfig(config) { + localStorage.setItem('config', JSON.stringify(config)); + }, + getTheme() { + return localStorage.getItem('theme') || 'auto'; + }, + setTheme(theme) { + if (theme === 'auto') { + localStorage.removeItem('theme'); + } else { + localStorage.setItem('theme', theme); + } + }, +}; + +// scroll to bottom of chat messages +// if requiresNearBottom is true, only auto-scroll if user is near bottom +const chatScrollToBottom = (requiresNearBottom) => { + const msgListElem = document.getElementById('messages-list'); + const spaceToBottom = msgListElem.scrollHeight - msgListElem.scrollTop - msgListElem.clientHeight; + if (!requiresNearBottom || (spaceToBottom < 100)) { + setTimeout(() => msgListElem.scrollTo({ top: msgListElem.scrollHeight }), 1); + } +}; + +const mainApp = createApp({ + components: { + VueMarkdown, + SettingsModalShortInput, + }, + data() { + return { + conversations: StorageUtils.getAllConversations(), + messages: [], // { id: number, role: 'user' | 'assistant', content: string } + viewingConvId: StorageUtils.getNewConvId(), + inputMsg: '', + isGenerating: false, + pendingMsg: null, // the on-going message from assistant + stopGeneration: () => {}, + selectedTheme: StorageUtils.getTheme(), + config: StorageUtils.getConfig(), + showConfigDialog: false, + editingMsg: null, + // const + themes: THEMES, + configDefault: {...CONFIG_DEFAULT}, + configInfo: {...CONFIG_INFO}, + } + }, + computed: {}, + mounted() { + document.getElementById('app').classList.remove('opacity-0'); // show app + // scroll to the bottom when the pending message height is updated + const pendingMsgElem = document.getElementById('pending-msg'); + const resizeObserver = new ResizeObserver(() => { + if (this.isGenerating) chatScrollToBottom(true); + }); + resizeObserver.observe(pendingMsgElem); + }, + methods: { + hideSidebar() { + document.getElementById('toggle-drawer').checked = false; + }, + setSelectedTheme(theme) { + this.selectedTheme = theme; + StorageUtils.setTheme(theme); + }, + newConversation() { + if (this.isGenerating) return; + this.viewingConvId = StorageUtils.getNewConvId(); + this.editingMsg = null; + this.fetchMessages(); + chatScrollToBottom(); + this.hideSidebar(); + }, + setViewingConv(convId) { + if (this.isGenerating) return; + this.viewingConvId = convId; + this.editingMsg = null; + this.fetchMessages(); + chatScrollToBottom(); + this.hideSidebar(); + }, + deleteConv(convId) { + if (this.isGenerating) return; + if (window.confirm('Are you sure to delete this conversation?')) { + StorageUtils.remove(convId); + if (this.viewingConvId === convId) { + this.viewingConvId = StorageUtils.getNewConvId(); + this.editingMsg = null; + } + this.fetchConversation(); + this.fetchMessages(); + } + }, + downloadConv(convId) { + const conversation = StorageUtils.getOneConversation(convId); + if (!conversation) { + alert('Conversation not found.'); + return; + } + const conversationJson = JSON.stringify(conversation, null, 2); + const blob = new Blob([conversationJson], { type: 'application/json' }); + const url = URL.createObjectURL(blob); + const a = document.createElement('a'); + a.href = url; + a.download = `conversation_${convId}.json`; + document.body.appendChild(a); + a.click(); + document.body.removeChild(a); + URL.revokeObjectURL(url); + }, + async sendMessage() { + if (!this.inputMsg) return; + const currConvId = this.viewingConvId; + + StorageUtils.appendMsg(currConvId, { + id: Date.now(), + role: 'user', + content: this.inputMsg, + }); + this.fetchConversation(); + this.fetchMessages(); + this.inputMsg = ''; + this.editingMsg = null; + this.generateMessage(currConvId); + chatScrollToBottom(); + }, + async generateMessage(currConvId) { + if (this.isGenerating) return; + this.pendingMsg = { id: Date.now()+1, role: 'assistant', content: null }; + this.isGenerating = true; + this.editingMsg = null; + + try { + const abortController = new AbortController(); + this.stopGeneration = () => abortController.abort(); + const params = { + messages: [ + { role: 'system', content: this.config.systemMessage }, + ...this.messages, + ], + stream: true, + cache_prompt: true, + samplers: this.config.samplers, + temperature: this.config.temperature, + dynatemp_range: this.config.dynatemp_range, + dynatemp_exponent: this.config.dynatemp_exponent, + top_k: this.config.top_k, + top_p: this.config.top_p, + min_p: this.config.min_p, + typical_p: this.config.typical_p, + xtc_probability: this.config.xtc_probability, + xtc_threshold: this.config.xtc_threshold, + repeat_last_n: this.config.repeat_last_n, + repeat_penalty: this.config.repeat_penalty, + presence_penalty: this.config.presence_penalty, + frequency_penalty: this.config.frequency_penalty, + dry_multiplier: this.config.dry_multiplier, + dry_base: this.config.dry_base, + dry_allowed_length: this.config.dry_allowed_length, + dry_penalty_last_n: this.config.dry_penalty_last_n, + max_tokens: this.config.max_tokens, + ...(this.config.custom.length ? JSON.parse(this.config.custom) : {}), + ...(this.config.apiKey ? { api_key: this.config.apiKey } : {}), + }; + const config = { + controller: abortController, + api_url: BASE_URL, + endpoint: '/chat/completions', + }; + for await (const chunk of llama(prompt, params, config)) { + const stop = chunk.data.stop; + const addedContent = chunk.data.choices[0].delta.content; + const lastContent = this.pendingMsg.content || ''; + if (addedContent) { + this.pendingMsg = { + id: this.pendingMsg.id, + role: 'assistant', + content: lastContent + addedContent, + }; + } + } + + StorageUtils.appendMsg(currConvId, this.pendingMsg); + this.fetchConversation(); + this.fetchMessages(); + setTimeout(() => document.getElementById('msg-input').focus(), 1); + } catch (error) { + if (error.name === 'AbortError') { + // user stopped the generation via stopGeneration() function + StorageUtils.appendMsg(currConvId, this.pendingMsg); + this.fetchConversation(); + this.fetchMessages(); + } else { + console.error(error); + alert(error); + // pop last user message + const lastUserMsg = StorageUtils.popMsg(currConvId); + this.inputMsg = lastUserMsg ? lastUserMsg.content : ''; + } + } + + this.pendingMsg = null; + this.isGenerating = false; + this.stopGeneration = () => {}; + this.fetchMessages(); + chatScrollToBottom(); + }, + + // message actions + regenerateMsg(msg) { + if (this.isGenerating) return; + // TODO: somehow keep old history (like how ChatGPT has different "tree"). This can be done by adding "sub-conversations" with "subconv-" prefix, and new message will have a list of subconvIds + const currConvId = this.viewingConvId; + StorageUtils.filterAndKeepMsgs(currConvId, (m) => m.id < msg.id); + this.fetchConversation(); + this.fetchMessages(); + this.generateMessage(currConvId); + }, + copyMsg(msg) { + copyStr(msg.content); + }, + editUserMsgAndRegenerate(msg) { + if (this.isGenerating) return; + const currConvId = this.viewingConvId; + const newContent = msg.content; + this.editingMsg = null; + StorageUtils.filterAndKeepMsgs(currConvId, (m) => m.id < msg.id); + StorageUtils.appendMsg(currConvId, { + id: Date.now(), + role: 'user', + content: newContent, + }); + this.fetchConversation(); + this.fetchMessages(); + this.generateMessage(currConvId); + }, + + // settings dialog methods + closeAndSaveConfigDialog() { + try { + if (this.config.custom.length) JSON.parse(this.config.custom); + } catch (error) { + alert('Invalid JSON for custom config. Please either fix it or leave it empty.'); + return; + } + for (const key of CONFIG_NUMERIC_KEYS) { + if (isNaN(this.config[key]) || this.config[key].toString().trim().length === 0) { + alert(`Invalid number for ${key} (expected an integer or a float)`); + return; + } + this.config[key] = parseFloat(this.config[key]); + } + this.showConfigDialog = false; + StorageUtils.setConfig(this.config); + }, + closeAndDiscardConfigDialog() { + this.showConfigDialog = false; + this.config = StorageUtils.getConfig(); + }, + resetConfigDialog() { + if (window.confirm('Are you sure to reset all settings?')) { + this.config = {...CONFIG_DEFAULT}; + } + }, + + // sync state functions + fetchConversation() { + this.conversations = StorageUtils.getAllConversations(); + }, + fetchMessages() { + this.messages = StorageUtils.getOneConversation(this.viewingConvId)?.messages ?? []; + }, + }, +}); +mainApp.config.errorHandler = alert; +try { + mainApp.mount('#app'); +} catch (err) { + console.error(err); + document.getElementById('app').innerHTML = `
+ Failed to start app. Please try clearing localStorage and try again.
+
+ +
`; +} diff --git a/examples/server/webui/src/styles.css b/examples/server/webui/src/styles.css new file mode 100644 index 0000000000000..67d35b99e49fc --- /dev/null +++ b/examples/server/webui/src/styles.css @@ -0,0 +1,26 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +.markdown { + h1, h2, h3, h4, h5, h6, ul, ol, li { all: revert; } + pre { + @apply whitespace-pre-wrap rounded-lg p-2; + border: 1px solid currentColor; + } + /* TODO: fix markdown table */ +} + +.show-on-hover { + @apply md:opacity-0 md:group-hover:opacity-100; +} +.btn-mini { + @apply cursor-pointer hover:shadow-md; +} +.chat-screen { max-width: 900px; } + +.chat-bubble-base-300 { + --tw-bg-opacity: 1; + --tw-text-opacity: 1; + @apply bg-base-300 text-base-content; +} diff --git a/examples/server/webui/tailwind.config.js b/examples/server/webui/tailwind.config.js new file mode 100644 index 0000000000000..c43066a19e7ad --- /dev/null +++ b/examples/server/webui/tailwind.config.js @@ -0,0 +1,16 @@ +/** @type {import('tailwindcss').Config} */ +export default { + content: [ + "./index.html", + "./src/**/*.{js,ts,jsx,tsx}", + ], + theme: { + extend: {}, + }, + plugins: [ + require('daisyui'), + ], + daisyui: { + themes: ['light', 'dark', 'cupcake', 'bumblebee', 'emerald', 'corporate', 'synthwave', 'retro', 'cyberpunk', 'valentine', 'halloween', 'garden', 'forest', 'aqua', 'lofi', 'pastel', 'fantasy', 'wireframe', 'black', 'luxury', 'dracula', 'cmyk', 'autumn', 'business', 'acid', 'lemonade', 'night', 'coffee', 'winter', 'dim', 'nord', 'sunset'], + } +} diff --git a/examples/server/webui/vite.config.js b/examples/server/webui/vite.config.js new file mode 100644 index 0000000000000..789bf9cbbf197 --- /dev/null +++ b/examples/server/webui/vite.config.js @@ -0,0 +1,36 @@ + +import { viteSingleFile } from 'vite-plugin-singlefile'; +import path from 'path'; +import fs from 'fs'; + +const GUIDE_FOR_FRONTEND = ` + +`.trim(); + +export default { + plugins: [ + viteSingleFile(), + (function llamaCppPlugin() { + let config; + return { + name: 'llamacpp:build', + apply: 'build', + async configResolved(_config) { + config = _config; + }, + writeBundle() { + const outputIndexHtml = path.join(config.build.outDir, 'index.html'); + const content = fs.readFileSync(outputIndexHtml, 'utf-8'); + + const targetOutputFile = path.join(config.build.outDir, '../../public/index.html'); + fs.writeFileSync(targetOutputFile, GUIDE_FOR_FRONTEND + '\n' + content); + } + } + })(), + ], +}; diff --git a/examples/simple-chat/CMakeLists.txt b/examples/simple-chat/CMakeLists.txt index 87723533b3226..567f7fbbbf43a 100644 --- a/examples/simple-chat/CMakeLists.txt +++ b/examples/simple-chat/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-simple-chat) add_executable(${TARGET} simple-chat.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/simple/CMakeLists.txt b/examples/simple/CMakeLists.txt index b63afbb8b3081..104ecabfd7236 100644 --- a/examples/simple/CMakeLists.txt +++ b/examples/simple/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-simple) add_executable(${TARGET} simple.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/speculative-simple/CMakeLists.txt b/examples/speculative-simple/CMakeLists.txt index 7a3a141c27994..aeaea74fcd1f1 100644 --- a/examples/speculative-simple/CMakeLists.txt +++ b/examples/speculative-simple/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-speculative-simple) add_executable(${TARGET} speculative-simple.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/speculative/CMakeLists.txt b/examples/speculative/CMakeLists.txt index aa208e7aaeeb0..c84196bd95b1e 100644 --- a/examples/speculative/CMakeLists.txt +++ b/examples/speculative/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-speculative) add_executable(${TARGET} speculative.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/examples/tokenize/CMakeLists.txt b/examples/tokenize/CMakeLists.txt index b704dcae18c52..1690b53e5d52b 100644 --- a/examples/tokenize/CMakeLists.txt +++ b/examples/tokenize/CMakeLists.txt @@ -2,4 +2,4 @@ set(TARGET llama-tokenize) add_executable(${TARGET} tokenize.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/ggml/CMakeLists.txt b/ggml/CMakeLists.txt index 70b5cfdf7fbb4..1b3d9896781f1 100644 --- a/ggml/CMakeLists.txt +++ b/ggml/CMakeLists.txt @@ -92,29 +92,33 @@ else() set(INS_ENB ON) endif() -option(GGML_CPU_HBM "ggml: use memkind for CPU HBM" OFF) -option(GGML_CPU_AARCH64 "ggml: use runtime weight conversion of Q4_0 to Q4_X_X" ON) - -option(GGML_AVX "ggml: enable AVX" ${INS_ENB}) -option(GGML_AVX2 "ggml: enable AVX2" ${INS_ENB}) -option(GGML_AVX512 "ggml: enable AVX512" OFF) -option(GGML_AVX512_VBMI "ggml: enable AVX512-VBMI" OFF) -option(GGML_AVX512_VNNI "ggml: enable AVX512-VNNI" OFF) -option(GGML_AVX512_BF16 "ggml: enable AVX512-BF16" OFF) -option(GGML_AMX_TILE "ggml: enable AMX-TILE" OFF) -option(GGML_AMX_INT8 "ggml: enable AMX-INT8" OFF) -option(GGML_AMX_BF16 "ggml: enable AMX-BF16" OFF) -option(GGML_FMA "ggml: enable FMA" ${INS_ENB}) +option(GGML_CPU_HBM "ggml: use memkind for CPU HBM" OFF) +option(GGML_CPU_AARCH64 "ggml: use runtime weight conversion of Q4_0 to Q4_X_X" ON) +option(GGML_AVX "ggml: enable AVX" ${INS_ENB}) +option(GGML_AVX_VNNI "ggml: enable AVX-VNNI" OFF) +option(GGML_AVX2 "ggml: enable AVX2" ${INS_ENB}) +option(GGML_AVX512 "ggml: enable AVX512F" OFF) +option(GGML_AVX512_VBMI "ggml: enable AVX512-VBMI" OFF) +option(GGML_AVX512_VNNI "ggml: enable AVX512-VNNI" OFF) +option(GGML_AVX512_BF16 "ggml: enable AVX512-BF16" OFF) if (NOT MSVC) - option(GGML_F16C "ggml: enable F16C" ${INS_ENB}) # in MSVC F16C is implied with AVX2/AVX512 + # in MSVC F16C and FMA is implied with AVX2/AVX512 + option(GGML_FMA "ggml: enable FMA" ${INS_ENB}) + option(GGML_F16C "ggml: enable F16C" ${INS_ENB}) + # MSVC does not seem to support AMX + option(GGML_AMX_TILE "ggml: enable AMX-TILE" OFF) + option(GGML_AMX_INT8 "ggml: enable AMX-INT8" OFF) + option(GGML_AMX_BF16 "ggml: enable AMX-BF16" OFF) endif() -option(GGML_LASX "ggml: enable lasx" ON) -option(GGML_LSX "ggml: enable lsx" ON) -option(GGML_RVV "ggml: enable rvv" ON) -option(GGML_SVE "ggml: enable SVE" OFF) +option(GGML_LASX "ggml: enable lasx" ON) +option(GGML_LSX "ggml: enable lsx" ON) +option(GGML_RVV "ggml: enable rvv" ON) +option(GGML_SVE "ggml: enable SVE" OFF) +option(GGML_CPU_ALL_VARIANTS "ggml: build all variants of the CPU backend (requires GGML_BACKEND_DL)" OFF) + if (WIN32) - set(GGML_WIN_VER "0x602" CACHE STRING "ggml: Windows Version") + set(GGML_WIN_VER "0x602" CACHE STRING "ggml: Windows version") endif() # ggml core @@ -161,7 +165,6 @@ set (GGML_METAL_MACOSX_VERSION_MIN "" CACHE STRING set (GGML_METAL_STD "" CACHE STRING "ggml: metal standard version (-std flag)") option(GGML_OPENMP "ggml: use OpenMP" ON) option(GGML_RPC "ggml: use RPC" OFF) -option(GGML_AMX "ggml: use AMX" OFF) option(GGML_SYCL "ggml: use SYCL" OFF) option(GGML_SYCL_F16 "ggml: use 16 bit floats for sycl calculations" OFF) set (GGML_SYCL_TARGET "INTEL" CACHE STRING @@ -180,11 +183,7 @@ option(GGML_BUILD_EXAMPLES "ggml: build examples" ${GGML_STANDALONE}) set(CMAKE_C_STANDARD 11) set(CMAKE_C_STANDARD_REQUIRED true) -if (GGML_SYCL) - set(CMAKE_CXX_STANDARD 17) -else() - set(CMAKE_CXX_STANDARD 11) -endif() +set(CMAKE_CXX_STANDARD 17) set(CMAKE_CXX_STANDARD_REQUIRED true) set(THREADS_PREFER_PTHREAD_FLAG ON) diff --git a/ggml/include/ggml-amx.h b/ggml/include/ggml-amx.h deleted file mode 100644 index 042d6d91998f7..0000000000000 --- a/ggml/include/ggml-amx.h +++ /dev/null @@ -1,25 +0,0 @@ -#pragma once - -#include "ggml.h" -#include "ggml-backend.h" - - -#ifdef __cplusplus -extern "C" { -#endif - -// buffer_type API -GGML_BACKEND_API ggml_backend_buffer_type_t ggml_backend_amx_buffer_type(void); - -GGML_BACKEND_API bool ggml_backend_is_amx(ggml_backend_t backend); - -// backend API -GGML_BACKEND_API ggml_backend_t ggml_backend_amx_init(void); - -GGML_BACKEND_API void ggml_backend_amx_set_n_threads(ggml_backend_t backend_amx, int n_threads); - -GGML_BACKEND_API ggml_backend_reg_t ggml_backend_amx_reg(void); - -#ifdef __cplusplus -} -#endif diff --git a/ggml/include/ggml-backend.h b/ggml/include/ggml-backend.h index 19881a5059f17..7221a08309274 100644 --- a/ggml/include/ggml-backend.h +++ b/ggml/include/ggml-backend.h @@ -228,6 +228,7 @@ extern "C" { GGML_API void ggml_backend_unload(ggml_backend_reg_t reg); // Load all known backends from dynamic libraries GGML_API void ggml_backend_load_all(void); + GGML_API void ggml_backend_load_all_from_path(const char * dir_path); // // Backend scheduler diff --git a/ggml/include/ggml-cpu.h b/ggml/include/ggml-cpu.h index e14ea9ea5301f..3aa71badb5fb0 100644 --- a/ggml/include/ggml-cpu.h +++ b/ggml/include/ggml-cpu.h @@ -103,24 +103,14 @@ extern "C" { // Internal types and functions exposed for tests and benchmarks - typedef void (*ggml_from_float_to_mat_t) - (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t nr, int64_t k, int64_t bs); typedef void (*ggml_vec_dot_t) (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT x, size_t bx, const void * GGML_RESTRICT y, size_t by, int nrc); - typedef void (*ggml_gemv_t) (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT x, - const void * GGML_RESTRICT y, int nr, int nc); - typedef void (*ggml_gemm_t) (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT x, - const void * GGML_RESTRICT y, int nr, int nc); struct ggml_type_traits_cpu { ggml_from_float_t from_float; - ggml_from_float_to_mat_t from_float_to_mat; ggml_vec_dot_t vec_dot; enum ggml_type vec_dot_type; int64_t nrows; // number of rows to process simultaneously - int64_t ncols; // number of columns to process simultaneously - ggml_gemv_t gemv; - ggml_gemm_t gemm; }; GGML_BACKEND_API const struct ggml_type_traits_cpu * ggml_get_type_traits_cpu(enum ggml_type type); @@ -140,13 +130,6 @@ extern "C" { GGML_BACKEND_API ggml_backend_reg_t ggml_backend_cpu_reg(void); -#ifdef GGML_USE_CPU_HBM - GGML_BACKEND_API ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type(void); -#endif - - GGML_BACKEND_API ggml_backend_buffer_type_t ggml_backend_cpu_aarch64_buffer_type(void); - GGML_BACKEND_API bool ggml_backend_cpu_buft_is_aarch64(ggml_backend_buffer_type_t buft); - #ifdef __cplusplus } #endif diff --git a/ggml/include/ggml.h b/ggml/include/ggml.h index 3525a920f4f51..b0c1ac9ce2b89 100644 --- a/ggml/include/ggml.h +++ b/ggml/include/ggml.h @@ -386,15 +386,15 @@ extern "C" { GGML_TYPE_F64 = 28, GGML_TYPE_IQ1_M = 29, GGML_TYPE_BF16 = 30, - GGML_TYPE_Q4_0_4_4 = 31, - GGML_TYPE_Q4_0_4_8 = 32, - GGML_TYPE_Q4_0_8_8 = 33, + // GGML_TYPE_Q4_0_4_4 = 31, support has been removed from gguf files + // GGML_TYPE_Q4_0_4_8 = 32, + // GGML_TYPE_Q4_0_8_8 = 33, GGML_TYPE_TQ1_0 = 34, GGML_TYPE_TQ2_0 = 35, - GGML_TYPE_IQ4_NL_4_4 = 36, + // GGML_TYPE_IQ4_NL_4_4 = 36, // GGML_TYPE_IQ4_NL_4_8 = 37, // GGML_TYPE_IQ4_NL_8_8 = 38, - GGML_TYPE_COUNT, + GGML_TYPE_COUNT = 39, }; // precision @@ -435,9 +435,6 @@ extern "C" { GGML_FTYPE_MOSTLY_IQ4_XS = 22, // except 1d tensors GGML_FTYPE_MOSTLY_IQ1_M = 23, // except 1d tensors GGML_FTYPE_MOSTLY_BF16 = 24, // except 1d tensors - GGML_FTYPE_MOSTLY_Q4_0_4_4 = 25, // except 1d tensors - GGML_FTYPE_MOSTLY_Q4_0_4_8 = 26, // except 1d tensors - GGML_FTYPE_MOSTLY_Q4_0_8_8 = 27, // except 1d tensors }; // available tensor operations: @@ -501,6 +498,7 @@ extern "C" { GGML_OP_POOL_2D_BACK, GGML_OP_UPSCALE, // nearest interpolate GGML_OP_PAD, + GGML_OP_PAD_REFLECT_1D, GGML_OP_ARANGE, GGML_OP_TIMESTEP_EMBEDDING, GGML_OP_ARGSORT, @@ -1713,6 +1711,13 @@ extern "C" { int p2, int p3); + // pad each dimension with reflection: [a, b, c, d] -> [b, a, b, c, d, c] + GGML_API struct ggml_tensor * ggml_pad_reflect_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int p0, + int p1); + // Ref: https://github.com/CompVis/stable-diffusion/blob/main/ldm/modules/diffusionmodules/util.py#L151 // timesteps: [N,] // return: [N, dim] @@ -2215,11 +2220,19 @@ extern "C" { GGML_API size_t gguf_get_meta_size(const struct gguf_context * ctx); GGML_API void gguf_get_meta_data(const struct gguf_context * ctx, void * data); -#ifdef __cplusplus -// restrict not standard in C++ -#define GGML_RESTRICT +#ifdef __cplusplus + // restrict not standard in C++ +# if defined(__GNUC__) +# define GGML_RESTRICT __restrict__ +# elif defined(__clang__) +# define GGML_RESTRICT __restrict +# elif defined(_MSC_VER) +# define GGML_RESTRICT __restrict +# else +# define GGML_RESTRICT +# endif #else -#define GGML_RESTRICT restrict +# define GGML_RESTRICT restrict #endif typedef void (*ggml_to_float_t) (const void * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); typedef void (*ggml_from_float_t)(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); diff --git a/ggml/src/CMakeLists.txt b/ggml/src/CMakeLists.txt index 9022aa3ae197d..a267a8b596bd1 100644 --- a/ggml/src/CMakeLists.txt +++ b/ggml/src/CMakeLists.txt @@ -220,9 +220,7 @@ add_library(ggml-base ggml-threading.cpp ggml-threading.h ggml-quants.c - ggml-quants.h - ggml-aarch64.c - ggml-aarch64.h) + ggml-quants.h) target_include_directories(ggml-base PRIVATE .) @@ -261,21 +259,50 @@ function(ggml_add_backend backend) if (${backend_id}) string(TOLOWER "ggml-${backend}" backend_target) add_subdirectory(${backend_target}) - # check again in case the backend disabled itself - # note that this should NOT be the normal behavior, in case of errors the backend should fail the build - # however, currently it is necessary for AMX, since it is enabled by default on llama.cpp - if (${backend_id}) - message(STATUS "Including ${backend} backend") - if (NOT GGML_BACKEND_DL) - string(TOUPPER "GGML_USE_${backend}" backend_use) - target_compile_definitions(ggml PUBLIC ${backend_use}) - endif() + message(STATUS "Including ${backend} backend") + if (NOT GGML_BACKEND_DL) + string(TOUPPER "GGML_USE_${backend}" backend_use) + target_compile_definitions(ggml PUBLIC ${backend_use}) endif() endif() endfunction() +function(ggml_add_cpu_backend_variant tag_name) + set(GGML_CPU_TAG_NAME ${tag_name}) + # other: OPENMP LLAMAFILE CPU_HBM + foreach (feat NATIVE + AVX AVX2 AVX_VNNI FMA F16C + AVX512 AVX512_VBMI AVX512_VNNI AVX512_BF16 + AMX_TILE AMX_INT8 AMX_BF16) + set(GGML_${feat} OFF) + endforeach() + + foreach (feat ${ARGN}) + set(GGML_${feat} ON) + endforeach() + + ggml_add_cpu_backend_variant_impl(${tag_name}) +endfunction() + ggml_add_backend(CPU) -ggml_add_backend(AMX) + +if (GGML_CPU_ALL_VARIANTS) + if (NOT GGML_BACKEND_DL) + message(FATAL_ERROR "GGML_CPU_ALL_VARIANTS requires GGML_BACKEND_DL") + endif() + ggml_add_cpu_backend_variant(sandybridge AVX) + ggml_add_cpu_backend_variant(haswell AVX F16C AVX2 FMA) + ggml_add_cpu_backend_variant(skylakex AVX F16C AVX2 FMA AVX512) + ggml_add_cpu_backend_variant(icelake AVX F16C AVX2 FMA AVX512 AVX512_VBMI AVX512_VNNI) + if (NOT MSVC) + # MSVC doesn't support AVX-VNNI or AMX + ggml_add_cpu_backend_variant(alderlake AVX F16C AVX2 FMA AVX_VNNI) + ggml_add_cpu_backend_variant(sapphirerapids AVX F16C AVX2 FMA AVX512 AVX512_VBMI AVX512_VNNI AVX512_BF16 AMX_TILE AMX_INT8) + endif() +else () + ggml_add_cpu_backend_variant_impl("") +endif() + ggml_add_backend(BLAS) ggml_add_backend(CANN) ggml_add_backend(CUDA) @@ -289,7 +316,7 @@ ggml_add_backend(Vulkan) foreach (target ggml-base ggml) target_include_directories(${target} PUBLIC $ $) - target_compile_features (${target} PRIVATE c_std_11) # don't bump + target_compile_features (${target} PRIVATE c_std_11 cxx_std_17) # don't bump endforeach() target_link_libraries(ggml-base PRIVATE Threads::Threads) diff --git a/ggml/src/ggml-aarch64.c b/ggml/src/ggml-aarch64.c deleted file mode 100644 index 0139120519cea..0000000000000 --- a/ggml/src/ggml-aarch64.c +++ /dev/null @@ -1,129 +0,0 @@ -#define GGML_COMMON_DECL_C -#include "ggml-common.h" - -#include "ggml-aarch64.h" -#include "ggml-impl.h" -#include "ggml-quants.h" -#include - -#define UNUSED GGML_UNUSED - -static block_q4_0x4 make_block_q4_0x4(block_q4_0 * in, unsigned int blck_size_interleave) { - block_q4_0x4 out; - - for (int i = 0; i < 4; i++) { - out.d[i] = in[i].d; - } - - const int end = QK4_0 * 2 / blck_size_interleave; - - if (blck_size_interleave == 8) { - const uint64_t xor_mask = 0x8888888888888888ULL; - for (int i = 0; i < end; ++i) { - int src_id = i % 4; - int src_offset = (i / 4) * blck_size_interleave; - int dst_offset = i * blck_size_interleave; - - uint64_t elems; - // Using memcpy to avoid unaligned memory accesses - memcpy(&elems, &in[src_id].qs[src_offset], sizeof(uint64_t)); - elems ^= xor_mask; - memcpy(&out.qs[dst_offset], &elems, sizeof(uint64_t)); - } - } else if (blck_size_interleave == 4) { - const uint32_t xor_mask = 0x88888888; - for (int i = 0; i < end; ++i) { - int src_id = i % 4; - int src_offset = (i / 4) * blck_size_interleave; - int dst_offset = i * blck_size_interleave; - - uint32_t elems; - memcpy(&elems, &in[src_id].qs[src_offset], sizeof(uint32_t)); - elems ^= xor_mask; - memcpy(&out.qs[dst_offset], &elems, sizeof(uint32_t)); - } - } else { - GGML_ASSERT(false); - } - - return out; -} - -// interleave 8 block_q4_0s in blocks of blck_size_interleave -// returns an interleaved block_q4_0x8 -// in the interleaved block_q4_0x8, place deltas for 8 block_q4_0 blocks -// first, then interleave quants from 8 block_q4_0s in blocks of blck_size_interleave -static block_q4_0x8 make_block_q4_0x8(block_q4_0 * in, unsigned int blck_size_interleave) { - block_q4_0x8 out; - - for (int i = 0; i < 8; i++) { - out.d[i] = in[i].d; - } - - const int end = QK4_0 * 4 / blck_size_interleave; - const uint64_t xor_mask = 0x8888888888888888ULL; - - for (int i = 0; i < end; ++i) { - int src_id = i % 8; - int src_offset = (i / 8) * blck_size_interleave; - int dst_offset = i * blck_size_interleave; - - uint64_t elems; - memcpy(&elems, &in[src_id].qs[src_offset], sizeof(uint64_t)); - elems ^= xor_mask; - memcpy(&out.qs[dst_offset], &elems, sizeof(uint64_t)); - } - - return out; -} - -static size_t quantize_q4_0_nr_bl(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, int nrows_interleaved, int blck_size_interleave) { - assert(n_per_row % QK4_0 == 0); - const int nb = n_per_row / QK4_0; - - void * out_ptr = NULL; - if (nrows_interleaved == 8) { - out_ptr = (block_q4_0x8 *) dst; - } - else if (nrows_interleaved == 4) { - out_ptr = (block_q4_0x4 *) dst; - } - assert(nrows_interleaved <= 8); - block_q4_0 dst_tmp[8]; - - for (int b = 0; b < (nrow * n_per_row); b += nrows_interleaved * n_per_row) { - - for (int64_t x = 0; x < nb; x++) { - - for (int i = 0; i < nrows_interleaved; i++ ) { - quantize_row_q4_0_ref(src + b + i * n_per_row + x * QK4_0, (block_q4_0 *) dst_tmp + i, QK4_0); - } - - if (nrows_interleaved == 8) { - *(block_q4_0x8 *) out_ptr = make_block_q4_0x8(dst_tmp, blck_size_interleave); - out_ptr = (block_q4_0x8 *) out_ptr + 1; - } - else if (nrows_interleaved == 4) { - *(block_q4_0x4 *) out_ptr = make_block_q4_0x4(dst_tmp, blck_size_interleave); - out_ptr = (block_q4_0x4 *) out_ptr + 1; - } - } - } - - return ((nrow * n_per_row) / QK4_0 * sizeof(block_q4_0)); -} - -size_t quantize_q4_0_4x4(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { - UNUSED(quant_weights); - return quantize_q4_0_nr_bl(src, dst, nrow, n_per_row, 4, 4); -} - -size_t quantize_q4_0_4x8(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { - UNUSED(quant_weights); - return quantize_q4_0_nr_bl(src, dst, nrow, n_per_row, 4, 8); -} - -size_t quantize_q4_0_8x8(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { - UNUSED(quant_weights); - return quantize_q4_0_nr_bl(src, dst, nrow, n_per_row, 8, 8); -} diff --git a/ggml/src/ggml-aarch64.h b/ggml/src/ggml-aarch64.h deleted file mode 100644 index a578685911be6..0000000000000 --- a/ggml/src/ggml-aarch64.h +++ /dev/null @@ -1,19 +0,0 @@ -#pragma once - -#include "ggml.h" - -// GGML internal header - -#ifdef __cplusplus -extern "C" { -#endif - -// Quantization utilizing an importance matrix (a.k.a. "Activation aWare Quantization") -size_t quantize_q4_0_4x4(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); -size_t quantize_q4_0_4x8(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); -size_t quantize_q4_0_8x8(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); - -#ifdef __cplusplus -} -#endif - diff --git a/ggml/src/ggml-amx/CMakeLists.txt b/ggml/src/ggml-amx/CMakeLists.txt deleted file mode 100644 index cf3ade6f020ed..0000000000000 --- a/ggml/src/ggml-amx/CMakeLists.txt +++ /dev/null @@ -1,105 +0,0 @@ -if (CMAKE_OSX_ARCHITECTURES STREQUAL "x86_64" OR CMAKE_GENERATOR_PLATFORM_LWR MATCHES "^(x86_64|i686|amd64|x64|win32)$" OR - (NOT CMAKE_OSX_ARCHITECTURES AND NOT CMAKE_GENERATOR_PLATFORM_LWR AND - CMAKE_SYSTEM_PROCESSOR MATCHES "^(x86_64|i686|AMD64)$") AND - CMAKE_COMPILER_IS_GNUCC AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 11.0) - message(STATUS "Using AMX") - - file(GLOB GGML_HEADERS_AMX "*.h") - list(APPEND GGML_HEADERS_AMX "../../include/ggml-amx.h") - - file(GLOB GGML_SOURCES_AMX "*.cpp") - - ggml_add_backend_library(ggml-amx - ${GGML_HEADERS_AMX} - ${GGML_SOURCES_AMX} - ) - - # this is duplicated from the CPU backend, since the AMX backend also depends on the architecture flags - # TODO: integrate AMX backend into the CPU backend - if (MSVC) - # instruction set detection for MSVC only - if (GGML_NATIVE) - # TODO: improve, should not reference files from the parent folder - include(../ggml-cpu/cmake/FindSIMD.cmake) - endif () - if (GGML_AVX512) - list(APPEND ARCH_FLAGS /arch:AVX512) - # MSVC has no compile-time flags enabling specific - # AVX512 extensions, neither it defines the - # macros corresponding to the extensions. - # Do it manually. - if (GGML_AVX512_VBMI) - add_compile_definitions($<$:__AVX512VBMI__>) - add_compile_definitions($<$:__AVX512VBMI__>) - endif() - if (GGML_AVX512_VNNI) - add_compile_definitions($<$:__AVX512VNNI__>) - add_compile_definitions($<$:__AVX512VNNI__>) - endif() - if (GGML_AVX512_BF16) - add_compile_definitions($<$:__AVX512BF16__>) - add_compile_definitions($<$:__AVX512BF16__>) - endif() - if (GGML_AMX_TILE) - add_compile_definitions($<$:__AMX_TILE__>) - add_compile_definitions($<$:__AMX_TILE__>) - endif() - if (GGML_AMX_INT8) - add_compile_definitions($<$:__AMX_INT8__>) - add_compile_definitions($<$:__AMX_INT8__>) - endif() - if (GGML_AMX_BF16) - add_compile_definitions($<$:__AMX_BF16__>) - add_compile_definitions($<$:__AMX_BF16__>) - endif() - elseif (GGML_AVX2) - list(APPEND ARCH_FLAGS /arch:AVX2) - elseif (GGML_AVX) - list(APPEND ARCH_FLAGS /arch:AVX) - endif() - else() - if (GGML_NATIVE) - list(APPEND ARCH_FLAGS -march=native) - endif() - if (GGML_F16C) - list(APPEND ARCH_FLAGS -mf16c) - endif() - if (GGML_FMA) - list(APPEND ARCH_FLAGS -mfma) - endif() - if (GGML_AVX) - list(APPEND ARCH_FLAGS -mavx) - endif() - if (GGML_AVX2) - list(APPEND ARCH_FLAGS -mavx2) - endif() - if (GGML_AVX512) - list(APPEND ARCH_FLAGS -mavx512f) - list(APPEND ARCH_FLAGS -mavx512dq) - list(APPEND ARCH_FLAGS -mavx512bw) - endif() - if (GGML_AVX512_VBMI) - list(APPEND ARCH_FLAGS -mavx512vbmi) - endif() - if (GGML_AVX512_VNNI) - list(APPEND ARCH_FLAGS -mavx512vnni) - endif() - if (GGML_AVX512_BF16) - list(APPEND ARCH_FLAGS -mavx512bf16) - endif() - if (GGML_AMX_TILE) - list(APPEND ARCH_FLAGS -mamx-tile) - endif() - if (GGML_AMX_INT8) - list(APPEND ARCH_FLAGS -mamx-int8) - endif() - if (GGML_AMX_BF16) - list(APPEND ARCH_FLAGS -mamx-bf16) - endif() - endif() - - target_compile_options(ggml-amx PRIVATE ${ARCH_FLAGS}) -else() - set(GGML_AMX OFF PARENT_SCOPE) - message(WARNING "AMX requires x86 and gcc version > 11.0. Turning off GGML_AMX.") -endif() diff --git a/ggml/src/ggml-amx/ggml-amx.cpp b/ggml/src/ggml-amx/ggml-amx.cpp deleted file mode 100644 index 6bfb3da274c39..0000000000000 --- a/ggml/src/ggml-amx/ggml-amx.cpp +++ /dev/null @@ -1,449 +0,0 @@ -#include "ggml-amx.h" -#include "ggml-amx/common.h" -#include "ggml-amx/mmq.h" -#include "ggml-backend-impl.h" -#include "ggml-impl.h" - -#if defined(__gnu_linux__) -#include -#include -#endif - -#include -#include -#include - -#if defined(__AMX_INT8__) - -// AMX buffer interface -static void ggml_backend_amx_buffer_free_buffer(ggml_backend_buffer_t buffer) { - free(buffer->context); -} - -static void * ggml_backend_amx_buffer_get_base(ggml_backend_buffer_t buffer) { - return (void *)(buffer->context); -} - -static void ggml_backend_amx_buffer_memset_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, uint8_t value, size_t offset, size_t size) { - memset((char *)tensor->data + offset, value, size); - - GGML_UNUSED(buffer); -} - -static void ggml_backend_amx_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - if (qtype_has_amx_kernels(tensor->type)) { - ggml_backend_amx_convert_weight(tensor, data, offset, size); - } else { - memcpy((char *)tensor->data + offset, data, size); - } - - GGML_UNUSED(buffer); -} - -static void ggml_backend_amx_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { - GGML_ASSERT(!qtype_has_amx_kernels(tensor->type)); - memcpy(data, (const char *)tensor->data + offset, size); - - GGML_UNUSED(buffer); -} - -static bool ggml_backend_amx_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { - if (ggml_backend_buffer_is_host(src->buffer)) { - if (qtype_has_amx_kernels(src->type)) { - ggml_backend_amx_convert_weight(dst, src->data, 0, ggml_backend_amx_get_alloc_size(dst)); - } else { - memcpy(dst->data, src->data, ggml_nbytes(src)); - } - return true; - } - return false; - - GGML_UNUSED(buffer); -} - -static void ggml_backend_amx_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { - memset(buffer->context, value, buffer->size); -} - -static ggml_backend_buffer_i ggml_backend_amx_buffer_interface = { - /* .free_buffer = */ ggml_backend_amx_buffer_free_buffer, - /* .get_base = */ ggml_backend_amx_buffer_get_base, - /* .init_tensor = */ NULL, // no initialization required - /* .memset_tensor = */ ggml_backend_amx_buffer_memset_tensor, - /* .set_tensor = */ ggml_backend_amx_buffer_set_tensor, - /* .get_tensor = */ ggml_backend_amx_buffer_get_tensor, - /* .cpy_tensor = */ ggml_backend_amx_buffer_cpy_tensor, - /* .clear = */ ggml_backend_amx_buffer_clear, - /* .reset = */ NULL, -}; - -static const char * ggml_backend_amx_buffer_type_get_name(ggml_backend_buffer_type_t buft) { - return "AMX"; - - GGML_UNUSED(buft); -} - -static ggml_backend_buffer_t ggml_backend_amx_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { - void * data = aligned_alloc(TENSOR_ALIGNMENT, size); - if (data == NULL) { - fprintf(stderr, "%s: failed to allocate buffer of size %zu\n", __func__, size); - return NULL; - } - - return ggml_backend_buffer_init(buft, ggml_backend_amx_buffer_interface, data, size); -} - -static size_t ggml_backend_amx_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { - return TENSOR_ALIGNMENT; - - GGML_UNUSED(buft); -} - -static size_t ggml_backend_amx_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor* tensor) { - return ggml_backend_amx_get_alloc_size(tensor); - - GGML_UNUSED(buft); -} - -static bool ggml_backend_amx_buffer_type_is_host(ggml_backend_buffer_type_t buft) { - return false; - - GGML_UNUSED(buft); -} - -ggml_backend_buffer_type_t ggml_backend_amx_buffer_type() { - static struct ggml_backend_buffer_type ggml_backend_buffer_type_amx = { - /* .iface = */ { - /* .get_name = */ ggml_backend_amx_buffer_type_get_name, - /* .alloc_buffer = */ ggml_backend_amx_buffer_type_alloc_buffer, - /* .get_alignment = */ ggml_backend_amx_buffer_type_get_alignment, - /* .get_max_size = */ NULL, // defaults to SIZE_MAX - /* .get_alloc_size = */ ggml_backend_amx_buffer_type_get_alloc_size, - /* .is_host = */ ggml_backend_amx_buffer_type_is_host, - }, - /* .device = */ ggml_backend_reg_dev_get(ggml_backend_amx_reg(), 0), - /* .context = */ NULL, - }; - - return &ggml_backend_buffer_type_amx; -} - -// backend interface - -static const char * ggml_backend_amx_name(ggml_backend_t backend) { - return "AMX"; - - GGML_UNUSED(backend); -} - -static void ggml_backend_amx_free(ggml_backend_t backend) { - ggml_backend_amx_context * ctx = (ggml_backend_amx_context *)backend->context; - delete ctx; - delete backend; -} - -static enum ggml_status ggml_backend_amx_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { - ggml_backend_amx_context * ctx = (ggml_backend_amx_context *)backend->context; - - for (int i = 0; i < cgraph->n_nodes; i++) { - struct ggml_tensor * node = cgraph->nodes[i]; - - switch (node->op) { - case GGML_OP_MUL_MAT: - ggml_backend_amx_mul_mat(ctx, node); - break; - - case GGML_OP_NONE: - case GGML_OP_RESHAPE: - case GGML_OP_VIEW: - case GGML_OP_PERMUTE: - case GGML_OP_TRANSPOSE: - break; - - default: - fprintf(stderr, "%s: unsupported op %s\n", __func__, ggml_op_desc(node)); - GGML_ASSERT(false); - } - } - - return GGML_STATUS_SUCCESS; - - GGML_UNUSED(backend); -} - -static struct ggml_backend_i ggml_backend_amx_i = { - /* .get_name = */ ggml_backend_amx_name, - /* .free = */ ggml_backend_amx_free, - /* .set_tensor_async = */ NULL, - /* .get_tensor_async = */ NULL, - /* .cpy_tensor_async = */ NULL, - /* .synchronize = */ NULL, - /* .graph_plan_create = */ NULL, - /* .graph_plan_free = */ NULL, - /* .graph_plan_update = */ NULL, - /* .graph_plan_compute = */ NULL, - /* .graph_compute = */ ggml_backend_amx_graph_compute, - /* .event_record = */ NULL, - /* .event_wait = */ NULL, -}; - -static ggml_guid_t ggml_backend_amx_guid() { - static ggml_guid guid = { 0x13, 0xb8, 0xa4, 0xc4, 0xba, 0xfe, 0x51, 0x67, 0x87, 0x44, 0x55, 0x15, 0xb2, 0x35, 0x62, 0x3e }; - return &guid; -} - -#define ARCH_GET_XCOMP_PERM 0x1022 -#define ARCH_REQ_XCOMP_PERM 0x1023 -#define XFEATURE_XTILECFG 17 -#define XFEATURE_XTILEDATA 18 - -static bool ggml_amx_init() { -#if defined(__gnu_linux__) - if (syscall(SYS_arch_prctl, ARCH_REQ_XCOMP_PERM, XFEATURE_XTILEDATA)) { - fprintf(stderr, "AMX is not ready to be used!\n"); - return false; - } - return true; -#elif defined(_WIN32) - return true; -#endif -} - -ggml_backend_t ggml_backend_amx_init() { - - // invoke a Linux system call to request access to AMX features - ggml_amx_init(); - - // backend context - ggml_backend_amx_context * ctx = new ggml_backend_amx_context; - - // ggml amx backend - ggml_backend_t backend = new ggml_backend { - /* .guid = */ ggml_backend_amx_guid(), - /* .interface = */ ggml_backend_amx_i, - /* .device = */ ggml_backend_reg_dev_get(ggml_backend_amx_reg(), 0), - /* .context = */ ctx, - }; - - return backend; -} - -bool ggml_backend_is_amx(ggml_backend_t backend) { - return backend != NULL && ggml_guid_matches(backend->guid, ggml_backend_amx_guid()); -} - -void ggml_backend_amx_set_n_threads(ggml_backend_t backend_amx, int n_threads) { - GGML_ASSERT(ggml_backend_is_amx(backend_amx)); - - ggml_backend_amx_context * ctx = (ggml_backend_amx_context *)backend_amx->context; - ctx->n_threads = n_threads; -} - -// device interface - -static const char * ggml_backend_amx_device_get_name(ggml_backend_dev_t dev) { - return "AMX"; - - GGML_UNUSED(dev); -} - -static const char * ggml_backend_amx_device_get_description(ggml_backend_dev_t dev) { - return "Intel Advanced Matrix Extensions"; - - GGML_UNUSED(dev); -} - -static void ggml_backend_amx_device_get_memory(ggml_backend_dev_t dev, size_t * free, size_t * total) { - // TODO - *free = 0; - *total = 0; - - GGML_UNUSED(dev); -} - -static enum ggml_backend_dev_type ggml_backend_amx_device_get_type(ggml_backend_dev_t dev) { - return GGML_BACKEND_DEVICE_TYPE_ACCEL; - - GGML_UNUSED(dev); -} - -static void ggml_backend_amx_device_get_props(ggml_backend_dev_t dev, struct ggml_backend_dev_props * props) { - props->name = ggml_backend_amx_device_get_name(dev); - props->description = ggml_backend_amx_device_get_description(dev); - props->type = ggml_backend_amx_device_get_type(dev); - ggml_backend_amx_device_get_memory(dev, &props->memory_free, &props->memory_total); - - // `buffer_from_host_ptr` is intended to be used in mmap, when memory layout unchanged - props->caps = { - /* .async = */ false, - /* .host_buffer = */ false, - /* .buffer_from_host_ptr = */ false, - /* .events = */ false, - }; -} - -static ggml_backend_t ggml_backend_amx_device_init(ggml_backend_dev_t dev, const char * params) { - return ggml_backend_amx_init(); - - GGML_UNUSED(dev); - GGML_UNUSED(params); -} - -static ggml_backend_buffer_type_t ggml_backend_amx_device_get_buffer_type(ggml_backend_dev_t dev) { - return ggml_backend_amx_buffer_type(); - - GGML_UNUSED(dev); -} - -static bool ggml_backend_amx_device_supports_op(ggml_backend_dev_t dev, const struct ggml_tensor * op) { - - // handle only 2d gemm for now - auto is_contiguous_2d = [](const struct ggml_tensor * t) { - return ggml_is_contiguous(t) && t->ne[3] == 1 && t->ne[2] == 1; - }; - - switch (op->op) { - case GGML_OP_NONE: - case GGML_OP_RESHAPE: - case GGML_OP_VIEW: - case GGML_OP_PERMUTE: - case GGML_OP_TRANSPOSE: - return true; - - case GGML_OP_MUL_MAT: { - const struct ggml_tensor * src0 = op->src[0]; - const struct ggml_tensor * src1 = op->src[1]; - - const enum ggml_type type = src0->type; - const int64_t ne0 = op->ne[0]; - - // amx kernels enables for Q4_0, Q4_1, Q8_0, F16 - // Q4_K, Q5_K, Q6_K, IQ4_XS enabled for QK_K = 256 - bool has_amx_kernels = qtype_has_amx_kernels(type) || (type == GGML_TYPE_F16); - - bool can_use_amx = - is_contiguous_2d(src0) && // src0 must be contiguous - is_contiguous_2d(src1) && // src1 must be contiguous - src1->type == GGML_TYPE_F32 && // src1 must be float32 - has_amx_kernels && // with amx kernel impls - ne0 % (TILE_N * 2) == 0; // out_features is 32x - - return can_use_amx; - } - default: - return false; - } - - GGML_UNUSED(dev); -} - -static bool ggml_backend_amx_device_supports_buft(ggml_backend_dev_t dev, ggml_backend_buffer_type_t buft) { - return buft->iface.get_name == ggml_backend_amx_buffer_type_get_name; - - GGML_UNUSED(dev); -} - -static const struct ggml_backend_device_i ggml_backend_amx_device_i = { - /* .get_name = */ ggml_backend_amx_device_get_name, - /* .get_description = */ ggml_backend_amx_device_get_description, - /* .get_memory = */ ggml_backend_amx_device_get_memory, - /* .get_type = */ ggml_backend_amx_device_get_type, - /* .get_props = */ ggml_backend_amx_device_get_props, - /* .init_backend = */ ggml_backend_amx_device_init, - /* .get_buffer_type = */ ggml_backend_amx_device_get_buffer_type, - /* .get_host_buffer_type = */ NULL, - /* .buffer_from_host_ptr = */ NULL, - /* .supports_op = */ ggml_backend_amx_device_supports_op, - /* .supports_buft = */ ggml_backend_amx_device_supports_buft, - /* .offload_op = */ NULL, - /* .event_new = */ NULL, - /* .event_free = */ NULL, - /* .event_synchronize = */ NULL, -}; - -// backend reg interface - -static const char * ggml_backend_amx_reg_get_name(ggml_backend_reg_t reg) { - return "AMX"; - - GGML_UNUSED(reg); -} - -static size_t ggml_backend_amx_reg_get_device_count(ggml_backend_reg_t reg) { - return 1; - - GGML_UNUSED(reg); -} - -static ggml_backend_dev_t ggml_backend_amx_reg_get_device(ggml_backend_reg_t reg, size_t index) { - GGML_ASSERT(index == 0); - - static ggml_backend_device ggml_backend_amx_device = { - /* .iface = */ ggml_backend_amx_device_i, - /* .reg = */ reg, - /* .context = */ nullptr, - }; - - return &ggml_backend_amx_device; - - GGML_UNUSED(reg); - GGML_UNUSED(index); -} - -static void * ggml_backend_amx_get_proc_address(ggml_backend_reg_t reg, const char * name) { - if (std::strcmp(name, "ggml_backend_set_n_threads") == 0) { - return (void *)ggml_backend_amx_set_n_threads; - } - return NULL; - - GGML_UNUSED(reg); - GGML_UNUSED(name); -} - -static const struct ggml_backend_reg_i ggml_backend_amx_reg_i = { - /* .get_name = */ ggml_backend_amx_reg_get_name, - /* .get_device_count = */ ggml_backend_amx_reg_get_device_count, - /* .get_device = */ ggml_backend_amx_reg_get_device, - /* .get_proc_address = */ ggml_backend_amx_get_proc_address, -}; - -ggml_backend_reg_t ggml_backend_amx_reg(void) { - static struct ggml_backend_reg ggml_backend_amx_reg = { - /* .api_version = */ GGML_BACKEND_API_VERSION, - /* .iface = */ ggml_backend_amx_reg_i, - /* .context = */ NULL, - }; - - return &ggml_backend_amx_reg; -} - -#else // if defined(__AMX_INT8__) - -ggml_backend_buffer_type_t ggml_backend_amx_buffer_type(void) { - return nullptr; -} - -bool ggml_backend_is_amx(ggml_backend_t backend) { - GGML_UNUSED(backend); - return false; -} - -ggml_backend_t ggml_backend_amx_init(void) { - fprintf(stderr, "GGML is not compiled with AMX support!\n"); - return nullptr; -} - -void ggml_backend_amx_set_n_threads(ggml_backend_t backend_amx, int n_threads) { - fprintf(stderr, "GGML is not compiled with AMX support!\n"); - - GGML_UNUSED(backend_amx); - GGML_UNUSED(n_threads); -} - -ggml_backend_reg_t ggml_backend_amx_reg(void) { - return nullptr; -} - -#endif - -GGML_BACKEND_DL_IMPL(ggml_backend_amx_reg) diff --git a/ggml/src/ggml-backend-impl.h b/ggml/src/ggml-backend-impl.h index dff7749b416dc..36d72e95f028c 100644 --- a/ggml/src/ggml-backend-impl.h +++ b/ggml/src/ggml-backend-impl.h @@ -211,27 +211,45 @@ extern "C" { GGML_API void ggml_backend_device_register(ggml_backend_dev_t device); // Add backend dynamic loading support to the backend - typedef ggml_backend_reg_t (*ggml_backend_init_t)(void); - #ifdef GGML_BACKEND_DL - #ifdef __cplusplus - # define GGML_BACKEND_DL_IMPL(reg_fn) \ - extern "C" { \ - GGML_BACKEND_API ggml_backend_reg_t ggml_backend_init(void); \ - } \ - ggml_backend_reg_t ggml_backend_init(void) { \ - return reg_fn(); \ - } - #else - # define GGML_BACKEND_DL_IMPL(reg_fn) \ - GGML_BACKEND_API ggml_backend_reg_t ggml_backend_init(void); \ - ggml_backend_reg_t ggml_backend_init(void) { \ - return reg_fn(); \ - } - #endif - #else - # define GGML_BACKEND_DL_IMPL(reg_fn) - #endif + // Initialize the backend + typedef ggml_backend_reg_t (*ggml_backend_init_t)(void); + // Optional: obtain a score for the backend based on the system configuration + // Higher scores are preferred, 0 means the backend is not supported in the current system + typedef int (*ggml_backend_score_t)(void); + +#ifdef GGML_BACKEND_DL +# ifdef __cplusplus +# define GGML_BACKEND_DL_IMPL(reg_fn) \ + extern "C" { \ + GGML_BACKEND_API ggml_backend_reg_t ggml_backend_init(void); \ + } \ + ggml_backend_reg_t ggml_backend_init(void) { \ + return reg_fn(); \ + } +# define GGML_BACKEND_DL_SCORE_IMPL(score_fn) \ + extern "C" { \ + GGML_BACKEND_API int ggml_backend_score(void); \ + } \ + int ggml_backend_score(void) { \ + return score_fn(); \ + } +# else +# define GGML_BACKEND_DL_IMPL(reg_fn) \ + GGML_BACKEND_API ggml_backend_reg_t ggml_backend_init(void); \ + ggml_backend_reg_t ggml_backend_init(void) { \ + return reg_fn(); \ + } +# define GGML_BACKEND_DL_SCORE_IMPL(score_fn) \ + GGML_BACKEND_API int ggml_backend_score(void); \ + int ggml_backend_score(void) { \ + return score_fn(); \ + } +# endif +#else +# define GGML_BACKEND_DL_IMPL(reg_fn) +# define GGML_BACKEND_DL_SCORE_IMPL(score_fn) +#endif #ifdef __cplusplus } diff --git a/ggml/src/ggml-backend-reg.cpp b/ggml/src/ggml-backend-reg.cpp index a0e0e2c5852f7..2e7340145eed2 100644 --- a/ggml/src/ggml-backend-reg.cpp +++ b/ggml/src/ggml-backend-reg.cpp @@ -2,8 +2,13 @@ #include "ggml-backend.h" #include "ggml-impl.h" #include +#include #include +#include +#include +#include #include +#include #include #ifdef _WIN32 @@ -49,10 +54,6 @@ #include "ggml-rpc.h" #endif -#ifdef GGML_USE_AMX -# include "ggml-amx.h" -#endif - #ifdef GGML_USE_CANN #include "ggml-cann.h" #endif @@ -61,9 +62,71 @@ #include "ggml-kompute.h" #endif +#ifdef _WIN32 + +using dl_handle = std::remove_pointer_t; + +struct dl_handle_deleter { + void operator()(HMODULE handle) { + FreeLibrary(handle); + } +}; + +static dl_handle * dl_load_library(const std::wstring & path) { + // suppress error dialogs for missing DLLs + DWORD old_mode = SetErrorMode(SEM_FAILCRITICALERRORS); + SetErrorMode(old_mode | SEM_FAILCRITICALERRORS); + + HMODULE handle = LoadLibraryW(path.c_str()); + + SetErrorMode(old_mode); + + return handle; +} + +static dl_handle * dl_load_library(const std::string & path) { + std::wstring_convert> converter; + return dl_load_library(converter.from_bytes(path)); +} + +static void * dl_get_sym(dl_handle * handle, const char * name) { + DWORD old_mode = SetErrorMode(SEM_FAILCRITICALERRORS); + SetErrorMode(old_mode | SEM_FAILCRITICALERRORS); + + void * p = (void *) GetProcAddress(handle, name); + + SetErrorMode(old_mode); + + return p; +} + +#else + +using dl_handle = void; + +struct dl_handle_deleter { + void operator()(void * handle) { + dlclose(handle); + } +}; + +static void * dl_load_library(const std::string & path) { + dl_handle * handle = dlopen(path.c_str(), RTLD_NOW | RTLD_LOCAL); + + return handle; +} + +static void * dl_get_sym(dl_handle * handle, const char * name) { + return dlsym(handle, name); +} + +#endif + +using dl_handle_ptr = std::unique_ptr; + struct ggml_backend_reg_entry { ggml_backend_reg_t reg; - void * handle; + dl_handle_ptr handle; }; struct ggml_backend_registry { @@ -92,9 +155,6 @@ struct ggml_backend_registry { #ifdef GGML_USE_RPC register_backend(ggml_backend_rpc_reg()); #endif -#ifdef GGML_USE_AMX - register_backend(ggml_backend_amx_reg()); -#endif #ifdef GGML_USE_KOMPUTE register_backend(ggml_backend_kompute_reg()); #endif @@ -104,13 +164,16 @@ struct ggml_backend_registry { } ~ggml_backend_registry() { - while (!backends.empty()) { - // use silent since the log system may have been destroyed at this point - unload_backend(backends.back().reg, true); + // FIXME: backends cannot be safely unloaded without a function to destroy all the backend resources, + // since backend threads may still be running and accessing resources from the dynamic library + for (auto & entry : backends) { + if (entry.handle) { + entry.handle.release(); // NOLINT + } } } - void register_backend(ggml_backend_reg_t reg, void * handle = nullptr) { + void register_backend(ggml_backend_reg_t reg, dl_handle_ptr handle = nullptr) { if (!reg) { return; } @@ -119,7 +182,7 @@ struct ggml_backend_registry { GGML_LOG_DEBUG("%s: registered backend %s (%zu devices)\n", __func__, ggml_backend_reg_name(reg), ggml_backend_reg_dev_count(reg)); #endif - backends.push_back({ reg, handle }); + backends.push_back({ reg, std::move(handle) }); for (size_t i = 0; i < ggml_backend_reg_dev_count(reg); i++) { register_device(ggml_backend_reg_dev_get(reg, i)); } @@ -133,79 +196,53 @@ struct ggml_backend_registry { } ggml_backend_reg_t load_backend(const char * path, bool silent) { -#ifdef _WIN32 - // suppress error dialogs for missing DLLs - DWORD old_mode = SetErrorMode(SEM_FAILCRITICALERRORS); - SetErrorMode(old_mode | SEM_FAILCRITICALERRORS); - - HMODULE handle = LoadLibraryA(path); - + dl_handle_ptr handle { dl_load_library(path) }; if (!handle) { if (!silent) { - GGML_LOG_ERROR("%s: failed to load %s: %lu\n", __func__, path, GetLastError()); - } - SetErrorMode(old_mode); - return nullptr; - } - - ggml_backend_init_t backend_init = (ggml_backend_init_t) GetProcAddress(handle, "ggml_backend_init"); - - SetErrorMode(old_mode); - - if (!backend_init) { - if (!silent) { - GGML_LOG_ERROR("%s: failed to find ggml_backend_init in %s: %lu\n", __func__, path, GetLastError()); + GGML_LOG_ERROR("%s: failed to load %s\n", __func__, path); } - FreeLibrary(handle); return nullptr; } -#else - void * handle = dlopen(path, RTLD_NOW | RTLD_LOCAL); - if (!handle) { + auto score_fn = (ggml_backend_score_t) dl_get_sym(handle.get(), "ggml_backend_score"); + if (score_fn && score_fn() == 0) { if (!silent) { - GGML_LOG_ERROR("%s: failed to load %s: %s\n", __func__, path, dlerror()); + GGML_LOG_INFO("%s: backend %s is not supported on this system\n", __func__, path); } return nullptr; } - auto * backend_init = (ggml_backend_init_t) dlsym(handle, "ggml_backend_init"); - - if (!backend_init) { + auto backend_init_fn = (ggml_backend_init_t) dl_get_sym(handle.get(), "ggml_backend_init"); + if (!backend_init_fn) { if (!silent) { - GGML_LOG_ERROR("%s: failed to find ggml_backend_init in %s: %s\n", __func__, path, dlerror()); + GGML_LOG_ERROR("%s: failed to find ggml_backend_init in %s\n", __func__, path); } - dlclose(handle); return nullptr; } -#endif - ggml_backend_reg_t reg = backend_init(); + ggml_backend_reg_t reg = backend_init_fn(); if (!reg || reg->api_version != GGML_BACKEND_API_VERSION) { if (!silent) { if (!reg) { GGML_LOG_ERROR("%s: failed to initialize backend from %s: ggml_backend_init returned NULL\n", __func__, path); } else { GGML_LOG_ERROR("%s: failed to initialize backend from %s: incompatible API version (backend: %d, current: %d)\n", - __func__, path, reg->api_version, GGML_BACKEND_API_VERSION); + __func__, path, reg->api_version, GGML_BACKEND_API_VERSION); } } -#ifdef _WIN32 - FreeLibrary(handle); -#else - dlclose(handle); -#endif return nullptr; } GGML_LOG_INFO("%s: loaded %s backend from %s\n", __func__, ggml_backend_reg_name(reg), path); - register_backend(reg, handle); + + register_backend(reg, std::move(handle)); + return reg; } void unload_backend(ggml_backend_reg_t reg, bool silent) { auto it = std::find_if(backends.begin(), backends.end(), - [reg](ggml_backend_reg_entry entry) { return entry.reg == reg; }); + [reg](const ggml_backend_reg_entry & entry) { return entry.reg == reg; }); if (it == backends.end()) { if (!silent) { @@ -224,15 +261,6 @@ struct ggml_backend_registry { [reg](ggml_backend_dev_t dev) { return ggml_backend_dev_backend_reg(dev) == reg; }), devices.end()); - // unload library - if (it->handle) { -#ifdef _WIN32 - FreeLibrary((HMODULE) it->handle); -#else - dlclose(it->handle); -#endif - } - // remove backend backends.erase(it); } @@ -348,12 +376,7 @@ void ggml_backend_unload(ggml_backend_reg_t reg) { get_reg().unload_backend(reg, true); } -void ggml_backend_load_all() { - std::vector search_prefix; - - // add the executable directory to the search path - // FIXME: this is convenient for development, but it should probably be disabled in production - +static std::string get_executable_path() { #if defined(__APPLE__) // get executable path std::vector path; @@ -371,7 +394,7 @@ void ggml_backend_load_all() { if (last_slash != std::string::npos) { base_path = base_path.substr(0, last_slash); } - search_prefix.push_back(base_path + "/"); + return base_path + "/"; #elif defined(__linux__) std::string base_path = "."; std::vector path(1024); @@ -393,38 +416,128 @@ void ggml_backend_load_all() { path.resize(path.size() * 2); } - search_prefix.push_back(base_path + "/"); + return base_path + "/"; +#elif defined(_WIN32) + std::vector path(MAX_PATH); + DWORD len = GetModuleFileNameA(NULL, path.data(), path.size()); + if (len == 0) { + return ""; + } + std::string base_path(path.data(), len); + // remove executable name + auto last_slash = base_path.find_last_of('\\'); + if (last_slash != std::string::npos) { + base_path = base_path.substr(0, last_slash); + } + return base_path + "\\"; #endif +} - auto & reg = get_reg(); +static std::string backend_filename_prefix() { +#ifdef _WIN32 + return "ggml-"; +#else + return "libggml-"; +#endif +} - auto try_load = [&](const std::string & name) { - std::string os_name; +static std::string backend_filename_suffix() { #ifdef _WIN32 - os_name = "ggml-" + name + ".dll"; + return ".dll"; #else - os_name = "libggml-" + name + ".so"; + return ".so"; #endif - if (reg.load_backend(os_name.c_str(), true)) { - return; +} + +static ggml_backend_reg_t ggml_backend_load_best(const char * name, bool silent, const char * user_search_path) { + // enumerate all the files that match [lib]ggml-name-*.[so|dll] in the search paths + // TODO: search system paths + std::string file_prefix = backend_filename_prefix() + name + "-"; + std::vector search_paths; + if (user_search_path == nullptr) { + search_paths.push_back("./"); + search_paths.push_back(get_executable_path()); + } else { +#if defined(_WIN32) + search_paths.push_back(std::string(user_search_path) + "\\"); +#else + search_paths.push_back(std::string(user_search_path) + "/"); +#endif + } + + int best_score = 0; + std::string best_path; + + namespace fs = std::filesystem; + for (const auto & search_path : search_paths) { + if (!fs::exists(search_path)) { + continue; } - for (const auto & prefix : search_prefix) { - if (reg.load_backend((prefix + os_name).c_str(), true)) { - return; + for (const auto & entry : fs::directory_iterator(search_path)) { + if (entry.is_regular_file()) { + std::string filename = entry.path().filename().string(); + std::string ext = entry.path().extension().string(); + if (filename.find(file_prefix) == 0 && ext == backend_filename_suffix()) { + dl_handle_ptr handle { dl_load_library(entry.path().c_str()) }; + if (!handle && !silent) { + GGML_LOG_ERROR("%s: failed to load %s\n", __func__, entry.path().string().c_str()); + } + if (handle) { + auto score_fn = (ggml_backend_score_t) dl_get_sym(handle.get(), "ggml_backend_score"); + if (score_fn) { + int s = score_fn(); +#ifndef NDEBUG + GGML_LOG_DEBUG("%s: %s score: %d\n", __func__, entry.path().string().c_str(), s); +#endif + if (s > best_score) { + best_score = s; + best_path = entry.path().string(); + } + } else { + if (!silent) { + GGML_LOG_INFO("%s: failed to find ggml_backend_score in %s\n", __func__, entry.path().string().c_str()); + } + } + } + } + } + } + } + + if (best_score == 0) { + // try to load the base backend + for (const auto & search_path : search_paths) { + std::string path = search_path + backend_filename_prefix() + name + backend_filename_suffix(); + if (fs::exists(path)) { + return get_reg().load_backend(path.c_str(), silent); } } - }; - - try_load("amx"); - try_load("blas"); - try_load("cann"); - try_load("cuda"); - try_load("hip"); - try_load("kompute"); - try_load("metal"); - try_load("rpc"); - try_load("sycl"); - try_load("vulkan"); - try_load("musa"); - try_load("cpu"); + return nullptr; + } + + return get_reg().load_backend(best_path.c_str(), silent); +} + +void ggml_backend_load_all() { + ggml_backend_load_all_from_path(nullptr); +} + +void ggml_backend_load_all_from_path(const char * dir_path) { +#ifdef NDEBUG + bool silent = true; +#else + bool silent = false; +#endif + + ggml_backend_load_best("blas", silent, dir_path); + ggml_backend_load_best("cann", silent, dir_path); + ggml_backend_load_best("cuda", silent, dir_path); + ggml_backend_load_best("hip", silent, dir_path); + ggml_backend_load_best("kompute", silent, dir_path); + ggml_backend_load_best("metal", silent, dir_path); + ggml_backend_load_best("rpc", silent, dir_path); + ggml_backend_load_best("sycl", silent, dir_path); + ggml_backend_load_best("vulkan", silent, dir_path); + ggml_backend_load_best("musa", silent, dir_path); + ggml_backend_load_best("cpu", silent, dir_path); } diff --git a/ggml/src/ggml-backend.cpp b/ggml/src/ggml-backend.cpp index 45da0c27daffa..fdb4b986f613b 100644 --- a/ggml/src/ggml-backend.cpp +++ b/ggml/src/ggml-backend.cpp @@ -742,7 +742,8 @@ static int ggml_backend_sched_backend_id_from_cur(ggml_backend_sched_t sched, st if (tensor->buffer || (tensor->view_src && tensor->view_src->buffer)) { // since the tensor is pre-allocated, it cannot be moved to another backend - GGML_ABORT("pre-allocated tensor (%s) in a backend that cannot run the operation", tensor->name); + ggml_backend_buffer_t buffer = tensor->view_src ? tensor->view_src->buffer : tensor->buffer; + GGML_ABORT("pre-allocated tensor (%s) in a buffer (%s) that cannot run the operation (%s)", tensor->name, ggml_backend_buffer_name(buffer), ggml_op_name(tensor->op)); } // graph input diff --git a/ggml/src/ggml-cann/ggml-cann.cpp b/ggml/src/ggml-cann/ggml-cann.cpp index 04e25b8ab1a23..fa04ab84f3f15 100644 --- a/ggml/src/ggml-cann/ggml-cann.cpp +++ b/ggml/src/ggml-cann/ggml-cann.cpp @@ -2089,7 +2089,7 @@ static void * ggml_backend_cann_reg_get_proc_address(ggml_backend_reg_t reg, con static const ggml_backend_reg_i ggml_backend_cann_reg_interface = { /* .get_name = */ ggml_backend_cann_reg_get_name, /* .get_device_count = */ ggml_backend_cann_reg_get_device_count, - /* .get_device_get = */ ggml_backend_cann_reg_get_device, + /* .get_device = */ ggml_backend_cann_reg_get_device, /* .get_proc_address = */ ggml_backend_cann_reg_get_proc_address, }; diff --git a/ggml/src/ggml-common.h b/ggml/src/ggml-common.h index 27253a6c2b3ca..f13fd4dea6f41 100644 --- a/ggml/src/ggml-common.h +++ b/ggml/src/ggml-common.h @@ -6,7 +6,20 @@ typedef uint16_t ggml_half; typedef uint32_t ggml_half2; -#define GGML_COMMON_AGGR +#define GGML_COMMON_AGGR_U +#define GGML_COMMON_AGGR_S + +#define GGML_COMMON_DECL +#elif defined(GGML_COMMON_DECL_CPP) +#include + +typedef uint16_t ggml_half; +typedef uint32_t ggml_half2; + +// std-c++ allow anonymous unions but some compiler warn on it +#define GGML_COMMON_AGGR_U data +// std-c++ do not allow it. +#define GGML_COMMON_AGGR_S data #define GGML_COMMON_DECL #elif defined(GGML_COMMON_DECL_METAL) @@ -15,7 +28,8 @@ typedef uint32_t ggml_half2; typedef half ggml_half; typedef half2 ggml_half2; -#define GGML_COMMON_AGGR +#define GGML_COMMON_AGGR_U +#define GGML_COMMON_AGGR_S #define GGML_COMMON_DECL #elif defined(GGML_COMMON_DECL_CUDA) @@ -29,7 +43,8 @@ typedef half2 ggml_half2; typedef half ggml_half; typedef half2 ggml_half2; -#define GGML_COMMON_AGGR data +#define GGML_COMMON_AGGR_U +#define GGML_COMMON_AGGR_S data #define GGML_COMMON_DECL #elif defined(GGML_COMMON_DECL_HIP) @@ -39,7 +54,8 @@ typedef half2 ggml_half2; typedef half ggml_half; typedef half2 ggml_half2; -#define GGML_COMMON_AGGR data +#define GGML_COMMON_AGGR_U +#define GGML_COMMON_AGGR_S data #define GGML_COMMON_DECL #elif defined(GGML_COMMON_DECL_SYCL) @@ -49,7 +65,8 @@ typedef half2 ggml_half2; typedef sycl::half ggml_half; typedef sycl::half2 ggml_half2; -#define GGML_COMMON_AGGR data +#define GGML_COMMON_AGGR_U +#define GGML_COMMON_AGGR_S data #define GGML_COMMON_DECL #endif @@ -154,9 +171,9 @@ typedef struct { struct { ggml_half d; // delta ggml_half m; // min - } GGML_COMMON_AGGR; + } GGML_COMMON_AGGR_S; ggml_half2 dm; - }; + } GGML_COMMON_AGGR_U; uint8_t qs[QK4_1 / 2]; // nibbles / quants } block_q4_1; static_assert(sizeof(block_q4_1) == 2 * sizeof(ggml_half) + QK4_1 / 2, "wrong q4_1 block size/padding"); @@ -175,9 +192,9 @@ typedef struct { struct { ggml_half d; // delta ggml_half m; // min - } GGML_COMMON_AGGR; + } GGML_COMMON_AGGR_S; ggml_half2 dm; - }; + } GGML_COMMON_AGGR_U; uint8_t qh[4]; // 5-th bit of quants uint8_t qs[QK5_1 / 2]; // nibbles / quants } block_q5_1; @@ -196,37 +213,13 @@ typedef struct { struct { ggml_half d; // delta ggml_half s; // d * sum(qs[i]) - } GGML_COMMON_AGGR; + } GGML_COMMON_AGGR_S; ggml_half2 ds; - }; + } GGML_COMMON_AGGR_U; int8_t qs[QK8_1]; // quants } block_q8_1; static_assert(sizeof(block_q8_1) == 2*sizeof(ggml_half) + QK8_1, "wrong q8_1 block size/padding"); -typedef struct { - ggml_half d[4]; // deltas for 4 q4_0 blocks - uint8_t qs[QK4_0 * 2]; // nibbles / quants for 4 q4_0 blocks -} block_q4_0x4; -static_assert(sizeof(block_q4_0x4) == 4 * sizeof(ggml_half) + QK4_0 * 2, "wrong q4_0x4 block size/padding"); - -typedef struct { - ggml_half d[8]; // deltas for 8 q4_0 blocks - uint8_t qs[QK4_0 * 4]; // nibbles / quants for 8 q4_0 blocks -} block_q4_0x8; -static_assert(sizeof(block_q4_0x8) == 8 * sizeof(ggml_half) + QK4_0 * 4, "wrong q4_0x8 block size/padding"); - -typedef struct { - ggml_half d[4]; // deltas for 4 q8_0 blocks - int8_t qs[QK8_0 * 4]; // quants for 4 q8_0 blocks -} block_q8_0x4; -static_assert(sizeof(block_q8_0x4) == 4 * sizeof(ggml_half) + QK8_0 * 4, "wrong q8_0x4 block size/padding"); - -typedef struct { - ggml_half d[8]; // deltas for 8 q8_0 blocks - int8_t qs[QK8_0 * 8]; // quants for 8 q8_0 blocks -} block_q8_0x8; -static_assert(sizeof(block_q8_0x8) == 8 * sizeof(ggml_half) + QK8_0 * 8, "wrong q8_0x8 block size/padding"); - // // Ternary quantization // @@ -261,9 +254,9 @@ typedef struct { struct { ggml_half d; // super-block scale for quantized scales ggml_half dmin; // super-block scale for quantized mins - } GGML_COMMON_AGGR; + } GGML_COMMON_AGGR_S; ggml_half2 dm; - }; + } GGML_COMMON_AGGR_U; } block_q2_K; static_assert(sizeof(block_q2_K) == 2*sizeof(ggml_half) + QK_K/16 + QK_K/4, "wrong q2_K block size/padding"); @@ -288,9 +281,9 @@ typedef struct { struct { ggml_half d; // super-block scale for quantized scales ggml_half dmin; // super-block scale for quantized mins - } GGML_COMMON_AGGR; + } GGML_COMMON_AGGR_S; ggml_half2 dm; - }; + } GGML_COMMON_AGGR_U; uint8_t scales[K_SCALE_SIZE]; // scales and mins, quantized with 6 bits uint8_t qs[QK_K/2]; // 4--bit quants } block_q4_K; @@ -305,9 +298,9 @@ typedef struct { struct { ggml_half d; // super-block scale for quantized scales ggml_half dmin; // super-block scale for quantized mins - } GGML_COMMON_AGGR; + } GGML_COMMON_AGGR_S; ggml_half2 dm; - }; + } GGML_COMMON_AGGR_U; uint8_t scales[K_SCALE_SIZE]; // scales and mins, quantized with 6 bits uint8_t qh[QK_K/8]; // quants, high bit uint8_t qs[QK_K/2]; // quants, low 4 bits @@ -418,12 +411,6 @@ typedef struct { } block_iq4_xs; static_assert(sizeof(block_iq4_xs) == sizeof(ggml_half) + sizeof(uint16_t) + QK_K/64 + QK_K/2, "wrong iq4_xs block size/padding"); -typedef struct { - ggml_half d[4]; // deltas for 4 iq4_nl blocks - uint8_t qs[QK4_NL * 2];// nibbles / quants for 4 iq4_nl blocks -} block_iq4_nlx4; -static_assert(sizeof(block_iq4_nlx4) == 4 * sizeof(ggml_half) + QK4_NL * 2, "wrong iq4_nlx4 block size/padding"); - #endif // GGML_COMMON_DECL #endif // GGML_COMMON_DECL @@ -437,6 +424,13 @@ static_assert(sizeof(block_iq4_nlx4) == 4 * sizeof(ggml_half) + QK4_NL * 2, "wro #define GGML_TABLE_BEGIN(type, name, size) static const type name[size] = { #define GGML_TABLE_END() }; +#define GGML_COMMON_IMPL +#elif defined(GGML_COMMON_IMPL_CPP) +#include + +#define GGML_TABLE_BEGIN(type, name, size) static const type name[size] = { +#define GGML_TABLE_END() }; + #define GGML_COMMON_IMPL #elif defined(GGML_COMMON_IMPL_METAL) #include @@ -479,7 +473,7 @@ GGML_TABLE_BEGIN(uint8_t, ksigns_iq2xs, 128) 240, 113, 114, 243, 116, 245, 246, 119, 120, 249, 250, 123, 252, 125, 126, 255, GGML_TABLE_END() -//#if __CUDA_ARCH__ >= MIN_CC_DP4A // lowest compute capability for integer intrinsics +//#if __CUDA_ARCH__ >= GGML_CUDA_CC_DP4A // lowest compute capability for integer intrinsics GGML_TABLE_BEGIN(uint64_t, ksigns64, 128) 0x0000000000000000, 0xff000000000000ff, 0xff0000000000ff00, 0x000000000000ffff, 0xff00000000ff0000, 0x0000000000ff00ff, 0x0000000000ffff00, 0xff00000000ffffff, diff --git a/ggml/src/ggml-cpu/CMakeLists.txt b/ggml/src/ggml-cpu/CMakeLists.txt index 4dbc1f75b647f..0e0556703620f 100644 --- a/ggml/src/ggml-cpu/CMakeLists.txt +++ b/ggml/src/ggml-cpu/CMakeLists.txt @@ -1,310 +1,358 @@ -ggml_add_backend_library(ggml-cpu - ggml-cpu.c - ggml-cpu.cpp - ggml-cpu-aarch64.c - ggml-cpu-aarch64.h - ggml-cpu-quants.c - ggml-cpu-quants.h - ) - -target_include_directories(ggml-cpu PRIVATE .) - -if (APPLE AND GGML_ACCELERATE) - find_library(ACCELERATE_FRAMEWORK Accelerate) - if (ACCELERATE_FRAMEWORK) - message(STATUS "Accelerate framework found") - - add_compile_definitions(GGML_USE_ACCELERATE) - add_compile_definitions(ACCELERATE_NEW_LAPACK) - add_compile_definitions(ACCELERATE_LAPACK_ILP64) - - target_link_libraries(ggml-cpu PRIVATE ${ACCELERATE_FRAMEWORK}) +function(ggml_add_cpu_backend_variant_impl tag_name) + if (tag_name) + set(GGML_CPU_NAME ggml-cpu-${tag_name}) else() - message(WARNING "Accelerate framework not found") + set(GGML_CPU_NAME ggml-cpu) endif() -endif() -if (GGML_OPENMP) - find_package(OpenMP) - if (OpenMP_FOUND) - message(STATUS "OpenMP found") - - add_compile_definitions(GGML_USE_OPENMP) + ggml_add_backend_library(${GGML_CPU_NAME}) + + list (APPEND GGML_CPU_SOURCES + ggml-cpu/ggml-cpu.c + ggml-cpu/ggml-cpu.cpp + ggml-cpu/ggml-cpu-aarch64.cpp + ggml-cpu/ggml-cpu-aarch64.h + ggml-cpu/ggml-cpu-hbm.cpp + ggml-cpu/ggml-cpu-hbm.h + ggml-cpu/ggml-cpu-quants.c + ggml-cpu/ggml-cpu-quants.h + ggml-cpu/ggml-cpu-traits.cpp + ggml-cpu/ggml-cpu-traits.h + ggml-cpu/amx/amx.cpp + ggml-cpu/amx/amx.h + ggml-cpu/amx/mmq.cpp + ggml-cpu/amx/mmq.h + ggml-cpu/ggml-cpu-impl.h + ) + + target_compile_features(${GGML_CPU_NAME} PRIVATE c_std_11 cxx_std_17) + target_include_directories(${GGML_CPU_NAME} PRIVATE . ggml-cpu) + + if (APPLE AND GGML_ACCELERATE) + find_library(ACCELERATE_FRAMEWORK Accelerate) + if (ACCELERATE_FRAMEWORK) + message(STATUS "Accelerate framework found") + + target_compile_definitions(${GGML_CPU_NAME} PRIVATE GGML_USE_ACCELERATE) + target_compile_definitions(${GGML_CPU_NAME} PRIVATE ACCELERATE_NEW_LAPACK) + target_compile_definitions(${GGML_CPU_NAME} PRIVATE ACCELERATE_LAPACK_ILP64) + + target_link_libraries(${GGML_CPU_NAME} PRIVATE ${ACCELERATE_FRAMEWORK}) + else() + message(WARNING "Accelerate framework not found") + endif() + endif() - target_link_libraries(ggml-cpu PRIVATE OpenMP::OpenMP_C OpenMP::OpenMP_CXX) + if (GGML_OPENMP) + find_package(OpenMP) + if (OpenMP_FOUND) + target_compile_definitions(${GGML_CPU_NAME} PRIVATE GGML_USE_OPENMP) - # FIXME: should be replaced with a compiler id check - #if (GGML_MUSA) - # list(APPEND GGML_CPU_EXTRA_INCLUDES "/usr/lib/llvm-14/lib/clang/14.0.0/include") - # list(APPEND GGML_CPU_EXTRA_LIBS_PRIVATE "/usr/lib/llvm-14/lib/libomp.so") - #endif() - else() - message(WARNING "OpenMP not found") + target_link_libraries(${GGML_CPU_NAME} PRIVATE OpenMP::OpenMP_C OpenMP::OpenMP_CXX) + else() + message(WARNING "OpenMP not found") + endif() endif() -endif() -if (GGML_LLAMAFILE) - message(STATUS "Using llamafile") + if (GGML_LLAMAFILE) + target_compile_definitions(${GGML_CPU_NAME} PRIVATE GGML_USE_LLAMAFILE) - add_compile_definitions(GGML_USE_LLAMAFILE) - - target_sources(ggml-cpu PRIVATE - llamafile/sgemm.cpp - llamafile/sgemm.h) -endif() + list(APPEND GGML_CPU_SOURCES + ggml-cpu/llamafile/sgemm.cpp + ggml-cpu/llamafile/sgemm.h) + endif() -if (GGML_CPU_HBM) - find_library(memkind memkind REQUIRED) + if (GGML_CPU_HBM) + find_library(memkind memkind REQUIRED) - message(STATUS "Using memkind for CPU HBM") + message(STATUS "Using memkind for CPU HBM") - add_compile_definitions(GGML_USE_CPU_HBM) + target_compile_definitions(${GGML_CPU_NAME} PRIVATE GGML_USE_CPU_HBM) - target_link_libraries(ggml-cpu PUBLIC memkind) -endif() + target_link_libraries(${GGML_CPU_NAME} PUBLIC memkind) + endif() -if (CMAKE_OSX_ARCHITECTURES STREQUAL "arm64" OR - CMAKE_GENERATOR_PLATFORM_LWR STREQUAL "arm64" OR - (NOT CMAKE_OSX_ARCHITECTURES AND - NOT CMAKE_GENERATOR_PLATFORM_LWR AND - CMAKE_SYSTEM_PROCESSOR MATCHES "^(aarch64|arm.*|ARM64)$")) + if (CMAKE_OSX_ARCHITECTURES STREQUAL "arm64" OR + CMAKE_GENERATOR_PLATFORM_LWR STREQUAL "arm64" OR + (NOT CMAKE_OSX_ARCHITECTURES AND + NOT CMAKE_GENERATOR_PLATFORM_LWR AND + CMAKE_SYSTEM_PROCESSOR MATCHES "^(aarch64|arm.*|ARM64)$")) - message(STATUS "ARM detected") + message(STATUS "ARM detected") - if (MSVC) - add_compile_definitions(__aarch64__) # MSVC defines _M_ARM64 instead - add_compile_definitions(__ARM_NEON) - add_compile_definitions(__ARM_FEATURE_FMA) + if (MSVC) + list(APPEND ARCH_DEFINITIONS __aarch64__) # MSVC defines _M_ARM64 instead + list(APPEND ARCH_DEFINITIONS __ARM_NEON) + list(APPEND ARCH_DEFINITIONS __ARM_FEATURE_FMA) - set(CMAKE_REQUIRED_FLAGS_PREV ${CMAKE_REQUIRED_FLAGS}) - string(JOIN " " CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS} "/arch:armv8.2") + set(CMAKE_REQUIRED_FLAGS_PREV ${CMAKE_REQUIRED_FLAGS}) + string(JOIN " " CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS} "/arch:armv8.2") - check_cxx_source_compiles("#include \nint main() { int8x16_t _a, _b; int32x4_t _s = vdotq_s32(_s, _a, _b); return 0; }" GGML_COMPILER_SUPPORT_DOTPROD) - if (GGML_COMPILER_SUPPORT_DOTPROD) - add_compile_definitions(__ARM_FEATURE_DOTPROD) + check_cxx_source_compiles("#include \nint main() { int8x16_t _a, _b; int32x4_t _s = vdotq_s32(_s, _a, _b); return 0; }" GGML_COMPILER_SUPPORT_DOTPROD) + if (GGML_COMPILER_SUPPORT_DOTPROD) + list(APPEND ARCH_DEFINITIONS __ARM_FEATURE_DOTPROD) - message(STATUS "ARM feature DOTPROD enabled") - endif () + message(STATUS "ARM feature DOTPROD enabled") + endif () - check_cxx_source_compiles("#include \nint main() { int8x16_t _a, _b; int32x4_t _s = vmmlaq_f32(_s, _a, _b); return 0; }" GGML_COMPILER_SUPPORT_MATMUL_INT8) + check_cxx_source_compiles("#include \nint main() { int8x16_t _a, _b; int32x4_t _s = vmmlaq_f32(_s, _a, _b); return 0; }" GGML_COMPILER_SUPPORT_MATMUL_INT8) - if (GGML_COMPILER_SUPPORT_MATMUL_INT8) - add_compile_definitions(__ARM_FEATURE_MATMUL_INT8) + if (GGML_COMPILER_SUPPORT_MATMUL_INT8) + list(APPEND ARCH_DEFINITIONS __ARM_FEATURE_MATMUL_INT8) - message(STATUS "ARM feature MATMUL_INT8 enabled") - endif () + message(STATUS "ARM feature MATMUL_INT8 enabled") + endif () - check_cxx_source_compiles("#include \nint main() { float16_t _a; float16x8_t _s = vdupq_n_f16(_a); return 0; }" GGML_COMPILER_SUPPORT_FP16_VECTOR_ARITHMETIC) - if (GGML_COMPILER_SUPPORT_FP16_VECTOR_ARITHMETIC) - add_compile_definitions(__ARM_FEATURE_FP16_VECTOR_ARITHMETIC) + check_cxx_source_compiles("#include \nint main() { float16_t _a; float16x8_t _s = vdupq_n_f16(_a); return 0; }" GGML_COMPILER_SUPPORT_FP16_VECTOR_ARITHMETIC) + if (GGML_COMPILER_SUPPORT_FP16_VECTOR_ARITHMETIC) + list(APPEND ARCH_DEFINITIONS __ARM_FEATURE_FP16_VECTOR_ARITHMETIC) - message(STATUS "ARM feature FP16_VECTOR_ARITHMETIC enabled") - endif () + message(STATUS "ARM feature FP16_VECTOR_ARITHMETIC enabled") + endif () - set(CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS_PREV}) - elseif (APPLE) - if (GGML_NATIVE) - set(USER_PROVIDED_MARCH FALSE) - foreach(flag_var IN ITEMS CMAKE_C_FLAGS CMAKE_CXX_FLAGS CMAKE_REQUIRED_FLAGS) - if ("${${flag_var}}" MATCHES "-march=[a-zA-Z0-9+._-]+") - set(USER_PROVIDED_MARCH TRUE) - break() - endif() - endforeach() + set(CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS_PREV}) + elseif (APPLE) + if (GGML_NATIVE) + set(USER_PROVIDED_MARCH FALSE) + foreach(flag_var IN ITEMS CMAKE_C_FLAGS CMAKE_CXX_FLAGS CMAKE_REQUIRED_FLAGS) + if ("${${flag_var}}" MATCHES "-march=[a-zA-Z0-9+._-]+") + set(USER_PROVIDED_MARCH TRUE) + break() + endif() + endforeach() - if (NOT USER_PROVIDED_MARCH) - set(MARCH_FLAGS "-march=armv8.2a") + if (NOT USER_PROVIDED_MARCH) + set(MARCH_FLAGS "-march=armv8.2a") - check_cxx_source_compiles("#include \nint main() { int8x16_t _a, _b; int32x4_t _s = vdotq_s32(_s, _a, _b); return 0; }" GGML_COMPILER_SUPPORT_DOTPROD) - if (GGML_COMPILER_SUPPORT_DOTPROD) - set(MARCH_FLAGS "${MARCH_FLAGS}+dotprod") - add_compile_definitions(__ARM_FEATURE_DOTPROD) + check_cxx_source_compiles("#include \nint main() { int8x16_t _a, _b; int32x4_t _s = vdotq_s32(_s, _a, _b); return 0; }" GGML_COMPILER_SUPPORT_DOTPROD) + if (GGML_COMPILER_SUPPORT_DOTPROD) + set(MARCH_FLAGS "${MARCH_FLAGS}+dotprod") + list(APPEND ARCH_DEFINITIONS __ARM_FEATURE_DOTPROD) - message(STATUS "ARM feature DOTPROD enabled") - endif () + message(STATUS "ARM feature DOTPROD enabled") + endif () - set(TEST_I8MM_FLAGS "-march=armv8.2a+i8mm") + set(TEST_I8MM_FLAGS "-march=armv8.2a+i8mm") - set(CMAKE_REQUIRED_FLAGS_SAVE ${CMAKE_REQUIRED_FLAGS}) - set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${TEST_I8MM_FLAGS}") + set(CMAKE_REQUIRED_FLAGS_SAVE ${CMAKE_REQUIRED_FLAGS}) + set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${TEST_I8MM_FLAGS}") - check_cxx_source_compiles("#include \nint main() { int8x16_t _a, _b; int32x4_t _s = vmmlaq_s32(_s, _a, _b); return 0; }" GGML_COMPILER_SUPPORT_MATMUL_INT8) - if (GGML_COMPILER_SUPPORT_MATMUL_INT8) - set(MARCH_FLAGS "${MARCH_FLAGS}+i8mm") - add_compile_definitions(__ARM_FEATURE_MATMUL_INT8) + check_cxx_source_compiles("#include \nint main() { int8x16_t _a, _b; int32x4_t _s = vmmlaq_s32(_s, _a, _b); return 0; }" GGML_COMPILER_SUPPORT_MATMUL_INT8) + if (GGML_COMPILER_SUPPORT_MATMUL_INT8) + set(MARCH_FLAGS "${MARCH_FLAGS}+i8mm") + list(APPEND ARCH_DEFINITIONS __ARM_FEATURE_MATMUL_INT8) - message(STATUS "ARM feature MATMUL_INT8 enabled") - endif () + message(STATUS "ARM feature MATMUL_INT8 enabled") + endif () - set(CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS_SAVE}) + set(CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS_SAVE}) - list(APPEND ARCH_FLAGS "${MARCH_FLAGS}") + list(APPEND ARCH_FLAGS "${MARCH_FLAGS}") + endif () endif () - endif () - else() - check_cxx_compiler_flag(-mfp16-format=ieee COMPILER_SUPPORTS_FP16_FORMAT_I3E) - if (NOT "${COMPILER_SUPPORTS_FP16_FORMAT_I3E}" STREQUAL "") - list(APPEND ARCH_FLAGS -mfp16-format=ieee) - endif() - if (${CMAKE_SYSTEM_PROCESSOR} MATCHES "armv6") - # Raspberry Pi 1, Zero - list(APPEND ARCH_FLAGS -mfpu=neon-fp-armv8 -mno-unaligned-access) - endif() - if (${CMAKE_SYSTEM_PROCESSOR} MATCHES "armv7") - if ("${CMAKE_SYSTEM_NAME}" STREQUAL "Android") - # Android armeabi-v7a - list(APPEND ARCH_FLAGS -mfpu=neon-vfpv4 -mno-unaligned-access -funsafe-math-optimizations) - else() - # Raspberry Pi 2 - list(APPEND ARCH_FLAGS -mfpu=neon-fp-armv8 -mno-unaligned-access -funsafe-math-optimizations) + else() + check_cxx_compiler_flag(-mfp16-format=ieee COMPILER_SUPPORTS_FP16_FORMAT_I3E) + if (NOT "${COMPILER_SUPPORTS_FP16_FORMAT_I3E}" STREQUAL "") + list(APPEND ARCH_FLAGS -mfp16-format=ieee) + endif() + if (${CMAKE_SYSTEM_PROCESSOR} MATCHES "armv6") + # Raspberry Pi 1, Zero + list(APPEND ARCH_FLAGS -mfpu=neon-fp-armv8 -mno-unaligned-access) + endif() + if (${CMAKE_SYSTEM_PROCESSOR} MATCHES "armv7") + if ("${CMAKE_SYSTEM_NAME}" STREQUAL "Android") + # Android armeabi-v7a + list(APPEND ARCH_FLAGS -mfpu=neon-vfpv4 -mno-unaligned-access -funsafe-math-optimizations) + else() + # Raspberry Pi 2 + list(APPEND ARCH_FLAGS -mfpu=neon-fp-armv8 -mno-unaligned-access -funsafe-math-optimizations) + endif() + endif() + if (${CMAKE_SYSTEM_PROCESSOR} MATCHES "armv8") + # Android arm64-v8a + # Raspberry Pi 3, 4, Zero 2 (32-bit) + list(APPEND ARCH_FLAGS -mno-unaligned-access) + endif() + if (GGML_SVE) + list(APPEND ARCH_FLAGS -march=armv8.6-a+sve) endif() endif() - if (${CMAKE_SYSTEM_PROCESSOR} MATCHES "armv8") - # Android arm64-v8a - # Raspberry Pi 3, 4, Zero 2 (32-bit) - list(APPEND ARCH_FLAGS -mno-unaligned-access) - endif() - if (GGML_SVE) - list(APPEND ARCH_FLAGS -march=armv8.6-a+sve) - endif() - endif() -elseif (CMAKE_OSX_ARCHITECTURES STREQUAL "x86_64" OR CMAKE_GENERATOR_PLATFORM_LWR MATCHES "^(x86_64|i686|amd64|x64|win32)$" OR - (NOT CMAKE_OSX_ARCHITECTURES AND NOT CMAKE_GENERATOR_PLATFORM_LWR AND - CMAKE_SYSTEM_PROCESSOR MATCHES "^(x86_64|i686|AMD64)$")) - message(STATUS "x86 detected") - if (MSVC) - # instruction set detection for MSVC only - if (GGML_NATIVE) - # TODO: improve, should not reference files from the parent folder - include(cmake/FindSIMD.cmake) - endif () - if (GGML_AVX512) - list(APPEND ARCH_FLAGS /arch:AVX512) - # MSVC has no compile-time flags enabling specific - # AVX512 extensions, neither it defines the - # macros corresponding to the extensions. - # Do it manually. - if (GGML_AVX512_VBMI) - add_compile_definitions($<$:__AVX512VBMI__>) - add_compile_definitions($<$:__AVX512VBMI__>) - if (CMAKE_C_COMPILER_ID STREQUAL "Clang") - list(APPEND ARCH_FLAGS -mavx512vbmi) + elseif (CMAKE_OSX_ARCHITECTURES STREQUAL "x86_64" OR CMAKE_GENERATOR_PLATFORM_LWR MATCHES "^(x86_64|i686|amd64|x64|win32)$" OR + (NOT CMAKE_OSX_ARCHITECTURES AND NOT CMAKE_GENERATOR_PLATFORM_LWR AND + CMAKE_SYSTEM_PROCESSOR MATCHES "^(x86_64|i686|AMD64)$")) + if (MSVC) + # instruction set detection for MSVC only + if (GGML_NATIVE) + include(ggml-cpu/cmake/FindSIMD.cmake) + endif () + if (GGML_AVX512) + list(APPEND ARCH_FLAGS /arch:AVX512) + # /arch:AVX512 includes: __AVX512F__, __AVX512CD__, __AVX512BW__, __AVX512DQ__, and __AVX512VL__ + # MSVC has no compile-time flags enabling specific + # AVX512 extensions, neither it defines the + # macros corresponding to the extensions. + # Do it manually. + list(APPEND ARCH_DEFINITIONS GGML_AVX512) + if (GGML_AVX512_VBMI) + list(APPEND ARCH_DEFINITIONS __AVX512VBMI__) + if (CMAKE_C_COMPILER_ID STREQUAL "Clang") + list(APPEND ARCH_FLAGS -mavx512vbmi) + endif() + endif() + if (GGML_AVX512_VNNI) + list(APPEND ARCH_DEFINITIONS __AVX512VNNI__ GGML_AVX512_VNNI) + if (CMAKE_C_COMPILER_ID STREQUAL "Clang") + list(APPEND ARCH_FLAGS -mavx512vnni) + endif() + endif() + if (GGML_AVX512_BF16) + list(APPEND ARCH_DEFINITIONS __AVX512BF16__ GGML_AVX512_BF16) + if (CMAKE_C_COMPILER_ID STREQUAL "Clang") + list(APPEND ARCH_FLAGS -mavx512bf16) + endif() endif() + if (GGML_AMX_TILE) + list(APPEND ARCH_DEFINITIONS __AMX_TILE__ GGML_AMX_TILE) + endif() + if (GGML_AMX_INT8) + list(APPEND ARCH_DEFINITIONS __AMX_INT8__ GGML_AMX_INT8) + endif() + if (GGML_AMX_BF16) + list(APPEND ARCH_DEFINITIONS __AMX_BF16__ GGML_AMX_BF16) + endif() + elseif (GGML_AVX2) + list(APPEND ARCH_FLAGS /arch:AVX2) + list(APPEND ARCH_DEFINITIONS GGML_AVX2 GGML_FMA GGML_F16C) + elseif (GGML_AVX) + list(APPEND ARCH_FLAGS /arch:AVX) + list(APPEND ARCH_DEFINITIONS GGML_AVX) + else () + list(APPEND ARCH_FLAGS /arch:SSE4.2) + list(APPEND ARCH_DEFINITIONS GGML_SSE42) + endif() + if (GGML_AVX_VNNI) + # MSVC generates AVX512 with AVX-VNNI intrinsics even with /arch:AVX2 + #list(APPEND ARCH_DEFINITIONS __AVXVNNI__ GGML_AVX_VNNI) endif() - if (GGML_AVX512_VNNI) - add_compile_definitions($<$:__AVX512VNNI__>) - add_compile_definitions($<$:__AVX512VNNI__>) - if (CMAKE_C_COMPILER_ID STREQUAL "Clang") + else () + if (GGML_NATIVE) + list(APPEND ARCH_FLAGS -march=native) + else () + list(APPEND ARCH_FLAGS -msse4.2) + list(APPEND ARCH_DEFINITIONS GGML_SSE42) + if (GGML_F16C) + list(APPEND ARCH_FLAGS -mf16c) + list(APPEND ARCH_DEFINITIONS GGML_F16C) + endif() + if (GGML_FMA) + list(APPEND ARCH_FLAGS -mfma) + list(APPEND ARCH_DEFINITIONS GGML_FMA) + endif() + if (GGML_AVX) + list(APPEND ARCH_FLAGS -mavx) + list(APPEND ARCH_DEFINITIONS GGML_AVX) + endif() + if (GGML_AVX2) + list(APPEND ARCH_FLAGS -mavx2) + list(APPEND ARCH_DEFINITIONS GGML_AVX2) + endif() + if (GGML_AVX_VNNI) + list(APPEND ARCH_FLAGS -mavxvnni) + list(APPEND ARCH_DEFINITIONS GGML_AVX_VNNI) + endif() + if (GGML_AVX512) + list(APPEND ARCH_FLAGS -mavx512f) + list(APPEND ARCH_FLAGS -mavx512cd) + list(APPEND ARCH_FLAGS -mavx512vl) + list(APPEND ARCH_FLAGS -mavx512dq) + list(APPEND ARCH_FLAGS -mavx512bw) + list(APPEND ARCH_DEFINITIONS GGML_AVX512) + endif() + if (GGML_AVX512_VBMI) + list(APPEND ARCH_FLAGS -mavx512vbmi) + list(APPEND ARCH_DEFINITIONS GGML_AVX512_VBMI) + endif() + if (GGML_AVX512_VNNI) list(APPEND ARCH_FLAGS -mavx512vnni) + list(APPEND ARCH_DEFINITIONS GGML_AVX512_VNNI) endif() - endif() - if (GGML_AVX512_BF16) - add_compile_definitions($<$:__AVX512BF16__>) - add_compile_definitions($<$:__AVX512BF16__>) - if (CMAKE_C_COMPILER_ID STREQUAL "Clang") + if (GGML_AVX512_BF16) list(APPEND ARCH_FLAGS -mavx512bf16) + list(APPEND ARCH_DEFINITIONS GGML_AVX512_BF16) + endif() + if (GGML_AMX_TILE) + list(APPEND ARCH_FLAGS -mamx-tile) + list(APPEND ARCH_DEFINITIONS GGML_AMX_TILE) + endif() + if (GGML_AMX_INT8) + list(APPEND ARCH_FLAGS -mamx-int8) + list(APPEND ARCH_DEFINITIONS GGML_AMX_INT8) + endif() + if (GGML_AMX_BF16) + list(APPEND ARCH_FLAGS -mamx-bf16) + list(APPEND ARCH_DEFINITIONS GGML_AMX_BF16) endif() endif() - if (GGML_AMX_TILE) - add_compile_definitions($<$:__AMX_TILE__>) - add_compile_definitions($<$:__AMX_TILE__>) - endif() - if (GGML_AMX_INT8) - add_compile_definitions($<$:__AMX_INT8__>) - add_compile_definitions($<$:__AMX_INT8__>) - endif() - if (GGML_AMX_BF16) - add_compile_definitions($<$:__AMX_BF16__>) - add_compile_definitions($<$:__AMX_BF16__>) - endif() - elseif (GGML_AVX2) - list(APPEND ARCH_FLAGS /arch:AVX2) - elseif (GGML_AVX) - list(APPEND ARCH_FLAGS /arch:AVX) - endif() - else() - if (GGML_NATIVE) - list(APPEND ARCH_FLAGS -march=native) endif() - if (GGML_F16C) - list(APPEND ARCH_FLAGS -mf16c) + elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "ppc64") + message(STATUS "PowerPC detected") + execute_process(COMMAND bash -c "grep POWER10 /proc/cpuinfo | head -n 1" OUTPUT_VARIABLE POWER10_M) + string(FIND "${POWER10_M}" "POWER10" substring_index) + if (NOT DEFINED substring_index OR "${substring_index}" STREQUAL "") + set(substring_index -1) endif() - if (GGML_FMA) - list(APPEND ARCH_FLAGS -mfma) - endif() - if (GGML_AVX) - list(APPEND ARCH_FLAGS -mavx) - endif() - if (GGML_AVX2) - list(APPEND ARCH_FLAGS -mavx2) - endif() - if (GGML_AVX512) - list(APPEND ARCH_FLAGS -mavx512f) - list(APPEND ARCH_FLAGS -mavx512dq) - list(APPEND ARCH_FLAGS -mavx512bw) - endif() - if (GGML_AVX512_VBMI) - list(APPEND ARCH_FLAGS -mavx512vbmi) - endif() - if (GGML_AVX512_VNNI) - list(APPEND ARCH_FLAGS -mavx512vnni) - endif() - if (GGML_AVX512_BF16) - list(APPEND ARCH_FLAGS -mavx512bf16) + + if (${substring_index} GREATER_EQUAL 0) + list(APPEND ARCH_FLAGS -mcpu=power10) + elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "ppc64le") + list(APPEND ARCH_FLAGS -mcpu=powerpc64le) + else() + list(APPEND ARCH_FLAGS -mcpu=native -mtune=native) + # TODO: Add targets for Power8/Power9 (Altivec/VSX) and Power10(MMA) and query for big endian systems (ppc64/le/be) endif() - if (GGML_AMX_TILE) - list(APPEND ARCH_FLAGS -mamx-tile) + elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "loongarch64") + message(STATUS "loongarch64 detected") + + list(APPEND ARCH_FLAGS -march=loongarch64) + if (GGML_LASX) + list(APPEND ARCH_FLAGS -mlasx) endif() - if (GGML_AMX_INT8) - list(APPEND ARCH_FLAGS -mamx-int8) + if (GGML_LSX) + list(APPEND ARCH_FLAGS -mlsx) endif() - if (GGML_AMX_BF16) - list(APPEND ARCH_FLAGS -mamx-bf16) + elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "riscv64") + message(STATUS "RISC-V detected") + if (GGML_RVV) + list(APPEND ARCH_FLAGS -march=rv64gcv -mabi=lp64d) endif() - endif() -elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "ppc64") - message(STATUS "PowerPC detected") - execute_process(COMMAND bash -c "grep POWER10 /proc/cpuinfo | head -n 1" OUTPUT_VARIABLE POWER10_M) - string(FIND "${POWER10_M}" "POWER10" substring_index) - if (NOT DEFINED substring_index OR "${substring_index}" STREQUAL "") - set(substring_index -1) - endif() - - if (${substring_index} GREATER_EQUAL 0) - list(APPEND ARCH_FLAGS -mcpu=power10) - elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "ppc64le") - list(APPEND ARCH_FLAGS -mcpu=powerpc64le) else() - list(APPEND ARCH_FLAGS -mcpu=native -mtune=native) - #TODO: Add targets for Power8/Power9 (Altivec/VSX) and Power10(MMA) and query for big endian systems (ppc64/le/be) + message(STATUS "Unknown architecture") endif() -elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "loongarch64") - message(STATUS "loongarch64 detected") - list(APPEND ARCH_FLAGS -march=loongarch64) - if (GGML_LASX) - list(APPEND ARCH_FLAGS -mlasx) - endif() - if (GGML_LSX) - list(APPEND ARCH_FLAGS -mlsx) + if (GGML_CPU_AARCH64) + target_compile_definitions(${GGML_CPU_NAME} PRIVATE GGML_USE_CPU_AARCH64) endif() -elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "riscv64") - message(STATUS "RISC-V detected") - if (GGML_RVV) - list(APPEND ARCH_FLAGS -march=rv64gcv -mabi=lp64d) - endif() -else() - message(STATUS "Unknown architecture") -endif() - -if (GGML_CPU_AARCH64) - message(STATUS "Using runtime weight conversion of Q4_0 to Q4_0_x_x to enable optimized GEMM/GEMV kernels") - add_compile_definitions(GGML_USE_CPU_AARCH64) -endif() -target_compile_options(ggml-cpu PRIVATE "$<$:${ARCH_FLAGS}>") -target_compile_options(ggml-cpu PRIVATE "$<$:${ARCH_FLAGS}>") + message(STATUS "Adding CPU backend variant ${GGML_CPU_NAME}: ${ARCH_FLAGS} ${ARCH_DEFINITIONS}") + target_sources(${GGML_CPU_NAME} PRIVATE ${GGML_CPU_SOURCES}) + target_compile_options(${GGML_CPU_NAME} PRIVATE ${ARCH_FLAGS}) + target_compile_definitions(${GGML_CPU_NAME} PRIVATE ${ARCH_DEFINITIONS}) + + if (GGML_BACKEND_DL) + # The feature detection code is compiled as a separate target so that + # it can be built without the architecture flags + # Since multiple variants of the CPU backend may be included in the same + # build, using set_source_files_properties() to set the arch flags is not possible + set(GGML_CPU_FEATS_NAME ${GGML_CPU_NAME}-feats) + add_library(${GGML_CPU_FEATS_NAME} OBJECT ggml-cpu/cpu-feats-x86.cpp) + target_include_directories(${GGML_CPU_FEATS_NAME} PRIVATE . .. ../include) + target_compile_definitions(${GGML_CPU_FEATS_NAME} PRIVATE ${ARCH_DEFINITIONS}) + target_compile_definitions(${GGML_CPU_FEATS_NAME} PRIVATE GGML_BACKEND_DL GGML_BACKEND_BUILD GGML_BACKEND_SHARED) + set_target_properties(${GGML_CPU_FEATS_NAME} PROPERTIES POSITION_INDEPENDENT_CODE ON) + target_link_libraries(${GGML_CPU_NAME} PRIVATE ${GGML_CPU_FEATS_NAME}) + endif() -if (EMSCRIPTEN) - set_target_properties(ggml-cpu PROPERTIES COMPILE_FLAGS "-msimd128") -endif() + if (EMSCRIPTEN) + set_target_properties(${GGML_CPU_NAME} PROPERTIES COMPILE_FLAGS "-msimd128") + endif() +endfunction() diff --git a/ggml/src/ggml-cpu/amx/amx.cpp b/ggml/src/ggml-cpu/amx/amx.cpp new file mode 100644 index 0000000000000..b9074cb3aca55 --- /dev/null +++ b/ggml/src/ggml-cpu/amx/amx.cpp @@ -0,0 +1,220 @@ +#include "amx.h" +#include "common.h" +#include "mmq.h" +#include "ggml-backend-impl.h" +#include "ggml-backend.h" +#include "ggml-impl.h" +#include "ggml-cpu.h" +#include "ggml-cpu-traits.h" + +#if defined(__gnu_linux__) +#include +#include +#endif + +#include +#include +#include + +#if defined(__AMX_INT8__) && defined(__AVX512VNNI__) + +// AMX type_trais +namespace ggml::cpu::amx { +class tensor_traits : public ggml::cpu::tensor_traits { + bool work_size(int /* n_threads */, const struct ggml_tensor * op, size_t & size) override { + size = ggml_backend_amx_desired_wsize(op); + return true; + } + + bool compute_forward(struct ggml_compute_params * params, struct ggml_tensor * op) override { + if (op->op == GGML_OP_MUL_MAT) { + ggml_backend_amx_mul_mat(params, op); + return true; + } + return false; + } +}; + +static ggml::cpu::tensor_traits * get_tensor_traits(ggml_backend_buffer_t, struct ggml_tensor *) { + static tensor_traits traits; + return &traits; +} +} // namespace ggml::cpu::amx + +// AMX buffer interface +static void ggml_backend_amx_buffer_free_buffer(ggml_backend_buffer_t buffer) { + free(buffer->context); +} + +static void * ggml_backend_amx_buffer_get_base(ggml_backend_buffer_t buffer) { + return (void *) (buffer->context); +} + +static void ggml_backend_amx_buffer_init_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { + tensor->extra = (void *) ggml::cpu::amx::get_tensor_traits(buffer, tensor); + + GGML_UNUSED(buffer); +} + +static void ggml_backend_amx_buffer_memset_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, + uint8_t value, size_t offset, size_t size) { + memset((char *) tensor->data + offset, value, size); + + GGML_UNUSED(buffer); +} + +static void ggml_backend_amx_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, + const void * data, size_t offset, size_t size) { + if (qtype_has_amx_kernels(tensor->type)) { + GGML_LOG_DEBUG("%s: amx repack tensor %s of type %s\n", __func__, tensor->name, ggml_type_name(tensor->type)); + ggml_backend_amx_convert_weight(tensor, data, offset, size); + } else { + memcpy((char *) tensor->data + offset, data, size); + } + + GGML_UNUSED(buffer); +} + +/* +// need to figure what we need to do with buffer->extra. +static void ggml_backend_amx_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { + GGML_ASSERT(!qtype_has_amx_kernels(tensor->type)); + memcpy(data, (const char *)tensor->data + offset, size); + + GGML_UNUSED(buffer); +} + +static bool ggml_backend_amx_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { + if (ggml_backend_buffer_is_host(src->buffer)) { + if (qtype_has_amx_kernels(src->type)) { + ggml_backend_amx_convert_weight(dst, src->data, 0, ggml_nbytes(dst)); + } else { + memcpy(dst->data, src->data, ggml_nbytes(src)); + } + return true; + } + return false; + + GGML_UNUSED(buffer); +} +*/ + +static void ggml_backend_amx_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + memset(buffer->context, value, buffer->size); +} + +static ggml_backend_buffer_i ggml_backend_amx_buffer_interface = { + /* .free_buffer = */ ggml_backend_amx_buffer_free_buffer, + /* .get_base = */ ggml_backend_amx_buffer_get_base, + /* .init_tensor = */ ggml_backend_amx_buffer_init_tensor, + /* .memset_tensor = */ ggml_backend_amx_buffer_memset_tensor, + /* .set_tensor = */ ggml_backend_amx_buffer_set_tensor, + /* .get_tensor = */ nullptr, + /* .cpy_tensor = */ nullptr, + /* .clear = */ ggml_backend_amx_buffer_clear, + /* .reset = */ nullptr, +}; + +static const char * ggml_backend_amx_buffer_type_get_name(ggml_backend_buffer_type_t buft) { + return "AMX"; + + GGML_UNUSED(buft); +} + +static ggml_backend_buffer_t ggml_backend_amx_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + void * data = aligned_alloc(TENSOR_ALIGNMENT, size); + if (data == NULL) { + fprintf(stderr, "%s: failed to allocate buffer of size %zu\n", __func__, size); + return NULL; + } + + return ggml_backend_buffer_init(buft, ggml_backend_amx_buffer_interface, data, size); +} + +static size_t ggml_backend_amx_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { + return TENSOR_ALIGNMENT; + + GGML_UNUSED(buft); +} + +namespace ggml::cpu::amx { +class extra_buffer_type : ggml::cpu::extra_buffer_type { + bool supports_op(ggml_backend_dev_t, const struct ggml_tensor * op) override { + // handle only 2d gemm for now + auto is_contiguous_2d = [](const struct ggml_tensor * t) { + return ggml_is_contiguous(t) && t->ne[3] == 1 && t->ne[2] == 1; + }; + + if (op->op == GGML_OP_MUL_MAT && is_contiguous_2d(op->src[0]) && // src0 must be contiguous + is_contiguous_2d(op->src[1]) && // src1 must be contiguous + op->src[0]->buffer && op->src[0]->buffer->buft == ggml_backend_amx_buffer_type() && + op->ne[0] % (TILE_N * 2) == 0 && // out_features is 32x + (qtype_has_amx_kernels(op->src[0]->type) || (op->src[0]->type == GGML_TYPE_F16))) { + // src1 must be host buffer + if (op->src[1]->buffer && !ggml_backend_buft_is_host(op->src[1]->buffer->buft)) { + return false; + } + // src1 must be float32 + if (op->src[1]->type == GGML_TYPE_F32) { + return true; + } + } + return false; + } + + ggml::cpu::tensor_traits * get_tensor_traits(const struct ggml_tensor * op) override { + if (op->op == GGML_OP_MUL_MAT && op->src[0]->buffer && + op->src[0]->buffer->buft == ggml_backend_amx_buffer_type()) { + return (ggml::cpu::tensor_traits *) op->src[0]->extra; + } + + return nullptr; + } +}; +} // namespace ggml::cpu::amx + +static size_t ggml_backend_amx_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { + return ggml_backend_amx_get_alloc_size(tensor); + + GGML_UNUSED(buft); +} + +#define ARCH_GET_XCOMP_PERM 0x1022 +#define ARCH_REQ_XCOMP_PERM 0x1023 +#define XFEATURE_XTILECFG 17 +#define XFEATURE_XTILEDATA 18 + +static bool ggml_amx_init() { +#if defined(__gnu_linux__) + if (syscall(SYS_arch_prctl, ARCH_REQ_XCOMP_PERM, XFEATURE_XTILEDATA)) { + fprintf(stderr, "AMX is not ready to be used!\n"); + return false; + } + return true; +#elif defined(_WIN32) + return true; +#endif +} + +ggml_backend_buffer_type_t ggml_backend_amx_buffer_type() { + static struct ggml_backend_buffer_type ggml_backend_buffer_type_amx = { + /* .iface = */ { + /* .get_name = */ ggml_backend_amx_buffer_type_get_name, + /* .alloc_buffer = */ ggml_backend_amx_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_amx_buffer_type_get_alignment, + /* .get_max_size = */ nullptr, // defaults to SIZE_MAX + /* .get_alloc_size = */ ggml_backend_amx_buffer_type_get_alloc_size, + /* .is_host = */ nullptr, + }, + /* .device = */ ggml_backend_reg_dev_get(ggml_backend_cpu_reg(), 0), + /* .context = */ new ggml::cpu::amx::extra_buffer_type(), + }; + + if (!ggml_amx_init()) { + return nullptr; + } + + return &ggml_backend_buffer_type_amx; +} + +#endif // defined(__AMX_INT8__) && defined(__AVX512VNNI__) diff --git a/ggml/src/ggml-cpu/amx/amx.h b/ggml/src/ggml-cpu/amx/amx.h new file mode 100644 index 0000000000000..5b65d76bdc89c --- /dev/null +++ b/ggml/src/ggml-cpu/amx/amx.h @@ -0,0 +1,8 @@ +#include "ggml-backend.h" +#include "ggml-cpu-impl.h" + +// GGML internal header + +#if defined(__AMX_INT8__) && defined(__AVX512VNNI__) +ggml_backend_buffer_type_t ggml_backend_amx_buffer_type(void); +#endif diff --git a/ggml/src/ggml-amx/common.h b/ggml/src/ggml-cpu/amx/common.h similarity index 67% rename from ggml/src/ggml-amx/common.h rename to ggml/src/ggml-cpu/amx/common.h index 5db8ce30d9bb4..f392e898518a7 100644 --- a/ggml/src/ggml-amx/common.h +++ b/ggml/src/ggml-cpu/amx/common.h @@ -1,14 +1,13 @@ #pragma once #include "ggml.h" -// hack until AMX is moved into the CPU backend -#include "../ggml-cpu/ggml-cpu-impl.h" // +#include "ggml-cpu-impl.h" #include #include #include -#if defined(_OPENMP) +#if defined(GGML_USE_OPENMP) #include #endif @@ -57,11 +56,11 @@ inline void balance211(T n, T nth, T ith, T& n_start, T& n_end) { } template -inline void parallel_for(int nth, int n, const func_t& f) { -#if defined(_OPENMP) -#pragma omp parallel num_threads(nth) +inline void parallel_for(int n, const func_t& f) { +#if defined(GGML_USE_OPENMP) +#pragma omp parallel { - //int nth = omp_get_num_threads(); + int nth = omp_get_num_threads(); int ith = omp_get_thread_num(); int tbegin, tend; balance211(n, nth, ith, tbegin, tend); @@ -69,26 +68,24 @@ inline void parallel_for(int nth, int n, const func_t& f) { } #else f(0, n); - - GGML_UNUSED(nth); #endif } +template +inline void parallel_for_ggml(const ggml_compute_params * params, int n, const func_t & f) { + int tbegin, tend; + balance211(n, params->nth, params->ith, tbegin, tend); + f(tbegin, tend); +} + // quantized types that have AMX support inline bool qtype_has_amx_kernels(const enum ggml_type type) { // TODO: fix padding for vnni format return (type == GGML_TYPE_Q4_0) || - (type == GGML_TYPE_Q4_1); - //(type == GGML_TYPE_Q8_0) || - //(type == GGML_TYPE_Q4_K) || - //(type == GGML_TYPE_Q5_K) || - //(type == GGML_TYPE_Q6_K) || - //(type == GGML_TYPE_IQ4_XS); + (type == GGML_TYPE_Q4_1) || + (type == GGML_TYPE_Q8_0) || + (type == GGML_TYPE_Q4_K) || + (type == GGML_TYPE_Q5_K) || + (type == GGML_TYPE_Q6_K) || + (type == GGML_TYPE_IQ4_XS); } - -// ggml backend context -struct ggml_backend_amx_context { - int n_threads = GGML_DEFAULT_N_THREADS; - std::unique_ptr work_data; - size_t work_size = 0; -}; diff --git a/ggml/src/ggml-amx/mmq.cpp b/ggml/src/ggml-cpu/amx/mmq.cpp similarity index 96% rename from ggml/src/ggml-amx/mmq.cpp rename to ggml/src/ggml-cpu/amx/mmq.cpp index 529bee25b5e73..0ea91596bc7e2 100644 --- a/ggml/src/ggml-amx/mmq.cpp +++ b/ggml/src/ggml-cpu/amx/mmq.cpp @@ -4,8 +4,11 @@ #pragma GCC diagnostic ignored "-Wunused-local-typedefs" #endif +#include "amx.h" #include "mmq.h" #include "ggml-impl.h" +#include "ggml-cpu-impl.h" +#include "ggml-cpu-quants.h" #include "ggml-quants.h" #include #include @@ -15,10 +18,6 @@ #include #endif -#if defined(_OPENMP) -#include -#endif - #if (defined(_WIN32) || defined(_WIN64)) #define RESTRICT __restrict #else @@ -33,7 +32,7 @@ #define ALWAYS_INLINE inline #endif -#if defined(__AMX_INT8__) +#if defined(__AMX_INT8__) && defined(__AVX512VNNI__) namespace { @@ -496,13 +495,12 @@ inline void from_float(const float * x, char * vy, int64_t k); template <> inline void from_float(const float * x, char * vy, int64_t k) { - // FIXME: using unoptimized reference impl until moved to CPU backend - quantize_row_q8_0_ref(x, (block_q8_0 *)vy, k); + quantize_row_q8_0(x, (block_q8_0 *)vy, k); } template <> inline void from_float(const float * x, char * vy, int64_t k) { - quantize_row_q8_1_ref(x, (block_q8_1 *)vy, k); + quantize_row_q8_1(x, (block_q8_1 *)vy, k); } template <> @@ -950,7 +948,7 @@ template> void unpack_B(packed_B_t * RESTRICT tile, const void * RESTRICT packed_B) { GGML_UNUSED(tile); GGML_UNUSED(packed_B); -}; +} template <> void unpack_B(int8_t * RESTRICT tile, const void * RESTRICT packed_B) { @@ -1338,21 +1336,19 @@ struct tinygemm_kernel_avx __m512 vb[COLS]; __m512 vc[ROWS * COLS]; - auto loadc = [&](int idx) { + auto loadc = [&](auto idx) { vc[idx] = _mm512_setzero_ps(); }; Unroll{}(loadc); - auto compute = [&](int idx, int k) { - // TODO: use `constexpr` here to get rid of interger div - // when upgraded to C++17 - const int row = idx / COLS; - const int col = idx % COLS; + auto compute = [&](auto idx, auto k) { + constexpr int row = idx / COLS; + constexpr int col = idx % COLS; - if (col == 0) { + if constexpr (col == 0) { va = _mm512_loadu_ps(A + row * K + k); } - if (row == 0) { + if constexpr (row == 0) { vb[col] = _mm512_cvtph_ps(_mm256_loadu_si256((const __m256i *)(B + col * K + k))); } vc[idx] = _mm512_fmadd_ps(va, vb[col], vc[idx]); @@ -1362,9 +1358,9 @@ struct tinygemm_kernel_avx Unroll{}(compute, k); } - auto storec = [&](int idx) { - const int row = idx / COLS; - const int col = idx % COLS; + auto storec = [&](auto idx) { + constexpr int row = idx / COLS; + constexpr int col = idx % COLS; C[row * ldc + col] = _mm512_reduce_add_ps(vc[idx]); }; Unroll{}(storec); @@ -1382,13 +1378,13 @@ struct tinygemm_kernel_avx #define PACKED_INDEX(n, k, KB, tile_size) (n * KB + k) * tile_size template -void convert_B_packed_format(void * RESTRICT packed_B, const TB * RESTRICT B, int N, int K, int n_threads) { +void convert_B_packed_format(void * RESTRICT packed_B, const TB * RESTRICT B, int N, int K) { const int NB = N / TILE_N; const int KB = K / BLOCK_K; const int TILE_SIZE = get_tile_size(); // parallel on NB should be enough - parallel_for(n_threads, NB, [&](int begin, int end) { + parallel_for(NB, [&](int begin, int end) { for (int n = begin; n < end; ++n) { for (int k = 0; k < KB; ++k) { int n0 = n * TILE_N; @@ -1427,14 +1423,14 @@ struct tinygemm_kernel_vnni{}(loadc); - auto compute = [&](int col, int i) { + auto compute = [&](auto col, auto i) { // load a and compute compensation - if (col == 0) { + if constexpr (col == 0) { const int32_t * a_ptr = reinterpret_cast(A[0 * KB + i].qs); vcomp = _mm512_setzero_si512(); for (int k = 0; k < 8; ++k) { @@ -1466,7 +1462,7 @@ struct tinygemm_kernel_vnni{}(storec); @@ -1490,14 +1486,14 @@ struct tinygemm_kernel_vnni const __m512i lowMask = _mm512_set1_epi8(0xF); - auto loadc = [&](int col) { + auto loadc = [&](auto col) { vc[col] = _mm512_setzero_ps(); }; Unroll{}(loadc); - auto compute = [&](int col, int i) { + auto compute = [&](auto col, auto i) { // load a - if (col == 0) { + if constexpr (col == 0) { const int32_t * a_ptr = reinterpret_cast(A[0 * KB + i].qs); for (int k = 0; k < 8; ++k) { va[k] = _mm512_set1_epi32(a_ptr[k]); @@ -1531,7 +1527,7 @@ struct tinygemm_kernel_vnni } //store to C - auto storec = [&](int col) { + auto storec = [&](auto col) { _mm512_storeu_ps((__m512i*)(C + 0 * ldc + col * 16), vc[col]); }; Unroll{}(storec); @@ -1562,14 +1558,14 @@ struct tinygemm_kernel_vnni(0x80)); - auto loadc = [&](int col) { + auto loadc = [&](auto col) { vc[col] = _mm512_setzero_ps(); }; Unroll{}(loadc); - auto compute = [&](int col, int i) { + auto compute = [&](auto col, auto i) { // load a and add offset 128 - if (col == 0) { + if constexpr (col == 0) { const int32_t * a_ptr = reinterpret_cast(A[0 * KB + i].qs); for (int k = 0; k < 8; ++k) { va[k] = _mm512_set1_epi32(a_ptr[k]); @@ -1602,7 +1598,7 @@ struct tinygemm_kernel_vnni{}(storec); @@ -1634,7 +1630,7 @@ struct tinygemm_kernel_vnni{}(loadc); @@ -1648,9 +1644,9 @@ struct tinygemm_kernel_vnni{}(storec); @@ -1735,15 +1731,15 @@ struct tinygemm_kernel_vnni{}(loadc); // Q5_K and Q4_K shares the same vnni formats, refer to notes above. - auto compute = [&](int col, int i) { + auto compute = [&](auto col, auto i) { // load a - if (col == 0) { + if constexpr (col == 0) { for (int k_group = 0; k_group < QK_K / 32; ++k_group) { va[k_group] = _mm512_castsi256_si512(_mm256_loadu_si256((const __m256i *)(A[0 * KB + i].qs + k_group * 32))); } @@ -1808,7 +1804,7 @@ struct tinygemm_kernel_vnni{}(storec); @@ -1841,13 +1837,13 @@ struct tinygemm_kernel_vnni{}(loadc); - auto compute = [&](int col, int i) { - if (col == 0) { + auto compute = [&](auto col, auto i) { + if constexpr (col == 0) { // load a va[0] = _mm512_loadu_si512((const __m512i *)(A[0 * KB + i].qs + 0)); va[1] = _mm512_loadu_si512((const __m512i *)(A[0 * KB + i].qs + 64)); @@ -1959,13 +1955,13 @@ struct tinygemm_kernel_vnni(0x80)); const __m512i values256 = _mm512_add_epi8(values128, off); - auto loadc = [&](int col) { + auto loadc = [&](auto col) { vc[col] = _mm512_setzero_ps(); }; Unroll{}(loadc); - auto compute = [&](int col, int i) { - if (col == 0) { + auto compute = [&](auto col, auto i) { + if constexpr (col == 0) { // load a va[0] = _mm512_loadu_si512((const __m512i *)(A[0 * KB + i].qs + 0)); va[1] = _mm512_loadu_si512((const __m512i *)(A[0 * KB + i].qs + 64)); @@ -2015,7 +2011,7 @@ struct tinygemm_kernel_vnni{}(storec); @@ -2327,25 +2323,39 @@ size_t ggml_backend_amx_get_alloc_size(const struct ggml_tensor * tensor) { // pack weight to vnni format void ggml_backend_amx_convert_weight(struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - - size_t alloc_size = ggml_backend_amx_get_alloc_size(tensor); - GGML_ASSERT(alloc_size == size); + GGML_ASSERT(offset == 0 && size == ggml_nbytes(tensor)); // only full tensor conversion is supported for now const enum ggml_type TYPE = tensor->type; const int K = tensor->ne[0]; // ne0: in_features const int N = tensor->ne[1]; // ne1: out_features -#if defined(_OPENMP) - // the buffer ctx is not initialized when .set_tensor is called - int n_threads = omp_get_num_threads(); -#else - int n_threads = 1; -#endif + GGML_DISPATCH_QTYPES(TYPE, [&] { + convert_B_packed_format((void *)((char *)tensor->data + offset), (const type *)data, N, K); + }); +} + +size_t ggml_backend_amx_desired_wsize(const struct ggml_tensor * dst) { + struct ggml_tensor * src0 = dst->src[0]; + + const enum ggml_type TYPE = src0->type; + + const bool is_floating_type = TYPE == GGML_TYPE_F16; + if (is_floating_type) { + return 0; + } + + const int M = dst->ne[1]; + const int K = src0->ne[0]; + + size_t desired_wsize = 0; GGML_DISPATCH_QTYPES(TYPE, [&] { - convert_B_packed_format((void *)((char *)tensor->data + offset), (const type *)data, N, K, n_threads); + const size_t row_size_A = K / blck_size * sizeof(vec_dot_type); + desired_wsize = M * row_size_A; }); + + return desired_wsize; } // NB: mixed dtype gemm with Advanced Matrix Extensions (Intel AMX) @@ -2356,14 +2366,12 @@ void ggml_backend_amx_convert_weight(struct ggml_tensor * tensor, const void * d // // the function performs: dst = src1 @ src0.T // -void ggml_backend_amx_mul_mat(ggml_backend_amx_context * ctx, struct ggml_tensor * dst) { +void ggml_backend_amx_mul_mat(const ggml_compute_params * params, struct ggml_tensor * dst) { struct ggml_tensor * src0 = dst->src[0]; struct ggml_tensor * src1 = dst->src[1]; const enum ggml_type TYPE = src0->type; - const int n_threads = ctx->n_threads; - // f16 only has avx512 kernels for now, // amx kernels will be added once 6th gen xeon is released. const bool is_floating_type = TYPE == GGML_TYPE_F16; @@ -2379,7 +2387,7 @@ void ggml_backend_amx_mul_mat(ggml_backend_amx_context * ctx, struct ggml_tensor const int MB = div_up(M, BLOCK_M); const int NB = div_up(N, BLOCK_N); - parallel_for(n_threads, MB * NB, [&](int begin, int end) { + parallel_for_ggml(params, MB * NB, [&](int begin, int end) { GGML_DISPATCH_FLOATING_TYPES(TYPE, [&] { for (int i = begin; i < end; ++i) { int mb = i / NB; @@ -2412,27 +2420,29 @@ void ggml_backend_amx_mul_mat(ggml_backend_amx_context * ctx, struct ggml_tensor } // pointer to work space, used convert A from float to quantized type - void * wdata = nullptr; + void * wdata = params->wdata; //TODO: performance improvement: merge quant A - GGML_DISPATCH_QTYPES(TYPE, [&] { - const size_t row_size_A = K / blck_size * sizeof(vec_dot_type); - const size_t desired_wsize = M * row_size_A; - if (ctx->work_size < desired_wsize) { - ctx->work_data.reset(new char[desired_wsize]); - ctx->work_size = desired_wsize; - } - wdata = ctx->work_data.get(); + if (params->ith == 0) { + GGML_DISPATCH_QTYPES(TYPE, [&] { + const size_t row_size_A = K / blck_size * sizeof(vec_dot_type); + const size_t desired_wsize = M * row_size_A; + if (params->wsize < desired_wsize) { + GGML_ABORT("insufficient work space size"); + } - // Q4_0, Q4_1, Q8_0 handles 1 TILE_K per blck_size - // Q4_K, Q5_K, Q6_K, IQ4_XS handles 8 TILE_K per blck_size - GGML_ASSERT(TILE_K == blck_size || TILE_K * 8 == blck_size); + // Q4_0, Q4_1, Q8_0 handles 1 TILE_K per blck_size + // Q4_K, Q5_K, Q6_K, IQ4_XS handles 8 TILE_K per blck_size + GGML_ASSERT(TILE_K == blck_size || TILE_K * 8 == blck_size); - const float * A_data = static_cast(src1->data); - for (int m = 0; m < M; ++m) { - from_float(A_data + m * K, (char *)wdata + m * row_size_A, K); - } - }); + const float * A_data = static_cast(src1->data); + for (int m = 0; m < M; ++m) { + from_float(A_data + m * K, (char *)wdata + m * row_size_A, K); + } + }); + } + + ggml_barrier(params->threadpool); if (M == 1) { // MB = 1 and handle 8 tiles in each block @@ -2440,7 +2450,7 @@ void ggml_backend_amx_mul_mat(ggml_backend_amx_context * ctx, struct ggml_tensor constexpr int BLOCK_N = TILE_N * kTilesN; const int NB = div_up(N, BLOCK_N); - parallel_for(n_threads, NB, [&](int begin, int end) { + parallel_for_ggml(params, NB, [&](int begin, int end) { GGML_DISPATCH_QTYPES(TYPE, [&] { const int KB = K / blck_size; const int TILE_SIZE = get_tile_size(); @@ -2470,7 +2480,7 @@ void ggml_backend_amx_mul_mat(ggml_backend_amx_context * ctx, struct ggml_tensor const int MB = div_up(M, BLOCK_M); const int NB = div_up(N, BLOCK_N); - parallel_for(n_threads, MB * NB, [&](int begin, int end) { + parallel_for_ggml(params, MB * NB, [&](int begin, int end) { // init tile config for each thread ggml_tile_config_init(); @@ -2498,13 +2508,4 @@ void ggml_backend_amx_mul_mat(ggml_backend_amx_context * ctx, struct ggml_tensor }); } -#else // if defined(__AMX_INT8__) - -void ggml_backend_amx_mul_mat(ggml_backend_amx_context * ctx, struct ggml_tensor * dst) { - fprintf(stderr, "GGML is not compiled with AMX support!\n"); - - GGML_UNUSED(ctx); - GGML_UNUSED(dst); -} - -#endif // if defined(__AMX_INT8__) +#endif // if defined(__AMX_INT8__) && defined(__AVX512VNNI__) diff --git a/ggml/src/ggml-amx/mmq.h b/ggml/src/ggml-cpu/amx/mmq.h similarity index 56% rename from ggml/src/ggml-amx/mmq.h rename to ggml/src/ggml-cpu/amx/mmq.h index cf09206206370..baf7684773453 100644 --- a/ggml/src/ggml-amx/mmq.h +++ b/ggml/src/ggml-cpu/amx/mmq.h @@ -1,17 +1,10 @@ #pragma once #include "common.h" -#include -#ifdef __cplusplus -extern "C" { -#endif +size_t ggml_backend_amx_desired_wsize(const struct ggml_tensor * dst); size_t ggml_backend_amx_get_alloc_size(const struct ggml_tensor * tensor); void ggml_backend_amx_convert_weight(struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); -void ggml_backend_amx_mul_mat(ggml_backend_amx_context * ctx, struct ggml_tensor * dst); - -#ifdef __cplusplus -} -#endif +void ggml_backend_amx_mul_mat(const struct ggml_compute_params * params, struct ggml_tensor * dst); diff --git a/ggml/src/ggml-cpu/cpu-feats-x86.cpp b/ggml/src/ggml-cpu/cpu-feats-x86.cpp new file mode 100644 index 0000000000000..e8133d411fd14 --- /dev/null +++ b/ggml/src/ggml-cpu/cpu-feats-x86.cpp @@ -0,0 +1,323 @@ +#include "ggml-backend-impl.h" + +#if defined(__x86_64__) || (defined(_MSC_VER) && defined(_M_AMD64)) + +#ifdef _MSC_VER +#include +#endif + +#include +#include +#include +#include +#include + +// ref: https://cdrdv2-public.intel.com/782156/325383-sdm-vol-2abcd.pdf +struct cpuid_x86 { + bool SSE3(void) { return f_1_ecx[0]; } + bool PCLMULQDQ(void) { return f_1_ecx[1]; } + bool MONITOR(void) { return f_1_ecx[3]; } + bool SSSE3(void) { return f_1_ecx[9]; } + bool FMA(void) { return f_1_ecx[12]; } + bool CMPXCHG16B(void) { return f_1_ecx[13]; } + bool SSE41(void) { return f_1_ecx[19]; } + bool SSE42(void) { return f_1_ecx[20]; } + bool MOVBE(void) { return f_1_ecx[22]; } + bool POPCNT(void) { return f_1_ecx[23]; } + bool AES(void) { return f_1_ecx[25]; } + bool XSAVE(void) { return f_1_ecx[26]; } + bool OSXSAVE(void) { return f_1_ecx[27]; } + bool AVX(void) { return f_1_ecx[28]; } + bool F16C(void) { return f_1_ecx[29]; } + bool RDRAND(void) { return f_1_ecx[30]; } + + bool MSR(void) { return f_1_edx[5]; } + bool CX8(void) { return f_1_edx[8]; } + bool SEP(void) { return f_1_edx[11]; } + bool CMOV(void) { return f_1_edx[15]; } + bool CLFSH(void) { return f_1_edx[19]; } + bool MMX(void) { return f_1_edx[23]; } + bool FXSR(void) { return f_1_edx[24]; } + bool SSE(void) { return f_1_edx[25]; } + bool SSE2(void) { return f_1_edx[26]; } + + bool FSGSBASE(void) { return f_7_ebx[0]; } + bool BMI1(void) { return f_7_ebx[3]; } + bool HLE(void) { return is_intel && f_7_ebx[4]; } + bool AVX2(void) { return f_7_ebx[5]; } + bool BMI2(void) { return f_7_ebx[8]; } + bool ERMS(void) { return f_7_ebx[9]; } + bool INVPCID(void) { return f_7_ebx[10]; } + bool RTM(void) { return is_intel && f_7_ebx[11]; } + bool AVX512F(void) { return f_7_ebx[16]; } + bool AVX512DQ(void) { return f_7_ebx[17]; } + bool RDSEED(void) { return f_7_ebx[18]; } + bool ADX(void) { return f_7_ebx[19]; } + bool AVX512PF(void) { return f_7_ebx[26]; } + bool AVX512ER(void) { return f_7_ebx[27]; } + bool AVX512CD(void) { return f_7_ebx[28]; } + bool AVX512BW(void) { return f_7_ebx[30]; } + bool AVX512VL(void) { return f_7_ebx[31]; } + + bool SHA(void) { return f_7_ebx[29]; } + + bool PREFETCHWT1(void) { return f_7_ecx[0]; } + + bool LAHF(void) { return f_81_ecx[0]; } + bool LZCNT(void) { return is_intel && f_81_ecx[5]; } + bool ABM(void) { return is_amd && f_81_ecx[5]; } + bool SSE4a(void) { return is_amd && f_81_ecx[6]; } + bool XOP(void) { return is_amd && f_81_ecx[11]; } + bool TBM(void) { return is_amd && f_81_ecx[21]; } + + bool SYSCALL(void) { return is_intel && f_81_edx[11]; } + bool MMXEXT(void) { return is_amd && f_81_edx[22]; } + bool RDTSCP(void) { return is_intel && f_81_edx[27]; } + bool _3DNOWEXT(void) { return is_amd && f_81_edx[30]; } + bool _3DNOW(void) { return is_amd && f_81_edx[31]; } + + bool AVX512_VBMI(void) { return f_7_ecx[1]; } + bool AVX512_VNNI(void) { return f_7_ecx[11]; } + bool AVX512_FP16(void) { return f_7_edx[23]; } + bool AVX512_BF16(void) { return f_7_1_eax[5]; } + bool AVX_VNNI(void) { return f_7_1_eax[4]; } + + bool AMX_TILE(void) { return f_7_edx[24]; } + bool AMX_INT8(void) { return f_7_edx[25]; } + bool AMX_FP16(void) { return f_7_1_eax[21]; } + bool AMX_BF16(void) { return f_7_edx[22]; } + +#ifdef _MSC_VER + static void cpuid(int cpu_info[4], int eax) { + __cpuid(cpu_info, eax); + } + static void cpuidex(int cpu_info[4], int eax, int ecx) { + __cpuidex(cpu_info, eax, ecx); + } +#else + static void cpuid(int cpu_info[4], int eax) { + __asm__ __volatile__( + "cpuid" + : "=a"(cpu_info[0]), "=b"(cpu_info[1]), "=c"(cpu_info[2]), "=d"(cpu_info[3]) + : "a"(eax), "c"(0)); + } + static void cpuidex(int cpu_info[4], int eax, int ecx) { + __asm__ __volatile__( + "cpuid" + : "=a"(cpu_info[0]), "=b"(cpu_info[1]), "=c"(cpu_info[2]), "=d"(cpu_info[3]) + : "a"(eax), "c"(ecx)); + } +#endif + + cpuid_x86() { + std::array cpui; + std::vector> data; + + // calling __cpuid with 0x0 as the function_id argument + // gets the number of the highest valid function ID. + cpuid(cpui.data(), 0); + int n_ids = cpui[0]; + + for (int i = 0; i <= n_ids; ++i) { + cpuidex(cpui.data(), i, 0); + data.push_back(cpui); + } + + // capture vendor string + char vendor[0x20] = {}; + *reinterpret_cast(vendor) = data[0][1]; + *reinterpret_cast(vendor + 4) = data[0][3]; + *reinterpret_cast(vendor + 8) = data[0][2]; + this->vendor = vendor; + if (this->vendor == "GenuineIntel") { + is_intel = true; + } else if (this->vendor == "AuthenticAMD") { + is_amd = true; + } + + // load bitset with flags for function 0x00000001 + if (n_ids >= 1) { + f_1_ecx = data[1][2]; + f_1_edx = data[1][3]; + } + + // load bitset with flags for function 0x00000007 + if (n_ids >= 7) { + f_7_ebx = data[7][1]; + f_7_ecx = data[7][2]; + f_7_edx = data[7][3]; + cpuidex(cpui.data(), 7, 1); + f_7_1_eax = cpui[0]; + } + + // calling __cpuid with 0x80000000 as the function_id argument + // gets the number of the highest valid extended ID. + cpuid(cpui.data(), 0x80000000); + unsigned int n_ex_ids = cpui[0]; + + std::vector> ext_data; + for (unsigned int i = 0x80000000; i <= n_ex_ids; ++i) { + cpuidex(cpui.data(), i, 0); + ext_data.push_back(cpui); + } + + // load bitset with flags for function 0x80000001 + if (n_ex_ids >= 0x80000001) { + f_81_ecx = ext_data[1][2]; + f_81_edx = ext_data[1][3]; + } + + // interpret CPU brand string if reported + char brand[0x40] = {}; + if (n_ex_ids >= 0x80000004) { + std::memcpy(brand, ext_data[2].data(), sizeof(cpui)); + std::memcpy(brand + 16, ext_data[3].data(), sizeof(cpui)); + std::memcpy(brand + 32, ext_data[4].data(), sizeof(cpui)); + this->brand = brand; + } + } + + bool is_intel = false; + bool is_amd = false; + std::string vendor; + std::string brand; + std::bitset<32> f_1_ecx; + std::bitset<32> f_1_edx; + std::bitset<32> f_7_ebx; + std::bitset<32> f_7_ecx; + std::bitset<32> f_7_edx; + std::bitset<32> f_7_1_eax; + std::bitset<32> f_81_ecx; + std::bitset<32> f_81_edx; +}; + +#if 0 +void test_x86_is() { + cpuid_x86 is; + printf("CPU Vendor: %s\n", is.vendor.c_str()); + printf("Brand: %s\n", is.brand.c_str()); + printf("is_intel: %d\n", is.is_intel); + printf("is_amd: %d\n", is.is_amd); + printf("sse3: %d\n", is.SSE3()); + printf("pclmulqdq: %d\n", is.PCLMULQDQ()); + printf("ssse3: %d\n", is.SSSE3()); + printf("fma: %d\n", is.FMA()); + printf("cmpxchg16b: %d\n", is.CMPXCHG16B()); + printf("sse41: %d\n", is.SSE41()); + printf("sse42: %d\n", is.SSE42()); + printf("movbe: %d\n", is.MOVBE()); + printf("popcnt: %d\n", is.POPCNT()); + printf("aes: %d\n", is.AES()); + printf("xsave: %d\n", is.XSAVE()); + printf("osxsave: %d\n", is.OSXSAVE()); + printf("avx: %d\n", is.AVX()); + printf("f16c: %d\n", is.F16C()); + printf("rdrand: %d\n", is.RDRAND()); + printf("msr: %d\n", is.MSR()); + printf("cx8: %d\n", is.CX8()); + printf("sep: %d\n", is.SEP()); + printf("cmov: %d\n", is.CMOV()); + printf("clflush: %d\n", is.CLFSH()); + printf("mmx: %d\n", is.MMX()); + printf("fxsr: %d\n", is.FXSR()); + printf("sse: %d\n", is.SSE()); + printf("sse2: %d\n", is.SSE2()); + printf("fsgsbase: %d\n", is.FSGSBASE()); + printf("bmi1: %d\n", is.BMI1()); + printf("hle: %d\n", is.HLE()); + printf("avx2: %d\n", is.AVX2()); + printf("bmi2: %d\n", is.BMI2()); + printf("erms: %d\n", is.ERMS()); + printf("invpcid: %d\n", is.INVPCID()); + printf("rtm: %d\n", is.RTM()); + printf("avx512f: %d\n", is.AVX512F()); + printf("rdseed: %d\n", is.RDSEED()); + printf("adx: %d\n", is.ADX()); + printf("avx512pf: %d\n", is.AVX512PF()); + printf("avx512er: %d\n", is.AVX512ER()); + printf("avx512cd: %d\n", is.AVX512CD()); + printf("sha: %d\n", is.SHA()); + printf("prefetchwt1: %d\n", is.PREFETCHWT1()); + printf("lahf: %d\n", is.LAHF()); + printf("lzcnt: %d\n", is.LZCNT()); + printf("abm: %d\n", is.ABM()); + printf("sse4a: %d\n", is.SSE4a()); + printf("xop: %d\n", is.XOP()); + printf("tbm: %d\n", is.TBM()); + printf("syscall: %d\n", is.SYSCALL()); + printf("mmxext: %d\n", is.MMXEXT()); + printf("rdtscp: %d\n", is.RDTSCP()); + printf("3dnowext: %d\n", is._3DNOWEXT()); + printf("3dnow: %d\n", is._3DNOW()); + printf("avx512_vbmi: %d\n", is.AVX512_VBMI()); + printf("avx512_vnni: %d\n", is.AVX512_VNNI()); + printf("avx512_fp16: %d\n", is.AVX512_FP16()); + printf("avx512_bf16: %d\n", is.AVX512_BF16()); + printf("amx_tile: %d\n", is.AMX_TILE()); + printf("amx_int8: %d\n", is.AMX_INT8()); + printf("amx_fp16: %d\n", is.AMX_FP16()); + printf("amx_bf16: %d\n", is.AMX_BF16()); +} +#endif + +static int ggml_backend_cpu_x86_score() { + // FIXME: this does not check for OS support + + int score = 0; + cpuid_x86 is; + +#ifdef GGML_FMA + if (!is.FMA()) { return 0; } + score += 1; +#endif +#ifdef GGML_F16C + if (!is.F16C()) { return 0; } + score += 1<<1; +#endif +#ifdef GGML_SSE42 + if (!is.SSE42()) { return 0; } + score += 1<<2; +#endif +#ifdef GGML_AVX + if (!is.AVX()) { return 0; } + score += 1<<4; +#endif +#ifdef GGML_AVX2 + if (!is.AVX2()) { return 0; } + score += 1<<5; +#endif +#ifdef GGML_AVX_VNNI + if (!is.AVX_VNNI()) { return 0; } + score += 1<<6; +#endif +#ifdef GGML_AVX512 + if (!is.AVX512F()) { return 0; } + if (!is.AVX512CD()) { return 0; } + if (!is.AVX512VL()) { return 0; } + if (!is.AVX512DQ()) { return 0; } + if (!is.AVX512BW()) { return 0; } + score += 1<<7; +#endif +#ifdef GGML_AVX512_VBMI + if (!is.AVX512_VBMI()) { return 0; } + score += 1<<8; +#endif +#ifdef GGML_AVX512_BF16 + if (!is.AVX512_BF16()) { return 0; } + score += 1<<9; +#endif +#ifdef GGML_AVX512_VNNI + if (!is.AVX512_VNNI()) { return 0; } + score += 1<<10; +#endif +#ifdef GGML_AMX_INT8 + if (!is.AMX_INT8()) { return 0; } + score += 1<<11; +#endif + + return score; +} + +GGML_BACKEND_DL_SCORE_IMPL(ggml_backend_cpu_x86_score) + +#endif // defined(__x86_64__) || (defined(_MSC_VER) && defined(_M_AMD64)) diff --git a/ggml/src/ggml-cpu/ggml-cpu-aarch64.c b/ggml/src/ggml-cpu/ggml-cpu-aarch64.cpp similarity index 84% rename from ggml/src/ggml-cpu/ggml-cpu-aarch64.c rename to ggml/src/ggml-cpu/ggml-cpu-aarch64.cpp index 69d3d327d1180..a51d1a6c59517 100644 --- a/ggml/src/ggml-cpu/ggml-cpu-aarch64.c +++ b/ggml/src/ggml-cpu/ggml-cpu-aarch64.cpp @@ -1,20 +1,57 @@ -#define GGML_COMMON_IMPL_C +#define GGML_COMMON_IMPL_CPP +#define GGML_COMMON_DECL_CPP #include "ggml-common.h" +#include "ggml-backend-impl.h" #include "ggml-quants.h" #include "ggml-impl.h" #include "ggml-cpu.h" -#include "ggml-cpu/ggml-cpu-impl.h" +#include "ggml-cpu-impl.h" +#include "ggml-cpu-traits.h" -#include -#include -#include -#include -#include // for qsort -#include // for GGML_ASSERT +#include +#include +#include +#include +#include // for qsort +#include // for GGML_ASSERT #include "ggml-cpu-aarch64.h" +// TODO: move to include file? +template constexpr int QK_0() { + if constexpr (K == 4) { + return QK4_0; + } + if constexpr (K == 8) { + return QK8_0; + } + return -1; +} + +template struct block { + ggml_half d[N]; // deltas for N qK_0 blocks + int8_t qs[(QK_0() * N * K) / 8]; // quants for N qK_0 blocks +}; + +// control size +static_assert(sizeof(block<4, 4>) == 4 * sizeof(ggml_half) + QK8_0 * 2, "wrong block<4,4> size/padding"); +static_assert(sizeof(block<4, 8>) == 8 * sizeof(ggml_half) + QK8_0 * 4, "wrong block<4,8> size/padding"); +static_assert(sizeof(block<8, 4>) == 4 * sizeof(ggml_half) + QK8_0 * 4, "wrong block<8,4> size/padding"); +static_assert(sizeof(block<8, 8>) == 8 * sizeof(ggml_half) + QK8_0 * 8, "wrong block<8,8> size/padding"); + +using block_q4_0x4 = block<4, 4>; +using block_q4_0x8 = block<4, 8>; +using block_q8_0x4 = block<8, 4>; +using block_q8_0x8 = block<8, 8>; + +struct block_iq4_nlx4 { + ggml_half d[4]; // deltas for 4 iq4_nl blocks + uint8_t qs[QK4_NL * 2]; // nibbles / quants for 4 iq4_nl blocks +}; + +static_assert(sizeof(block_iq4_nlx4) == 4 * sizeof(ggml_half) + QK4_NL * 2, "wrong iq4_nlx4 block size/padding"); + #if defined(__GNUC__) #pragma GCC diagnostic ignored "-Woverlength-strings" #elif defined(_MSC_VER) @@ -128,7 +165,7 @@ static inline __m512i sum_i16_pairs_int_32x16(const __m512i x) { } static inline __m512i mul_sum_us8_pairs_int32x16(const __m512i ax, const __m512i sy) { -#if defined(__AVXVNNI__) || (defined(__AVX512VNNI__) && defined(__AVX512VL__)) +#if defined(__AVX512VNNI__) const __m512i zero = _mm512_setzero_si512(); return _mm512_dpbusd_epi32(zero, ax, sy); #else @@ -185,12 +222,12 @@ static inline __m256i mul_sum_i8_pairs_int32x8(const __m256i x, const __m256i y) static const int8_t kvalues_iq4nl[16] = {-127, -104, -83, -65, -49, -35, -22, -10, 1, 13, 25, 38, 53, 69, 89, 113}; -static void quantize_q8_0_4x4(const float * restrict x, void * restrict vy, int64_t k) { +static void quantize_q8_0_4x4(const float * GGML_RESTRICT x, void * GGML_RESTRICT vy, int64_t k) { assert(QK8_0 == 32); assert(k % QK8_0 == 0); const int nb = k / QK8_0; - block_q8_0x4 * restrict y = (block_q8_0x4 *) vy; + block_q8_0x4 * GGML_RESTRICT y = (block_q8_0x4 *) vy; #if defined(__ARM_NEON) float32x4_t srcv[4][8]; @@ -279,12 +316,12 @@ static void quantize_q8_0_4x4(const float * restrict x, void * restrict vy, int6 #endif } -static void quantize_q8_0_4x8(const float * restrict x, void * restrict vy, int64_t k) { +static void quantize_q8_0_4x8(const float * GGML_RESTRICT x, void * GGML_RESTRICT vy, int64_t k) { assert(QK8_0 == 32); assert(k % QK8_0 == 0); const int nb = k / QK8_0; - block_q8_0x4 * restrict y = (block_q8_0x4 *) vy; + block_q8_0x4 * GGML_RESTRICT y = (block_q8_0x4 *) vy; #if defined(__ARM_NEON) float32x4_t srcv[4][8]; @@ -494,7 +531,7 @@ static void quantize_q8_0_4x8(const float * restrict x, void * restrict vy, int6 #endif } -void quantize_mat_q8_0(const float * restrict x, void * restrict vy, int64_t nrow, int64_t n_per_row, int64_t blck_size_interleave) { +static void quantize_mat_q8_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT vy, int64_t nrow, int64_t n_per_row, int64_t blck_size_interleave) { assert(nrow == 4); UNUSED(nrow); if (blck_size_interleave == 4) { @@ -506,7 +543,7 @@ void quantize_mat_q8_0(const float * restrict x, void * restrict vy, int64_t nro } } -void ggml_gemv_q4_0_4x4_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, const void * restrict vy, int nr, int nc) { +static void ggml_gemv_q4_0_4x4_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { const int qk = QK8_0; const int nb = n / qk; const int ncols_interleaved = 4; @@ -525,67 +562,47 @@ void ggml_gemv_q4_0_4x4_q8_0(int n, float * restrict s, size_t bs, const void * UNUSED(ncols_interleaved); UNUSED(blocklen); -#if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) && defined(__ARM_NEON) +#if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) && defined(__ARM_NEON) && defined(__ARM_FEATURE_DOTPROD) if (ggml_cpu_has_neon() && ggml_cpu_has_dotprod()) { - const void * b_ptr = vx; - const void * a_ptr = vy; - float * res_ptr = s; - - __asm__ __volatile__( - "movi v31.16b, #0x4\n" - "movi v30.16b, #0xf0\n" - "add %x[b_ptr], %x[b_ptr], #0x8\n" - "1:" // Column loop - "add x22, %x[a_ptr], #0x2\n" - "movi v29.16b, #0x0\n" - "mov x21, %x[nb]\n" - "2:" // Block loop - "ldr q28, [%x[b_ptr], #0x0]\n" - "ldr q27, [x22, #0x0]\n" - "movi v26.4s, #0x0\n" - "sub x20, x22, #0x2\n" - "ldr q25, [x22, #0x10]\n" - "ldr q24, [%x[b_ptr], #0x10]\n" - "sub x21, x21, #0x1\n" - "add x22, x22, #0x22\n" - "ldr q23, [%x[b_ptr], #0x20]\n" - "ldr q22, [%x[b_ptr], #0x30]\n" - "ld1r { v21.8h }, [x20]\n" - "ldr q20, [%x[b_ptr], #-0x8]\n" - "sshl v16.16b, v28.16b, v31.16b\n" - "and v28.16b, v28.16b, v30.16b\n" - "sshl v19.16b, v24.16b, v31.16b\n" - "and v24.16b, v24.16b, v30.16b\n" - "add %x[b_ptr], %x[b_ptr], #0x48\n" - "sshl v18.16b, v23.16b, v31.16b\n" - "and v23.16b, v23.16b, v30.16b\n" - ".inst 0x4f9be21a // sdot v26.4s, v16.16b, v27.4b[0]\n" - "sshl v17.16b, v22.16b, v31.16b\n" - "and v22.16b, v22.16b, v30.16b\n" - "fcvtl v21.4s, v21.4h\n" - "fcvtl v16.4s, v20.4h\n" - ".inst 0x4f99e39a // sdot v26.4s, v28.16b, v25.4b[0]\n" - "fmul v16.4s, v16.4s, v21.4s\n" - ".inst 0x4fbbe27a // sdot v26.4s, v19.16b, v27.4b[1]\n" - ".inst 0x4fb9e31a // sdot v26.4s, v24.16b, v25.4b[1]\n" - ".inst 0x4f9bea5a // sdot v26.4s, v18.16b, v27.4b[2]\n" - ".inst 0x4f99eafa // sdot v26.4s, v23.16b, v25.4b[2]\n" - ".inst 0x4fbbea3a // sdot v26.4s, v17.16b, v27.4b[3]\n" - ".inst 0x4fb9eada // sdot v26.4s, v22.16b, v25.4b[3]\n" - "scvtf v26.4s, v26.4s, #0x4\n" - "fmla v29.4s, v26.4s, v16.4s\n" - "cbnz x21, 2b\n" - "sub %x[nc], %x[nc], #0x4\n" - "str q29, [%x[res_ptr], #0x0]\n" - "add %x[res_ptr], %x[res_ptr], #0x10\n" - "cbnz %x[nc], 1b\n" - : [b_ptr] "+&r" (b_ptr), [res_ptr] "+&r" (res_ptr), [nc] "+&r" (nc) - : [a_ptr] "r" (a_ptr), [nb] "r" (nb) - : "memory", "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31", "x20", "x21", "x22" - ); + const block_q4_0x4 * b_ptr = (const block_q4_0x4 *)vx; + + for (int c = 0; c < nc; c += ncols_interleaved) { + const block_q8_0 * a_ptr = (const block_q8_0 *)vy; + float32x4_t acc = vdupq_n_f32(0); + for (int b = 0; b < nb; b++) { + int8x16_t b0 = vld1q_s8((const int8_t *)b_ptr->qs); + int8x16_t b1 = vld1q_s8((const int8_t *)b_ptr->qs + 16); + int8x16_t b2 = vld1q_s8((const int8_t *)b_ptr->qs + 32); + int8x16_t b3 = vld1q_s8((const int8_t *)b_ptr->qs + 48); + float16x4_t bd = vld1_f16((const __fp16 *)b_ptr->d); + + int8x16_t a0 = vld1q_s8(a_ptr->qs); + int8x16_t a1 = vld1q_s8(a_ptr->qs + qk/2); + float16x4_t ad = vld1_dup_f16((const __fp16 *)&a_ptr->d); + + int32x4_t ret = vdupq_n_s32(0); + + ret = vdotq_laneq_s32(ret, b0 << 4, a0, 0); + ret = vdotq_laneq_s32(ret, b1 << 4, a0, 1); + ret = vdotq_laneq_s32(ret, b2 << 4, a0, 2); + ret = vdotq_laneq_s32(ret, b3 << 4, a0, 3); + + ret = vdotq_laneq_s32(ret, b0 & 0xf0U, a1, 0); + ret = vdotq_laneq_s32(ret, b1 & 0xf0U, a1, 1); + ret = vdotq_laneq_s32(ret, b2 & 0xf0U, a1, 2); + ret = vdotq_laneq_s32(ret, b3 & 0xf0U, a1, 3); + + acc = vfmaq_f32(acc, vcvtq_n_f32_s32(ret, 4), + vmulq_f32(vcvt_f32_f16(ad), vcvt_f32_f16(bd))); + a_ptr++; + b_ptr++; + } + vst1q_f32(s, acc); + s += ncols_interleaved; + } return; } -#endif // #if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) && defined(__ARM_NEON) +#endif // #if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) && defined(__ARM_NEON) && defined(__ARM_FEATURE_DOTPROD) float sumf[4]; int sumi; @@ -611,7 +628,7 @@ void ggml_gemv_q4_0_4x4_q8_0(int n, float * restrict s, size_t bs, const void * } } -void ggml_gemv_q4_0_4x8_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, const void * restrict vy, int nr, int nc) { +static void ggml_gemv_q4_0_4x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { const int qk = QK8_0; const int nb = n / qk; const int ncols_interleaved = 4; @@ -721,7 +738,7 @@ void ggml_gemv_q4_0_4x8_q8_0(int n, float * restrict s, size_t bs, const void * } } -void ggml_gemv_q4_0_8x8_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, const void * restrict vy, int nr, int nc) { +static void ggml_gemv_q4_0_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { const int qk = QK8_0; const int nb = n / qk; const int ncols_interleaved = 8; @@ -994,7 +1011,7 @@ void ggml_gemv_q4_0_8x8_q8_0(int n, float * restrict s, size_t bs, const void * } } -void ggml_gemv_iq4_nl_4x4_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, const void * restrict vy, int nr, int nc) { +static void ggml_gemv_iq4_nl_4x4_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { const int qk = QK8_0; const int nb = n / qk; const int ncols_interleaved = 4; @@ -1020,7 +1037,7 @@ void ggml_gemv_iq4_nl_4x4_q8_0(int n, float * restrict s, size_t bs, const void float * res_ptr = s; for (int x = 0; x < nc / ncols_interleaved; x++) { - const block_q4_0x4 * b_ptr = (const block_q4_0x4 *) vx + (x * nb); + const block_iq4_nlx4 * b_ptr = (const block_iq4_nlx4 *) vx + (x * nb); float32x4_t sumf = vdupq_n_f32(0); for (int l = 0; l < nb; l++) { @@ -1090,7 +1107,7 @@ void ggml_gemv_iq4_nl_4x4_q8_0(int n, float * restrict s, size_t bs, const void } } -void ggml_gemm_q4_0_4x4_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, const void * restrict vy, int nr, int nc) { +static void ggml_gemm_q4_0_4x4_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { const int qk = QK8_0; const int nb = n / qk; const int ncols_interleaved = 4; @@ -1606,7 +1623,7 @@ void ggml_gemm_q4_0_4x4_q8_0(int n, float * restrict s, size_t bs, const void * } } -void ggml_gemm_q4_0_4x8_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, const void * restrict vy, int nr, int nc) { +static void ggml_gemm_q4_0_4x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { const int qk = QK8_0; const int nb = n / qk; const int ncols_interleaved = 4; @@ -2060,7 +2077,7 @@ void ggml_gemm_q4_0_4x8_q8_0(int n, float * restrict s, size_t bs, const void * } } -void ggml_gemm_q4_0_8x8_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, const void * restrict vy, int nr, int nc) { +static void ggml_gemm_q4_0_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { const int qk = QK8_0; const int nb = n / qk; const int ncols_interleaved = 8; @@ -2580,31 +2597,31 @@ void ggml_gemm_q4_0_8x8_q8_0(int n, float * restrict s, size_t bs, const void * const __m512i rhs_mat_2367ABEF_3 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_2367ABEF_1, 4), m4bexpanded)); //B2(24-31) B3(24-31) B6(24-31) B7(24-31) BA(24-31) BB(24-31) BE(24-31) BF(24-31) // Shuffle pattern one - right side input - const __m512i rhs_mat_014589CD_0_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, 136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) B8(0-3) B9(0-3) B8(0-3) B9(0-3) BC(0-3) BD(0-3) BC(0-3) BD(0-3) - const __m512i rhs_mat_2367ABEF_0_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, 136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) BA(0-3) BB(0-3) BA(0-3) BB(0-3) BE(0-3) BF(0-3) BE(0-3) BF(0-3) + const __m512i rhs_mat_014589CD_0_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, (_MM_PERM_ENUM)136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) B8(0-3) B9(0-3) B8(0-3) B9(0-3) BC(0-3) BD(0-3) BC(0-3) BD(0-3) + const __m512i rhs_mat_2367ABEF_0_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, (_MM_PERM_ENUM)136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) BA(0-3) BB(0-3) BA(0-3) BB(0-3) BE(0-3) BF(0-3) BE(0-3) BF(0-3) - const __m512i rhs_mat_014589CD_1_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, 136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) B8(8-11) B9(8-11) B8(8-11) B9(8-11) BC(8-11) BD(8-11) BC(8-11) BD(8-11) - const __m512i rhs_mat_2367ABEF_1_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, 136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) BA(8-11) BB(8-11) BA(8-11) BB(8-11) BE(8-11) BF(8-11) BE(8-11) BF(8-11) + const __m512i rhs_mat_014589CD_1_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, (_MM_PERM_ENUM)136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) B8(8-11) B9(8-11) B8(8-11) B9(8-11) BC(8-11) BD(8-11) BC(8-11) BD(8-11) + const __m512i rhs_mat_2367ABEF_1_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, (_MM_PERM_ENUM)136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) BA(8-11) BB(8-11) BA(8-11) BB(8-11) BE(8-11) BF(8-11) BE(8-11) BF(8-11) - const __m512i rhs_mat_014589CD_2_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, 136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) B8(16-19) B9(16-19) B8(16-19) B9(16-19) BC(16-19) BD(16-19) BC(16-19) BD(16-19) - const __m512i rhs_mat_2367ABEF_2_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, 136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) BA(16-19) BB(16-19) BA(16-19) BB(16-19) BE(16-19) BF(16-19) BE(16-19) BF(16-19) + const __m512i rhs_mat_014589CD_2_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, (_MM_PERM_ENUM)136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) B8(16-19) B9(16-19) B8(16-19) B9(16-19) BC(16-19) BD(16-19) BC(16-19) BD(16-19) + const __m512i rhs_mat_2367ABEF_2_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, (_MM_PERM_ENUM)136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) BA(16-19) BB(16-19) BA(16-19) BB(16-19) BE(16-19) BF(16-19) BE(16-19) BF(16-19) - const __m512i rhs_mat_014589CD_3_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, 136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) B8(24-27) B9(24-27) B8(24-27) B9(24-27) BC(24-27) BD(24-27) BC(24-27) BD(24-27) - const __m512i rhs_mat_2367ABEF_3_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, 136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) BA(24-27) BB(24-27) BA(24-27) BB(24-27) BE(24-27) BF(24-27) BE(24-27) BF(24-27) + const __m512i rhs_mat_014589CD_3_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, (_MM_PERM_ENUM)136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) B8(24-27) B9(24-27) B8(24-27) B9(24-27) BC(24-27) BD(24-27) BC(24-27) BD(24-27) + const __m512i rhs_mat_2367ABEF_3_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, (_MM_PERM_ENUM)136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) BA(24-27) BB(24-27) BA(24-27) BB(24-27) BE(24-27) BF(24-27) BE(24-27) BF(24-27) // Shuffle pattern two - right side input - const __m512i rhs_mat_014589CD_0_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, 221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) B8(4-7) B9(4-7) B8(4-7) B9(4-7) BC(4-7) BD(4-7) BC(4-7) BD(4-7) - const __m512i rhs_mat_2367ABEF_0_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, 221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) BA(4-7) BB(4-7) BA(4-7) BB(4-7) BE(4-7) BF(4-7) BE(4-7) BF(4-7) + const __m512i rhs_mat_014589CD_0_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, (_MM_PERM_ENUM)221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) B8(4-7) B9(4-7) B8(4-7) B9(4-7) BC(4-7) BD(4-7) BC(4-7) BD(4-7) + const __m512i rhs_mat_2367ABEF_0_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, (_MM_PERM_ENUM)221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) BA(4-7) BB(4-7) BA(4-7) BB(4-7) BE(4-7) BF(4-7) BE(4-7) BF(4-7) - const __m512i rhs_mat_014589CD_1_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, 221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) B8(12-15) B9(12-15) B8(12-15) B9(12-15) BC(12-15) BD(12-15) BC(12-15) BD(12-15) - const __m512i rhs_mat_2367ABEF_1_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, 221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) BA(12-15) BB(12-15) BA(12-15) BB(12-15) BE(12-15) BF(12-15) BE(12-15) BF(12-15) + const __m512i rhs_mat_014589CD_1_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, (_MM_PERM_ENUM)221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) B8(12-15) B9(12-15) B8(12-15) B9(12-15) BC(12-15) BD(12-15) BC(12-15) BD(12-15) + const __m512i rhs_mat_2367ABEF_1_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, (_MM_PERM_ENUM)221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) BA(12-15) BB(12-15) BA(12-15) BB(12-15) BE(12-15) BF(12-15) BE(12-15) BF(12-15) - const __m512i rhs_mat_014589CD_2_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, 221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) B8(20-23) B9(20-23) B8(20-23) B9(20-23) BC(20-23) BD(20-23) BC(20-23) BD(20-23) - const __m512i rhs_mat_2367ABEF_2_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, 221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) BA(20-23) BB(20-23) BA(20-23) BB(20-23) BE(20-23) BF(20-23) BE(20-23) BF(20-23) + const __m512i rhs_mat_014589CD_2_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, (_MM_PERM_ENUM)221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) B8(20-23) B9(20-23) B8(20-23) B9(20-23) BC(20-23) BD(20-23) BC(20-23) BD(20-23) + const __m512i rhs_mat_2367ABEF_2_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, (_MM_PERM_ENUM)221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) BA(20-23) BB(20-23) BA(20-23) BB(20-23) BE(20-23) BF(20-23) BE(20-23) BF(20-23) - const __m512i rhs_mat_014589CD_3_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, 221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) B8(28-31) B9(28-31) B8(28-31) B9(28-31) BC(28-31) BD(28-31) BC(28-31) BD(28-31) - const __m512i rhs_mat_2367ABEF_3_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, 221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) BA(28-31) BB(28-31) BA(28-31) BB(28-31) BE(28-31) BF(28-31) BE(28-31) BF(28-31) + const __m512i rhs_mat_014589CD_3_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, (_MM_PERM_ENUM)221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) B8(28-31) B9(28-31) B8(28-31) B9(28-31) BC(28-31) BD(28-31) BC(28-31) BD(28-31) + const __m512i rhs_mat_2367ABEF_3_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, (_MM_PERM_ENUM)221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) BA(28-31) BB(28-31) BA(28-31) BB(28-31) BE(28-31) BF(28-31) BE(28-31) BF(28-31) // Scale values - Load the weight scale values of two block_q4_0x8 const __m512 col_scale_f32 = GGML_F32Cx8x2_LOAD(b_ptr_0[b].d, b_ptr_1[b].d); @@ -2638,31 +2655,31 @@ void ggml_gemm_q4_0_8x8_q8_0(int n, float * restrict s, size_t bs, const void * // Shuffle pattern one - left side input - const __m512i lhs_mat_01_0_sp1 = _mm512_shuffle_epi32(lhs_mat_01_0, 160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) - const __m512i lhs_mat_23_0_sp1 = _mm512_shuffle_epi32(lhs_mat_23_0, 160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) + const __m512i lhs_mat_01_0_sp1 = _mm512_shuffle_epi32(lhs_mat_01_0, (_MM_PERM_ENUM)160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) + const __m512i lhs_mat_23_0_sp1 = _mm512_shuffle_epi32(lhs_mat_23_0, (_MM_PERM_ENUM)160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) - const __m512i lhs_mat_01_1_sp1 = _mm512_shuffle_epi32(lhs_mat_01_1, 160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) - const __m512i lhs_mat_23_1_sp1 = _mm512_shuffle_epi32(lhs_mat_23_1, 160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) + const __m512i lhs_mat_01_1_sp1 = _mm512_shuffle_epi32(lhs_mat_01_1, (_MM_PERM_ENUM)160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) + const __m512i lhs_mat_23_1_sp1 = _mm512_shuffle_epi32(lhs_mat_23_1, (_MM_PERM_ENUM)160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) - const __m512i lhs_mat_01_2_sp1 = _mm512_shuffle_epi32(lhs_mat_01_2, 160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) - const __m512i lhs_mat_23_2_sp1 = _mm512_shuffle_epi32(lhs_mat_23_2, 160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) + const __m512i lhs_mat_01_2_sp1 = _mm512_shuffle_epi32(lhs_mat_01_2, (_MM_PERM_ENUM)160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) + const __m512i lhs_mat_23_2_sp1 = _mm512_shuffle_epi32(lhs_mat_23_2, (_MM_PERM_ENUM)160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) - const __m512i lhs_mat_01_3_sp1 = _mm512_shuffle_epi32(lhs_mat_01_3, 160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) - const __m512i lhs_mat_23_3_sp1 = _mm512_shuffle_epi32(lhs_mat_23_3, 160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) + const __m512i lhs_mat_01_3_sp1 = _mm512_shuffle_epi32(lhs_mat_01_3, (_MM_PERM_ENUM)160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) + const __m512i lhs_mat_23_3_sp1 = _mm512_shuffle_epi32(lhs_mat_23_3, (_MM_PERM_ENUM)160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) // Shuffle pattern two - left side input - const __m512i lhs_mat_01_0_sp2 = _mm512_shuffle_epi32(lhs_mat_01_0, 245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) - const __m512i lhs_mat_23_0_sp2 = _mm512_shuffle_epi32(lhs_mat_23_0, 245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) + const __m512i lhs_mat_01_0_sp2 = _mm512_shuffle_epi32(lhs_mat_01_0, (_MM_PERM_ENUM)245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) + const __m512i lhs_mat_23_0_sp2 = _mm512_shuffle_epi32(lhs_mat_23_0, (_MM_PERM_ENUM)245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) - const __m512i lhs_mat_01_1_sp2 = _mm512_shuffle_epi32(lhs_mat_01_1, 245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) - const __m512i lhs_mat_23_1_sp2 = _mm512_shuffle_epi32(lhs_mat_23_1, 245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) + const __m512i lhs_mat_01_1_sp2 = _mm512_shuffle_epi32(lhs_mat_01_1, (_MM_PERM_ENUM)245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) + const __m512i lhs_mat_23_1_sp2 = _mm512_shuffle_epi32(lhs_mat_23_1, (_MM_PERM_ENUM)245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) - const __m512i lhs_mat_01_2_sp2 = _mm512_shuffle_epi32(lhs_mat_01_2, 245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) - const __m512i lhs_mat_23_2_sp2 = _mm512_shuffle_epi32(lhs_mat_23_2, 245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) + const __m512i lhs_mat_01_2_sp2 = _mm512_shuffle_epi32(lhs_mat_01_2, (_MM_PERM_ENUM)245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) + const __m512i lhs_mat_23_2_sp2 = _mm512_shuffle_epi32(lhs_mat_23_2, (_MM_PERM_ENUM)245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) - const __m512i lhs_mat_01_3_sp2 = _mm512_shuffle_epi32(lhs_mat_01_3, 245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) - const __m512i lhs_mat_23_3_sp2 = _mm512_shuffle_epi32(lhs_mat_23_3, 245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) + const __m512i lhs_mat_01_3_sp2 = _mm512_shuffle_epi32(lhs_mat_01_3, (_MM_PERM_ENUM)245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) + const __m512i lhs_mat_23_3_sp2 = _mm512_shuffle_epi32(lhs_mat_23_3, (_MM_PERM_ENUM)245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) // The values arranged in shuffle patterns are operated with dot product operation within 32 bit lane i.e corresponding bytes and multiplied and added into 32 bit integers within 32 bit lane // Resembles MMLAs into 2x2 matrices in ARM Version @@ -2691,10 +2708,10 @@ void ggml_gemm_q4_0_8x8_q8_0(int n, float * restrict s, size_t bs, const void * // Straighten out to make 4 row vectors - __m512i iacc_row_0 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_00, _mm512_shuffle_epi32(iacc_mat_01, 78)); - __m512i iacc_row_1 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_00, 78), iacc_mat_01); - __m512i iacc_row_2 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_10, _mm512_shuffle_epi32(iacc_mat_11, 78)); - __m512i iacc_row_3 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_10, 78), iacc_mat_11); + __m512i iacc_row_0 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_00, _mm512_shuffle_epi32(iacc_mat_01, (_MM_PERM_ENUM)78)); + __m512i iacc_row_1 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_00, (_MM_PERM_ENUM)78), iacc_mat_01); + __m512i iacc_row_2 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_10, _mm512_shuffle_epi32(iacc_mat_11, (_MM_PERM_ENUM)78)); + __m512i iacc_row_3 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_10, (_MM_PERM_ENUM)78), iacc_mat_11); // Load the scale(d) values for all the 4 Q8_0 blocks and repeat it across lanes const __m128i row_scale_f16 = _mm_shuffle_epi32(_mm_maskload_epi32((int const*)(a_ptrs[rp][b].d), loadMask), 68); @@ -2773,31 +2790,31 @@ void ggml_gemm_q4_0_8x8_q8_0(int n, float * restrict s, size_t bs, const void * const __m512i rhs_mat_2367ABEF_3 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_2367ABEF_1, 4), m4bexpanded)); //B2(24-31) B3(24-31) B6(24-31) B7(24-31) BA(24-31) BB(24-31) BE(24-31) BF(24-31) // Shuffle pattern one - right side input - const __m512i rhs_mat_014589CD_0_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, 136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) B8(0-3) B9(0-3) B8(0-3) B9(0-3) BC(0-3) BD(0-3) BC(0-3) BD(0-3) - const __m512i rhs_mat_2367ABEF_0_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, 136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) BA(0-3) BB(0-3) BA(0-3) BB(0-3) BE(0-3) BF(0-3) BE(0-3) BF(0-3) + const __m512i rhs_mat_014589CD_0_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, (_MM_PERM_ENUM)136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) B8(0-3) B9(0-3) B8(0-3) B9(0-3) BC(0-3) BD(0-3) BC(0-3) BD(0-3) + const __m512i rhs_mat_2367ABEF_0_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, (_MM_PERM_ENUM)136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) BA(0-3) BB(0-3) BA(0-3) BB(0-3) BE(0-3) BF(0-3) BE(0-3) BF(0-3) - const __m512i rhs_mat_014589CD_1_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, 136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) B8(8-11) B9(8-11) B8(8-11) B9(8-11) BC(8-11) BD(8-11) BC(8-11) BD(8-11) - const __m512i rhs_mat_2367ABEF_1_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, 136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) BA(8-11) BB(8-11) BA(8-11) BB(8-11) BE(8-11) BF(8-11) BE(8-11) BF(8-11) + const __m512i rhs_mat_014589CD_1_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, (_MM_PERM_ENUM)136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) B8(8-11) B9(8-11) B8(8-11) B9(8-11) BC(8-11) BD(8-11) BC(8-11) BD(8-11) + const __m512i rhs_mat_2367ABEF_1_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, (_MM_PERM_ENUM)136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) BA(8-11) BB(8-11) BA(8-11) BB(8-11) BE(8-11) BF(8-11) BE(8-11) BF(8-11) - const __m512i rhs_mat_014589CD_2_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, 136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) B8(16-19) B9(16-19) B8(16-19) B9(16-19) BC(16-19) BD(16-19) BC(16-19) BD(16-19) - const __m512i rhs_mat_2367ABEF_2_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, 136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) BA(16-19) BB(16-19) BA(16-19) BB(16-19) BE(16-19) BF(16-19) BE(16-19) BF(16-19) + const __m512i rhs_mat_014589CD_2_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, (_MM_PERM_ENUM)136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) B8(16-19) B9(16-19) B8(16-19) B9(16-19) BC(16-19) BD(16-19) BC(16-19) BD(16-19) + const __m512i rhs_mat_2367ABEF_2_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, (_MM_PERM_ENUM)136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) BA(16-19) BB(16-19) BA(16-19) BB(16-19) BE(16-19) BF(16-19) BE(16-19) BF(16-19) - const __m512i rhs_mat_014589CD_3_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, 136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) B8(24-27) B9(24-27) B8(24-27) B9(24-27) BC(24-27) BD(24-27) BC(24-27) BD(24-27) - const __m512i rhs_mat_2367ABEF_3_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, 136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) BA(24-27) BB(24-27) BA(24-27) BB(24-27) BE(24-27) BF(24-27) BE(24-27) BF(24-27) + const __m512i rhs_mat_014589CD_3_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, (_MM_PERM_ENUM)136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) B8(24-27) B9(24-27) B8(24-27) B9(24-27) BC(24-27) BD(24-27) BC(24-27) BD(24-27) + const __m512i rhs_mat_2367ABEF_3_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, (_MM_PERM_ENUM)136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) BA(24-27) BB(24-27) BA(24-27) BB(24-27) BE(24-27) BF(24-27) BE(24-27) BF(24-27) // Shuffle pattern two - right side input - const __m512i rhs_mat_014589CD_0_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, 221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) B8(4-7) B9(4-7) B8(4-7) B9(4-7) BC(4-7) BD(4-7) BC(4-7) BD(4-7) - const __m512i rhs_mat_2367ABEF_0_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, 221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) BA(4-7) BB(4-7) BA(4-7) BB(4-7) BE(4-7) BF(4-7) BE(4-7) BF(4-7) + const __m512i rhs_mat_014589CD_0_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, (_MM_PERM_ENUM)221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) B8(4-7) B9(4-7) B8(4-7) B9(4-7) BC(4-7) BD(4-7) BC(4-7) BD(4-7) + const __m512i rhs_mat_2367ABEF_0_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, (_MM_PERM_ENUM)221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) BA(4-7) BB(4-7) BA(4-7) BB(4-7) BE(4-7) BF(4-7) BE(4-7) BF(4-7) - const __m512i rhs_mat_014589CD_1_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, 221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) B8(12-15) B9(12-15) B8(12-15) B9(12-15) BC(12-15) BD(12-15) BC(12-15) BD(12-15) - const __m512i rhs_mat_2367ABEF_1_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, 221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) BA(12-15) BB(12-15) BA(12-15) BB(12-15) BE(12-15) BF(12-15) BE(12-15) BF(12-15) + const __m512i rhs_mat_014589CD_1_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, (_MM_PERM_ENUM)221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) B8(12-15) B9(12-15) B8(12-15) B9(12-15) BC(12-15) BD(12-15) BC(12-15) BD(12-15) + const __m512i rhs_mat_2367ABEF_1_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, (_MM_PERM_ENUM)221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) BA(12-15) BB(12-15) BA(12-15) BB(12-15) BE(12-15) BF(12-15) BE(12-15) BF(12-15) - const __m512i rhs_mat_014589CD_2_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, 221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) B8(20-23) B9(20-23) B8(20-23) B9(20-23) BC(20-23) BD(20-23) BC(20-23) BD(20-23) - const __m512i rhs_mat_2367ABEF_2_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, 221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) BA(20-23) BB(20-23) BA(20-23) BB(20-23) BE(20-23) BF(20-23) BE(20-23) BF(20-23) + const __m512i rhs_mat_014589CD_2_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, (_MM_PERM_ENUM)221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) B8(20-23) B9(20-23) B8(20-23) B9(20-23) BC(20-23) BD(20-23) BC(20-23) BD(20-23) + const __m512i rhs_mat_2367ABEF_2_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, (_MM_PERM_ENUM)221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) BA(20-23) BB(20-23) BA(20-23) BB(20-23) BE(20-23) BF(20-23) BE(20-23) BF(20-23) - const __m512i rhs_mat_014589CD_3_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, 221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) B8(28-31) B9(28-31) B8(28-31) B9(28-31) BC(28-31) BD(28-31) BC(28-31) BD(28-31) - const __m512i rhs_mat_2367ABEF_3_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, 221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) BA(28-31) BB(28-31) BA(28-31) BB(28-31) BE(28-31) BF(28-31) BE(28-31) BF(28-31) + const __m512i rhs_mat_014589CD_3_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, (_MM_PERM_ENUM)221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) B8(28-31) B9(28-31) B8(28-31) B9(28-31) BC(28-31) BD(28-31) BC(28-31) BD(28-31) + const __m512i rhs_mat_2367ABEF_3_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, (_MM_PERM_ENUM)221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) BA(28-31) BB(28-31) BA(28-31) BB(28-31) BE(28-31) BF(28-31) BE(28-31) BF(28-31) // Scale values - Load the weight scale values of two block_q4_0x8 @@ -2829,31 +2846,31 @@ void ggml_gemm_q4_0_8x8_q8_0(int n, float * restrict s, size_t bs, const void * // Shuffle pattern one - left side input - const __m512i lhs_mat_01_0_sp1 = _mm512_shuffle_epi32(lhs_mat_01_0, 160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) - const __m512i lhs_mat_23_0_sp1 = _mm512_shuffle_epi32(lhs_mat_23_0, 160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) + const __m512i lhs_mat_01_0_sp1 = _mm512_shuffle_epi32(lhs_mat_01_0, (_MM_PERM_ENUM)160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) + const __m512i lhs_mat_23_0_sp1 = _mm512_shuffle_epi32(lhs_mat_23_0, (_MM_PERM_ENUM)160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) - const __m512i lhs_mat_01_1_sp1 = _mm512_shuffle_epi32(lhs_mat_01_1, 160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) - const __m512i lhs_mat_23_1_sp1 = _mm512_shuffle_epi32(lhs_mat_23_1, 160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) + const __m512i lhs_mat_01_1_sp1 = _mm512_shuffle_epi32(lhs_mat_01_1, (_MM_PERM_ENUM)160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) + const __m512i lhs_mat_23_1_sp1 = _mm512_shuffle_epi32(lhs_mat_23_1, (_MM_PERM_ENUM)160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) - const __m512i lhs_mat_01_2_sp1 = _mm512_shuffle_epi32(lhs_mat_01_2, 160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) - const __m512i lhs_mat_23_2_sp1 = _mm512_shuffle_epi32(lhs_mat_23_2, 160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) + const __m512i lhs_mat_01_2_sp1 = _mm512_shuffle_epi32(lhs_mat_01_2, (_MM_PERM_ENUM)160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) + const __m512i lhs_mat_23_2_sp1 = _mm512_shuffle_epi32(lhs_mat_23_2, (_MM_PERM_ENUM)160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) - const __m512i lhs_mat_01_3_sp1 = _mm512_shuffle_epi32(lhs_mat_01_3, 160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) - const __m512i lhs_mat_23_3_sp1 = _mm512_shuffle_epi32(lhs_mat_23_3, 160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) + const __m512i lhs_mat_01_3_sp1 = _mm512_shuffle_epi32(lhs_mat_01_3, (_MM_PERM_ENUM)160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) + const __m512i lhs_mat_23_3_sp1 = _mm512_shuffle_epi32(lhs_mat_23_3, (_MM_PERM_ENUM)160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) // Shuffle pattern two - left side input - const __m512i lhs_mat_01_0_sp2 = _mm512_shuffle_epi32(lhs_mat_01_0, 245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) - const __m512i lhs_mat_23_0_sp2 = _mm512_shuffle_epi32(lhs_mat_23_0, 245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) + const __m512i lhs_mat_01_0_sp2 = _mm512_shuffle_epi32(lhs_mat_01_0, (_MM_PERM_ENUM)245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) + const __m512i lhs_mat_23_0_sp2 = _mm512_shuffle_epi32(lhs_mat_23_0, (_MM_PERM_ENUM)245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) - const __m512i lhs_mat_01_1_sp2 = _mm512_shuffle_epi32(lhs_mat_01_1, 245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) - const __m512i lhs_mat_23_1_sp2 = _mm512_shuffle_epi32(lhs_mat_23_1, 245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) + const __m512i lhs_mat_01_1_sp2 = _mm512_shuffle_epi32(lhs_mat_01_1, (_MM_PERM_ENUM)245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) + const __m512i lhs_mat_23_1_sp2 = _mm512_shuffle_epi32(lhs_mat_23_1, (_MM_PERM_ENUM)245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) - const __m512i lhs_mat_01_2_sp2 = _mm512_shuffle_epi32(lhs_mat_01_2, 245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) - const __m512i lhs_mat_23_2_sp2 = _mm512_shuffle_epi32(lhs_mat_23_2, 245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) + const __m512i lhs_mat_01_2_sp2 = _mm512_shuffle_epi32(lhs_mat_01_2, (_MM_PERM_ENUM)245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) + const __m512i lhs_mat_23_2_sp2 = _mm512_shuffle_epi32(lhs_mat_23_2, (_MM_PERM_ENUM)245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) - const __m512i lhs_mat_01_3_sp2 = _mm512_shuffle_epi32(lhs_mat_01_3, 245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) - const __m512i lhs_mat_23_3_sp2 = _mm512_shuffle_epi32(lhs_mat_23_3, 245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) + const __m512i lhs_mat_01_3_sp2 = _mm512_shuffle_epi32(lhs_mat_01_3, (_MM_PERM_ENUM)245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) + const __m512i lhs_mat_23_3_sp2 = _mm512_shuffle_epi32(lhs_mat_23_3, (_MM_PERM_ENUM)245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) // The values arranged in shuffle patterns are operated with dot product operation within 32 bit lane i.e corresponding bytes and multiplied and added into 32 bit integers within 32 bit lane // Resembles MMLAs into 2x2 matrices in ARM Version @@ -2882,10 +2899,10 @@ void ggml_gemm_q4_0_8x8_q8_0(int n, float * restrict s, size_t bs, const void * // Straighten out to make 4 row vectors - __m512i iacc_row_0 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_00, _mm512_shuffle_epi32(iacc_mat_01, 78)); - __m512i iacc_row_1 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_00, 78), iacc_mat_01); - __m512i iacc_row_2 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_10, _mm512_shuffle_epi32(iacc_mat_11, 78)); - __m512i iacc_row_3 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_10, 78), iacc_mat_11); + __m512i iacc_row_0 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_00, _mm512_shuffle_epi32(iacc_mat_01, (_MM_PERM_ENUM)78)); + __m512i iacc_row_1 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_00, (_MM_PERM_ENUM)78), iacc_mat_01); + __m512i iacc_row_2 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_10, _mm512_shuffle_epi32(iacc_mat_11, (_MM_PERM_ENUM)78)); + __m512i iacc_row_3 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_10, (_MM_PERM_ENUM)78), iacc_mat_11); // Load the scale(d) values for all the 4 Q8_0 blocks and repeat it across lanes const __m128i row_scale_f16 = _mm_shuffle_epi32(_mm_maskload_epi32((int const*)(a_ptr[b].d), loadMask), 68); @@ -3480,7 +3497,7 @@ void ggml_gemm_q4_0_8x8_q8_0(int n, float * restrict s, size_t bs, const void * } } -void ggml_gemm_iq4_nl_4x4_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, const void * restrict vy, int nr, int nc) { +static void ggml_gemm_iq4_nl_4x4_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc) { const int qk = QK8_0; const int nb = n / qk; const int ncols_interleaved = 4; @@ -3507,7 +3524,7 @@ void ggml_gemm_iq4_nl_4x4_q8_0(int n, float * restrict s, size_t bs, const void for (int y = 0; y < nr / 4; y++) { const block_q8_0x4 * a_ptr = (const block_q8_0x4 *) vy + (y * nb); for (int x = 0; x < nc / ncols_interleaved; x++) { - const block_q4_0x4 * b_ptr = (const block_q4_0x4 *) vx + (x * nb); + const block_iq4_nlx4 * b_ptr = (const block_iq4_nlx4 *) vx + (x * nb); float32x4_t sumf[4]; for (int m = 0; m < 4; m++) { @@ -3591,7 +3608,6 @@ void ggml_gemm_iq4_nl_4x4_q8_0(int n, float * restrict s, size_t bs, const void } } -// FIXME: this code is duplicated from ggml-aarch64.c static block_q4_0x4 make_block_q4_0x4(block_q4_0 * in, unsigned int blck_size_interleave) { block_q4_0x4 out; @@ -3661,20 +3677,20 @@ static block_q4_0x8 make_block_q4_0x8(block_q4_0 * in, unsigned int blck_size_in return out; } -static int repack_q4_0_to_q4_0_4_bl(struct ggml_tensor * t, int interleave_block, const void * restrict data, size_t data_size) { +static int repack_q4_0_to_q4_0_4_bl(struct ggml_tensor * t, int interleave_block, const void * GGML_RESTRICT data, size_t data_size) { GGML_ASSERT(t->type == GGML_TYPE_Q4_0); GGML_ASSERT(interleave_block == 4 || interleave_block == 8); + constexpr int nrows_interleaved = 4; block_q4_0x4 * dst = (block_q4_0x4 *)t->data; const block_q4_0 * src = (const block_q4_0 *)data; block_q4_0 dst_tmp[4]; - int nrow = t->ne[1]; // Number of rows - int nrows_interleaved = 4; + int nrow = ggml_nrows(t); int nblocks = t->ne[0] / QK4_0; GGML_ASSERT(data_size == nrow * nblocks * sizeof(block_q4_0)); - if (nrow % nrows_interleaved != 0 || t->ne[0] % 8 != 0) { + if (t->ne[1] % nrows_interleaved != 0 || t->ne[0] % 8 != 0) { return -1; } @@ -3692,20 +3708,20 @@ static int repack_q4_0_to_q4_0_4_bl(struct ggml_tensor * t, int interleave_block GGML_UNUSED(data_size); } -static int repack_q4_0_to_q4_0_8_bl(struct ggml_tensor *t, int interleave_block, const void * restrict data, size_t data_size) { +static int repack_q4_0_to_q4_0_8_bl(struct ggml_tensor * t, int interleave_block, const void * GGML_RESTRICT data, size_t data_size) { GGML_ASSERT(t->type == GGML_TYPE_Q4_0); GGML_ASSERT(interleave_block == 8); + constexpr int nrows_interleaved = 8; block_q4_0x8 * dst = (block_q4_0x8*)t->data; const block_q4_0 * src = (const block_q4_0*) data; block_q4_0 dst_tmp[8]; - int nrow = t->ne[1]; // Number of rows - int nrows_interleaved = 8; + int nrow = ggml_nrows(t); int nblocks = t->ne[0] / QK4_0; GGML_ASSERT(data_size == nrow * nblocks * sizeof(block_q4_0)); - if (nrow % nrows_interleaved != 0 || t->ne[0] % 8 != 0) { + if (t->ne[1] % nrows_interleaved != 0 || t->ne[0] % 8 != 0) { return -1; } @@ -3732,16 +3748,18 @@ static block_iq4_nlx4 make_block_iq4_nlx4(block_iq4_nl * in, unsigned int blck_s const int end = QK4_NL * 2 / blck_size_interleave; - if (blck_size_interleave == 8) { - for (int i = 0; i < end; ++i) { - int src_id = i % 4; - int src_offset = (i / 4) * blck_size_interleave; - int dst_offset = i * blck_size_interleave; - - // Using memcpy to avoid unaligned memory accesses - memcpy(&out.qs[dst_offset], &in[src_id].qs[src_offset], sizeof(uint64_t)); - } - } else if (blck_size_interleave == 4) { + // TODO: this branch seems wrong + //if (blck_size_interleave == 8) { + // for (int i = 0; i < end; ++i) { + // int src_id = i % 4; + // int src_offset = (i / 4) * blck_size_interleave; + // int dst_offset = i * blck_size_interleave; + + // // Using memcpy to avoid unaligned memory accesses + // memcpy(&out.qs[dst_offset], &in[src_id].qs[src_offset], sizeof(uint64_t)); + // } + //} else + if (blck_size_interleave == 4) { for (int i = 0; i < end; ++i) { int src_id = i % 4; int src_offset = (i / 4) * blck_size_interleave; @@ -3756,20 +3774,21 @@ static block_iq4_nlx4 make_block_iq4_nlx4(block_iq4_nl * in, unsigned int blck_s return out; } -static int repack_iq4_nl_to_iq4_nl_4_bl(struct ggml_tensor * t, int interleave_block, const void * restrict data, size_t data_size) { +static int repack_iq4_nl_to_iq4_nl_4_bl(struct ggml_tensor * t, int interleave_block, const void * GGML_RESTRICT data, size_t data_size) { GGML_ASSERT(t->type == GGML_TYPE_IQ4_NL); - GGML_ASSERT(interleave_block == 4 || interleave_block == 8); + //GGML_ASSERT(interleave_block == 4 || interleave_block == 8); + GGML_ASSERT(interleave_block == 4); block_iq4_nlx4 * dst = (block_iq4_nlx4 *)t->data; const block_iq4_nl * src = (const block_iq4_nl *)data; block_iq4_nl dst_tmp[4]; - int nrow = t->ne[1]; // Number of rows + int nrow = ggml_nrows(t); int nrows_interleaved = 4; int nblocks = t->ne[0] / QK4_0; GGML_ASSERT(data_size == nrow * nblocks * sizeof(block_iq4_nl)); - if (nrow % nrows_interleaved != 0 || t->ne[0] % 8 != 0) { + if (t->ne[1] % nrows_interleaved != 0 || t->ne[0] % 8 != 0) { return -1; } @@ -3787,57 +3806,457 @@ static int repack_iq4_nl_to_iq4_nl_4_bl(struct ggml_tensor * t, int interleave_b GGML_UNUSED(data_size); } -// Prepare for optimized kernels if applicable -void ggml_aarch64_repack_tensor(struct ggml_tensor * cur, enum ggml_type repack_type, const void * restrict data, size_t data_size) { - if (cur->type == repack_type) { - memcpy(cur->data, data, data_size); - return; +namespace ggml::cpu::aarch64 { +// repack +template +int repack(struct ggml_tensor *, const void *, size_t); + +// TODO: generalise. +template <> int repack(struct ggml_tensor * t, const void * data, size_t data_size) { + return repack_q4_0_to_q4_0_4_bl(t, 4, data, data_size); +} + +template <> int repack(struct ggml_tensor * t, const void * data, size_t data_size) { + return repack_q4_0_to_q4_0_4_bl(t, 8, data, data_size); +} + +template <> int repack(struct ggml_tensor * t, const void * data, size_t data_size) { + return repack_q4_0_to_q4_0_8_bl(t, 8, data, data_size); +} + +template <> int repack(struct ggml_tensor * t, const void * data, size_t data_size) { + return repack_iq4_nl_to_iq4_nl_4_bl(t, 4, data, data_size); +} + +// TODO: needs to be revisited +//template <> int repack(struct ggml_tensor * t, const void * data, size_t data_size) { +// return repack_iq4_nl_to_iq4_nl_4_bl(t, 8, data, data_size); +//} + +// gemv +template +void gemv(int, float *, size_t, const void *, const void *, int, int); + +template <> void gemv(int n, float * s, size_t bs, const void * vx, const void * vy, int nr, int nc) { + ggml_gemv_q4_0_4x4_q8_0(n, s, bs, vx, vy, nr, nc); +} + +template <> void gemv(int n, float * s, size_t bs, const void * vx, const void * vy, int nr, int nc) { + ggml_gemv_q4_0_4x8_q8_0(n, s, bs, vx, vy, nr, nc); +} + +template <> void gemv(int n, float * s, size_t bs, const void * vx, const void * vy, int nr, int nc) { + ggml_gemv_q4_0_8x8_q8_0(n, s, bs, vx, vy, nr, nc); +} + +template <> +void gemv(int n, float * s, size_t bs, const void * vx, const void * vy, int nr, int nc) { + ggml_gemv_iq4_nl_4x4_q8_0(n, s, bs, vx, vy, nr, nc); +} + +// gemm +template +void gemm(int, float *, size_t, const void *, const void *, int, int); + +template <> void gemm(int n, float * s, size_t bs, const void * vx, const void * vy, int nr, int nc) { + ggml_gemm_q4_0_4x4_q8_0(n, s, bs, vx, vy, nr, nc); +} + +template <> void gemm(int n, float * s, size_t bs, const void * vx, const void * vy, int nr, int nc) { + ggml_gemm_q4_0_4x8_q8_0(n, s, bs, vx, vy, nr, nc); +} + +template <> void gemm(int n, float * s, size_t bs, const void * vx, const void * vy, int nr, int nc) { + ggml_gemm_q4_0_8x8_q8_0(n, s, bs, vx, vy, nr, nc); +} + +template <> +void gemm(int n, float * s, size_t bs, const void * vx, const void * vy, int nr, int nc) { + ggml_gemm_iq4_nl_4x4_q8_0(n, s, bs, vx, vy, nr, nc); +} + +class tensor_traits_base : public ggml::cpu::tensor_traits { + public: + virtual int repack(struct ggml_tensor * t, const void * data, size_t data_size) = 0; +}; + +template class tensor_traits : public tensor_traits_base { + + bool work_size(int /* n_threads */, const struct ggml_tensor * op, size_t & size) override { + // not realy a GGML_TYPE_Q8_0 but same size. + switch (op->op) { + case GGML_OP_MUL_MAT: + size = ggml_row_size(GGML_TYPE_Q8_0, ggml_nelements(op->src[1])); + return true; + case GGML_OP_MUL_MAT_ID: + size = ggml_row_size(GGML_TYPE_Q8_0, ggml_nelements(op->src[1])); + size = GGML_PAD(size, sizeof(int64_t)); // + padding for next bloc. + size += sizeof(int64_t) * (1+op->src[0]->ne[2]) * op->src[1]->ne[2]; + return true; + default: + // GGML_ABORT("fatal error"); + break; + } + return false; } - if (cur->type == GGML_TYPE_Q4_0) { - switch (repack_type) { - case GGML_TYPE_Q4_0_8_8: - repack_q4_0_to_q4_0_8_bl(cur, 8, data, data_size); - break; - case GGML_TYPE_Q4_0_4_8: - repack_q4_0_to_q4_0_4_bl(cur, 8, data, data_size); - break; - case GGML_TYPE_Q4_0_4_4: - repack_q4_0_to_q4_0_4_bl(cur, 4, data, data_size); - break; - default: - GGML_ABORT("Unsupported type"); + bool compute_forward(struct ggml_compute_params * params, struct ggml_tensor * op) override { + switch (op->op) { + case GGML_OP_MUL_MAT: + forward_mul_mat(params, op); + return true; + case GGML_OP_MUL_MAT_ID: + forward_mul_mat_id(params, op); + return true; + default: + // GGML_ABORT("fatal error"); + break; } - } else if (cur->type == GGML_TYPE_IQ4_NL) { - switch (repack_type) { - case GGML_TYPE_IQ4_NL_4_4: - repack_iq4_nl_to_iq4_nl_4_bl(cur, 4, data, data_size); - break; - default: - GGML_ABORT("Unsupported type"); + return false; + } + + void forward_mul_mat(ggml_compute_params * params, ggml_tensor * op) { + const ggml_tensor * src0 = op->src[0]; + const ggml_tensor * src1 = op->src[1]; + ggml_tensor * dst = op; + + GGML_TENSOR_BINARY_OP_LOCALS + + const int ith = params->ith; + const int nth = params->nth; + + GGML_ASSERT(ne0 == ne01); + GGML_ASSERT(ne1 == ne11); + GGML_ASSERT(ne2 == ne12); + GGML_ASSERT(ne3 == ne13); + + // dst cannot be transposed or permuted + GGML_ASSERT(nb0 == sizeof(float)); + GGML_ASSERT(nb0 <= nb1); + GGML_ASSERT(nb1 <= nb2); + GGML_ASSERT(nb2 <= nb3); + + GGML_ASSERT(src1->type == GGML_TYPE_F32); + + GGML_ASSERT(ggml_n_dims(op->src[0]) == 2); + // GGML_ASSERT(ggml_n_dims(op->src[1]) == 2); + + char * wdata = static_cast(params->wdata); + const size_t nbw1 = ggml_row_size(GGML_TYPE_Q8_0, ne10); + + assert(params->wsize >= nbw1 * ne11); + + const ggml_from_float_t from_float = ggml_get_type_traits_cpu(GGML_TYPE_Q8_0)->from_float; + + int64_t i11_processed = 0; + for (int64_t i11 = ith * 4; i11 < ne11 - ne11 % 4; i11 += nth * 4) { + quantize_mat_q8_0((float *) ((char *) src1->data + i11 * nb11), (void *) (wdata + i11 * nbw1), 4, ne10, + INTER_SIZE); + } + i11_processed = ne11 - ne11 % 4; + for (int64_t i11 = i11_processed + ith; i11 < ne11; i11 += nth) { + from_float((float *) ((char *) src1->data + i11 * nb11), (void *) (wdata + i11 * nbw1), ne10); + } + + ggml_barrier(params->threadpool); + + const void * src1_wdata = params->wdata; + const size_t src1_col_stride = ggml_row_size(GGML_TYPE_Q8_0, ne10); + int64_t src0_start = (ith * ne01) / nth; + int64_t src0_end = ((ith + 1) * ne01) / nth; + src0_start = (src0_start % NB_COLS) ? src0_start + NB_COLS - (src0_start % NB_COLS) : src0_start; + src0_end = (src0_end % NB_COLS) ? src0_end + NB_COLS - (src0_end % NB_COLS) : src0_end; + if (src0_start >= src0_end) { + return; + } + + // If there are more than three rows in src1, use gemm; otherwise, use gemv. + if (ne11 > 3) { + gemm(ne00, (float *) ((char *) dst->data) + src0_start, ne01, + (const char *) src0->data + src0_start * nb01, + (const char *) src1_wdata, ne11 - ne11 % 4, src0_end - src0_start); + } + for (int iter = ne11 - ne11 % 4; iter < ne11; iter++) { + gemv(ne00, (float *) ((char *) dst->data + (iter * nb1)) + src0_start, ne01, + (const char *) src0->data + src0_start * nb01, + (const char *) src1_wdata + (src1_col_stride * iter), 1, + src0_end - src0_start); } - } else { - GGML_ABORT("Unsupported type"); } -} -enum ggml_type ggml_aarch64_get_optimal_repack_type(const struct ggml_tensor * cur) { + void forward_mul_mat_id(ggml_compute_params * params, ggml_tensor * op) { + const ggml_tensor * src0 = op->src[0]; + const ggml_tensor * src1 = op->src[1]; + const ggml_tensor * ids = op->src[2]; + ggml_tensor * dst = op; + + GGML_TENSOR_BINARY_OP_LOCALS + + const int ith = params->ith; + const int nth = params->nth; + + const ggml_from_float_t from_float = ggml_get_type_traits_cpu(GGML_TYPE_Q8_0)->from_float; + + // we don't support permuted src0 or src1 + GGML_ASSERT(nb00 == ggml_type_size(src0->type)); + GGML_ASSERT(nb10 == ggml_type_size(src1->type)); + + // dst cannot be transposed or permuted + GGML_ASSERT(nb0 == sizeof(float)); + GGML_ASSERT(nb0 <= nb1); + GGML_ASSERT(nb1 <= nb2); + GGML_ASSERT(nb2 <= nb3); + + GGML_ASSERT(ne03 == 1); + GGML_ASSERT(ne13 == 1); + GGML_ASSERT(ne3 == 1); + + GGML_ASSERT(src1->type == GGML_TYPE_F32); + + // row groups + const int n_ids = ids->ne[0]; // n_expert_used + const int n_as = ne02; // n_expert + + const size_t nbw1 = ggml_row_size(GGML_TYPE_Q8_0, ne10); + const size_t nbw2 = nbw1*ne11; + const size_t nbw3 = nbw2*ne12; + + struct mmid_row_mapping { + int32_t i1; + int32_t i2; + }; + + GGML_ASSERT(params->wsize >= (GGML_PAD(nbw3, sizeof(int64_t)) + n_as * sizeof(int64_t) + + n_as * ne12 * sizeof(mmid_row_mapping))); + + auto wdata = (char *) params->wdata; + auto wdata_src1_end = (char *) wdata + GGML_PAD(nbw3, sizeof(int64_t)); + int64_t * matrix_row_counts = (int64_t *) (wdata_src1_end); // [n_as] + struct mmid_row_mapping * matrix_rows = (struct mmid_row_mapping *) (matrix_row_counts + n_as); // [n_as][ne12] + + // src1: float32 => block_q8_0 + for (int64_t i12 = 0; i12 < ne12; ++i12) { + for (int64_t i11 = ith; i11 < ne11; i11 += nth) { + from_float((float *)((char *) src1->data + i12 * nb12 + i11 * nb11), + (void *) (wdata + i12 * nbw2 + i11 * nbw1), + ne10); + } + } + +#define MMID_MATRIX_ROW(row_id, i1) matrix_rows[(row_id) * ne12 + (i1)] + + if (ith == 0) { + // initialize matrix_row_counts + memset(matrix_row_counts, 0, n_as * sizeof(int64_t)); + + // group rows by src0 matrix + for (int32_t iid1 = 0; iid1 < ids->ne[1]; ++iid1) { + for (int32_t id = 0; id < n_ids; ++id) { + const int32_t i02 = + *(const int32_t *) ((const char *) ids->data + iid1 * ids->nb[1] + id * ids->nb[0]); + + GGML_ASSERT(i02 >= 0 && i02 < n_as); + + MMID_MATRIX_ROW(i02, matrix_row_counts[i02]) = { id, iid1 }; + matrix_row_counts[i02] += 1; + } + } + } + + ggml_barrier(params->threadpool); + + // compute each matrix multiplication in sequence + for (int cur_a = 0; cur_a < n_as; ++cur_a) { + const int64_t cne1 = matrix_row_counts[cur_a]; + + if (cne1 == 0) { + continue; + } + + auto src0_cur = (const char *) src0->data + cur_a*nb02; + + //const int64_t nr0 = ne01; // src0 rows + const int64_t nr1 = cne1; // src1 rows + + int64_t src0_cur_start = (ith * ne01) / nth; + int64_t src0_cur_end = ((ith + 1) * ne01) / nth; + src0_cur_start = + (src0_cur_start % NB_COLS) ? src0_cur_start + NB_COLS - (src0_cur_start % NB_COLS) : src0_cur_start; + src0_cur_end = (src0_cur_end % NB_COLS) ? src0_cur_end + NB_COLS - (src0_cur_end % NB_COLS) : src0_cur_end; + + if (src0_cur_start >= src0_cur_end) return; + + for (int ir1 = 0; ir1 < nr1; ir1++) { + struct mmid_row_mapping row_mapping = MMID_MATRIX_ROW(cur_a, ir1); + const int id = row_mapping.i1; // selected expert index + + const int64_t i11 = id % ne11; + const int64_t i12 = row_mapping.i2; // row index in src1 + + const int64_t i1 = id; // selected expert index + const int64_t i2 = i12; // row + + auto src1_col = (const char *) wdata + (i11 * nbw1 + i12 * nbw2); + + gemv( + ne00, (float *)((char *) dst->data + (i1 * nb1 + i2 * nb2)) + src0_cur_start, + ne01, src0_cur + src0_cur_start * nb01, + src1_col, 1, src0_cur_end - src0_cur_start); + } + } +#undef MMID_MATRIX_ROW + } + + int repack(struct ggml_tensor * t, const void * data, size_t data_size) override { + GGML_LOG_DEBUG("%s: repack tensor %s with %s_%dx%d\n", __func__, t->name, ggml_type_name(t->type), + (int) NB_COLS, (int) INTER_SIZE); + return ggml::cpu::aarch64::repack(t, data, data_size); + } +}; + +// instance for Q4 +static const tensor_traits q4_0_4x4_q8_0; +static const tensor_traits q4_0_4x8_q8_0; +static const tensor_traits q4_0_8x8_q8_0; + +// instance for IQ4 +static const tensor_traits iq4_nl_4x4_q8_0; + +} // namespace ggml::cpu::aarch64 + +static const ggml::cpu::tensor_traits * ggml_aarch64_get_optimal_repack_type(const struct ggml_tensor * cur) { if (cur->type == GGML_TYPE_Q4_0) { - // TODO: enable for AVX2 - currently disabled due to bad gemv performance - if (/* ggml_cpu_has_avx2() || */ (ggml_cpu_has_sve() && ggml_cpu_has_matmul_int8() && ggml_cpu_get_sve_cnt() == QK8_0)) { - return GGML_TYPE_Q4_0_8_8; + if (ggml_cpu_has_avx2() || (ggml_cpu_has_sve() && ggml_cpu_has_matmul_int8() && ggml_cpu_get_sve_cnt() == QK8_0)) { + if (cur->ne[1] % 8 == 0) { + return &ggml::cpu::aarch64::q4_0_8x8_q8_0; + } } if (ggml_cpu_has_neon() && ggml_cpu_has_matmul_int8()) { - return GGML_TYPE_Q4_0_4_8; + if (cur->ne[1] % 4 == 0) { + return &ggml::cpu::aarch64::q4_0_4x8_q8_0; + } } if (ggml_cpu_has_neon() && ggml_cpu_has_dotprod()) { - return GGML_TYPE_Q4_0_4_4; + if (cur->ne[1] % 4 == 0) { + return &ggml::cpu::aarch64::q4_0_4x4_q8_0; + } } } else if (cur->type == GGML_TYPE_IQ4_NL) { if (ggml_cpu_has_neon() && ggml_cpu_has_dotprod()) { - return GGML_TYPE_IQ4_NL_4_4; + if (cur->ne[1] % 4 == 0) { + return &ggml::cpu::aarch64::iq4_nl_4x4_q8_0; + } + } + } + + return nullptr; +} + +static void ggml_backend_cpu_aarch64_buffer_init_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { + tensor->extra = (void *) const_cast(ggml_aarch64_get_optimal_repack_type(tensor)); + + GGML_UNUSED(buffer); +} + +static void ggml_backend_cpu_aarch64_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, + const void * data, size_t offset, size_t size) { + GGML_ASSERT(offset == 0); + GGML_ASSERT(size == ggml_nbytes(tensor)); + + auto tensor_traits = (ggml::cpu::aarch64::tensor_traits_base *) tensor->extra; + auto OK = tensor_traits->repack(tensor, data, size); + + GGML_ASSERT(OK == 0); + GGML_UNUSED(buffer); +} + +static const char * ggml_backend_cpu_aarch64_buffer_type_get_name(ggml_backend_buffer_type_t buft) { + return "CPU_AARCH64"; + + GGML_UNUSED(buft); +} + +static ggml_backend_buffer_t ggml_backend_cpu_aarch64_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + ggml_backend_buffer_t buffer = ggml_backend_buft_alloc_buffer(ggml_backend_cpu_buffer_type(), size); + + if (buffer == nullptr) { + return nullptr; + } + + buffer->buft = buft; + buffer->iface.init_tensor = ggml_backend_cpu_aarch64_buffer_init_tensor; + buffer->iface.set_tensor = ggml_backend_cpu_aarch64_buffer_set_tensor; + return buffer; +} + +static size_t ggml_backend_cpu_aarch64_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { + return TENSOR_ALIGNMENT; + + GGML_UNUSED(buft); +} + +namespace ggml::cpu::aarch64 { +class extra_buffer_type : ggml::cpu::extra_buffer_type { + bool supports_op(ggml_backend_dev_t, const struct ggml_tensor * op) override { + if ( op->op == GGML_OP_MUL_MAT && + op->src[0]->buffer && + (ggml_n_dims(op->src[0]) == 2) && + op->src[0]->buffer->buft == ggml_backend_cpu_aarch64_buffer_type() && + ggml_aarch64_get_optimal_repack_type(op->src[0]) + ) { + if (op->src[1]->buffer && !ggml_backend_buft_is_host(op->src[1]->buffer->buft)) { + return false; + } + if (op->src[1]->type == GGML_TYPE_F32) { + return true; + } + //if (op->src[1]->type == GGML_TYPE_Q8_0) { + // return true; + //} + // may be possible if Q8_0 packed... + } else if (op->op == GGML_OP_MUL_MAT_ID + && op->src[0]->buffer + && (ggml_n_dims(op->src[0]) == 3) + && op->src[0]->buffer->buft == ggml_backend_cpu_aarch64_buffer_type() + && ggml_aarch64_get_optimal_repack_type(op->src[0]) + ) { + if (op->src[1]->buffer && !ggml_backend_buft_is_host(op->src[1]->buffer->buft)) { + return false; + } + if (op->src[1]->type == GGML_TYPE_F32) { + return true; + } + //if (op->src[1]->type == GGML_TYPE_Q8_0) { + // return true; + //} } + return false; } - return cur->type; + ggml::cpu::tensor_traits * get_tensor_traits(const struct ggml_tensor * op) override { + if (op->op == GGML_OP_MUL_MAT || op->op == GGML_OP_MUL_MAT_ID) { + if (op->src[0]->buffer && op->src[0]->buffer->buft == ggml_backend_cpu_aarch64_buffer_type()) { + return (ggml::cpu::tensor_traits *) op->src[0]->extra; + } + } + return nullptr; + } +}; +} // namespace ggml::cpu::aarch64 + +ggml_backend_buffer_type_t ggml_backend_cpu_aarch64_buffer_type(void) { + static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type_aarch64 = { + /* .iface = */ { + /* .get_name = */ ggml_backend_cpu_aarch64_buffer_type_get_name, + /* .alloc_buffer = */ ggml_backend_cpu_aarch64_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_cpu_aarch64_buffer_type_get_alignment, + /* .get_max_size = */ nullptr, // defaults to SIZE_MAX + /* .get_alloc_size = */ nullptr, // defaults to ggml_nbytes + /* .is_host = */ nullptr, + }, + /* .device = */ ggml_backend_reg_dev_get(ggml_backend_cpu_reg(), 0), + /* .context = */ new ggml::cpu::aarch64::extra_buffer_type(), + }; + + return &ggml_backend_cpu_buffer_type_aarch64; } diff --git a/ggml/src/ggml-cpu/ggml-cpu-aarch64.h b/ggml/src/ggml-cpu/ggml-cpu-aarch64.h index 3d9db6a19eb87..6e84c826b4091 100644 --- a/ggml/src/ggml-cpu/ggml-cpu-aarch64.h +++ b/ggml/src/ggml-cpu/ggml-cpu-aarch64.h @@ -1,32 +1,8 @@ #pragma once +#include "ggml-cpu-traits.h" #include "ggml.h" // GGML internal header -#ifdef __cplusplus -extern "C" { -#endif - -// Quantization -void quantize_mat_q8_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t nrows, int64_t n_per_row, int64_t blck_size_interleave); - -// GEMV -void ggml_gemv_q4_0_4x4_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); -void ggml_gemv_q4_0_4x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); -void ggml_gemv_q4_0_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); -void ggml_gemv_iq4_nl_4x4_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); - -// GEMM -void ggml_gemm_q4_0_4x4_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); -void ggml_gemm_q4_0_4x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); -void ggml_gemm_q4_0_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); -void ggml_gemm_iq4_nl_4x4_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); - -void ggml_aarch64_repack_tensor(struct ggml_tensor * cur, enum ggml_type repack_type, const void * data, size_t data_size); -enum ggml_type ggml_aarch64_get_optimal_repack_type(const struct ggml_tensor * cur); - -#ifdef __cplusplus -} -#endif - +ggml_backend_buffer_type_t ggml_backend_cpu_aarch64_buffer_type(void); diff --git a/ggml/src/ggml-cpu/ggml-cpu-hbm.cpp b/ggml/src/ggml-cpu/ggml-cpu-hbm.cpp new file mode 100644 index 0000000000000..fa8dea2af9c72 --- /dev/null +++ b/ggml/src/ggml-cpu/ggml-cpu-hbm.cpp @@ -0,0 +1,55 @@ +#ifdef GGML_USE_CPU_HBM + +#include "ggml-backend.h" +#include "ggml-backend-impl.h" +#include "ggml-cpu.h" +#include "ggml-impl.h" + +#include "ggml-cpu-hbm.h" + +// buffer type HBM + +#include + +static const char * ggml_backend_cpu_hbm_buffer_type_get_name(ggml_backend_buffer_type_t buft) { + return "CPU_HBM"; + + GGML_UNUSED(buft); +} + +static void ggml_backend_cpu_hbm_buffer_free_buffer(ggml_backend_buffer_t buffer) { + hbw_free(buffer->context); +} + +static ggml_backend_buffer_t ggml_backend_cpu_hbm_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, + size_t size) { + void * ptr; + int result = hbw_posix_memalign(&ptr, ggml_backend_cpu_buffer_type_get_alignment(buft), size); + if (result != 0) { + GGML_LOG_ERROR("failed to allocate HBM buffer of size %zu\n", size); + return NULL; + } + + ggml_backend_buffer_t buffer = ggml_backend_cpu_buffer_from_ptr(ptr, size); + buffer->buft = buft; + buffer->iface.free_buffer = ggml_backend_cpu_hbm_buffer_free_buffer; + + return buffer; +} + +ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type(void) { + static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type_hbm = { + /* .iface = */ { + /* .get_name = */ ggml_backend_cpu_hbm_buffer_type_get_name, + /* .alloc_buffer = */ ggml_backend_cpu_hbm_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_cpu_buffer_type_get_alignment, + /* .get_max_size = */ nullptr, // defaults to SIZE_MAX + /* .get_alloc_size = */ nullptr, // defaults to ggml_nbytes + /* .is_host = */ ggml_backend_cpu_buffer_type_is_host, + }, + /* .context = */ nullptr, + }; + + return &ggml_backend_cpu_buffer_type_hbm; +} +#endif diff --git a/ggml/src/ggml-cpu/ggml-cpu-hbm.h b/ggml/src/ggml-cpu/ggml-cpu-hbm.h new file mode 100644 index 0000000000000..09a1f09d72be2 --- /dev/null +++ b/ggml/src/ggml-cpu/ggml-cpu-hbm.h @@ -0,0 +1,8 @@ +#pragma once + +#include "ggml-backend.h" +#include "ggml.h" + +// GGML CPU internal header + +ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type(void); diff --git a/ggml/src/ggml-cpu/ggml-cpu-impl.h b/ggml/src/ggml-cpu/ggml-cpu-impl.h index 27a530b227954..d71076ad12b1f 100644 --- a/ggml/src/ggml-cpu/ggml-cpu-impl.h +++ b/ggml/src/ggml-cpu/ggml-cpu-impl.h @@ -15,6 +15,18 @@ extern "C" { #endif +struct ggml_compute_params { + // ith = thread index, nth = number of threads + int ith, nth; + + // work buffer for all threads + size_t wsize; + void * wdata; + + struct ggml_threadpool * threadpool; +}; + + #if defined(_MSC_VER) #define m512bh(p) p @@ -366,6 +378,9 @@ static __m256 __lasx_xvreplfr2vr_s(float val) { } #endif +// TODO: move to ggml-threading +void ggml_barrier(struct ggml_threadpool * tp); + #ifdef __cplusplus } #endif diff --git a/ggml/src/ggml-cpu/ggml-cpu-quants.c b/ggml/src/ggml-cpu/ggml-cpu-quants.c index 11e8df253d5ca..634c5fa1162c3 100644 --- a/ggml/src/ggml-cpu/ggml-cpu-quants.c +++ b/ggml/src/ggml-cpu/ggml-cpu-quants.c @@ -1791,11 +1791,12 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r const int8x16_t y1_l = vld1q_s8(b_y1->qs); const int8x16_t y1_h = vld1q_s8(b_y1->qs + 16); - float32_t _scale[4] = { GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y0->d), - GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y1->d), - GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y0->d), - GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y1->d)}; - + float32_t _scale[4] = { + GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y1->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y1->d) + }; float32x4_t scale = vld1q_f32(_scale); int8x16_t l0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); @@ -1811,7 +1812,7 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r int8x16_t r3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); sumv0 = vmlaq_f32(sumv0,(vcvtq_f32_s32(vmmlaq_s32((vmmlaq_s32((vmmlaq_s32((vmmlaq_s32(vdupq_n_s32(0), l0, r0)), - l1, r1)), l2, r2)), l3, r3))), scale); + l1, r1)), l2, r2)), l3, r3))), scale); } float32x4_t sumv1 = vextq_f32 (sumv0, sumv0, 2); @@ -2347,10 +2348,12 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * r const block_q8_1 * restrict b_y0 = &vy0[i]; const block_q8_1 * restrict b_y1 = &vy1[i]; - float32_t summs_t[4] = {GGML_FP16_TO_FP32(b_x0->m) * GGML_FP16_TO_FP32(b_y0->s), - GGML_FP16_TO_FP32(b_x1->m) * GGML_FP16_TO_FP32(b_y0->s), - GGML_FP16_TO_FP32(b_x0->m) * GGML_FP16_TO_FP32(b_y1->s), - GGML_FP16_TO_FP32(b_x1->m) * GGML_FP16_TO_FP32(b_y1->s)}; + float32_t summs_t[4] = { + GGML_FP16_TO_FP32(b_x0->m) * GGML_FP16_TO_FP32(b_y0->s), + GGML_FP16_TO_FP32(b_x1->m) * GGML_FP16_TO_FP32(b_y0->s), + GGML_FP16_TO_FP32(b_x0->m) * GGML_FP16_TO_FP32(b_y1->s), + GGML_FP16_TO_FP32(b_x1->m) * GGML_FP16_TO_FP32(b_y1->s) + }; summs0 = vaddq_f32(summs0, vld1q_f32(summs_t)); const uint8x16_t m4b = vdupq_n_u8(0x0F); @@ -2371,10 +2374,12 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * r const int8x16_t y1_h = vld1q_s8(b_y1->qs + 16); // mmla into int32x4_t - float32_t _scale[4] = {GGML_FP16_TO_FP32(b_x0->d)*b_y0->d, - GGML_FP16_TO_FP32(b_x0->d)*b_y1->d, - GGML_FP16_TO_FP32(b_x1->d)*b_y0->d, - GGML_FP16_TO_FP32(b_x1->d)*b_y1->d}; + float32_t _scale[4] = { + GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y1->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y1->d) + }; float32x4_t scale = vld1q_f32(_scale); int8x16_t l0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); @@ -2389,15 +2394,17 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * r int8x16_t r2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); int8x16_t r3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); sumv0 = vmlaq_f32(sumv0,(vcvtq_f32_s32(vmmlaq_s32((vmmlaq_s32((vmmlaq_s32((vmmlaq_s32(vdupq_n_s32(0), l0, r0)), - l1, r1)), l2, r2)), l3, r3))), scale); + l1, r1)), l2, r2)), l3, r3))), scale); } - float32x4_t sumv1 = vextq_f32(sumv0, sumv0, 2); + float32x4_t sumv1 = vextq_f32 (sumv0, sumv0, 2); float32x4_t sumv2 = vzip1q_f32(sumv0, sumv1); + sumv2 = vaddq_f32(sumv2, summs0); vst1_f32(s, vget_low_f32 (sumv2)); vst1_f32(s + bs, vget_high_f32(sumv2)); + return; } #endif @@ -3374,10 +3381,12 @@ void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * r const int8x16_t y1_l = vld1q_s8(b_y1->qs); const int8x16_t y1_h = vld1q_s8(b_y1->qs + 16); - float32_t _scale[4] = {GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y0->d), - GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y1->d), - GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y0->d), - GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y1->d)}; + float32_t _scale[4] = { + GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y1->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y1->d) + }; float32x4_t scale = vld1q_f32(_scale); int8x16_t l0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); @@ -3393,13 +3402,15 @@ void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * r int8x16_t r3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); sumv0 = vmlaq_f32(sumv0,(vcvtq_f32_s32(vmmlaq_s32((vmmlaq_s32((vmmlaq_s32((vmmlaq_s32(vdupq_n_s32(0), l0, r0)), - l1, r1)), l2, r2)), l3, r3))), scale); + l1, r1)), l2, r2)), l3, r3))), scale); } - float32x4_t sumv1 = vextq_f32(sumv0, sumv0, 2); + + float32x4_t sumv1 = vextq_f32 (sumv0, sumv0, 2); float32x4_t sumv2 = vzip1q_f32(sumv0, sumv1); - vst1_f32(s, vget_low_f32(sumv2)); + vst1_f32(s, vget_low_f32 (sumv2)); vst1_f32(s + bs, vget_high_f32(sumv2)); + return; } #endif diff --git a/ggml/src/ggml-cpu/ggml-cpu-traits.cpp b/ggml/src/ggml-cpu/ggml-cpu-traits.cpp new file mode 100644 index 0000000000000..62a0712dabbf6 --- /dev/null +++ b/ggml/src/ggml-cpu/ggml-cpu-traits.cpp @@ -0,0 +1,36 @@ +#include "ggml-cpu-traits.h" + +#include "ggml-backend-impl.h" +#include "ggml-backend.h" + +namespace ggml::cpu { +tensor_traits::~tensor_traits() {} + +extra_buffer_type::~extra_buffer_type() {} +} // namespace ggml::cpu + +bool ggml_cpu_extra_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * op) { + for (auto extra : ggml_backend_cpu_get_extra_buffers_type()) { + if (extra && extra->context) { + auto buf_extra = (ggml::cpu::extra_buffer_type *) extra->context; + auto tensor_traits = buf_extra->get_tensor_traits(op); + if (tensor_traits && tensor_traits->compute_forward(params, op)) { + return true; + } + } + } + return false; +} + +bool ggml_cpu_extra_work_size(int n_threads, const struct ggml_tensor * op, size_t * size) { + for (auto extra : ggml_backend_cpu_get_extra_buffers_type()) { + if (extra && extra->context) { + auto buf_extra = (ggml::cpu::extra_buffer_type *) extra->context; + auto tensor_traits = buf_extra->get_tensor_traits(op); + if (tensor_traits && tensor_traits->work_size(n_threads, op, *size)) { + return true; + } + } + } + return false; +} diff --git a/ggml/src/ggml-cpu/ggml-cpu-traits.h b/ggml/src/ggml-cpu/ggml-cpu-traits.h new file mode 100644 index 0000000000000..99a6186b1d6b5 --- /dev/null +++ b/ggml/src/ggml-cpu/ggml-cpu-traits.h @@ -0,0 +1,38 @@ +#pragma once +#include "ggml-backend-impl.h" +#include "ggml-cpu-impl.h" +#include "ggml.h" + +#ifdef __cplusplus +# include +extern "C" { +#endif + +// return true if op part of extra "accelerator" +bool ggml_cpu_extra_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * op); +bool ggml_cpu_extra_work_size(int n_threads, const struct ggml_tensor * op, size_t * size); + +#ifdef __cplusplus +} + +namespace ggml::cpu { +// register in tensor->extra +class tensor_traits { + public: + virtual ~tensor_traits(); + virtual bool work_size(int n_threads, const struct ggml_tensor * op, size_t & size) = 0; + virtual bool compute_forward(struct ggml_compute_params * params, struct ggml_tensor * op) = 0; +}; + +class extra_buffer_type { + public: + virtual ~extra_buffer_type(); + virtual bool supports_op(ggml_backend_dev_t dev, const struct ggml_tensor * op) = 0; + virtual tensor_traits * get_tensor_traits(const struct ggml_tensor * op) = 0; +}; +} // namespace ggml::cpu + +// implemented in ggml-cpu.cpp. +std::vector & ggml_backend_cpu_get_extra_buffers_type(); + +#endif diff --git a/ggml/src/ggml-cpu/ggml-cpu.c b/ggml/src/ggml-cpu/ggml-cpu.c index a87db53a45c86..8c950f503aa2a 100644 --- a/ggml/src/ggml-cpu/ggml-cpu.c +++ b/ggml/src/ggml-cpu/ggml-cpu.c @@ -3,13 +3,14 @@ #include "ggml-backend-impl.h" #include "ggml-backend.h" -#include "ggml-cpu-aarch64.h" +#include "ggml-cpu-traits.h" #include "ggml-cpu-impl.h" #include "ggml-cpu.h" #include "ggml-impl.h" #include "ggml-quants.h" #include "ggml-cpu-quants.h" #include "ggml-threading.h" +#include "amx/amx.h" #include "ggml.h" #if defined(_MSC_VER) || defined(__MINGW32__) @@ -223,10 +224,6 @@ typedef void * thread_ret_t; typedef pthread_t ggml_thread_t; -#ifdef GGML_USE_CPU_HBM -#include -#endif - #if defined(__APPLE__) #include #include @@ -300,7 +297,6 @@ static const struct ggml_type_traits_cpu type_traits_cpu[GGML_TYPE_COUNT] = { }, [GGML_TYPE_Q8_0] = { .from_float = quantize_row_q8_0, - .from_float_to_mat = quantize_mat_q8_0, .vec_dot = ggml_vec_dot_q8_0_q8_0, .vec_dot_type = GGML_TYPE_Q8_0, #if defined (__ARM_FEATURE_MATMUL_INT8) @@ -408,33 +404,6 @@ static const struct ggml_type_traits_cpu type_traits_cpu[GGML_TYPE_COUNT] = { .vec_dot_type = GGML_TYPE_BF16, .nrows = 1, }, - [GGML_TYPE_Q4_0_4_4] = { - .from_float = NULL, - .vec_dot = NULL, - .vec_dot_type = GGML_TYPE_Q8_0, - .nrows = 1, - .ncols = 4, - .gemv = ggml_gemv_q4_0_4x4_q8_0, - .gemm = ggml_gemm_q4_0_4x4_q8_0, - }, - [GGML_TYPE_Q4_0_4_8] = { - .from_float = NULL, - .vec_dot = NULL, - .vec_dot_type = GGML_TYPE_Q8_0, - .nrows = 1, - .ncols = 4, - .gemv = ggml_gemv_q4_0_4x8_q8_0, - .gemm = ggml_gemm_q4_0_4x8_q8_0, - }, - [GGML_TYPE_Q4_0_8_8] = { - .from_float = NULL, - .vec_dot = NULL, - .vec_dot_type = GGML_TYPE_Q8_0, - .nrows = 1, - .ncols = 8, - .gemv = ggml_gemv_q4_0_8x8_q8_0, - .gemm = ggml_gemm_q4_0_8x8_q8_0, - }, [GGML_TYPE_TQ1_0] = { .from_float = quantize_row_tq1_0, .vec_dot = ggml_vec_dot_tq1_0_q8_K, @@ -447,15 +416,6 @@ static const struct ggml_type_traits_cpu type_traits_cpu[GGML_TYPE_COUNT] = { .vec_dot_type = GGML_TYPE_Q8_K, .nrows = 1, }, - [GGML_TYPE_IQ4_NL_4_4] = { - .from_float = NULL, - .vec_dot = NULL, - .vec_dot_type = GGML_TYPE_Q8_0, - .nrows = 1, - .ncols = 4, - .gemv = ggml_gemv_iq4_nl_4x4_q8_0, - .gemm = ggml_gemm_iq4_nl_4x4_q8_0, - }, }; const struct ggml_type_traits_cpu * ggml_get_type_traits_cpu(enum ggml_type type) { @@ -624,7 +584,7 @@ do { \ for (int i = 0; i < offset; ++i) { \ x[i] = _mm512_add_ps(x[i], x[offset+i]); \ } \ - res = _mm512_reduce_add_ps(x[0]); \ + res = (ggml_float) _mm512_reduce_add_ps(x[0]); \ } while (0) // TODO: is this optimal ? @@ -674,7 +634,7 @@ do { \ for (int i = 0; i < offset; ++i) { \ x[i] = _mm512_add_ps(x[i], x[offset+i]); \ } \ - res = _mm512_reduce_add_ps(x[0]); \ + res = (ggml_float) _mm512_reduce_add_ps(x[0]); \ } while (0) #define GGML_F16_VEC GGML_F32Cx16 @@ -685,8 +645,8 @@ do { \ #define GGML_F16_VEC_FMA GGML_F32Cx16_FMA #define GGML_F16_VEC_ADD GGML_F32Cx16_ADD #define GGML_F16_VEC_MUL GGML_F32Cx16_MUL -#define GGML_F16_VEC_REDUCE GGML_F32Cx16_REDUCE +#define GGML_F16_VEC_REDUCE GGML_F32Cx16_REDUCE #elif defined(__AVX__) #define GGML_SIMD @@ -755,7 +715,7 @@ do { \ #define GGML_F32Cx8_LOAD(x) _mm256_cvtph_ps(_mm_loadu_si128((const __m128i *)(x))) #define GGML_F32Cx8_STORE(x, y) _mm_storeu_si128((__m128i *)(x), _mm256_cvtps_ph(y, 0)) #else -static inline __m256 __avx_f32cx8_load(ggml_fp16_t *x) { +static inline __m256 __avx_f32cx8_load(const ggml_fp16_t * x) { float tmp[8]; for (int i = 0; i < 8; i++) { @@ -1178,28 +1138,28 @@ static inline void __lasx_f32cx8_store(ggml_fp16_t * x, __m256 y) { #define GGML_F32x4_FMA(a, b, c) __lsx_vfmadd_s(b, c, a) #define GGML_F32x4_ADD __lsx_vfadd_s #define GGML_F32x4_MUL __lsx_vfmul_s -#define GGML_F32x4_REDUCE(res, x) \ -{ \ - int offset = GGML_F32_ARR >> 1; \ - for (int i = 0; i < offset; ++i) { \ - x[i] = __lsx_vfadd_s(x[i], x[offset+i]); \ - } \ - offset >>= 1; \ - for (int i = 0; i < offset; ++i) { \ - x[i] = __lsx_vfadd_s(x[i], x[offset+i]); \ - } \ - offset >>= 1; \ - for (int i = 0; i < offset; ++i) { \ - x[i] = __lsx_vfadd_s(x[i], x[offset+i]); \ - } \ - __m128i tmp = __lsx_vsrli_d((__m128i)x[0], 32); \ - tmp = (__m128i)__lsx_vfadd_s((__m128)tmp, x[0]); \ - tmp = __lsx_vpickev_w(__lsx_vldi(0), tmp); \ - const __m128 t0 = __lsx_vshuf4i_w(tmp, 0x88); \ - tmp = __lsx_vsrli_d((__m128i)t0, 32); \ - tmp = (__m128i)__lsx_vfadd_s((__m128)tmp, t0); \ - tmp = __lsx_vpickev_w(__lsx_vldi(0), tmp); \ - res = (ggml_float) __lsx_vpickve2gr_w(__lsx_vshuf4i_w(tmp, 0x88), 0); \ +#define GGML_F32x4_REDUCE(res, x) \ +{ \ + int offset = GGML_F32_ARR >> 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = __lsx_vfadd_s(x[i], x[offset + i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = __lsx_vfadd_s(x[i], x[offset + i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = __lsx_vfadd_s(x[i], x[offset + i]); \ + } \ + __m128i tmp = __lsx_vsrli_d((__m128i) x[0], 32); \ + tmp = (__m128i) __lsx_vfadd_s((__m128) tmp, x[0]); \ + tmp = __lsx_vpickev_w(__lsx_vldi(0), tmp); \ + const __m128 t0 = __lsx_vshuf4i_w(tmp, 0x88); \ + tmp = __lsx_vsrli_d((__m128i) t0, 32); \ + tmp = (__m128i) __lsx_vfadd_s((__m128) tmp, t0); \ + tmp = __lsx_vpickev_w(__lsx_vldi(0), tmp); \ + res = (ggml_float) __lsx_vpickve2gr_w(__lsx_vshuf4i_w(tmp, 0x88), 0); \ } #define GGML_F32_VEC GGML_F32x4 @@ -1367,31 +1327,18 @@ struct ggml_compute_state { int ith; }; -struct ggml_compute_params { - // ith = thread index, nth = number of threads - int ith, nth; - - // work buffer for all threads - size_t wsize; - void * wdata; - - struct ggml_threadpool * threadpool; -}; - // // fundamental operations // inline static void ggml_vec_set_i8(const int n, int8_t * x, const int8_t v) { for (int i = 0; i < n; ++i) x[i] = v; } - inline static void ggml_vec_set_i16(const int n, int16_t * x, const int16_t v) { for (int i = 0; i < n; ++i) x[i] = v; } -inline static void ggml_vec_set_i32(const int n, int32_t * x, const int32_t v) { for (int i = 0; i < n; ++i) x[i] = v; } +inline static void ggml_vec_set_i32(const int n, int32_t * x, const int32_t v) { for (int i = 0; i < n; ++i) x[i] = v; } +inline static void ggml_vec_cpy_i32(const int n, int32_t * y, const int32_t * x) { for (int i = 0; i < n; ++i) y[i] = x[i]; } inline static void ggml_vec_set_f16(const int n, ggml_fp16_t * x, const int32_t v) { for (int i = 0; i < n; ++i) x[i] = v; } - inline static void ggml_vec_set_bf16(const int n, ggml_bf16_t * x, const ggml_bf16_t v) { for (int i = 0; i < n; ++i) x[i] = v; } - inline static void ggml_vec_add_f32 (const int n, float * z, const float * x, const float * y) { for (int i = 0; i < n; ++i) z[i] = x[i] + y[i]; } inline static void ggml_vec_add1_f32(const int n, float * z, const float * x, const float v) { for (int i = 0; i < n; ++i) z[i] = x[i] + v; } inline static void ggml_vec_acc_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] += x[i]; } @@ -2286,7 +2233,7 @@ struct ggml_state { static struct ggml_state g_state = {0}; -static void ggml_barrier(struct ggml_threadpool * tp) { +void ggml_barrier(struct ggml_threadpool * tp) { int n_threads = atomic_load_explicit(&tp->n_threads_cur, memory_order_relaxed); if (n_threads == 1) { return; @@ -2440,7 +2387,7 @@ bool ggml_is_numa(void) { #endif #if !defined(HWCAP2_I8MM) -#define HWCAP2_I8MM 0 +#define HWCAP2_I8MM (1 << 13) #endif static void ggml_init_arm_arch_features(void) { @@ -4521,9 +4468,6 @@ static void ggml_compute_forward_add( case GGML_TYPE_IQ4_XS: case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ2_S: - case GGML_TYPE_Q4_0_4_4: - case GGML_TYPE_Q4_0_4_8: - case GGML_TYPE_Q4_0_8_8: { ggml_compute_forward_add_q_f32(params, dst); } break; @@ -4901,9 +4845,6 @@ static void ggml_compute_forward_add1( case GGML_TYPE_IQ4_XS: case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ2_S: - case GGML_TYPE_Q4_0_4_4: - case GGML_TYPE_Q4_0_4_8: - case GGML_TYPE_Q4_0_8_8: { ggml_compute_forward_add1_q_f32(params, dst); } break; @@ -5031,9 +4972,6 @@ static void ggml_compute_forward_acc( case GGML_TYPE_IQ4_XS: case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ2_S: - case GGML_TYPE_Q4_0_4_4: - case GGML_TYPE_Q4_0_4_8: - case GGML_TYPE_Q4_0_8_8: default: { GGML_ABORT("fatal error"); @@ -7449,20 +7387,9 @@ static void ggml_compute_forward_mul_mat( const int ith = params->ith; const int nth = params->nth; - enum ggml_type type = src0->type; - - if (src0->buffer && ggml_backend_cpu_buft_is_aarch64(src0->buffer->buft)) { - type = (enum ggml_type)(intptr_t)src0->extra; - } - - enum ggml_type const vec_dot_type = type_traits_cpu[type].vec_dot_type; + enum ggml_type const vec_dot_type = type_traits_cpu[src0->type].vec_dot_type; ggml_from_float_t const from_float = type_traits_cpu[vec_dot_type].from_float; - ggml_from_float_to_mat_t const from_float_to_mat = type_traits_cpu[vec_dot_type].from_float_to_mat; - int64_t const vec_dot_num_rows = type_traits_cpu[type].nrows; - int64_t const matmul_num_cols = type_traits_cpu[type].ncols; - int64_t const blck_size_interleave = ggml_get_type_traits(type)->blck_size_interleave; - ggml_gemv_t const gemv = type_traits_cpu[type].gemv; - ggml_gemm_t const gemm = type_traits_cpu[type].gemm; + int64_t const vec_dot_num_rows = type_traits_cpu[src0->type].nrows; GGML_ASSERT(ne0 == ne01); GGML_ASSERT(ne1 == ne11); @@ -7470,7 +7397,7 @@ static void ggml_compute_forward_mul_mat( GGML_ASSERT(ne3 == ne13); // we don't support permuted src0 or src1 - GGML_ASSERT(nb00 == ggml_type_size(type)); + GGML_ASSERT(nb00 == ggml_type_size(src0->type)); GGML_ASSERT(nb10 == ggml_type_size(src1->type)); // dst cannot be transposed or permuted @@ -7482,6 +7409,7 @@ static void ggml_compute_forward_mul_mat( // nb01 >= nb00 - src0 is not transposed // compute by src0 rows + // TODO: extract to "extra_op" #if GGML_USE_LLAMAFILE // broadcast factors const int64_t r2 = ne12 / ne02; @@ -7492,15 +7420,15 @@ static void ggml_compute_forward_mul_mat( if (src1_cont) { for (int64_t i13 = 0; i13 < ne13; i13++) for (int64_t i12 = 0; i12 < ne12; i12++) - if (!llamafile_sgemm(ne01, ne11, ne00/ggml_blck_size(type), + if (!llamafile_sgemm(ne01, ne11, ne00/ggml_blck_size(src0->type), (const char *)src0->data + i12/r2*nb02 + i13/r3*nb03, - nb01/ggml_type_size(type), + nb01/ggml_type_size(src0->type), (const char *)src1->data + i12*nb12 + i13*nb13, nb11/ggml_type_size(src1->type), (char *)dst->data + i12*nb2 + i13*nb3, nb1/ggml_type_size(dst->type), ith, nth, - type, + src0->type, src1->type, dst->type)) goto UseGgmlGemm1; @@ -7521,19 +7449,10 @@ UseGgmlGemm1:; for (int64_t i13 = 0; i13 < ne13; ++i13) { for (int64_t i12 = 0; i12 < ne12; ++i12) { - int64_t i11_processed = 0; - if ((ggml_n_dims(src1) == 2) && from_float_to_mat && gemm) { - for (int64_t i11 = ith * 4; i11 < ne11 - ne11 % 4; i11 += nth * 4) { - from_float_to_mat((float *)((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11), - (void *) (wdata + i13*nbw3 + i12*nbw2 + i11*nbw1), - 4, ne10, blck_size_interleave); - } - i11_processed = ne11 - ne11 % 4; - } - for (int64_t i11 = i11_processed + ith; i11 < ne11; i11 += nth) { + for (int64_t i11 = ith; i11 < ne11; i11 += nth) { from_float((float *)((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11), - (void *) (wdata + i13*nbw3 + i12*nbw2 + i11*nbw1), - ne10); + (void *) (wdata + i13*nbw3 + i12*nbw2 + i11*nbw1), + ne10); } } } @@ -7553,15 +7472,15 @@ UseGgmlGemm1:; for (int64_t i13 = 0; i13 < ne13; i13++) for (int64_t i12 = 0; i12 < ne12; i12++) - if (!llamafile_sgemm(ne01, ne11, ne00/ggml_blck_size(type), + if (!llamafile_sgemm(ne01, ne11, ne00/ggml_blck_size(src0->type), (const char *)src0->data + i12/r2*nb02 + i13/r3*nb03, - nb01/ggml_type_size(type), + nb01/ggml_type_size(src0->type), (const char *)wdata + (i12*ne11 + i13*ne12*ne11)*row_size, row_size/ggml_type_size(vec_dot_type), (char *)dst->data + i12*nb2 + i13*nb3, nb1/ggml_type_size(dst->type), ith, nth, - type, + src0->type, vec_dot_type, dst->type)) goto UseGgmlGemm2; @@ -7603,28 +7522,6 @@ UseGgmlGemm2:; const int64_t dr0 = (nr0 + nchunk0 - 1) / nchunk0; const int64_t dr1 = (nr1 + nchunk1 - 1) / nchunk1; - if ((ggml_n_dims(src0) == 2) && gemv) { - const void * src1_wdata = (src1->type == vec_dot_type) ? src1->data : params->wdata; - const size_t src1_col_stride = ggml_is_contiguous(src1) || src1->type != vec_dot_type ? ggml_row_size(vec_dot_type, ne10) : nb11; - int64_t src0_start = (ith * ne01) / nth; - int64_t src0_end = ((ith + 1) * ne01) / nth; - src0_start = (src0_start % matmul_num_cols) ? src0_start + matmul_num_cols - (src0_start % matmul_num_cols): src0_start; - src0_end = (src0_end % matmul_num_cols) ? src0_end + matmul_num_cols - (src0_end % matmul_num_cols): src0_end; - if (src0_start >= src0_end) return; - - // If there are more than three rows in src1, use gemm; otherwise, use gemv. - if (gemm && (ne11 > 3)) { - gemm(ne00, (float *)((char *) dst->data) + src0_start, ne01, (const char *) src0->data + src0_start * nb01, - (const char *) src1_wdata, ne11 - ne11 % 4, src0_end - src0_start); - } - for (int iter = gemm ? ne11 - ne11 % 4 : 0; iter < ne11; iter++) { - gemv(ne00, (float *)((char *) dst->data + (iter * nb1)) + src0_start, ne01, - (const char *) src0->data + src0_start * nb01, (const char *) src1_wdata + (src1_col_stride * iter), 1, - src0_end - src0_start); - } - return; - } - // The first chunk comes from our thread_id, the rest will get auto-assigned. int current_chunk = ith; @@ -7641,13 +7538,13 @@ UseGgmlGemm2:; // dot kernels can handle 1 row and col at a time, but mmla kernels can process 2 rows and cols int64_t num_rows_per_vec_dot = vec_dot_num_rows; - // TODO: currently the mmla kernels support only even numbered rows/cols. - // this check can be removed once they are extended to support odd numbered rows/cols too + // these checks are needed to avoid crossing dim1 boundaries + // can be optimized, but the logic would become more complicated, so keeping it like this for simplicity if ((nr0 % 2 != 0) || (ne11 % 2 != 0) || ((ir0_end - ir0_start) % 2 != 0) || ((ir1_end - ir1_start) % 2 != 0)) { num_rows_per_vec_dot = 1; } - ggml_compute_forward_mul_mat_one_chunk(params, dst, type, num_rows_per_vec_dot, ir0_start, ir0_end, ir1_start, ir1_end); + ggml_compute_forward_mul_mat_one_chunk(params, dst, src0->type, num_rows_per_vec_dot, ir0_start, ir0_end, ir1_start, ir1_end); if (nth >= nchunk0 * nchunk1) { break; @@ -7679,8 +7576,6 @@ static void ggml_compute_forward_mul_mat_id( ggml_vec_dot_t const vec_dot = type_traits_cpu[type].vec_dot; enum ggml_type const vec_dot_type = type_traits_cpu[type].vec_dot_type; ggml_from_float_t const from_float = type_traits_cpu[vec_dot_type].from_float; - int64_t const matmul_num_cols = type_traits_cpu[type].ncols; - ggml_gemv_t const gemv = type_traits_cpu[type].gemv; // we don't support permuted src0 or src1 GGML_ASSERT(nb00 == ggml_type_size(type)); @@ -7766,34 +7661,6 @@ static void ggml_compute_forward_mul_mat_id( const int64_t nr0 = ne01; // src0 rows const int64_t nr1 = cne1; // src1 rows - if (((ggml_n_dims(src0) - 1) == 2) && gemv) { - int64_t src0_cur_start = (ith * ne01) / nth; - int64_t src0_cur_end = ((ith + 1) * ne01) / nth; - src0_cur_start = (src0_cur_start % matmul_num_cols) ? src0_cur_start + matmul_num_cols - (src0_cur_start % matmul_num_cols): src0_cur_start; - src0_cur_end = (src0_cur_end % matmul_num_cols) ? src0_cur_end + matmul_num_cols - (src0_cur_end % matmul_num_cols): src0_cur_end; - if (src0_cur_start >= src0_cur_end) return; - - for (int ir1 = 0; ir1 < nr1; ir1++) { - struct mmid_row_mapping row_mapping = MMID_MATRIX_ROW(cur_a, ir1); - const int id = row_mapping.i1; // selected expert index - - const int64_t i11 = id % ne11; - const int64_t i12 = row_mapping.i2; // row index in src1 - - const int64_t i1 = id; // selected expert index - const int64_t i2 = i12; // row - - const char * src1_col = (const char *) wdata + - (src1_cont || src1->type != vec_dot_type - ? (i11 + i12 * ne11) * row_size - : (i11 * nb11 + i12 * nb12)); - - gemv(ne00, (float *)((char *) dst->data + (i1 * nb1 + i2 * nb2)) + src0_cur_start, ne01, - (const char *) src0_cur + src0_cur_start * nb01, src1_col, 1, src0_cur_end - src0_cur_start); - } - continue; - } - // distribute the thread work across the inner or outer loop based on which one is larger const int64_t nth0 = nr0 > nr1 ? nth : 1; // parallelize by src0 rows @@ -8101,9 +7968,6 @@ static void ggml_compute_forward_out_prod( case GGML_TYPE_IQ4_XS: case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ2_S: - case GGML_TYPE_Q4_0_4_4: - case GGML_TYPE_Q4_0_4_8: - case GGML_TYPE_Q4_0_8_8: { ggml_compute_forward_out_prod_q_f32(params, dst); } break; @@ -8256,6 +8120,77 @@ static void ggml_compute_forward_set_f32( } } +static void ggml_compute_forward_set_i32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); + + // view src0 and dst with these strides and data offset inbytes during set + // nb0 is implicitly element_size because src0 and dst are contiguous + size_t nb1 = ((int32_t *) dst->op_params)[0]; + size_t nb2 = ((int32_t *) dst->op_params)[1]; + size_t nb3 = ((int32_t *) dst->op_params)[2]; + size_t offset = ((int32_t *) dst->op_params)[3]; + bool inplace = (bool) ((int32_t *) dst->op_params)[4]; + + if (!inplace) { + if (params->ith == 0) { + // memcpy needs to be synchronized across threads to avoid race conditions. + // => do it in INIT phase + memcpy( + ((char *) dst->data), + ((char *) src0->data), + ggml_nbytes(dst)); + } + ggml_barrier(params->threadpool); + } + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src1); + const int nc = src1->ne[0]; + + GGML_TENSOR_LOCALS(int64_t, ne1, src1, ne) + GGML_TENSOR_LOCALS(size_t, nb1, src1, nb) + + // src0 and dst as viewed during set + const size_t nb0 = ggml_element_size(src0); + + const int im0 = (ne10 == 0 ? 0 : ne10-1); + const int im1 = (ne11 == 0 ? 0 : ne11-1); + const int im2 = (ne12 == 0 ? 0 : ne12-1); + const int im3 = (ne13 == 0 ? 0 : ne13-1); + + GGML_ASSERT(offset + im0*nb0 + im1*nb1 + im2*nb2 + im3*nb3 <= ggml_nbytes(dst)); + + GGML_ASSERT(nb10 == sizeof(int32_t)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int ir = ir0; ir < ir1; ++ir) { + // src0 and dst are viewed with shape of src1 and offset + // => same indices + const int i3 = ir/(ne12*ne11); + const int i2 = (ir - i3*ne12*ne11)/ne11; + const int i1 = (ir - i3*ne12*ne11 - i2*ne11); + + ggml_vec_cpy_i32(nc, + (int32_t *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + offset), + (int32_t *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11)); + } +} + static void ggml_compute_forward_set( const struct ggml_compute_params * params, struct ggml_tensor * dst) { @@ -8267,6 +8202,10 @@ static void ggml_compute_forward_set( { ggml_compute_forward_set_f32(params, dst); } break; + case GGML_TYPE_I32: + { + ggml_compute_forward_set_i32(params, dst); + } break; case GGML_TYPE_F16: case GGML_TYPE_BF16: case GGML_TYPE_Q4_0: @@ -8291,9 +8230,6 @@ static void ggml_compute_forward_set( case GGML_TYPE_IQ4_XS: case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ2_S: - case GGML_TYPE_Q4_0_4_4: - case GGML_TYPE_Q4_0_4_8: - case GGML_TYPE_Q4_0_8_8: default: { GGML_ABORT("fatal error"); @@ -8555,9 +8491,6 @@ static void ggml_compute_forward_get_rows( case GGML_TYPE_IQ4_XS: case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ2_S: - case GGML_TYPE_Q4_0_4_4: - case GGML_TYPE_Q4_0_4_8: - case GGML_TYPE_Q4_0_8_8: { ggml_compute_forward_get_rows_q(params, dst); } break; @@ -9147,10 +9080,6 @@ static void ggml_compute_forward_clamp( case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ2_S: case GGML_TYPE_Q8_K: - case GGML_TYPE_Q4_0_4_4: - case GGML_TYPE_Q4_0_4_8: - case GGML_TYPE_Q4_0_8_8: - case GGML_TYPE_IQ4_NL_4_4: case GGML_TYPE_I8: case GGML_TYPE_I16: case GGML_TYPE_I32: @@ -10625,6 +10554,40 @@ static void ggml_compute_forward_pad( } } +// ggml_compute_forward_pad_reflect_1d + +static void ggml_compute_forward_pad_reflect_1d( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + const int ith = params->ith; + const int nth = params->nth; + + const int32_t * opts = (const int32_t *) dst->op_params; + const int p0 = opts[0]; + const int p1 = opts[1]; + + GGML_TENSOR_UNARY_OP_LOCALS + + for (int64_t i3 = 0; i3 < ne3; i3++) { + for (int64_t i2 = 0; i2 < ne2; i2++) { + for (int64_t i1 = ith; i1 < ne1; i1 += nth) { + float * left = (float *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + p0*nb0); + float * right = (float *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + (ne0-p1-1)*nb0); + + ggml_vec_cpy_f32(ne00, left, (float *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01)); + + for (int i0 = 1; i0 <= p0; i0++) { left[-i0] = left[i0]; } + for (int i0 = 1; i0 <= p1; i0++) { right[i0] = right[-i0]; } + } + } + } +} // ggml_compute_forward_arange @@ -12500,6 +12463,9 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm return; } + // extra_buffer op? + if (ggml_cpu_extra_compute_forward(params, tensor)) return; + switch (tensor->op) { case GGML_OP_DUP: { @@ -12721,6 +12687,10 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm { ggml_compute_forward_pad(params, tensor); } break; + case GGML_OP_PAD_REFLECT_1D: + { + ggml_compute_forward_pad_reflect_1d(params, tensor); + } break; case GGML_OP_ARANGE: { ggml_compute_forward_arange(params, tensor); @@ -13063,6 +13033,7 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { } break; case GGML_OP_UPSCALE: case GGML_OP_PAD: + case GGML_OP_PAD_REFLECT_1D: case GGML_OP_ARANGE: case GGML_OP_TIMESTEP_EMBEDDING: case GGML_OP_ARGSORT: @@ -13442,140 +13413,142 @@ struct ggml_cplan ggml_graph_plan( size_t cur = 0; - switch (node->op) { - case GGML_OP_CPY: - case GGML_OP_DUP: - { - if (ggml_is_quantized(node->type) || - // F16 -> BF16 and BF16 -> F16 copies go through intermediate F32 - (node->src[0]->type == GGML_TYPE_F16 && node->src[1] && node->src[1]->type == GGML_TYPE_BF16) || - (node->src[0]->type == GGML_TYPE_BF16 && node->src[1] && node->src[1]->type == GGML_TYPE_F16)) { + if (!ggml_cpu_extra_work_size(n_threads, node, &cur)) { + + switch (node->op) { + case GGML_OP_CPY: + case GGML_OP_DUP: + { + if (ggml_is_quantized(node->type) || + // F16 -> BF16 and BF16 -> F16 copies go through intermediate F32 + (node->src[0]->type == GGML_TYPE_F16 && node->src[1] && node->src[1]->type == GGML_TYPE_BF16) || + (node->src[0]->type == GGML_TYPE_BF16 && node->src[1] && node->src[1]->type == GGML_TYPE_F16)) { + cur = ggml_type_size(GGML_TYPE_F32) * node->ne[0] * n_tasks; + } + } break; + case GGML_OP_ADD: + case GGML_OP_ADD1: + { + if (ggml_is_quantized(node->src[0]->type)) { + cur = ggml_type_size(GGML_TYPE_F32) * node->src[0]->ne[0] * n_tasks; + } + } break; + case GGML_OP_ACC: + { + if (ggml_is_quantized(node->src[0]->type)) { + cur = ggml_type_size(GGML_TYPE_F32) * node->src[1]->ne[0] * n_tasks; + } + } break; + case GGML_OP_COUNT_EQUAL: + { + cur = ggml_type_size(node->type)*n_tasks; + } break; + case GGML_OP_MUL_MAT: + { + const enum ggml_type vec_dot_type = type_traits_cpu[node->src[0]->type].vec_dot_type; + + if (node->src[1]->type != vec_dot_type) { + cur = ggml_row_size(vec_dot_type, ggml_nelements(node->src[1])); + } + } break; + case GGML_OP_MUL_MAT_ID: + { + cur = 0; + const struct ggml_tensor * src0 = node->src[0]; + const struct ggml_tensor * src1 = node->src[1]; + const enum ggml_type vec_dot_type = type_traits_cpu[src0->type].vec_dot_type; + if (src1->type != vec_dot_type) { + cur += ggml_row_size(vec_dot_type, ggml_nelements(src1)); + } + const int n_as = src0->ne[2]; + cur += GGML_PAD(cur, sizeof(int64_t)); // align + cur += n_as * sizeof(int64_t); // matrix_row_counts + cur += n_as * src1->ne[2] * sizeof(int64_t); // matrix_rows + } break; + case GGML_OP_OUT_PROD: + { + if (ggml_is_quantized(node->src[0]->type)) { + cur = ggml_type_size(GGML_TYPE_F32) * node->src[0]->ne[0] * n_tasks; + } + } break; + case GGML_OP_SOFT_MAX: + case GGML_OP_ROPE: + { cur = ggml_type_size(GGML_TYPE_F32) * node->ne[0] * n_tasks; - } - } break; - case GGML_OP_ADD: - case GGML_OP_ADD1: - { - if (ggml_is_quantized(node->src[0]->type)) { - cur = ggml_type_size(GGML_TYPE_F32) * node->src[0]->ne[0] * n_tasks; - } - } break; - case GGML_OP_ACC: - { - if (ggml_is_quantized(node->src[0]->type)) { - cur = ggml_type_size(GGML_TYPE_F32) * node->src[1]->ne[0] * n_tasks; - } - } break; - case GGML_OP_COUNT_EQUAL: - { - cur = ggml_type_size(node->type)*n_tasks; - } break; - case GGML_OP_MUL_MAT: - { - const enum ggml_type vec_dot_type = type_traits_cpu[node->src[0]->type].vec_dot_type; + } break; + case GGML_OP_CONV_TRANSPOSE_1D: + { + GGML_ASSERT(node->src[0]->ne[3] == 1); + GGML_ASSERT(node->src[1]->ne[2] == 1); + GGML_ASSERT(node->src[1]->ne[3] == 1); + + const int64_t ne00 = node->src[0]->ne[0]; // K + const int64_t ne01 = node->src[0]->ne[1]; // Cout + const int64_t ne02 = node->src[0]->ne[2]; // Cin + const int64_t ne10 = node->src[1]->ne[0]; // L + const int64_t ne11 = node->src[1]->ne[1]; // Cin + + if ((node->src[0]->type == GGML_TYPE_F16 || + node->src[0]->type == GGML_TYPE_BF16) && + node->src[1]->type == GGML_TYPE_F32) { + cur += sizeof(ggml_fp16_t)*ne00*ne01*ne02; + cur += sizeof(ggml_fp16_t)*ne10*ne11; + } else if (node->src[0]->type == GGML_TYPE_F32 && + node->src[1]->type == GGML_TYPE_F32) { + cur += sizeof(float)*ne00*ne01*ne02; + cur += sizeof(float)*ne10*ne11; + } else { + GGML_ABORT("fatal error"); + } + } break; + case GGML_OP_CONV_TRANSPOSE_2D: + { + const int64_t ne00 = node->src[0]->ne[0]; // W + const int64_t ne01 = node->src[0]->ne[1]; // H + const int64_t ne02 = node->src[0]->ne[2]; // Channels Out + const int64_t ne03 = node->src[0]->ne[3]; // Channels In - if (node->src[1]->type != vec_dot_type) { - cur = ggml_row_size(vec_dot_type, ggml_nelements(node->src[1])); - } - } break; - case GGML_OP_MUL_MAT_ID: - { - cur = 0; - const struct ggml_tensor * src0 = node->src[0]; - const struct ggml_tensor * src1 = node->src[1]; - const enum ggml_type vec_dot_type = type_traits_cpu[src0->type].vec_dot_type; - if (src1->type != vec_dot_type) { - cur += ggml_row_size(vec_dot_type, ggml_nelements(src1)); - } - const int n_as = src0->ne[2]; - cur += GGML_PAD(cur, sizeof(int64_t)); // align - cur += n_as * sizeof(int64_t); // matrix_row_counts - cur += n_as * src1->ne[2] * sizeof(int64_t); // matrix_rows - } break; - case GGML_OP_OUT_PROD: - { - if (ggml_is_quantized(node->src[0]->type)) { - cur = ggml_type_size(GGML_TYPE_F32) * node->src[0]->ne[0] * n_tasks; - } - } break; - case GGML_OP_SOFT_MAX: - case GGML_OP_ROPE: - { - cur = ggml_type_size(GGML_TYPE_F32) * node->ne[0] * n_tasks; - } break; - case GGML_OP_CONV_TRANSPOSE_1D: - { - GGML_ASSERT(node->src[0]->ne[3] == 1); - GGML_ASSERT(node->src[1]->ne[2] == 1); - GGML_ASSERT(node->src[1]->ne[3] == 1); - - const int64_t ne00 = node->src[0]->ne[0]; // K - const int64_t ne01 = node->src[0]->ne[1]; // Cout - const int64_t ne02 = node->src[0]->ne[2]; // Cin - - const int64_t ne10 = node->src[1]->ne[0]; // L - const int64_t ne11 = node->src[1]->ne[1]; // Cin - - if ((node->src[0]->type == GGML_TYPE_F16 || - node->src[0]->type == GGML_TYPE_BF16) && - node->src[1]->type == GGML_TYPE_F32) { - cur += sizeof(ggml_fp16_t)*ne00*ne01*ne02; - cur += sizeof(ggml_fp16_t)*ne10*ne11; - } else if (node->src[0]->type == GGML_TYPE_F32 && - node->src[1]->type == GGML_TYPE_F32) { - cur += sizeof(float)*ne00*ne01*ne02; - cur += sizeof(float)*ne10*ne11; - } else { - GGML_ABORT("fatal error"); - } - } break; - case GGML_OP_CONV_TRANSPOSE_2D: - { - const int64_t ne00 = node->src[0]->ne[0]; // W - const int64_t ne01 = node->src[0]->ne[1]; // H - const int64_t ne02 = node->src[0]->ne[2]; // Channels Out - const int64_t ne03 = node->src[0]->ne[3]; // Channels In - - const int64_t ne10 = node->src[1]->ne[0]; // W - const int64_t ne11 = node->src[1]->ne[1]; // H - const int64_t ne12 = node->src[1]->ne[2]; // Channels In - - cur += sizeof(ggml_fp16_t)*ne00*ne01*ne02*ne03; - cur += sizeof(ggml_fp16_t)*ne10*ne11*ne12; - } break; - case GGML_OP_FLASH_ATTN_EXT: - { - const int64_t ne00 = node->src[0]->ne[0]; // D + const int64_t ne10 = node->src[1]->ne[0]; // W + const int64_t ne11 = node->src[1]->ne[1]; // H + const int64_t ne12 = node->src[1]->ne[2]; // Channels In - cur = 3*sizeof(float)*ne00*n_tasks; // 3x head size/thread - } break; - case GGML_OP_FLASH_ATTN_BACK: - { - const int64_t D = node->src[0]->ne[0]; - const int64_t ne11 = ggml_up(node->src[1]->ne[1], GGML_SOFT_MAX_UNROLL); - const int64_t mxDn = MAX(D, ne11) * 2; // *2 because of S and SM in ggml_compute_forward_flash_attn_back - if (node->src[1]->type == GGML_TYPE_F32) { - cur = sizeof(float)*mxDn*n_tasks; // TODO: this can become (n_tasks-1) - cur += sizeof(float)*mxDn*n_tasks; // this is overestimated by x2 - } else if (node->src[1]->type == GGML_TYPE_F16) { - cur = sizeof(float)*mxDn*n_tasks; // TODO: this can become (n_tasks-1) - cur += sizeof(float)*mxDn*n_tasks; // this is overestimated by x2 - } else if (node->src[1]->type == GGML_TYPE_BF16) { - cur = sizeof(float)*mxDn*n_tasks; // TODO: this can become (n_tasks-1) - cur += sizeof(float)*mxDn*n_tasks; // this is overestimated by x2 - } - } break; + cur += sizeof(ggml_fp16_t)*ne00*ne01*ne02*ne03; + cur += sizeof(ggml_fp16_t)*ne10*ne11*ne12; + } break; + case GGML_OP_FLASH_ATTN_EXT: + { + const int64_t ne00 = node->src[0]->ne[0]; // D - case GGML_OP_CROSS_ENTROPY_LOSS: - { - cur = ggml_type_size(node->type)*(n_tasks + node->src[0]->ne[0]*n_tasks); - } break; - case GGML_OP_COUNT: - { - GGML_ABORT("fatal error"); - } - default: - break; + cur = 3*sizeof(float)*ne00*n_tasks; // 3x head size/thread + } break; + case GGML_OP_FLASH_ATTN_BACK: + { + const int64_t D = node->src[0]->ne[0]; + const int64_t ne11 = ggml_up(node->src[1]->ne[1], GGML_SOFT_MAX_UNROLL); + const int64_t mxDn = MAX(D, ne11) * 2; // *2 because of S and SM in ggml_compute_forward_flash_attn_back + if (node->src[1]->type == GGML_TYPE_F32) { + cur = sizeof(float)*mxDn*n_tasks; // TODO: this can become (n_tasks-1) + cur += sizeof(float)*mxDn*n_tasks; // this is overestimated by x2 + } else if (node->src[1]->type == GGML_TYPE_F16) { + cur = sizeof(float)*mxDn*n_tasks; // TODO: this can become (n_tasks-1) + cur += sizeof(float)*mxDn*n_tasks; // this is overestimated by x2 + } else if (node->src[1]->type == GGML_TYPE_BF16) { + cur = sizeof(float)*mxDn*n_tasks; // TODO: this can become (n_tasks-1) + cur += sizeof(float)*mxDn*n_tasks; // this is overestimated by x2 + } + } break; + + case GGML_OP_CROSS_ENTROPY_LOSS: + { + cur = ggml_type_size(node->type)*(n_tasks + node->src[0]->ne[0]*n_tasks); + } break; + case GGML_OP_COUNT: + { + GGML_ABORT("fatal error"); + } + default: + break; + } } work_size = MAX(work_size, cur); diff --git a/ggml/src/ggml-cpu/ggml-cpu.cpp b/ggml/src/ggml-cpu/ggml-cpu.cpp index 44d99089a490c..c390957afa8e3 100644 --- a/ggml/src/ggml-cpu/ggml-cpu.cpp +++ b/ggml/src/ggml-cpu/ggml-cpu.cpp @@ -2,11 +2,18 @@ #include "ggml-backend-impl.h" #include "ggml-cpu.h" #include "ggml-cpu-aarch64.h" +#include "ggml-cpu-traits.h" #include "ggml-impl.h" +#include "amx/amx.h" + #include #include #include +#ifdef GGML_USE_CPU_HBM +#include "ggml-cpu-hbm.h" +#endif + #if defined(__APPLE__) #include #include @@ -22,124 +29,20 @@ // ggml-backend interface -#ifdef GGML_USE_CPU_HBM - -// buffer type HBM - -#include - -static const char * ggml_backend_cpu_hbm_buffer_type_get_name(ggml_backend_buffer_type_t buft) { - return "CPU_HBM"; - - GGML_UNUSED(buft); -} - -static void ggml_backend_cpu_hbm_buffer_free_buffer(ggml_backend_buffer_t buffer) { - hbw_free(buffer->context); -} - -static ggml_backend_buffer_t ggml_backend_cpu_hbm_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { - void * ptr; - int result = hbw_posix_memalign(&ptr, ggml_backend_cpu_buffer_type_get_alignment(buft), size); - if (result != 0) { - GGML_LOG_ERROR("failed to allocate HBM buffer of size %zu\n", size); - return NULL; - } - - ggml_backend_buffer_t buffer = ggml_backend_cpu_buffer_from_ptr(ptr, size); - buffer->buft = buft; - buffer->iface.free_buffer = ggml_backend_cpu_hbm_buffer_free_buffer; - - return buffer; -} - -ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type(void) { - static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type_hbm = { - /* .iface = */ { - /* .get_name = */ ggml_backend_cpu_hbm_buffer_type_get_name, - /* .alloc_buffer = */ ggml_backend_cpu_hbm_buffer_type_alloc_buffer, - /* .get_alignment = */ ggml_backend_cpu_buffer_type_get_alignment, - /* .get_max_size = */ NULL, // defaults to SIZE_MAX - /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes - /* .is_host = */ ggml_backend_cpu_buffer_type_is_host, - }, - /* .context = */ NULL, - }; - - return &ggml_backend_cpu_buffer_type_hbm; -} -#endif - -// buffer type AARCH64 - -static void ggml_backend_cpu_aarch64_buffer_init_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { - tensor->extra = (void *)ggml_aarch64_get_optimal_repack_type(tensor); // NOLINT - - GGML_UNUSED(buffer); -} - -static void ggml_backend_cpu_aarch64_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - GGML_ASSERT(offset == 0); - GGML_ASSERT(size == ggml_nbytes(tensor)); - - enum ggml_type repack_type = (enum ggml_type)(intptr_t)tensor->extra; - - ggml_aarch64_repack_tensor(tensor, repack_type, data, size); - - GGML_UNUSED(buffer); -} - -static const char * ggml_backend_cpu_aarch64_buffer_type_get_name(ggml_backend_buffer_type_t buft) { - return "CPU_AARCH64"; - - GGML_UNUSED(buft); -} - -static ggml_backend_buffer_t ggml_backend_cpu_aarch64_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { - auto * buffer = ggml_backend_buft_alloc_buffer(ggml_backend_cpu_buffer_type(), size); - - if (buffer == NULL) { - return NULL; - } - - buffer->buft = buft; - buffer->iface.init_tensor = ggml_backend_cpu_aarch64_buffer_init_tensor; - buffer->iface.set_tensor = ggml_backend_cpu_aarch64_buffer_set_tensor; - - return buffer; -} - -ggml_backend_buffer_type_t ggml_backend_cpu_aarch64_buffer_type(void) { - static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type_aarch64 = { - /* .iface = */ { - /* .get_name = */ ggml_backend_cpu_aarch64_buffer_type_get_name, - /* .alloc_buffer = */ ggml_backend_cpu_aarch64_buffer_type_alloc_buffer, - /* .get_alignment = */ ggml_backend_cpu_buffer_type()->iface.get_alignment, - /* .get_max_size = */ NULL, // defaults to SIZE_MAX - /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes - /* .is_host = */ NULL, - }, - /* .device = */ ggml_backend_reg_dev_get(ggml_backend_cpu_reg(), 0), - /* .context = */ NULL, - }; - - return &ggml_backend_cpu_buffer_type_aarch64; -} - -bool ggml_backend_cpu_buft_is_aarch64(ggml_backend_buffer_type_t buft) { - return buft == ggml_backend_cpu_aarch64_buffer_type(); -} - -static ggml_backend_buffer_type_t * ggml_backend_cpu_get_extra_bufts(ggml_backend_dev_t device) { +std::vector& ggml_backend_cpu_get_extra_buffers_type() { static std::vector bufts = []() { std::vector bufts; -#ifdef GGML_USE_CPU_HBM - bufts.push_back(ggml_backend_cpu_hbm_buffer_type()); +#if defined(__AMX_INT8__) && defined(__AVX512VNNI__) + if (ggml_backend_amx_buffer_type()) { + bufts.push_back(ggml_backend_amx_buffer_type()); + } #endif #ifdef GGML_USE_CPU_AARCH64 - bufts.push_back(ggml_backend_cpu_aarch64_buffer_type()); + if (ggml_backend_cpu_aarch64_buffer_type()) { + bufts.push_back(ggml_backend_cpu_aarch64_buffer_type()); + } #endif bufts.push_back(NULL); @@ -147,11 +50,22 @@ static ggml_backend_buffer_type_t * ggml_backend_cpu_get_extra_bufts(ggml_backen return bufts; }(); - return bufts.data(); + return bufts; +} + +static ggml_backend_buffer_type_t * ggml_backend_cpu_device_get_extra_buffers_type(ggml_backend_dev_t device) { + return ggml_backend_cpu_get_extra_buffers_type().data(); GGML_UNUSED(device); } +static bool ggml_backend_cpu_is_extra_buffer_type(ggml_backend_buffer_type_t buft) { + for (auto extra : ggml_backend_cpu_get_extra_buffers_type()) { + if (extra && extra == buft) return true; + } + return false; +} + // CPU backend - backend (stream) struct ggml_backend_cpu_context { @@ -456,14 +370,23 @@ static bool ggml_backend_cpu_device_supports_op(ggml_backend_dev_t dev, const st const struct ggml_tensor * src0 = op->src[0]; const struct ggml_tensor * src1 = op->src[1]; - if (src0 && src0->buffer && ggml_backend_cpu_buft_is_aarch64(src0->buffer->buft)) { - if (op->op != GGML_OP_MUL_MAT || src0->type == ggml_aarch64_get_optimal_repack_type(src0)) { - return false; + if (op->op == GGML_OP_NONE || op->op == GGML_OP_RESHAPE || op->op == GGML_OP_VIEW || op->op == GGML_OP_PERMUTE || op->op == GGML_OP_TRANSPOSE) { + return true; + } + + // extra_buffer_op? + for (auto extra : ggml_backend_cpu_get_extra_buffers_type()) { + if (extra) { + auto buf_extra = (ggml::cpu::extra_buffer_type*) extra->context; + if (buf_extra && buf_extra->supports_op(dev, op)) { + return true; + } } } - for (int i = 1; i < GGML_MAX_SRC; i++) { - if (op->src[i] && op->src[i]->buffer && ggml_backend_cpu_buft_is_aarch64(op->src[i]->buffer->buft)) { + // the other case need host buffer. + for (int i = 0; i < GGML_MAX_SRC; i++) { + if (op->src[i] && op->src[i]->buffer && !ggml_backend_buft_is_host(op->src[i]->buffer->buft)) { return false; } } @@ -486,13 +409,10 @@ static bool ggml_backend_cpu_device_supports_op(ggml_backend_dev_t dev, const st default: return true; } - - GGML_UNUSED(dev); } static bool ggml_backend_cpu_device_supports_buft(ggml_backend_dev_t dev, ggml_backend_buffer_type_t buft) { - return ggml_backend_buft_is_host(buft) || ggml_backend_cpu_buft_is_aarch64(buft); - + return ggml_backend_buft_is_host(buft) || ggml_backend_cpu_is_extra_buffer_type(buft); GGML_UNUSED(dev); } @@ -615,7 +535,15 @@ static ggml_backend_feature * ggml_backend_cpu_get_features(ggml_backend_reg_t r if (ggml_cpu_has_llamafile()) { features.push_back({ "LLAMAFILE", "1" }); } - // TODO: rename this + #ifdef GGML_USE_ACCELERATE + features.push_back({ "ACCELERATE", "1" }); + #endif + #ifdef GGML_USE_CPU_HBM + features.push_back({ "CPU_HBM", "1" }); + #endif + #ifdef GGML_USE_OPENMP + features.push_back({ "OPENMP", "1" }); + #endif #ifdef GGML_USE_CPU_AARCH64 features.push_back({ "AARCH64_REPACK", "1" }); #endif @@ -632,10 +560,12 @@ static ggml_backend_feature * ggml_backend_cpu_get_features(ggml_backend_reg_t r static void * ggml_backend_cpu_get_proc_address(ggml_backend_reg_t reg, const char * name) { if (strcmp(name, "ggml_backend_set_n_threads") == 0) { - return (void *)ggml_backend_cpu_set_n_threads; + ggml_backend_set_n_threads_t fct = ggml_backend_cpu_set_n_threads; + return (void *)fct; } if (strcmp(name, "ggml_backend_dev_get_extra_bufts") == 0) { - return (void *)ggml_backend_cpu_get_extra_bufts; + ggml_backend_dev_get_extra_bufts_t fct = ggml_backend_cpu_device_get_extra_buffers_type; + return (void *)fct; } if (strcmp(name, "ggml_backend_get_features") == 0) { return (void *)ggml_backend_cpu_get_features; diff --git a/ggml/src/ggml-cpu/llamafile/sgemm.cpp b/ggml/src/ggml-cpu/llamafile/sgemm.cpp index b2ce2e6649479..da4146ec4f688 100644 --- a/ggml/src/ggml-cpu/llamafile/sgemm.cpp +++ b/ggml/src/ggml-cpu/llamafile/sgemm.cpp @@ -50,8 +50,7 @@ #include "sgemm.h" #include "ggml-impl.h" -// hack until moved into the CPU backend -#include "../ggml-cpu-impl.h" +#include "ggml-cpu-impl.h" #include "ggml-quants.h" #ifdef _MSC_VER diff --git a/ggml/src/ggml-cuda/common.cuh b/ggml/src/ggml-cuda/common.cuh index 535118d87928e..2c0a562267281 100644 --- a/ggml/src/ggml-cuda/common.cuh +++ b/ggml/src/ggml-cuda/common.cuh @@ -41,28 +41,28 @@ #define CUDART_HMAX 11070 // CUDA 11.7, min. ver. for which __hmax and __hmax2 are known to work (may be higher than needed) #define CUDART_HMASK 12000 // CUDA 12.0, min. ver. for half2 -> uint mask comparisons -#define CC_PASCAL 600 -#define MIN_CC_DP4A 610 // minimum compute capability for __dp4a, an intrinsic for byte-wise dot products -#define CC_VOLTA 700 -#define CC_TURING 750 -#define CC_AMPERE 800 -#define CC_OFFSET_AMD 1000000 +#define GGML_CUDA_CC_PASCAL 600 +#define GGML_CUDA_CC_DP4A 610 // minimum compute capability for __dp4a, an intrinsic for byte-wise dot products +#define GGML_CUDA_CC_VOLTA 700 +#define GGML_CUDA_CC_TURING 750 +#define GGML_CUDA_CC_AMPERE 800 +#define GGML_CUDA_CC_OFFSET_AMD 1000000 // GCN/CNDA, wave size is 64 -#define CC_GCN4 (CC_OFFSET_AMD + 803) // Tonga, Fiji, Polaris, minimum for fast fp16 -#define CC_VEGA (CC_OFFSET_AMD + 900) // Vega56/64, minimum for fp16 dual issue -#define CC_VEGA20 (CC_OFFSET_AMD + 906) // MI50/Radeon VII, minimum for dp4a -#define CC_CDNA (CC_OFFSET_AMD + 908) // MI100, minimum for MFMA, acc registers -#define CC_CDNA2 (CC_OFFSET_AMD + 910) // MI210, minimum acc register renameing -#define CC_CDNA3 (CC_OFFSET_AMD + 942) // MI300 +#define GGML_CUDA_CC_GCN4 (GGML_CUDA_CC_OFFSET_AMD + 803) // Tonga, Fiji, Polaris, minimum for fast fp16 +#define GGML_CUDA_CC_VEGA (GGML_CUDA_CC_OFFSET_AMD + 900) // Vega56/64, minimum for fp16 dual issue +#define GGML_CUDA_CC_VEGA20 (GGML_CUDA_CC_OFFSET_AMD + 906) // MI50/Radeon VII, minimum for dp4a +#define GGML_CUDA_CC_CDNA (GGML_CUDA_CC_OFFSET_AMD + 908) // MI100, minimum for MFMA, acc registers +#define GGML_CUDA_CC_CDNA2 (GGML_CUDA_CC_OFFSET_AMD + 910) // MI210, minimum acc register renameing +#define GGML_CUDA_CC_CDNA3 (GGML_CUDA_CC_OFFSET_AMD + 942) // MI300 // RNDA removes MFMA, dp4a, xnack, acc registers, wave size is 32 -#define CC_RDNA1 (CC_OFFSET_AMD + 1010) // RX 5000 -#define CC_RDNA2 (CC_OFFSET_AMD + 1030) // RX 6000, minimum for dp4a -#define CC_RDNA3 (CC_OFFSET_AMD + 1100) // RX 7000, minimum for WMMA +#define GGML_CUDA_CC_RDNA1 (GGML_CUDA_CC_OFFSET_AMD + 1010) // RX 5000 +#define GGML_CUDA_CC_RDNA2 (GGML_CUDA_CC_OFFSET_AMD + 1030) // RX 6000, minimum for dp4a +#define GGML_CUDA_CC_RDNA3 (GGML_CUDA_CC_OFFSET_AMD + 1100) // RX 7000, minimum for WMMA -#define CC_QY1 210 -#define CC_QY2 220 +#define GGML_CUDA_CC_QY1 210 +#define GGML_CUDA_CC_QY2 220 #define MATRIX_ROW_PADDING 512 // last row of quant. matrices is a multiple of this to avoid out-of-bounds memory accesses @@ -131,36 +131,36 @@ typedef float dfloat; // dequantize float typedef float2 dfloat2; #endif // GGML_CUDA_F16 -#if (defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) || __CUDA_ARCH__ >= CC_PASCAL +#if (defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) || __CUDA_ARCH__ >= GGML_CUDA_CC_PASCAL #define FP16_AVAILABLE -#endif // (defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) || __CUDA_ARCH__ >= CC_PASCAL +#endif // (defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) || __CUDA_ARCH__ >= GGML_CUDA_CC_PASCAL #if defined(FP16_AVAILABLE) && __CUDA_ARCH__ != 610 #define FAST_FP16_AVAILABLE #endif // defined(FP16_AVAILABLE) && __CUDA_ARCH__ != 610 -#if !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_VOLTA +#if !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= GGML_CUDA_CC_VOLTA #define FP16_MMA_AVAILABLE -#endif // !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_VOLTA +#endif // !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= GGML_CUDA_CC_VOLTA -#if !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_TURING +#if !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= GGML_CUDA_CC_TURING #define INT8_MMA_AVAILABLE -#endif // !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_TURING +#endif // !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= GGML_CUDA_CC_TURING -#if !(defined(GGML_USE_MUSA) && __MUSA_ARCH__ <= CC_QY1) +#if !(defined(GGML_USE_MUSA) && __MUSA_ARCH__ <= GGML_CUDA_CC_QY1) #define FLASH_ATTN_AVAILABLE -#endif // !(defined(GGML_USE_MUSA) && __MUSA_ARCH__ <= CC_QY1) +#endif // !(defined(GGML_USE_MUSA) && __MUSA_ARCH__ <= GGML_CUDA_CC_QY1) static constexpr bool fast_fp16_available(const int cc) { - return cc >= CC_PASCAL && cc != 610; + return cc >= GGML_CUDA_CC_PASCAL && cc != 610; } static constexpr bool fp16_mma_available(const int cc) { - return cc < CC_OFFSET_AMD && cc >= CC_VOLTA; + return cc < GGML_CUDA_CC_OFFSET_AMD && cc >= GGML_CUDA_CC_VOLTA; } static constexpr bool int8_mma_available(const int cc) { - return cc < CC_OFFSET_AMD && cc >= CC_TURING; + return cc < GGML_CUDA_CC_OFFSET_AMD && cc >= GGML_CUDA_CC_TURING; } [[noreturn]] @@ -187,7 +187,7 @@ static __device__ void no_device_code( #endif // __CUDA_ARCH__ static __device__ __forceinline__ int warp_reduce_sum(int x) { -#if !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_AMPERE +#if !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= GGML_CUDA_CC_AMPERE return __reduce_add_sync(0xffffffff, x); #else #pragma unroll @@ -195,7 +195,7 @@ static __device__ __forceinline__ int warp_reduce_sum(int x) { x += __shfl_xor_sync(0xffffffff, x, offset, 32); } return x; -#endif // !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_AMPERE +#endif // !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= GGML_CUDA_CC_AMPERE } static __device__ __forceinline__ float warp_reduce_sum(float x) { @@ -284,7 +284,7 @@ static __device__ __forceinline__ half2 ggml_cuda_hmax2(const half2 a, const hal } static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { -#if !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL +#if !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= GGML_CUDA_CC_PASCAL #pragma unroll for (int offset = 16; offset > 0; offset >>= 1) { x = ggml_cuda_hmax2(x, __shfl_xor_sync(0xffffffff, x, offset, 32)); @@ -293,7 +293,7 @@ static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { #else GGML_UNUSED(x); NO_DEVICE_CODE; -#endif // !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL +#endif // !(defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= GGML_CUDA_CC_PASCAL } #if CUDART_VERSION < CUDART_HMASK @@ -333,13 +333,13 @@ static __device__ __forceinline__ int ggml_cuda_dp4a(const int a, const int b, i #else // defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__) -#if __CUDA_ARCH__ >= MIN_CC_DP4A +#if __CUDA_ARCH__ >= GGML_CUDA_CC_DP4A return __dp4a(a, b, c); -#else // __CUDA_ARCH__ >= MIN_CC_DP4A +#else // __CUDA_ARCH__ >= GGML_CUDA_CC_DP4A const int8_t * a8 = (const int8_t *) &a; const int8_t * b8 = (const int8_t *) &b; return c + a8[0]*b8[0] + a8[1]*b8[1] + a8[2]*b8[2] + a8[3]*b8[3]; -#endif // __CUDA_ARCH__ >= MIN_CC_DP4A +#endif // __CUDA_ARCH__ >= GGML_CUDA_CC_DP4A #endif // defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__) } diff --git a/ggml/src/ggml-cuda/convert.cu b/ggml/src/ggml-cuda/convert.cu index c0a4447075c6e..3896f956d7338 100644 --- a/ggml/src/ggml-cuda/convert.cu +++ b/ggml/src/ggml-cuda/convert.cu @@ -26,7 +26,7 @@ static __global__ void dequantize_block(const void * __restrict__ vx, dst_t * __ template static __global__ void dequantize_block_q8_0_f16(const void * __restrict__ vx, half * __restrict__ y, const int64_t k) { -#if __CUDA_ARCH__ >= CC_PASCAL +#if __CUDA_ARCH__ >= GGML_CUDA_CC_PASCAL constexpr int nint = CUDA_Q8_0_NE_ALIGN/sizeof(int) + WARP_SIZE; const int64_t i0 = CUDA_Q8_0_NE_ALIGN*blockIdx.x; @@ -64,7 +64,7 @@ static __global__ void dequantize_block_q8_0_f16(const void * __restrict__ vx, h GGML_UNUSED(y); GGML_UNUSED(k); NO_DEVICE_CODE; -#endif // __CUDA_ARCH__ >= CC_PASCAL +#endif // __CUDA_ARCH__ >= GGML_CUDA_CC_PASCAL } template @@ -599,7 +599,7 @@ to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { case GGML_TYPE_Q5_1: return dequantize_block_cuda; case GGML_TYPE_Q8_0: - if (ggml_cuda_info().devices[ggml_cuda_get_device()].cc >= CC_PASCAL) { + if (ggml_cuda_info().devices[ggml_cuda_get_device()].cc >= GGML_CUDA_CC_PASCAL) { return dequantize_block_q8_0_f16_cuda; } return dequantize_block_cuda; diff --git a/ggml/src/ggml-cuda/fattn-vec-f16.cuh b/ggml/src/ggml-cuda/fattn-vec-f16.cuh index 5ec3b91ae2b39..34a2992c769b9 100644 --- a/ggml/src/ggml-cuda/fattn-vec-f16.cuh +++ b/ggml/src/ggml-cuda/fattn-vec-f16.cuh @@ -220,7 +220,6 @@ static __global__ void flash_attn_vec_ext_f16( for (int j = 0; j < ncols; ++j) { half kqmax_new_j = ncols == 1 ? kqmax_new : kqmax_new_arr[j]; - kqmax_new_j = warp_reduce_max(kqmax_new_j); if (threadIdx.x == 0) { kqmax_shared[j][threadIdx.y] = kqmax_new_j; } diff --git a/ggml/src/ggml-cuda/fattn-vec-f32.cuh b/ggml/src/ggml-cuda/fattn-vec-f32.cuh index 3d93f4a8acdf2..a28fc8b7fc893 100644 --- a/ggml/src/ggml-cuda/fattn-vec-f32.cuh +++ b/ggml/src/ggml-cuda/fattn-vec-f32.cuh @@ -206,7 +206,6 @@ static __global__ void flash_attn_vec_ext_f32( for (int j = 0; j < ncols; ++j) { float kqmax_new_j = kqmax_new_arr[j]; - kqmax_new_j = warp_reduce_max(kqmax_new_j); if (threadIdx.x == 0) { kqmax_shared[j][threadIdx.y] = kqmax_new_j; } diff --git a/ggml/src/ggml-cuda/fattn.cu b/ggml/src/ggml-cuda/fattn.cu index 0e7ebbc539352..0b26b0f8e0595 100644 --- a/ggml/src/ggml-cuda/fattn.cu +++ b/ggml/src/ggml-cuda/fattn.cu @@ -304,7 +304,7 @@ void ggml_cuda_flash_attn_ext(ggml_backend_cuda_context & ctx, ggml_tensor * dst const enum ggml_prec prec = ggml_flash_attn_ext_get_prec(KQV); // On AMD the tile kernels perform poorly, use the vec kernel instead: - if (cc >= CC_OFFSET_AMD) { + if (cc >= GGML_CUDA_CC_OFFSET_AMD) { if (prec == GGML_PREC_DEFAULT && fast_fp16_available(cc)) { ggml_cuda_flash_attn_ext_vec_f16(ctx, dst); } else { diff --git a/ggml/src/ggml-cuda/ggml-cuda.cu b/ggml/src/ggml-cuda/ggml-cuda.cu index d6e4bfdd0d437..c180adc84b861 100644 --- a/ggml/src/ggml-cuda/ggml-cuda.cu +++ b/ggml/src/ggml-cuda/ggml-cuda.cu @@ -177,7 +177,7 @@ static ggml_cuda_device_info ggml_cuda_init() { info.devices[id].smpb = prop.sharedMemPerBlock; #if defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__) info.devices[id].smpbo = prop.sharedMemPerBlock; - info.devices[id].cc = 100*prop.major + 10*prop.minor + CC_OFFSET_AMD; + info.devices[id].cc = 100*prop.major + 10*prop.minor + GGML_CUDA_CC_OFFSET_AMD; #else info.devices[id].smpbo = prop.sharedMemPerBlockOptin; info.devices[id].cc = 100*prop.major + 10*prop.minor; @@ -1081,7 +1081,7 @@ static void ggml_cuda_op_mul_mat_cublas( const int compute_capability = ggml_cuda_info().devices[id].cc; - if (compute_capability >= CC_VOLTA && (src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type)) && ggml_is_contiguous(src0) && row_diff == src0->ne[1] && dst->op_params[0] == GGML_PREC_DEFAULT) { + if (compute_capability >= GGML_CUDA_CC_VOLTA && (src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type)) && ggml_is_contiguous(src0) && row_diff == src0->ne[1] && dst->op_params[0] == GGML_PREC_DEFAULT) { // convert src0 and src1 to fp16, multiply as fp16, convert dst to fp32 ggml_cuda_pool_alloc src0_as_f16(ctx.pool(id)); if (src0->type != GGML_TYPE_F16) { @@ -1108,7 +1108,7 @@ static void ggml_cuda_op_mul_mat_cublas( const half beta_f16 = 0.0f; cublasComputeType_t cu_compute_type = CUBLAS_COMPUTE_16F; - if (ggml_cuda_info().devices[ctx.device].cc == CC_CDNA) { + if (ggml_cuda_info().devices[ctx.device].cc == GGML_CUDA_CC_CDNA) { cu_compute_type = CUBLAS_COMPUTE_32F; } @@ -1612,7 +1612,7 @@ static void ggml_cuda_mul_mat_batched_cublas(ggml_backend_cuda_context & ctx, co cublasComputeType_t cu_compute_type = CUBLAS_COMPUTE_16F; cudaDataType_t cu_data_type = CUDA_R_16F; - if (ggml_cuda_info().devices[ctx.device].cc == CC_CDNA) { + if (ggml_cuda_info().devices[ctx.device].cc == GGML_CUDA_CC_CDNA) { cu_compute_type = CUBLAS_COMPUTE_32F; } @@ -2357,7 +2357,7 @@ static enum ggml_status ggml_backend_cuda_graph_compute(ggml_backend_t backend, std::vector ggml_cuda_cpy_fn_ptrs; if (cuda_ctx->cuda_graph->graph == nullptr) { - if (ggml_cuda_info().devices[cuda_ctx->device].cc < CC_AMPERE) { + if (ggml_cuda_info().devices[cuda_ctx->device].cc < GGML_CUDA_CC_AMPERE) { cuda_ctx->cuda_graph->disable_due_to_gpu_arch = true; #ifndef NDEBUG GGML_LOG_DEBUG("%s: disabling CUDA graphs due to GPU architecture\n", __func__); @@ -3028,7 +3028,7 @@ static bool ggml_backend_cuda_device_supports_op(ggml_backend_dev_t dev, const g return true; } const int cc = ggml_cuda_info().devices[dev_ctx->device].cc; - return cc >= CC_VOLTA && cc < CC_OFFSET_AMD && op->src[1]->type == GGML_TYPE_F16 && op->src[2]->type == GGML_TYPE_F16; + return cc >= GGML_CUDA_CC_VOLTA && cc < GGML_CUDA_CC_OFFSET_AMD && op->src[1]->type == GGML_TYPE_F16 && op->src[2]->type == GGML_TYPE_F16; } case GGML_OP_CROSS_ENTROPY_LOSS: case GGML_OP_CROSS_ENTROPY_LOSS_BACK: @@ -3210,7 +3210,7 @@ static void * ggml_backend_cuda_reg_get_proc_address(ggml_backend_reg_t reg, con static const ggml_backend_reg_i ggml_backend_cuda_reg_interface = { /* .get_name = */ ggml_backend_cuda_reg_get_name, /* .get_device_count = */ ggml_backend_cuda_reg_get_device_count, - /* .get_device_get = */ ggml_backend_cuda_reg_get_device, + /* .get_device = */ ggml_backend_cuda_reg_get_device, /* .get_proc_address = */ ggml_backend_cuda_reg_get_proc_address, }; diff --git a/ggml/src/ggml-cuda/mma.cuh b/ggml/src/ggml-cuda/mma.cuh index a452a3cc3d152..7d11540afd237 100644 --- a/ggml/src/ggml-cuda/mma.cuh +++ b/ggml/src/ggml-cuda/mma.cuh @@ -171,7 +171,7 @@ struct mma_int_C_I16J8 { __device__ __forceinline__ void mma_K4(const mma_int_A_I16K4 & mma_A, const mma_int_B_J8K4 & mma_B) { #ifdef INT8_MMA_AVAILABLE -#if __CUDA_ARCH__ >= CC_AMPERE +#if __CUDA_ARCH__ >= GGML_CUDA_CC_AMPERE asm("mma.sync.aligned.m16n8k16.row.col.s32.s8.s8.s32 {%0, %1, %2, %3}, {%4, %5}, {%6}, {%0, %1, %2, %3};" : "+r"(x[0]), "+r"(x[1]), "+r"(x[2]), "+r"(x[3]) : "r"(mma_A.x[0]), "r"(mma_A.x[1]), "r"(mma_B.x[0])); @@ -183,7 +183,7 @@ struct mma_int_C_I16J8 { asm("mma.sync.aligned.m8n8k16.row.col.s32.s8.s8.s32 {%0, %1}, {%2}, {%3}, {%0, %1};" : "+r"(x[2]), "+r"(x[3]) : "r"(mma_A.x[1]), "r"(mma_B.x[0])); -#endif // __CUDA_ARCH__ >= CC_AMPERE +#endif // __CUDA_ARCH__ >= GGML_CUDA_CC_AMPERE #else GGML_UNUSED(mma_A); GGML_UNUSED(mma_B); @@ -193,7 +193,7 @@ struct mma_int_C_I16J8 { __device__ __forceinline__ void mma_K8(const mma_int_A_I16K8 & mma_A, const mma_int_B_J8K8 & mma_B) { #ifdef INT8_MMA_AVAILABLE -#if __CUDA_ARCH__ >= CC_AMPERE +#if __CUDA_ARCH__ >= GGML_CUDA_CC_AMPERE asm("mma.sync.aligned.m16n8k32.row.col.s32.s8.s8.s32 {%0, %1, %2, %3}, {%4, %5, %6, %7}, {%8, %9}, {%0, %1, %2, %3};" : "+r"(x[0]), "+r"(x[1]), "+r"(x[2]), "+r"(x[3]) : "r"(mma_A.x[0]), "r"(mma_A.x[1]), "r"(mma_A.x[2]), "r"(mma_A.x[3]), "r"(mma_B.x[0]), "r"(mma_B.x[1])); @@ -211,7 +211,7 @@ struct mma_int_C_I16J8 { asm("mma.sync.aligned.m8n8k16.row.col.s32.s8.s8.s32 {%0, %1}, {%2}, {%3}, {%0, %1};" : "+r"(x[2]), "+r"(x[3]) : "r"(mma_A.x[3]), "r"(mma_B.x[1])); -#endif // __CUDA_ARCH__ >= CC_AMPERE +#endif // __CUDA_ARCH__ >= GGML_CUDA_CC_AMPERE #else GGML_UNUSED(mma_A); GGML_UNUSED(mma_B); diff --git a/ggml/src/ggml-cuda/mmq.cu b/ggml/src/ggml-cuda/mmq.cu index 7f7c8c90b6fe2..270251df4f115 100644 --- a/ggml/src/ggml-cuda/mmq.cu +++ b/ggml/src/ggml-cuda/mmq.cu @@ -27,7 +27,7 @@ void ggml_cuda_op_mul_mat_q( // The stream-k decomposition is only faster for recent NVIDIA GPUs. // Also its fixup needs to allocate a temporary buffer in the memory pool. // There are multiple parallel CUDA streams for src1_ncols != ne11 which would introduce a race condition for this buffer. - const bool use_stream_k = compute_capability >= CC_VOLTA && compute_capability < CC_OFFSET_AMD && src1_ncols == ne11; + const bool use_stream_k = compute_capability >= GGML_CUDA_CC_VOLTA && compute_capability < GGML_CUDA_CC_OFFSET_AMD && src1_ncols == ne11; const mmq_args args = {src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stride00, src1_padded_row_size, src1_ncols, ne11, nrows_dst, use_stream_k}; switch (src0->type) { @@ -136,7 +136,7 @@ bool ggml_cuda_should_use_mmq(enum ggml_type type, int cc, int64_t ne11) { return true; } - if (cc < MIN_CC_DP4A) { + if (cc < GGML_CUDA_CC_DP4A) { return false; } @@ -144,9 +144,9 @@ bool ggml_cuda_should_use_mmq(enum ggml_type type, int cc, int64_t ne11) { return true; #endif //GGML_CUDA_FORCE_MMQ - if (cc < CC_OFFSET_AMD) { - return cc < CC_VOLTA || ne11 < MMQ_DP4A_MAX_BATCH_SIZE; + if (cc < GGML_CUDA_CC_OFFSET_AMD) { + return cc < GGML_CUDA_CC_VOLTA || ne11 < MMQ_DP4A_MAX_BATCH_SIZE; } - return (cc < CC_RDNA3 && cc != CC_CDNA && cc != CC_VEGA20) || ne11 < MMQ_DP4A_MAX_BATCH_SIZE; + return (cc < GGML_CUDA_CC_RDNA3 && cc != GGML_CUDA_CC_CDNA && cc != GGML_CUDA_CC_VEGA20) || ne11 < MMQ_DP4A_MAX_BATCH_SIZE; } diff --git a/ggml/src/ggml-cuda/mmq.cuh b/ggml/src/ggml-cuda/mmq.cuh index 8d8867121f321..3cd508a1d4a1c 100644 --- a/ggml/src/ggml-cuda/mmq.cuh +++ b/ggml/src/ggml-cuda/mmq.cuh @@ -89,9 +89,9 @@ struct tile_x_sizes { static constexpr int get_mmq_x_max_host(const int cc) { return int8_mma_available(cc) ? 128 : #ifdef GGML_CUDA_FORCE_MMQ - cc >= CC_VOLTA && cc < CC_OFFSET_AMD ? 128 : 64; + cc >= GGML_CUDA_CC_VOLTA && cc < GGML_CUDA_CC_OFFSET_AMD ? 128 : 64; #else - cc >= CC_VOLTA && cc < CC_OFFSET_AMD ? MMQ_DP4A_MAX_BATCH_SIZE : 64; + cc >= GGML_CUDA_CC_VOLTA && cc < GGML_CUDA_CC_OFFSET_AMD ? MMQ_DP4A_MAX_BATCH_SIZE : 64; #endif // GGML_CUDA_FORCE_MMQ } @@ -104,23 +104,23 @@ static constexpr __device__ int get_mmq_x_max_device() { return 128; #else // defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__) -#if __CUDA_ARCH__ >= CC_VOLTA +#if __CUDA_ARCH__ >= GGML_CUDA_CC_VOLTA #ifdef GGML_CUDA_FORCE_MMQ return MMQ_DP4A_MAX_BATCH_SIZE; #else // GGML_CUDA_FORCE_MMQ return 128; #endif // GGML_CUDA_FORCE_MMQ -#else // __CUDA_ARCH__ >= CC_VOLTA +#else // __CUDA_ARCH__ >= GGML_CUDA_CC_VOLTA return 64; -#endif // __CUDA_ARCH__ >= CC_VOLTA +#endif // __CUDA_ARCH__ >= GGML_CUDA_CC_VOLTA #endif // defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__) #endif // INT8_MMA_AVAILABLE } static constexpr int get_mmq_y_host(const int cc) { - return cc >= CC_OFFSET_AMD ? (cc == CC_RDNA1 ? 64 : 128) : (cc >= CC_VOLTA ? 128 : 64); + return cc >= GGML_CUDA_CC_OFFSET_AMD ? (cc == GGML_CUDA_CC_RDNA1 ? 64 : 128) : (cc >= GGML_CUDA_CC_VOLTA ? 128 : 64); } static constexpr __device__ int get_mmq_y_device() { @@ -131,11 +131,11 @@ static constexpr __device__ int get_mmq_y_device() { return 128; #endif // defined RDNA1 #else -#if __CUDA_ARCH__ >= CC_VOLTA +#if __CUDA_ARCH__ >= GGML_CUDA_CC_VOLTA return 128; #else return 64; -#endif // __CUDA_ARCH__ >= CC_VOLTA +#endif // __CUDA_ARCH__ >= GGML_CUDA_CC_VOLTA #endif // defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__) } @@ -2574,11 +2574,11 @@ template __launch_bounds__(WARP_SIZE*nwarps, 2) #endif // defined(RDNA3) || defined(RDNA2) || defined(CDNA) || defined(GCN) #else -#if __CUDA_ARCH__ >= CC_VOLTA +#if __CUDA_ARCH__ >= GGML_CUDA_CC_VOLTA __launch_bounds__(WARP_SIZE*nwarps, 1) #else __launch_bounds__(WARP_SIZE*nwarps, 2) -#endif // __CUDA_ARCH__ >= CC_VOLTA +#endif // __CUDA_ARCH__ >= GGML_CUDA_CC_VOLTA #endif // defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__) static __global__ void mul_mat_q( const char * __restrict__ x, const char * __restrict__ yc, float * __restrict__ dst, float * __restrict__ tmp_fixup, @@ -2594,7 +2594,7 @@ static __global__ void mul_mat_q( constexpr int mmq_y = get_mmq_y_device(); // On AMD or old CUDA the performance with stream-k was worse, use conventional tiling instead: -#if (defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) || __CUDA_ARCH__ < CC_VOLTA +#if (defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) || __CUDA_ARCH__ < GGML_CUDA_CC_VOLTA { constexpr bool fixup = false; mul_mat_q_process_tile @@ -2602,7 +2602,7 @@ static __global__ void mul_mat_q( blockIdx.x, blockIdx.y, 0, ne00/qk); return; } -#endif // (defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) || __CUDA_ARCH__ < CC_VOLTA +#endif // (defined(GGML_USE_HIP) && defined(__HIP_PLATFORM_AMD__)) || __CUDA_ARCH__ < GGML_CUDA_CC_VOLTA const int64_t blocks_per_ne00 = ne00 / qk; constexpr int blocks_per_iter = MMQ_ITER_K / qk; @@ -2825,7 +2825,7 @@ void mul_mat_q_case(ggml_backend_cuda_context & ctx, const mmq_args & args, cuda const int mmq_x_max = get_mmq_x_max_host(cc); const int mmq_y = get_mmq_y_host(cc); const int block_num_y = (args.ne01 + mmq_y - 1) / mmq_y; - const bool use_stream_k = cc >= CC_VOLTA && cc < CC_OFFSET_AMD; + const bool use_stream_k = cc >= GGML_CUDA_CC_VOLTA && cc < GGML_CUDA_CC_OFFSET_AMD; int mmq_x_best = 0; int nparts_best = INT_MAX; diff --git a/ggml/src/ggml-cuda/mmv.cu b/ggml/src/ggml-cuda/mmv.cu index cfe91f4283fa6..a4b4f6bc10d98 100644 --- a/ggml/src/ggml-cuda/mmv.cu +++ b/ggml/src/ggml-cuda/mmv.cu @@ -57,7 +57,7 @@ static __global__ void mul_mat_vec( if (block_size > WARP_SIZE) { buf_iw[tid/WARP_SIZE] = sumf; __syncthreads(); - if (tid > WARP_SIZE) { + if (tid >= WARP_SIZE) { return; } sumf = buf_iw[tid]; diff --git a/ggml/src/ggml-cuda/mmvq.cu b/ggml/src/ggml-cuda/mmvq.cu index 02d1509836c40..e3b912d875b5b 100644 --- a/ggml/src/ggml-cuda/mmvq.cu +++ b/ggml/src/ggml-cuda/mmvq.cu @@ -142,7 +142,7 @@ static void mul_mat_vec_q_cuda( int64_t nwarps = 1; int64_t rows_per_cuda_block = 1; - if (ggml_cuda_info().devices[id].cc < CC_CDNA || ggml_cuda_info().devices[id].cc == CC_RDNA1) { // NVIDIA and AMD older than RDNA2 but not CDNA + if (ggml_cuda_info().devices[id].cc < GGML_CUDA_CC_CDNA || ggml_cuda_info().devices[id].cc == GGML_CUDA_CC_RDNA1) { // NVIDIA and AMD older than RDNA2 but not CDNA switch(ncols_y) { case 1: nwarps = 4; diff --git a/ggml/src/ggml-cuda/sum.cu b/ggml/src/ggml-cuda/sum.cu index 31cfe53941fe0..e0dafc1d2049d 100644 --- a/ggml/src/ggml-cuda/sum.cu +++ b/ggml/src/ggml-cuda/sum.cu @@ -3,8 +3,6 @@ #endif // !defined(GGML_USE_HIP) && !defined(GGML_USE_MUSA) && CUDART_VERSION >= 11700 #ifdef USE_CUB -// On Windows CUB uses libraries with variables called CC_PASCAL which conflict with the define in common.cuh. -// For this reason CUB must be included BEFORE anything else. #include using namespace cub; #endif // USE_CUB diff --git a/ggml/src/ggml-impl.h b/ggml/src/ggml-impl.h index f39b7a88c8c4b..00a1546a7d5d0 100644 --- a/ggml/src/ggml-impl.h +++ b/ggml/src/ggml-impl.h @@ -30,11 +30,13 @@ extern "C" { #endif -#undef MIN -#undef MAX +#ifndef MIN +# define MIN(a, b) ((a) < (b) ? (a) : (b)) +#endif -#define MIN(a, b) ((a) < (b) ? (a) : (b)) -#define MAX(a, b) ((a) > (b) ? (a) : (b)) +#ifndef MAX +# define MAX(a, b) ((a) > (b) ? (a) : (b)) +#endif // required for mmap as gguf only guarantees 32-byte alignment #define TENSOR_ALIGNMENT 32 @@ -308,14 +310,14 @@ void ggml_aligned_free(void * ptr, size_t size); // FP16 to FP32 conversion #if defined(__ARM_NEON) - #ifdef _MSC_VER + #if defined(_MSC_VER) || (defined(__CUDACC__) && __CUDACC_VER_MAJOR__ <= 11) typedef uint16_t ggml_fp16_internal_t; #else typedef __fp16 ggml_fp16_internal_t; #endif #endif -#if defined(__ARM_NEON) && !defined(_MSC_VER) +#if defined(__ARM_NEON) && !defined(_MSC_VER) && !(defined(__CUDACC__) && __CUDACC_VER_MAJOR__ <= 11) #define GGML_COMPUTE_FP16_TO_FP32(x) ggml_compute_fp16_to_fp32(x) #define GGML_COMPUTE_FP32_TO_FP16(x) ggml_compute_fp32_to_fp16(x) diff --git a/ggml/src/ggml-metal/ggml-metal-impl.h b/ggml/src/ggml-metal/ggml-metal-impl.h index 53c13549650c8..e3dc25f1686fb 100644 --- a/ggml/src/ggml-metal/ggml-metal-impl.h +++ b/ggml/src/ggml-metal/ggml-metal-impl.h @@ -102,6 +102,21 @@ typedef struct { uint64_t nb3; } ggml_metal_kargs_cpy; +typedef struct { + int64_t ne10; + int64_t ne11; + int64_t ne12; + uint64_t nb10; + uint64_t nb11; + uint64_t nb12; + uint64_t nb13; + uint64_t nb1; + uint64_t nb2; + uint64_t nb3; + uint64_t offs; + bool inplace; +} ggml_metal_kargs_set; + typedef struct { int32_t ne00; int32_t ne01; @@ -192,6 +207,30 @@ typedef struct { int16_t r3; } ggml_metal_kargs_mul_mv; +typedef struct { + int32_t ne00; + int32_t ne01; + int32_t ne02; + uint64_t nb00; + uint64_t nb01; + uint64_t nb02; + uint64_t nb03; + int32_t ne10; + int32_t ne11; + int32_t ne12; + uint64_t nb10; + uint64_t nb11; + uint64_t nb12; + uint64_t nb13; + int32_t ne0; + int32_t ne1; + int16_t r2; + int16_t r3; + int16_t nsg; + int16_t nxpsg; + int16_t r1ptg; +} ggml_metal_kargs_mul_mv_ext; + typedef struct { int32_t nei0; int32_t nei1; diff --git a/ggml/src/ggml-metal/ggml-metal.m b/ggml/src/ggml-metal/ggml-metal.m index ae6b25edea95a..34fe5778e4f09 100644 --- a/ggml/src/ggml-metal/ggml-metal.m +++ b/ggml/src/ggml-metal/ggml-metal.m @@ -175,6 +175,46 @@ static void ggml_backend_metal_device_rel(struct ggml_backend_metal_device_conte GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_F16_F32_R1_2, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_F16_F32_R1_3, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_F16_F32_R1_4, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_F16_F32_R1_5, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_0_F32_R1_2, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_0_F32_R1_3, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_0_F32_R1_4, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_0_F32_R1_5, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_1_F32_R1_2, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_1_F32_R1_3, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_1_F32_R1_4, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_1_F32_R1_5, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_0_F32_R1_2, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_0_F32_R1_3, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_0_F32_R1_4, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_0_F32_R1_5, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_1_F32_R1_2, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_1_F32_R1_3, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_1_F32_R1_4, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_1_F32_R1_5, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q8_0_F32_R1_2, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q8_0_F32_R1_3, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q8_0_F32_R1_4, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q8_0_F32_R1_5, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_K_F32_R1_2, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_K_F32_R1_3, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_K_F32_R1_4, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_K_F32_R1_5, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_K_F32_R1_2, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_K_F32_R1_3, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_K_F32_R1_4, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_K_F32_R1_5, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q6_K_F32_R1_2, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q6_K_F32_R1_3, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q6_K_F32_R1_4, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q6_K_F32_R1_5, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_IQ4_NL_F32_R1_2, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_IQ4_NL_F32_R1_3, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_IQ4_NL_F32_R1_4, + GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_IQ4_NL_F32_R1_5, GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32, @@ -266,8 +306,11 @@ static void ggml_backend_metal_device_rel(struct ggml_backend_metal_device_conte GGML_METAL_KERNEL_TYPE_IM2COL_F32, GGML_METAL_KERNEL_TYPE_IM2COL_EXT_F16, GGML_METAL_KERNEL_TYPE_IM2COL_EXT_F32, + GGML_METAL_KERNEL_TYPE_CONV_TRANSPOSE_1D_F32_F32, + GGML_METAL_KERNEL_TYPE_CONV_TRANSPOSE_1D_F16_F32, GGML_METAL_KERNEL_TYPE_UPSCALE_F32, GGML_METAL_KERNEL_TYPE_PAD_F32, + GGML_METAL_KERNEL_TYPE_PAD_REFLECT_1D_F32, GGML_METAL_KERNEL_TYPE_ARANGE_F32, GGML_METAL_KERNEL_TYPE_TIMESTEP_EMBEDDING_F32, GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, @@ -329,6 +372,8 @@ static void ggml_backend_metal_device_rel(struct ggml_backend_metal_device_conte GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_Q5_0_H256, GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_Q5_1_H256, GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_Q8_0_H256, + GGML_METAL_KERNEL_TYPE_SET_I32, + GGML_METAL_KERNEL_TYPE_SET_F32, GGML_METAL_KERNEL_TYPE_CPY_F32_F32, GGML_METAL_KERNEL_TYPE_CPY_F32_F16, GGML_METAL_KERNEL_TYPE_CPY_F32_BF16, @@ -350,6 +395,7 @@ static void ggml_backend_metal_device_rel(struct ggml_backend_metal_device_conte GGML_METAL_KERNEL_TYPE_SUM_ROWS, GGML_METAL_KERNEL_TYPE_POOL_2D_AVG_F32, GGML_METAL_KERNEL_TYPE_POOL_2D_MAX_F32, + GGML_METAL_KERNEL_TYPE_ARGMAX, GGML_METAL_KERNEL_TYPE_COUNT }; @@ -464,6 +510,35 @@ @implementation GGMLMetalClass #endif NSString * path_lib = [bundle pathForResource:@"default" ofType:@"metallib"]; + if (path_lib == nil) { + // Try to find the resource in the directory where the current binary located. + NSString * current_binary = [[NSProcessInfo processInfo] arguments][0]; + NSString * bin_dir = [current_binary stringByDeletingLastPathComponent]; + NSString * default_metallib_path = [NSString pathWithComponents:@[bin_dir, @"default.metallib"]]; + if ([[NSFileManager defaultManager] isReadableFileAtPath:default_metallib_path]) { + GGML_LOG_INFO("%s: found '%s'\n", __func__, [default_metallib_path UTF8String]); + NSDictionary * atts = [[NSFileManager defaultManager] attributesOfItemAtPath:default_metallib_path error:&error]; + if (atts && atts[NSFileType] == NSFileTypeSymbolicLink) { + // Optionally, if this is a symlink, try to resolve it. + default_metallib_path = [[NSFileManager defaultManager] destinationOfSymbolicLinkAtPath:default_metallib_path error:&error]; + if (default_metallib_path && [default_metallib_path length] > 0 && ![[default_metallib_path substringToIndex:1] isEqualToString:@"/"]) { + // It is a relative path, adding the binary directory as directory prefix. + default_metallib_path = [NSString pathWithComponents:@[bin_dir, default_metallib_path]]; + } + if (!default_metallib_path || ![[NSFileManager defaultManager] isReadableFileAtPath:default_metallib_path]) { + // Link to the resource could not be resolved. + default_metallib_path = nil; + } else { + GGML_LOG_INFO("%s: symlink resolved '%s'\n", __func__, [default_metallib_path UTF8String]); + } + } + } else { + // The resource couldn't be found in the binary's directory. + default_metallib_path = nil; + } + path_lib = default_metallib_path; + } + if (try_metallib && path_lib != nil) { // pre-compiled library found NSURL * libURL = [NSURL fileURLWithPath:path_lib]; @@ -699,6 +774,46 @@ @implementation GGMLMetalClass GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32, mul_mv_q5_0_f32, has_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32, mul_mv_q5_1_f32, has_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32, mul_mv_q8_0_f32, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_F16_F32_R1_2, mul_mv_ext_f16_f32_r1_2, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_F16_F32_R1_3, mul_mv_ext_f16_f32_r1_3, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_F16_F32_R1_4, mul_mv_ext_f16_f32_r1_4, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_F16_F32_R1_5, mul_mv_ext_f16_f32_r1_5, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_0_F32_R1_2, mul_mv_ext_q4_0_f32_r1_2, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_0_F32_R1_3, mul_mv_ext_q4_0_f32_r1_3, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_0_F32_R1_4, mul_mv_ext_q4_0_f32_r1_4, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_0_F32_R1_5, mul_mv_ext_q4_0_f32_r1_5, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_1_F32_R1_2, mul_mv_ext_q4_1_f32_r1_2, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_1_F32_R1_3, mul_mv_ext_q4_1_f32_r1_3, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_1_F32_R1_4, mul_mv_ext_q4_1_f32_r1_4, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_1_F32_R1_5, mul_mv_ext_q4_1_f32_r1_5, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_0_F32_R1_2, mul_mv_ext_q5_0_f32_r1_2, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_0_F32_R1_3, mul_mv_ext_q5_0_f32_r1_3, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_0_F32_R1_4, mul_mv_ext_q5_0_f32_r1_4, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_0_F32_R1_5, mul_mv_ext_q5_0_f32_r1_5, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_1_F32_R1_2, mul_mv_ext_q5_1_f32_r1_2, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_1_F32_R1_3, mul_mv_ext_q5_1_f32_r1_3, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_1_F32_R1_4, mul_mv_ext_q5_1_f32_r1_4, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_1_F32_R1_5, mul_mv_ext_q5_1_f32_r1_5, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q8_0_F32_R1_2, mul_mv_ext_q8_0_f32_r1_2, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q8_0_F32_R1_3, mul_mv_ext_q8_0_f32_r1_3, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q8_0_F32_R1_4, mul_mv_ext_q8_0_f32_r1_4, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q8_0_F32_R1_5, mul_mv_ext_q8_0_f32_r1_5, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_K_F32_R1_2, mul_mv_ext_q4_K_f32_r1_2, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_K_F32_R1_3, mul_mv_ext_q4_K_f32_r1_3, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_K_F32_R1_4, mul_mv_ext_q4_K_f32_r1_4, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_K_F32_R1_5, mul_mv_ext_q4_K_f32_r1_5, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_K_F32_R1_2, mul_mv_ext_q5_K_f32_r1_2, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_K_F32_R1_3, mul_mv_ext_q5_K_f32_r1_3, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_K_F32_R1_4, mul_mv_ext_q5_K_f32_r1_4, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_K_F32_R1_5, mul_mv_ext_q5_K_f32_r1_5, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q6_K_F32_R1_2, mul_mv_ext_q6_K_f32_r1_2, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q6_K_F32_R1_3, mul_mv_ext_q6_K_f32_r1_3, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q6_K_F32_R1_4, mul_mv_ext_q6_K_f32_r1_4, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q6_K_F32_R1_5, mul_mv_ext_q6_K_f32_r1_5, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_IQ4_NL_F32_R1_2, mul_mv_ext_iq4_nl_f32_r1_2, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_IQ4_NL_F32_R1_3, mul_mv_ext_iq4_nl_f32_r1_3, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_IQ4_NL_F32_R1_4, mul_mv_ext_iq4_nl_f32_r1_4, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_IQ4_NL_F32_R1_5, mul_mv_ext_iq4_nl_f32_r1_5, has_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32, mul_mv_q2_K_f32, has_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32, mul_mv_q3_K_f32, has_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32, mul_mv_q4_K_f32, has_simdgroup_reduction); @@ -790,8 +905,11 @@ @implementation GGMLMetalClass GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_F32, im2col_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_EXT_F16, im2col_ext_f16, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_EXT_F32, im2col_ext_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CONV_TRANSPOSE_1D_F32_F32, conv_transpose_1d_f32_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CONV_TRANSPOSE_1D_F16_F32, conv_transpose_1d_f16_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_UPSCALE_F32, upscale_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_PAD_F32, pad_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_PAD_REFLECT_1D_F32, pad_reflect_1d_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_TIMESTEP_EMBEDDING_F32, timestep_embedding_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARANGE_F32, arange_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, argsort_f32_i32_asc, true); @@ -853,6 +971,8 @@ @implementation GGMLMetalClass GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_Q5_0_H256, flash_attn_ext_vec_q5_0_h256, has_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_Q5_1_H256, flash_attn_ext_vec_q5_1_h256, has_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_Q8_0_H256, flash_attn_ext_vec_q8_0_h256, has_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SET_F32, set_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SET_I32, set_i32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_F32, cpy_f32_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_F16, cpy_f32_f16, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_BF16, cpy_f32_bf16, use_bfloat); @@ -872,6 +992,7 @@ @implementation GGMLMetalClass GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SIN, sin, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_COS, cos, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SUM_ROWS, sum_rows, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGMAX, argmax, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_POOL_2D_AVG_F32, pool_2d_avg_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_POOL_2D_MAX_F32, pool_2d_max_f32, true); } @@ -989,6 +1110,7 @@ static bool ggml_metal_supports_op(const struct ggml_backend_metal_device_contex case GGML_OP_REPEAT: case GGML_OP_SCALE: case GGML_OP_CLAMP: + case GGML_OP_CONV_TRANSPOSE_1D: return true; case GGML_OP_SQR: case GGML_OP_SQRT: @@ -1001,6 +1123,7 @@ static bool ggml_metal_supports_op(const struct ggml_backend_metal_device_contex return has_simdgroup_reduction; case GGML_OP_RMS_NORM: return has_simdgroup_reduction && (op->ne[0] % 4 == 0); + case GGML_OP_ARGMAX: case GGML_OP_NORM: case GGML_OP_ROPE: return true; @@ -1011,6 +1134,7 @@ static bool ggml_metal_supports_op(const struct ggml_backend_metal_device_contex case GGML_OP_POOL_2D: case GGML_OP_UPSCALE: case GGML_OP_PAD: + case GGML_OP_PAD_REFLECT_1D: case GGML_OP_ARANGE: case GGML_OP_TIMESTEP_EMBEDDING: case GGML_OP_ARGSORT: @@ -1068,6 +1192,16 @@ static bool ggml_metal_supports_op(const struct ggml_backend_metal_device_contex return false; }; } + case GGML_OP_SET: + { + switch (op->src[0]->type) { + case GGML_TYPE_F32: + case GGML_TYPE_I32: + return true; + default: + return false; + }; + } case GGML_OP_DIAG_MASK_INF: case GGML_OP_GET_ROWS: { @@ -1928,30 +2062,180 @@ static void ggml_metal_encode_node( // find the break-even point where the matrix-matrix kernel becomes more efficient compared // to the matrix-vector kernel - int ne11_mm_min = 4; + const int ne11_mm_min = 4; + + // first try to use small-batch mat-mv kernels + // these should be efficient for BS [2, ~8] + if (src1t == GGML_TYPE_F32 && (ne00%256 == 0) && + ( + ( + ( + src0t == GGML_TYPE_F16 || // TODO: helper function + src0t == GGML_TYPE_Q4_0 || + src0t == GGML_TYPE_Q4_1 || + src0t == GGML_TYPE_Q5_0 || + src0t == GGML_TYPE_Q5_1 || + src0t == GGML_TYPE_Q8_0 || + src0t == GGML_TYPE_IQ4_NL || + false) && (ne11 >= 2 && ne11 <= 8) + ) || + ( + ( + src0t == GGML_TYPE_Q4_K || + src0t == GGML_TYPE_Q5_K || + src0t == GGML_TYPE_Q6_K || + false) && (ne11 >= 4 && ne11 <= 8) + ) + ) + ) { + // TODO: determine the optimal parameters based on grid utilization + // I still don't know why we should not always use the maximum available threads: + // + // nsg = pipeline.maxTotalThreadsPerThreadgroup / 32 + // + // my current hypothesis is that the work grid is not evenly divisible for different nsg + // values and there can be some tail effects when nsg is high. need to confirm this + // + const int nsg = 2; // num simdgroups per threadgroup + const int nxpsg = ne11 < 3 ? 16 : 8; // num threads along row per simdgroup + const int nypsg = 32/nxpsg; // num threads along col per simdgroup (i.e. a simdgroup processes that many src0 rows at a time) + const int r0ptg = nypsg*nsg; // num src0 rows per threadgroup + int r1ptg = 4; // num src1 rows per threadgroup + + // note: not sure how optimal are those across all different hardware. there might be someting cleverer + switch (ne11) { + case 2: + r1ptg = 2; break; + case 3: + case 6: + r1ptg = 3; break; + case 4: + case 7: + case 8: + r1ptg = 4; break; + case 5: + r1ptg = 5; break; + }; -#if 0 - // the numbers below are measured on M2 Ultra for 7B and 13B models - // these numbers do not translate to other devices or model sizes - // TODO: need to find a better approach - if ([device.name isEqualToString:@"Apple M2 Ultra"]) { - switch (src0t) { - case GGML_TYPE_F16: ne11_mm_min = 2; break; - case GGML_TYPE_Q8_0: ne11_mm_min = 7; break; - case GGML_TYPE_Q2_K: ne11_mm_min = 15; break; - case GGML_TYPE_Q3_K: ne11_mm_min = 7; break; - case GGML_TYPE_Q4_0: - case GGML_TYPE_Q4_1: ne11_mm_min = 15; break; - case GGML_TYPE_Q4_K: ne11_mm_min = 11; break; - case GGML_TYPE_Q5_0: // not tested yet - case GGML_TYPE_Q5_1: ne11_mm_min = 13; break; // not tested yet - case GGML_TYPE_Q5_K: ne11_mm_min = 7; break; - case GGML_TYPE_Q6_K: ne11_mm_min = 7; break; - default: ne11_mm_min = 1; break; - } - } -#endif + id pipeline = nil; + switch (src0->type) { + case GGML_TYPE_F16: + switch (r1ptg) { + case 2: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_F16_F32_R1_2].pipeline; break; + case 3: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_F16_F32_R1_3].pipeline; break; + case 4: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_F16_F32_R1_4].pipeline; break; + case 5: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_F16_F32_R1_5].pipeline; break; + default: GGML_ABORT("not implemented"); + } break; + case GGML_TYPE_Q4_0: + switch (r1ptg) { + case 2: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_0_F32_R1_2].pipeline; break; + case 3: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_0_F32_R1_3].pipeline; break; + case 4: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_0_F32_R1_4].pipeline; break; + case 5: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_0_F32_R1_5].pipeline; break; + default: GGML_ABORT("not implemented"); + } break; + case GGML_TYPE_Q4_1: + switch (r1ptg) { + case 2: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_1_F32_R1_2].pipeline; break; + case 3: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_1_F32_R1_3].pipeline; break; + case 4: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_1_F32_R1_4].pipeline; break; + case 5: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_1_F32_R1_5].pipeline; break; + default: GGML_ABORT("not implemented"); + } break; + case GGML_TYPE_Q5_0: + switch (r1ptg) { + case 2: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_0_F32_R1_2].pipeline; break; + case 3: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_0_F32_R1_3].pipeline; break; + case 4: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_0_F32_R1_4].pipeline; break; + case 5: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_0_F32_R1_5].pipeline; break; + default: GGML_ABORT("not implemented"); + } break; + case GGML_TYPE_Q5_1: + switch (r1ptg) { + case 2: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_1_F32_R1_2].pipeline; break; + case 3: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_1_F32_R1_3].pipeline; break; + case 4: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_1_F32_R1_4].pipeline; break; + case 5: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_1_F32_R1_5].pipeline; break; + default: GGML_ABORT("not implemented"); + } break; + case GGML_TYPE_Q8_0: + switch (r1ptg) { + case 2: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q8_0_F32_R1_2].pipeline; break; + case 3: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q8_0_F32_R1_3].pipeline; break; + case 4: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q8_0_F32_R1_4].pipeline; break; + case 5: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q8_0_F32_R1_5].pipeline; break; + default: GGML_ABORT("not implemented"); + } break; + case GGML_TYPE_Q4_K: + switch (r1ptg) { + case 2: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_K_F32_R1_2].pipeline; break; + case 3: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_K_F32_R1_3].pipeline; break; + case 4: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_K_F32_R1_4].pipeline; break; + case 5: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q4_K_F32_R1_5].pipeline; break; + default: GGML_ABORT("not implemented"); + } break; + case GGML_TYPE_Q5_K: + switch (r1ptg) { + case 2: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_K_F32_R1_2].pipeline; break; + case 3: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_K_F32_R1_3].pipeline; break; + case 4: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_K_F32_R1_4].pipeline; break; + case 5: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q5_K_F32_R1_5].pipeline; break; + default: GGML_ABORT("not implemented"); + } break; + case GGML_TYPE_Q6_K: + switch (r1ptg) { + case 2: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q6_K_F32_R1_2].pipeline; break; + case 3: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q6_K_F32_R1_3].pipeline; break; + case 4: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q6_K_F32_R1_4].pipeline; break; + case 5: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_Q6_K_F32_R1_5].pipeline; break; + default: GGML_ABORT("not implemented"); + } break; + case GGML_TYPE_IQ4_NL: + switch (r1ptg) { + case 2: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_IQ4_NL_F32_R1_2].pipeline; break; + case 3: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_IQ4_NL_F32_R1_3].pipeline; break; + case 4: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_IQ4_NL_F32_R1_4].pipeline; break; + case 5: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_EXT_IQ4_NL_F32_R1_5].pipeline; break; + default: GGML_ABORT("not implemented"); + } break; + default: GGML_ABORT("not implemented"); + } + + ggml_metal_kargs_mul_mv_ext args = { + /*.ne00 =*/ ne00, + /*.ne01 =*/ ne01, + /*.ne02 =*/ ne02, + /*.nb00 =*/ nb00, + /*.nb01 =*/ nb01, + /*.nb02 =*/ nb02, + /*.nb03 =*/ nb03, + /*.ne10 =*/ ne10, + /*.ne11 =*/ ne11, + /*.ne12 =*/ ne12, + /*.nb10 =*/ nb10, + /*.nb11 =*/ nb11, + /*.nb12 =*/ nb12, + /*.nb13 =*/ nb13, + /*.ne0 =*/ ne0, + /*.ne1 =*/ ne1, + /*.r2 =*/ r2, + /*.r3 =*/ r3, + /*.nsg =*/ nsg, + /*.nxpsg =*/ nxpsg, + /*.r1ptg =*/ r1ptg, + }; + + [encoder setComputePipelineState:pipeline]; + [encoder setBytes:&args length:sizeof(args) atIndex:0]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:1]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:2]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:3]; + + //printf("ne01 = %lld nr0ptg = %d\n", ne01, nr0ptg); + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + r0ptg - 1)/r0ptg, (ne11 + r1ptg - 1)/r1ptg, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(32, nsg, 1)]; + } else // for now the matrix-matrix multiplication kernel only works on A14+/M1+ SoCs // AMD GPU and older A-chips will reuse matrix-vector multiplication kernel if ([device supportsFamily:MTLGPUFamilyApple7] && @@ -2908,6 +3192,49 @@ static void ggml_metal_encode_node( [encoder dispatchThreadgroups:MTLSizeMake(IC, OH, OW) threadsPerThreadgroup:MTLSizeMake(N, KH, KW)]; } } break; + case GGML_OP_CONV_TRANSPOSE_1D: + { + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(ggml_is_contiguous(src1)); + GGML_ASSERT(src0->type == GGML_TYPE_F16 || src0->type == GGML_TYPE_F32); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + const int32_t s0 = ((const int32_t *)(dst->op_params))[0]; + + const int32_t IC = src1->ne[1]; + const int32_t IL = src1->ne[0]; + + const int32_t K = src0->ne[0]; + + const int32_t OL = dst->ne[0]; + const int32_t OC = dst->ne[1]; + + id pipeline; + + switch (src0->type) { + case GGML_TYPE_F32: { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CONV_TRANSPOSE_1D_F32_F32].pipeline; + } break; + case GGML_TYPE_F16: { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CONV_TRANSPOSE_1D_F16_F32].pipeline; + } break; + default: GGML_ABORT("fatal error"); + }; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&IC length:sizeof( int32_t) atIndex:3]; + [encoder setBytes:&IL length:sizeof( int32_t) atIndex:4]; + [encoder setBytes:&K length:sizeof( int32_t) atIndex:5]; + [encoder setBytes:&s0 length:sizeof( int32_t) atIndex:6]; + [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:7]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:8]; + + [encoder dispatchThreadgroups:MTLSizeMake(OL, OC, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; case GGML_OP_UPSCALE: { GGML_ASSERT(src0->type == GGML_TYPE_F32); @@ -2977,6 +3304,38 @@ static void ggml_metal_encode_node( const int nth = MIN(1024, ne0); + [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_PAD_REFLECT_1D: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + const int32_t p0 = ((const int32_t *)(dst->op_params))[0]; + const int32_t p1 = ((const int32_t *)(dst->op_params))[1]; + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_PAD_REFLECT_1D_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:6]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:8]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:9]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:10]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:11]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:12]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:13]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:14]; + [encoder setBytes:&p0 length:sizeof(p0) atIndex:15]; + [encoder setBytes:&p1 length:sizeof(p1) atIndex:16]; + + const int nth = MIN(1024, ne0); + [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; case GGML_OP_ARANGE: @@ -3508,6 +3867,68 @@ static void ggml_metal_encode_node( [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; + case GGML_OP_SET: + { + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); + + // src0 and dst as viewed during set + const size_t dst_nb0 = ggml_element_size(src0); + + const size_t dst_nb1 = ((int32_t *) dst->op_params)[0]; + const size_t dst_nb2 = ((int32_t *) dst->op_params)[1]; + const size_t dst_nb3 = ((int32_t *) dst->op_params)[2]; + const size_t offset = ((int32_t *) dst->op_params)[3]; + const bool inplace = (bool) ((int32_t *) dst->op_params)[4]; + + if (!inplace) { + memcpy(((char *) dst->data), ((char *) src0->data), ggml_nbytes(dst)); + } + + const int im0 = (ne10 == 0 ? 0 : ne10-1); + const int im1 = (ne11 == 0 ? 0 : ne11-1); + const int im2 = (ne12 == 0 ? 0 : ne12-1); + const int im3 = (ne13 == 0 ? 0 : ne13-1); + + GGML_ASSERT(offset + im0*dst_nb0 + im1*dst_nb1 + im2*dst_nb2 + im3*dst_nb3 <= ggml_nbytes(dst)); + + id pipeline = nil; + + switch (src0t) { + case GGML_TYPE_F32: + GGML_ASSERT(nb10 == sizeof(float)); + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SET_F32].pipeline; break; + case GGML_TYPE_I32: + GGML_ASSERT(nb10 == sizeof(int32_t)); + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SET_I32].pipeline; break; + default: GGML_ABORT("fatal error"); + } + + ggml_metal_kargs_set args = { + /*.ne10 =*/ ne10, + /*.ne11 =*/ ne11, + /*.ne12 =*/ ne12, + /*.nb10 =*/ nb10, + /*.nb11 =*/ nb11, + /*.nb12 =*/ nb12, + /*.nb13 =*/ nb13, + /*.nb1 =*/ dst_nb1, + /*.nb2 =*/ dst_nb2, + /*.nb3 =*/ dst_nb3, + /*.offs =*/ offset, + /*.inplace =*/ inplace, + }; + + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne10); + + [encoder setComputePipelineState:pipeline]; + [encoder setBytes:&args length:sizeof(args) atIndex:0]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:1]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:2]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:3]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne11, ne12, ne13) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; case GGML_OP_POOL_2D: { GGML_ASSERT(ggml_is_contiguous(src0)); @@ -3567,6 +3988,31 @@ static void ggml_metal_encode_node( [encoder dispatchThreadgroups:MTLSizeMake(n_tg, 1, 1) threadsPerThreadgroup:MTLSizeMake(n_threads, 1, 1)]; } break; + case GGML_OP_ARGMAX: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT(ggml_is_contiguous_1(src0)); + GGML_ASSERT(nb00 == ggml_type_size(src0->type)); + + const int64_t nrows = ggml_nrows(src0); + + int nth = 32; // SIMD width + while (nth < ne00 && nth*ne01*ne02*ne03 < 256) { + nth *= 2; + } + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGMAX].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; + [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; + [encoder setThreadgroupMemoryLength:32*sizeof(int32_t) atIndex:1]; + + [encoder dispatchThreadgroups:MTLSizeMake(nrows, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; default: { GGML_LOG_ERROR("%s: error: node %3d, op = %8s not implemented\n", __func__, idx, ggml_op_name(dst->op)); diff --git a/ggml/src/ggml-metal/ggml-metal.metal b/ggml/src/ggml-metal/ggml-metal.metal index eaca38864bd65..8ba43904d0c1b 100644 --- a/ggml/src/ggml-metal/ggml-metal.metal +++ b/ggml/src/ggml-metal/ggml-metal.metal @@ -47,6 +47,11 @@ void dequantize_f16(device const half4x4 * src, short il, thread type4x4 & reg) reg = (type4x4)(*src); } +template +void dequantize_f16_t4(device const half4 * src, short il, thread type4 & reg) { + reg = (type4)(*(src + il)); +} + #if defined(GGML_METAL_USE_BF16) template void dequantize_bf16(device const bfloat4x4 * src, short il, thread type4x4 & reg) { @@ -55,7 +60,7 @@ void dequantize_bf16(device const bfloat4x4 * src, short il, thread type4x4 & re #endif template -void dequantize_q4_0(device const block_q4_0 *xb, short il, thread type4x4 & reg) { +void dequantize_q4_0(device const block_q4_0 * xb, short il, thread type4x4 & reg) { device const uint16_t * qs = ((device const uint16_t *)xb + 1); const float d1 = il ? (xb->d / 16.h) : xb->d; const float d2 = d1 / 256.f; @@ -73,8 +78,23 @@ void dequantize_q4_0(device const block_q4_0 *xb, short il, thread type4x4 & reg reg = (type4x4) reg_f; } +template +void dequantize_q4_0_t4(device const block_q4_0 * xb, short il, thread type4 & reg) { + device const uint16_t * qs = ((device const uint16_t *)xb + 1); + const float d1 = (il/4) ? (xb->d / 16.h) : xb->d; + const float d2 = d1 / 256.f; + const float md = -8.h * xb->d; + const ushort mask0 = (il/4) ? 0x00F0 : 0x000F; + const ushort mask1 = mask0 << 8; + + for (int i = 0; i < 2; i++) { + reg[2*i + 0] = d1 * (qs[2*(il%4) + i] & mask0) + md; + reg[2*i + 1] = d2 * (qs[2*(il%4) + i] & mask1) + md; + } +} + template -void dequantize_q4_1(device const block_q4_1 *xb, short il, thread type4x4 & reg) { +void dequantize_q4_1(device const block_q4_1 * xb, short il, thread type4x4 & reg) { device const uint16_t * qs = ((device const uint16_t *)xb + 2); const float d1 = il ? (xb->d / 16.h) : xb->d; const float d2 = d1 / 256.f; @@ -92,8 +112,23 @@ void dequantize_q4_1(device const block_q4_1 *xb, short il, thread type4x4 & reg reg = (type4x4) reg_f; } +template +void dequantize_q4_1_t4(device const block_q4_1 * xb, short il, thread type4 & reg) { + device const uint16_t * qs = ((device const uint16_t *)xb + 2); + const float d1 = (il/4) ? (xb->d / 16.h) : xb->d; + const float d2 = d1 / 256.f; + const float m = xb->m; + const ushort mask0 = (il/4) ? 0x00F0 : 0x000F; + const ushort mask1 = mask0 << 8; + + for (int i = 0; i < 2; i++) { + reg[2*i + 0] = d1 * (qs[2*(il%4) + i] & mask0) + m; + reg[2*i + 1] = d2 * (qs[2*(il%4) + i] & mask1) + m; + } +} + template -void dequantize_q5_0(device const block_q5_0 *xb, short il, thread type4x4 & reg) { +void dequantize_q5_0(device const block_q5_0 * xb, short il, thread type4x4 & reg) { device const uint16_t * qs = ((device const uint16_t *)xb + 3); const float d = xb->d; const float md = -16.h * xb->d; @@ -124,8 +159,38 @@ void dequantize_q5_0(device const block_q5_0 *xb, short il, thread type4x4 & reg reg = (type4x4) reg_f; } +template +void dequantize_q5_0_t4(device const block_q5_0 * xb, short il, thread type4 & reg) { + device const uint16_t * qs = ((device const uint16_t *)xb + 3); + const float d = xb->d; + const float md = -16.h * xb->d; + const ushort mask = (il/4) ? 0x00F0 : 0x000F; + + const uint32_t qh = *((device const uint32_t *)xb->qh); + + const int x_mv = (il/4) ? 4 : 0; + + const int gh_mv = (il/4) ? 12 : 0; + const int gh_bk = (il/4) ? 0 : 4; + + for (int ii = 0; ii < 2; ii++) { + int i = 2*(il%4) + ii; + + // extract the 5-th bits for x0 and x1 + const uint8_t xh_0 = ((qh >> (gh_mv + 2*i )) << gh_bk) & 0x10; + const uint8_t xh_1 = ((qh >> (gh_mv + 2*i+1)) << gh_bk) & 0x10; + + // combine the 4-bits from qs with the 5th bit + const int32_t x0 = ((((qs[i] ) & mask) >> x_mv) | xh_0); + const int32_t x1 = ((((qs[i] >> 8) & mask) >> x_mv) | xh_1); + + reg[2*ii + 0] = d * x0 + md; + reg[2*ii + 1] = d * x1 + md; + } +} + template -void dequantize_q5_1(device const block_q5_1 *xb, short il, thread type4x4 & reg) { +void dequantize_q5_1(device const block_q5_1 * xb, short il, thread type4x4 & reg) { device const uint16_t * qs = ((device const uint16_t *)xb + 4); const float d = xb->d; const float m = xb->m; @@ -156,10 +221,40 @@ void dequantize_q5_1(device const block_q5_1 *xb, short il, thread type4x4 & reg reg = (type4x4) reg_f; } +template +void dequantize_q5_1_t4(device const block_q5_1 * xb, short il, thread type4 & reg) { + device const uint16_t * qs = ((device const uint16_t *)xb + 4); + const float d = xb->d; + const float m = xb->m; + const ushort mask = (il/4) ? 0x00F0 : 0x000F; + + const uint32_t qh = *((device const uint32_t *)xb->qh); + + const int x_mv = (il/4) ? 4 : 0; + + const int gh_mv = (il/4) ? 12 : 0; + const int gh_bk = (il/4) ? 0 : 4; + + for (int ii = 0; ii < 2; ii++) { + int i = 2*(il%4) + ii; + + // extract the 5-th bits for x0 and x1 + const uint8_t xh_0 = ((qh >> (gh_mv + 2*i )) << gh_bk) & 0x10; + const uint8_t xh_1 = ((qh >> (gh_mv + 2*i+1)) << gh_bk) & 0x10; + + // combine the 4-bits from qs with the 5th bit + const int32_t x0 = ((((qs[i] ) & mask) >> x_mv) | xh_0); + const int32_t x1 = ((((qs[i] >> 8) & mask) >> x_mv) | xh_1); + + reg[2*ii + 0] = d * x0 + m; + reg[2*ii + 1] = d * x1 + m; + } +} + template void dequantize_q8_0(device const block_q8_0 *xb, short il, thread type4x4 & reg) { device const int8_t * qs = ((device const int8_t *)xb->qs); - const half d = xb->d; + const float d = xb->d; float4x4 reg_f; @@ -170,6 +265,16 @@ void dequantize_q8_0(device const block_q8_0 *xb, short il, thread type4x4 & reg reg = (type4x4) reg_f; } +template +void dequantize_q8_0_t4(device const block_q8_0 *xb, short il, thread type4 & reg) { + device const int8_t * qs = ((device const int8_t *)xb->qs); + const float d = xb->d; + + for (int i = 0; i < 4; i++) { + reg[i] = (qs[4*(il%4) + i + 16*(il/4)] * d); + } +} + template void dequantize_q2_K(device const block_q2_K *xb, short il, thread type4x4 & reg) { const float d = xb->d; @@ -224,7 +329,7 @@ static inline uchar2 get_scale_min_k4_just2(int j, int k, device const uchar * q } template -void dequantize_q4_K(device const block_q4_K *xb, short il, thread type4x4 & reg) { +void dequantize_q4_K(device const block_q4_K * xb, short il, thread type4x4 & reg) { device const uchar * q = xb->qs; short is = (il/4) * 2; @@ -236,7 +341,7 @@ void dequantize_q4_K(device const block_q4_K *xb, short il, thread type4x4 & reg const float dl = d * sc[0]; const float ml = min * sc[1]; - const ushort mask = il<2 ? 0x0F : 0xF0; + const ushort mask = il < 2 ? 0x0F : 0xF0; for (int i = 0; i < 16; ++i) { reg[i/4][i%4] = dl * (q[i] & mask) - ml; } @@ -469,6 +574,19 @@ void dequantize_iq4_nl(device const block_iq4_nl * xb, short il, thread type4x4 } } +template +void dequantize_iq4_nl_t4(device const block_iq4_nl * xb, short il, thread type4 & reg) { + device const uint16_t * q4 = (device const uint16_t *)xb->qs; + const float d = xb->d; + uint32_t aux32; + thread const uint8_t * q8 = (thread const uint8_t *)&aux32; + aux32 = ((q4[2*(il%4)] | (q4[2*(il%4)+1] << 16)) >> 4*(il/4)) & 0x0f0f0f0f; + reg[0] = d * kvalues_iq4nl_f[q8[0]]; + reg[1] = d * kvalues_iq4nl_f[q8[1]]; + reg[2] = d * kvalues_iq4nl_f[q8[2]]; + reg[3] = d * kvalues_iq4nl_f[q8[3]]; +} + template void dequantize_iq4_xs(device const block_iq4_xs * xb, short il, thread type4x4 & reg) { // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 @@ -1248,6 +1366,63 @@ kernel void kernel_ssm_scan_f32( } } +kernel void kernel_argmax( + device const void * x, + device int32_t * dst, + constant int64_t & ncols, + constant uint64_t & nb01, + threadgroup float * shared_maxval [[threadgroup(0)]], + threadgroup int32_t * shared_argmax [[threadgroup(1)]], + uint tgpig[[threadgroup_position_in_grid]], + uint tpitg[[thread_position_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint ntg[[threads_per_threadgroup]]) { + device const float * x_row = (device const float *) ((device const char *) x + tgpig * nb01); + + float lmax = -INFINITY; + int32_t larg = -1; + + for (int i00 = tpitg; i00 < ncols; i00 += ntg) { + if (x_row[i00] > lmax) { + lmax = x_row[i00]; + larg = i00; + } + } + + // find the argmax value in the block + float max_val = simd_max(lmax); + int32_t arg_val = simd_max(select(-1, larg, lmax == max_val)); + + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + shared_maxval[tiisg] = -INFINITY; + shared_argmax[tiisg] = -1; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + shared_maxval[sgitg] = max_val; + shared_argmax[sgitg] = arg_val; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + max_val = shared_maxval[tiisg]; + arg_val = shared_argmax[tiisg]; + + float max_val_reduced = simd_max(max_val); + int32_t arg_val_reduced = simd_max(select(-1, arg_val, max_val == max_val_reduced)); + + dst[tgpig] = arg_val_reduced; + + return; + } + + dst[tgpig] = arg_val; +} + kernel void kernel_norm( constant ggml_metal_kargs_norm & args, device const char * src0, @@ -1752,6 +1927,301 @@ kernel void kernel_mul_mv_q8_0_f32( kernel_mul_mv_q8_0_f32_impl(args, src0, src1, dst, nullptr, tgpig, tiisg, sgitg); } +// mat-vec kernel processing in chunks of float4 +// chpb - chunks per quantization block +template +void kernel_mul_mv_ext_q4_f32_impl( + constant ggml_metal_kargs_mul_mv_ext & args, + device const char * src0, + device const char * src1, + device char * dst, + uint3 tgpig[[threadgroup_position_in_grid]], + ushort tiisg[[thread_index_in_simdgroup]], + ushort sgitg[[simdgroup_index_in_threadgroup]]) { + const short chpt = 4; // chunks per thread + + //const short nxpsg = (32); + const short nypsg = (32/nxpsg); + + const short tx = tiisg%nxpsg; + const short ty = tiisg/nxpsg; + + const int i01 = tgpig.x*(nypsg*args.nsg) + nypsg*sgitg + ty; + const int i11 = tgpig.y*r1ptg; + const int i1m = tgpig.z; + + const int i12 = i1m%args.ne12; + const int i13 = i1m/args.ne12; + + const uint64_t offset0 = i01*args.nb01 + (i12/args.r2)*args.nb02 + (i13/args.r3)*args.nb03; + const uint64_t offset1 = i11*args.nb11 + (i12 )*args.nb12 + (i13 )*args.nb13; + + device const q_t * xq = (i01 < args.ne01) ? (device const q_t *) (src0 + offset0) + tx/chpb : (device const q_t *) src0; + + device const float4 * y4[r1ptg]; + + for (int ir1 = 0; ir1 < r1ptg; ++ir1) { + y4[ir1] = (i11 + ir1 < args.ne11) ? (device const float4 *) (src1 + offset1 + ir1*args.nb11) + tx : (device const float4 *) src1; + } + + float sumf[r1ptg] = { [ 0 ... r1ptg - 1 ] = 0.0f }; + + short cch = tx%chpb; // current chunk index + + for (int ich = tx; 4*ich < args.ne00; ich += chpt*nxpsg) { + float4 lx[chpt]; + +#pragma unroll(chpt) + for (short ch = 0; ch < chpt; ++ch) { + deq_t4(xq, cch, lx[ch]); + + cch += nxpsg; + if (cch >= chpb) { + xq += cch/chpb; + cch %= chpb; + } + } + +#pragma unroll(chpt) + for (short ch = 0; ch < chpt; ++ch) { +#pragma unroll(r1ptg) + for (short ir1 = 0; ir1 < r1ptg; ++ir1) { + sumf[ir1] += dot(lx[ch], y4[ir1][ch*nxpsg]); + + } + } + +#pragma unroll(r1ptg) + for (short ir1 = 0; ir1 < r1ptg; ++ir1) { + y4[ir1] += chpt*nxpsg; + } + } + + // reduce only the threads in each row + for (short ir1 = 0; ir1 < r1ptg; ++ir1) { + if (nxpsg >= 32) { + sumf[ir1] += simd_shuffle_down(sumf[ir1], 16); + } + if (nxpsg >= 16) { + sumf[ir1] += simd_shuffle_down(sumf[ir1], 8); + } + if (nxpsg >= 8) { + sumf[ir1] += simd_shuffle_down(sumf[ir1], 4); + } + if (nxpsg >= 4) { + sumf[ir1] += simd_shuffle_down(sumf[ir1], 2); + } + if (nxpsg >= 2) { + sumf[ir1] += simd_shuffle_down(sumf[ir1], 1); + } + + //sumf[ir1] = simd_sum(sumf[ir1]); + } + + if (tx == 0) { + for (short ir1 = 0; ir1 < r1ptg && i11 + ir1 < args.ne11; ++ir1) { + device float * dst_f32 = (device float *) dst + (uint64_t)i1m*args.ne0*args.ne1 + (uint64_t)(i11 + ir1)*args.ne0; + + if (i01 < args.ne01) { + dst_f32[i01] = sumf[ir1]; + } + } + } +} + +// mat-vec kernel processing in chunks of float4x4 +template +void kernel_mul_mv_ext_q4x4_f32_impl( + constant ggml_metal_kargs_mul_mv_ext & args, + device const char * src0, + device const char * src1, + device char * dst, + uint3 tgpig[[threadgroup_position_in_grid]], + ushort tiisg[[thread_index_in_simdgroup]], + ushort sgitg[[simdgroup_index_in_threadgroup]]) { + const short chpt = 1; + + //const short nxpsg = (32); + const short nypsg = (32/nxpsg); + + const short tx = tiisg%nxpsg; + const short ty = tiisg/nxpsg; + + const int i01 = tgpig.x*(nypsg*args.nsg) + nypsg*sgitg + ty; + const int i11 = tgpig.y*r1ptg; + const int i1m = tgpig.z; + + const int i12 = i1m%args.ne12; + const int i13 = i1m/args.ne12; + + const uint64_t offset0 = i01*args.nb01 + (i12/args.r2)*args.nb02 + (i13/args.r3)*args.nb03; + const uint64_t offset1 = i11*args.nb11 + (i12 )*args.nb12 + (i13 )*args.nb13; + + device const q_t * xq = (i01 < args.ne01) ? (device const q_t *) (src0 + offset0) + tx/chpb : (device const q_t *) src0; + + device const float4x4 * y4x4[r1ptg]; + + for (int ir1 = 0; ir1 < r1ptg; ++ir1) { + y4x4[ir1] = (i11 + ir1 < args.ne11) ? (device const float4x4 *) (src1 + offset1 + ir1*args.nb11) + tx : (device const float4x4 *) src1; + } + + float sumf[r1ptg] = { [ 0 ... r1ptg - 1 ] = 0.0f }; + + short cch = tx%chpb; + + for (int ich = tx; 16*ich < args.ne00; ich += chpt*nxpsg) { + float4x4 lx[chpt]; + +#pragma unroll(chpt) + for (short ch = 0; ch < chpt; ++ch) { + deq_t4x4(xq, cch, lx[ch]); + + cch += nxpsg; + if (cch >= chpb) { + xq += cch/chpb; + cch %= chpb; + } + } + +#pragma unroll(chpt) + for (short ch = 0; ch < chpt; ++ch) { +#pragma unroll(r1ptg) + for (short ir1 = 0; ir1 < r1ptg; ++ir1) { + sumf[ir1] += + dot(lx[ch][0], y4x4[ir1][ch*nxpsg][0]) + + dot(lx[ch][1], y4x4[ir1][ch*nxpsg][1]) + + dot(lx[ch][2], y4x4[ir1][ch*nxpsg][2]) + + dot(lx[ch][3], y4x4[ir1][ch*nxpsg][3]); + + } + } + +#pragma unroll(r1ptg) + for (short ir1 = 0; ir1 < r1ptg; ++ir1) { + y4x4[ir1] += chpt*nxpsg; + } + } + + for (short ir1 = 0; ir1 < r1ptg; ++ir1) { + if (nxpsg >= 32) { + sumf[ir1] += simd_shuffle_down(sumf[ir1], 16); + } + if (nxpsg >= 16) { + sumf[ir1] += simd_shuffle_down(sumf[ir1], 8); + } + if (nxpsg >= 8) { + sumf[ir1] += simd_shuffle_down(sumf[ir1], 4); + } + if (nxpsg >= 4) { + sumf[ir1] += simd_shuffle_down(sumf[ir1], 2); + } + if (nxpsg >= 2) { + sumf[ir1] += simd_shuffle_down(sumf[ir1], 1); + } + + //sumf[ir1] = simd_sum(sumf[ir1]); + } + + if (tx == 0) { + for (short ir1 = 0; ir1 < r1ptg && i11 + ir1 < args.ne11; ++ir1) { + device float * dst_f32 = (device float *) dst + (uint64_t)i1m*args.ne0*args.ne1 + (uint64_t)(i11 + ir1)*args.ne0; + + if (i01 < args.ne01) { + dst_f32[i01] = sumf[ir1]; + } + } + } +} + +// dispatchers needed for compile-time nxpsg +// epb - elements per quantization block +template +kernel void kernel_mul_mv_ext_q4_f32_disp( + constant ggml_metal_kargs_mul_mv_ext & args, + device const char * src0, + device const char * src1, + device char * dst, + uint3 tgpig[[threadgroup_position_in_grid]], + ushort tiisg[[thread_index_in_simdgroup]], + ushort sgitg[[simdgroup_index_in_threadgroup]]) { + switch (args.nxpsg) { + case 4: kernel_mul_mv_ext_q4_f32_impl<4, r1ptg, q_t, epb/4, deq_t4>(args, src0, src1, dst, tgpig, tiisg, sgitg); break; + case 8: kernel_mul_mv_ext_q4_f32_impl<8, r1ptg, q_t, epb/4, deq_t4>(args, src0, src1, dst, tgpig, tiisg, sgitg); break; + case 16: kernel_mul_mv_ext_q4_f32_impl<16, r1ptg, q_t, epb/4, deq_t4>(args, src0, src1, dst, tgpig, tiisg, sgitg); break; + case 32: kernel_mul_mv_ext_q4_f32_impl<32, r1ptg, q_t, epb/4, deq_t4>(args, src0, src1, dst, tgpig, tiisg, sgitg); break; + } +} + +template +kernel void kernel_mul_mv_ext_q4x4_f32_disp( + constant ggml_metal_kargs_mul_mv_ext & args, + device const char * src0, + device const char * src1, + device char * dst, + uint3 tgpig[[threadgroup_position_in_grid]], + ushort tiisg[[thread_index_in_simdgroup]], + ushort sgitg[[simdgroup_index_in_threadgroup]]) { + switch (args.nxpsg) { + case 4: kernel_mul_mv_ext_q4x4_f32_impl<4, r1ptg, q_t, epb/16, deq_t4x4>(args, src0, src1, dst, tgpig, tiisg, sgitg); break; + case 8: kernel_mul_mv_ext_q4x4_f32_impl<8, r1ptg, q_t, epb/16, deq_t4x4>(args, src0, src1, dst, tgpig, tiisg, sgitg); break; + case 16: kernel_mul_mv_ext_q4x4_f32_impl<16, r1ptg, q_t, epb/16, deq_t4x4>(args, src0, src1, dst, tgpig, tiisg, sgitg); break; + case 32: kernel_mul_mv_ext_q4x4_f32_impl<32, r1ptg, q_t, epb/16, deq_t4x4>(args, src0, src1, dst, tgpig, tiisg, sgitg); break; + } +} + +typedef decltype(kernel_mul_mv_ext_q4_f32_disp <2, block_q8_0, 32, dequantize_q8_0_t4>) mul_mv_ext_q4_f32_t; +typedef decltype(kernel_mul_mv_ext_q4x4_f32_disp<2, block_q4_K, 256, dequantize_q4_K>) mul_mv_ext_q4x4_f32_t; + +template [[host_name("kernel_mul_mv_ext_f16_f32_r1_2")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<2, half4, 4, dequantize_f16_t4>; +template [[host_name("kernel_mul_mv_ext_f16_f32_r1_3")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<3, half4, 4, dequantize_f16_t4>; +template [[host_name("kernel_mul_mv_ext_f16_f32_r1_4")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<4, half4, 4, dequantize_f16_t4>; +template [[host_name("kernel_mul_mv_ext_f16_f32_r1_5")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<5, half4, 4, dequantize_f16_t4>; + +template [[host_name("kernel_mul_mv_ext_q4_0_f32_r1_2")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<2, block_q4_0, 32, dequantize_q4_0_t4>; +template [[host_name("kernel_mul_mv_ext_q4_0_f32_r1_3")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<3, block_q4_0, 32, dequantize_q4_0_t4>; +template [[host_name("kernel_mul_mv_ext_q4_0_f32_r1_4")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<4, block_q4_0, 32, dequantize_q4_0_t4>; +template [[host_name("kernel_mul_mv_ext_q4_0_f32_r1_5")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<5, block_q4_0, 32, dequantize_q4_0_t4>; + +template [[host_name("kernel_mul_mv_ext_q4_1_f32_r1_2")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<2, block_q4_1, 32, dequantize_q4_1_t4>; +template [[host_name("kernel_mul_mv_ext_q4_1_f32_r1_3")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<3, block_q4_1, 32, dequantize_q4_1_t4>; +template [[host_name("kernel_mul_mv_ext_q4_1_f32_r1_4")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<4, block_q4_1, 32, dequantize_q4_1_t4>; +template [[host_name("kernel_mul_mv_ext_q4_1_f32_r1_5")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<5, block_q4_1, 32, dequantize_q4_1_t4>; + +template [[host_name("kernel_mul_mv_ext_q5_0_f32_r1_2")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<2, block_q5_0, 32, dequantize_q5_0_t4>; +template [[host_name("kernel_mul_mv_ext_q5_0_f32_r1_3")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<3, block_q5_0, 32, dequantize_q5_0_t4>; +template [[host_name("kernel_mul_mv_ext_q5_0_f32_r1_4")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<4, block_q5_0, 32, dequantize_q5_0_t4>; +template [[host_name("kernel_mul_mv_ext_q5_0_f32_r1_5")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<5, block_q5_0, 32, dequantize_q5_0_t4>; + +template [[host_name("kernel_mul_mv_ext_q5_1_f32_r1_2")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<2, block_q5_1, 32, dequantize_q5_1_t4>; +template [[host_name("kernel_mul_mv_ext_q5_1_f32_r1_3")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<3, block_q5_1, 32, dequantize_q5_1_t4>; +template [[host_name("kernel_mul_mv_ext_q5_1_f32_r1_4")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<4, block_q5_1, 32, dequantize_q5_1_t4>; +template [[host_name("kernel_mul_mv_ext_q5_1_f32_r1_5")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<5, block_q5_1, 32, dequantize_q5_1_t4>; + +template [[host_name("kernel_mul_mv_ext_q8_0_f32_r1_2")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<2, block_q8_0, 32, dequantize_q8_0_t4>; +template [[host_name("kernel_mul_mv_ext_q8_0_f32_r1_3")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<3, block_q8_0, 32, dequantize_q8_0_t4>; +template [[host_name("kernel_mul_mv_ext_q8_0_f32_r1_4")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<4, block_q8_0, 32, dequantize_q8_0_t4>; +template [[host_name("kernel_mul_mv_ext_q8_0_f32_r1_5")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<5, block_q8_0, 32, dequantize_q8_0_t4>; + +template [[host_name("kernel_mul_mv_ext_iq4_nl_f32_r1_2")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<2, block_iq4_nl, 32, dequantize_iq4_nl_t4>; +template [[host_name("kernel_mul_mv_ext_iq4_nl_f32_r1_3")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<3, block_iq4_nl, 32, dequantize_iq4_nl_t4>; +template [[host_name("kernel_mul_mv_ext_iq4_nl_f32_r1_4")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<4, block_iq4_nl, 32, dequantize_iq4_nl_t4>; +template [[host_name("kernel_mul_mv_ext_iq4_nl_f32_r1_5")]] kernel mul_mv_ext_q4_f32_t kernel_mul_mv_ext_q4_f32_disp<5, block_iq4_nl, 32, dequantize_iq4_nl_t4>; + +template [[host_name("kernel_mul_mv_ext_q4_K_f32_r1_2")]] kernel mul_mv_ext_q4x4_f32_t kernel_mul_mv_ext_q4x4_f32_disp<2, block_q4_K, 256, dequantize_q4_K>; +template [[host_name("kernel_mul_mv_ext_q4_K_f32_r1_3")]] kernel mul_mv_ext_q4x4_f32_t kernel_mul_mv_ext_q4x4_f32_disp<3, block_q4_K, 256, dequantize_q4_K>; +template [[host_name("kernel_mul_mv_ext_q4_K_f32_r1_4")]] kernel mul_mv_ext_q4x4_f32_t kernel_mul_mv_ext_q4x4_f32_disp<4, block_q4_K, 256, dequantize_q4_K>; +template [[host_name("kernel_mul_mv_ext_q4_K_f32_r1_5")]] kernel mul_mv_ext_q4x4_f32_t kernel_mul_mv_ext_q4x4_f32_disp<5, block_q4_K, 256, dequantize_q4_K>; + +template [[host_name("kernel_mul_mv_ext_q5_K_f32_r1_2")]] kernel mul_mv_ext_q4x4_f32_t kernel_mul_mv_ext_q4x4_f32_disp<2, block_q5_K, 256, dequantize_q5_K>; +template [[host_name("kernel_mul_mv_ext_q5_K_f32_r1_3")]] kernel mul_mv_ext_q4x4_f32_t kernel_mul_mv_ext_q4x4_f32_disp<3, block_q5_K, 256, dequantize_q5_K>; +template [[host_name("kernel_mul_mv_ext_q5_K_f32_r1_4")]] kernel mul_mv_ext_q4x4_f32_t kernel_mul_mv_ext_q4x4_f32_disp<4, block_q5_K, 256, dequantize_q5_K>; +template [[host_name("kernel_mul_mv_ext_q5_K_f32_r1_5")]] kernel mul_mv_ext_q4x4_f32_t kernel_mul_mv_ext_q4x4_f32_disp<5, block_q5_K, 256, dequantize_q5_K>; + +template [[host_name("kernel_mul_mv_ext_q6_K_f32_r1_2")]] kernel mul_mv_ext_q4x4_f32_t kernel_mul_mv_ext_q4x4_f32_disp<2, block_q6_K, 256, dequantize_q6_K>; +template [[host_name("kernel_mul_mv_ext_q6_K_f32_r1_3")]] kernel mul_mv_ext_q4x4_f32_t kernel_mul_mv_ext_q4x4_f32_disp<3, block_q6_K, 256, dequantize_q6_K>; +template [[host_name("kernel_mul_mv_ext_q6_K_f32_r1_4")]] kernel mul_mv_ext_q4x4_f32_t kernel_mul_mv_ext_q4x4_f32_disp<4, block_q6_K, 256, dequantize_q6_K>; +template [[host_name("kernel_mul_mv_ext_q6_K_f32_r1_5")]] kernel mul_mv_ext_q4x4_f32_t kernel_mul_mv_ext_q4x4_f32_disp<5, block_q6_K, 256, dequantize_q6_K>; + #define N_MV_T_T 4 template @@ -2258,6 +2728,79 @@ kernel void kernel_im2col_ext( template [[host_name("kernel_im2col_ext_f32")]] kernel im2col_ext_t kernel_im2col_ext; template [[host_name("kernel_im2col_ext_f16")]] kernel im2col_ext_t kernel_im2col_ext; +typedef void (conv_transpose_1d_t)( + device const float * src0, + device const float * src1, + device char * dst, + constant int32_t & IC, + constant int32_t & IL, + constant int32_t & K, + constant int32_t & s0, + constant uint64_t & nb0, + constant uint64_t & nb1, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tgpg[[threadgroups_per_grid]]); + +template +kernel void kernel_conv_transpose_1d( + device const T * src0, + device const float * src1, + device char * dst, + constant int32_t & IC, + constant int32_t & IL, + constant int32_t & K, + constant int32_t & s0, + constant uint64_t & nb0, + constant uint64_t & nb1, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tgpg[[threadgroups_per_grid]]) { + + float v = 0.0f; + + for (int64_t c = 0; c < IC; c++) { + const int32_t kernel_offset = c * tgpg[1] * K + K * tgpig[1]; + const int32_t input_offset = c * IL; + + for (int64_t i = 0; i < IL; i++) { + if (tgpig[0] >= i * s0 && tgpig[0] < i * s0 + K) { + v += src0[kernel_offset + tgpig[0] - i * s0] * src1[input_offset + i]; + } + } + } + + device float * dst_ptr = (device float *) (dst + tgpig[0] * nb0 + tgpig[1] * nb1); + + dst_ptr[0] = v; +} + +template [[host_name("kernel_conv_transpose_1d_f32_f32")]] +kernel void kernel_conv_transpose_1d( + device const float * src0, + device const float * src1, + device char * dst, + constant int32_t & IC, + constant int32_t & IL, + constant int32_t & K, + constant int32_t & s0, + constant uint64_t & nb0, + constant uint64_t & nb1, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tgpg[[threadgroups_per_grid]]); + +template [[host_name("kernel_conv_transpose_1d_f16_f32")]] +kernel void kernel_conv_transpose_1d( + device const half * src0, + device const float * src1, + device char * dst, + constant int32_t & IC, + constant int32_t & IL, + constant int32_t & K, + constant int32_t & s0, + constant uint64_t & nb0, + constant uint64_t & nb1, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tgpg[[threadgroups_per_grid]]); + kernel void kernel_upscale_f32( device const char * src0, device char * dst, @@ -2354,6 +2897,53 @@ kernel void kernel_pad_f32( } } +kernel void kernel_pad_reflect_1d_f32( + device const char * src0, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant int64_t & ne0, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + constant int32_t & p0, + constant int32_t & p1, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tgpg[[threadgroups_per_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + const int64_t i3 = tgpig.z; + const int64_t i2 = tgpig.y; + const int64_t i1 = tgpig.x; + + const int64_t i03 = i3; + const int64_t i02 = i2; + const int64_t i01 = i1; + + device const float * src0_ptr = (device const float *) (src0 + i03*nb03 + i02*nb02 + i01*nb01); + device float * dst_ptr = (device float *) (dst + i3*nb3 + i2*nb2 + i1*nb1); + + if (i1 < ne01 && i2 < ne02 && i3 < ne03) { + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + if (i0 < p0) { + dst_ptr[i0] = src0_ptr[p0 - i0]; + } else if (i0 < ne0 - p1) { + dst_ptr[i0] = src0_ptr[i0 - p0]; + } else { + dst_ptr[i0] = src0_ptr[(ne0 - p1 - p0) - (p1 + 1 - (ne0 - i0)) - 1]; + } + } + } +} + kernel void kernel_arange_f32( device char * dst, constant int64_t & ne0, @@ -3337,6 +3927,38 @@ template [[host_name("kernel_flash_attn_ext_vec_q8_0_h256")]] kernel flash_attn_ #undef FA_TYPES +template +kernel void kernel_set( + constant ggml_metal_kargs_set & args, + device const char * src0, + device const char * src1, + device char * dst, + uint3 tgpig[[threadgroup_position_in_grid]], + ushort3 tpitg[[thread_position_in_threadgroup]], + ushort3 ntg[[threads_per_threadgroup]]) { + const int i13 = tgpig[2]; + const int i12 = tgpig[1]; + const int i11 = tgpig[0]; + + const int64_t n = i13*args.ne12*args.ne11*args.ne10 + i12*args.ne11*args.ne10 + i11*args.ne10; + + const int64_t i3 = n / (args.ne12*args.ne11*args.ne10); + const int64_t i2 = (n - i3*args.ne12*args.ne11*args.ne10) / (args.ne11*args.ne10); + const int64_t i1 = (n - i3*args.ne12*args.ne11*args.ne10 - i2*args.ne11*args.ne10) / args.ne10; + + device T * dst_data = (device T *) (dst + i3*args.nb3 + i2*args.nb2 + i1*args.nb1 + args.offs); + + for (int64_t i10 = tpitg.x; i10 < args.ne10; i10 += ntg.x) { + device const T * src = (device T *) (src1 + i13*args.nb13 + i12*args.nb12 + i11*args.nb11 + i10*args.nb10); + dst_data[i10] = (T) src[0]; + } +} + +typedef decltype(kernel_set) kernel_set_t; + +template [[host_name("kernel_set_f32")]] kernel kernel_set_t kernel_set; +template [[host_name("kernel_set_i32")]] kernel kernel_set_t kernel_set; + template kernel void kernel_cpy( constant ggml_metal_kargs_cpy & args, diff --git a/ggml/src/ggml-quants.c b/ggml/src/ggml-quants.c index 7301a9c6caab8..7918388ae9f2e 100644 --- a/ggml/src/ggml-quants.c +++ b/ggml/src/ggml-quants.c @@ -5220,15 +5220,6 @@ bool ggml_validate_row_data(enum ggml_type type, const void * data, size_t nbyte { VALIDATE_ROW_DATA_D_F16_IMPL(block_iq4_nl, data, nb); } break; - case GGML_TYPE_Q4_0_4_4: - case GGML_TYPE_Q4_0_4_8: - { - VALIDATE_ROW_DATA_DVEC_F16_IMPL(block_q4_0x4, data, nbytes / sizeof(block_q4_0x4), 4); - } break; - case GGML_TYPE_Q4_0_8_8: - { - VALIDATE_ROW_DATA_DVEC_F16_IMPL(block_q4_0x8, data, nbytes / sizeof(block_q4_0x8), 8); - } break; case GGML_TYPE_I8: case GGML_TYPE_I16: diff --git a/ggml/src/ggml-sycl/CMakeLists.txt b/ggml/src/ggml-sycl/CMakeLists.txt index 83f223fd7b6fc..3579a311aac07 100644 --- a/ggml/src/ggml-sycl/CMakeLists.txt +++ b/ggml/src/ggml-sycl/CMakeLists.txt @@ -68,7 +68,8 @@ else() target_link_libraries(ggml-sycl PRIVATE sycl OpenCL mkl_core pthread m dl mkl_sycl_blas mkl_intel_ilp64 mkl_tbb_thread) elseif (GGML_SYCL_TARGET STREQUAL "NVIDIA") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsycl-targets=nvptx64-nvidia-cuda") - target_link_libraries(ggml-sycl PRIVATE sycl pthread m dl onemkl) + add_compile_definitions(GGML_SYCL_NVIDIA) + target_link_libraries(ggml-sycl PRIVATE sycl pthread m dl onemkl_blas_cublas) elseif (GGML_SYCL_TARGET STREQUAL "AMD") if (NOT GGML_SYCL_DEVICE_ARCH) message(ERROR "Can't enable SYCL hip backend, GGML_SYCL_DEVICE_ARCH has not been set.") diff --git a/ggml/src/ggml-sycl/dpct/helper.hpp b/ggml/src/ggml-sycl/dpct/helper.hpp index c2f28bb49579e..d1b5dd87c6922 100644 --- a/ggml/src/ggml-sycl/dpct/helper.hpp +++ b/ggml/src/ggml-sycl/dpct/helper.hpp @@ -1689,9 +1689,14 @@ namespace dpct auto data_a = get_memory(a); auto data_b = get_memory(b); auto data_c = get_memory(c); - oneapi::mkl::blas::column_major::gemm( - q, a_trans, b_trans, m, n, k, alpha_value, data_a, lda, - data_b, ldb, beta_value, data_c, ldc); +#ifdef GGML_SYCL_NVIDIA + oneapi::mkl::blas::column_major::gemm(oneapi::mkl::backend_selector{ q }, + a_trans, b_trans, m, n, k, alpha_value, data_a, lda, data_b, ldb, + beta_value, data_c, ldc); +#else + oneapi::mkl::blas::column_major::gemm(q, a_trans, b_trans, m, n, k, alpha_value, data_a, lda, data_b, ldb, + beta_value, data_c, ldc); +#endif } template @@ -1754,14 +1759,22 @@ namespace dpct matrix_info->ld_info[2] = ldc; matrix_info->groupsize_info = batch_size; +#ifdef GGML_SYCL_NVIDIA + sycl::event e = oneapi::mkl::blas::column_major::gemm_batch( + oneapi::mkl::backend_selector{ q }, matrix_info->transpose_info, + matrix_info->transpose_info + 1, matrix_info->size_info, matrix_info->size_info + 1, + matrix_info->size_info + 2, matrix_info->value_info, reinterpret_cast(a), + matrix_info->ld_info, reinterpret_cast(b), matrix_info->ld_info + 1, + matrix_info->value_info + 1, reinterpret_cast(c), matrix_info->ld_info + 2, 1, + &(matrix_info->groupsize_info)); +#else sycl::event e = oneapi::mkl::blas::column_major::gemm_batch( - q, matrix_info->transpose_info, matrix_info->transpose_info + 1, - matrix_info->size_info, matrix_info->size_info + 1, - matrix_info->size_info + 2, matrix_info->value_info, - reinterpret_cast(a), matrix_info->ld_info, - reinterpret_cast(b), matrix_info->ld_info + 1, - matrix_info->value_info + 1, reinterpret_cast(c), + q, matrix_info->transpose_info, matrix_info->transpose_info + 1, matrix_info->size_info, + matrix_info->size_info + 1, matrix_info->size_info + 2, matrix_info->value_info, + reinterpret_cast(a), matrix_info->ld_info, reinterpret_cast(b), + matrix_info->ld_info + 1, matrix_info->value_info + 1, reinterpret_cast(c), matrix_info->ld_info + 2, 1, &(matrix_info->groupsize_info)); +#endif q.submit([&](sycl::handler &cgh) { @@ -1783,10 +1796,16 @@ namespace dpct auto data_a = get_memory(a); auto data_b = get_memory(b); auto data_c = get_memory(c); +#ifdef GGML_SYCL_NVIDIA oneapi::mkl::blas::column_major::gemm_batch( - q, a_trans, b_trans, m, n, k, alpha_value, data_a, lda, - stride_a, data_b, ldb, stride_b, beta_value, - data_c, ldc, stride_c, batch_size); + oneapi::mkl::backend_selector{ q }, a_trans, b_trans, m, n, k, + alpha_value, data_a, lda, stride_a, data_b, ldb, stride_b, beta_value, data_c, ldc, stride_c, + batch_size); +#else + oneapi::mkl::blas::column_major::gemm_batch(q, a_trans, b_trans, m, n, k, alpha_value, data_a, lda, + stride_a, data_b, ldb, stride_b, beta_value, data_c, ldc, + stride_c, batch_size); +#endif } } // namespace detail diff --git a/ggml/src/ggml-sycl/ggml-sycl.cpp b/ggml/src/ggml-sycl/ggml-sycl.cpp index b6392ed8dcc6a..ae3baedc7b6c3 100644 --- a/ggml/src/ggml-sycl/ggml-sycl.cpp +++ b/ggml/src/ggml-sycl/ggml-sycl.cpp @@ -47,7 +47,7 @@ static ggml_sycl_device_info ggml_sycl_init() { info.device_count = dpct::dev_mgr::instance().device_count(); if (info.device_count == 0) { - fprintf(stderr, "%s: failed to initialize " GGML_SYCL_NAME ": %s\n", __func__); + GGML_LOG_ERROR("%s: failed to initialize " GGML_SYCL_NAME ": %s\n", __func__); return info; } @@ -55,16 +55,16 @@ static ggml_sycl_device_info ggml_sycl_init() { int64_t total_vram = 0; #if defined(GGML_SYCL_FORCE_MMQ) - fprintf(stderr, "%s: GGML_SYCL_FORCE_MMQ: yes\n", __func__); + GGML_LOG_INFO("%s: GGML_SYCL_FORCE_MMQ: yes\n", __func__); #else - fprintf(stderr, "%s: GGML_SYCL_FORCE_MMQ: no\n", __func__); + GGML_LOG_INFO("%s: GGML_SYCL_FORCE_MMQ: no\n", __func__); #endif #if defined(SYCL_USE_XMX) - fprintf(stderr, "%s: SYCL_USE_XMX: yes\n", __func__); + GGML_LOG_INFO("%s: SYCL_USE_XMX: yes\n", __func__); #else - fprintf(stderr, "%s: SYCL_USE_XMX: no\n", __func__); + GGML_LOG_INFO("%s: SYCL_USE_XMX: no\n", __func__); #endif - fprintf(stderr, "%s: found %d " GGML_SYCL_NAME " devices:\n", __func__, info.device_count); + GGML_LOG_INFO("%s: found %d " GGML_SYCL_NAME " devices:\n", __func__, info.device_count); for (int i = 0; i < info.device_count; ++i) { info.devices[i].vmm = 0; @@ -110,7 +110,7 @@ void print_device_detail(int id, sycl::device &device, std::string device_type) auto global_mem_size = prop.get_global_mem_size()/1000000; - fprintf(stderr, "|%2d|%19s|%39s|%7s|%7d|%8d|%5d|%6luM|%21s|\n", id, device_type.c_str(), + GGML_LOG_INFO("|%2d|%19s|%39s|%7s|%7d|%8d|%5d|%6luM|%21s|\n", id, device_type.c_str(), name.c_str(), version.c_str(), prop.get_max_compute_units(), prop.get_max_work_group_size(), prop.get_max_sub_group_size(), global_mem_size, device.get_info().c_str()); @@ -120,19 +120,30 @@ void ggml_backend_sycl_print_sycl_devices() { GGML_SYCL_DEBUG("[SYCL] call ggml_backend_sycl_print_sycl_devices\n"); int device_count = dpct::dev_mgr::instance().device_count(); std::map DeviceNums; - fprintf(stderr, "found %d SYCL devices:\n", device_count); - fprintf(stderr, "| | | | |Max | |Max |Global | |\n"); - fprintf(stderr, "| | | | |compute|Max work|sub |mem | |\n"); - fprintf(stderr, "|ID| Device Type| Name|Version|units |group |group|size | Driver version|\n"); - fprintf(stderr, "|--|-------------------|---------------------------------------|-------|-------|--------|-----|-------|---------------------|\n"); + GGML_LOG_INFO("Found %d SYCL devices:\n", device_count); + + GGML_LOG_INFO( + "| | | | " + " |Max | |Max |Global | |\n"); + GGML_LOG_INFO( + "| | | | " + " |compute|Max work|sub |mem | |\n"); + GGML_LOG_INFO( + "|ID| Device Type| " + "Name|Version|units |group |group|size | Driver version|\n"); + GGML_LOG_INFO( + "|--|-------------------|---------------------------------------|------" + "-|-------|--------|-----|-------|---------------------|\n"); + for (int id = 0; id < device_count; ++id) { - sycl::device device = dpct::dev_mgr::instance().get_device(id); - sycl::backend backend = device.get_backend(); - std::string backend_type = get_device_backend_and_type(device); - int type_id=DeviceNums[backend_type]++; - std::stringstream device_type; - device_type << "[" << backend_type << ":" << std::to_string(type_id) << "]"; - print_device_detail(id, device, device_type.str()); + sycl::device device = dpct::dev_mgr::instance().get_device(id); + sycl::backend backend = device.get_backend(); + std::string backend_type = get_device_backend_and_type(device); + int type_id = DeviceNums[backend_type]++; + std::stringstream device_type; + device_type << "[" << backend_type << ":" << std::to_string(type_id) + << "]"; + print_device_detail(id, device, device_type.str()); } } @@ -154,15 +165,14 @@ static void ggml_check_sycl() try { static bool initialized = false; if (!initialized) { - fprintf(stderr, "[SYCL] call ggml_check_sycl\n"); + GGML_LOG_INFO("[SYCL] call ggml_check_sycl\n"); g_ggml_sycl_debug = get_sycl_env("GGML_SYCL_DEBUG", 0); - - fprintf(stderr, "%s: GGML_SYCL_DEBUG: %d\n", __func__, g_ggml_sycl_debug); + GGML_LOG_INFO("%s: GGML_SYCL_DEBUG: %d\n", __func__, g_ggml_sycl_debug); #if defined(GGML_SYCL_F16) - fprintf(stderr, "%s: GGML_SYCL_F16: yes\n", __func__); + GGML_LOG_INFO("%s: GGML_SYCL_F16: yes\n", __func__); #else - fprintf(stderr, "%s: GGML_SYCL_F16: no\n", __func__); + GGML_LOG_INFO("%s: GGML_SYCL_F16: no\n", __func__); #endif /* NOT REMOVE, keep it for next optimize for XMX. @@ -180,9 +190,10 @@ static void ggml_check_sycl() try { return; } GGML_ASSERT(g_all_sycl_device_count <= GGML_SYCL_MAX_DEVICES); - ggml_backend_sycl_print_sycl_devices(); + initialized = true; g_sycl_loaded = true; + ggml_backend_sycl_print_sycl_devices(); } } catch (sycl::exception const &exc) { @@ -205,7 +216,7 @@ inline void check_allow_gpu_index(const int device_index) { __func__, device_index, ggml_sycl_info().device_count - 1); - fprintf(stderr, "%s\n", error_buf); + GGML_LOG_ERROR("%s\n", error_buf); assert(false); } } @@ -475,8 +486,8 @@ ggml_backend_sycl_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, SYCL_CHECK(CHECK_TRY_ERROR(dev_ptr = (void *)sycl::malloc_device( size, *stream))); if (!dev_ptr) { - fprintf(stderr, "%s: can't malloc %lu Bytes memory on device", __func__, size); - return nullptr; + GGML_LOG_ERROR("%s: can't allocate %lu Bytes of memory on device\n", __func__, size); + return nullptr; } ggml_backend_sycl_buffer_context * ctx = new ggml_backend_sycl_buffer_context(buft_ctx->device, dev_ptr, buft_ctx->stream); return ggml_backend_buffer_init(buft, ggml_backend_sycl_buffer_interface, ctx, size); @@ -752,7 +763,7 @@ ggml_backend_sycl_split_buffer_init_tensor(ggml_backend_buffer_t buffer, size, *stream))); if (!buf) { char err_buf[1024]; - snprintf(err_buf, 1023, "%s: can't malloc %lu Bytes memory on device", __func__, size); + snprintf(err_buf, 1023, "%s: can't allocate %lu Bytes of memory on device\n", __func__, size); throw std::runtime_error(err_buf); } // set padding to 0 to avoid possible NaN values @@ -1142,17 +1153,18 @@ struct ggml_sycl_pool_leg : public ggml_sycl_pool { CHECK_TRY_ERROR(ptr = (void *)sycl::malloc_device( look_ahead_size, *qptr))); if (!ptr) { - fprintf(stderr, "%s: can't malloc %lu Bytes memory on device", __func__, look_ahead_size); + GGML_LOG_ERROR("%s: can't allocate %lu Bytes of memory on device/GPU\n", __func__, look_ahead_size); return nullptr; } *actual_size = look_ahead_size; pool_size += look_ahead_size; - #ifdef DEBUG_SYCL_MALLOC - fprintf(stderr, "%s[%d]: %d buffers, max_size = %u MB, pool_size = %u MB, requested %u MB\n", __func__, id, nnz, +#ifdef DEBUG_SYCL_MALLOC + GGML_LOG_DEBUG("%s[%d]: %d buffers, max_size = %u MB, pool_size = %u MB, requested %u MB\n", __func__, id, nnz, (uint32_t)(max_size/1024/1024), (uint32_t)(g_sycl_pool_size[id]/1024/1024), (uint32_t)(size/1024/1024)); - #endif +#endif + // GGML_SYCL_DEBUG("ggml_sycl_pool_malloc_leg look_ahead_size=%lu, return %p\n", look_ahead_size, ptr); return ptr; } @@ -1166,7 +1178,7 @@ struct ggml_sycl_pool_leg : public ggml_sycl_pool { return; } } - fprintf(stderr, "WARNING: sycl buffer pool full, increase MAX_sycl_BUFFERS\n"); + GGML_LOG_WARN("WARNING: sycl buffer pool full, increase MAX_sycl_BUFFERS\n"); SYCL_CHECK(CHECK_TRY_ERROR(sycl::free(ptr, *qptr))); pool_size -= size; } @@ -2437,7 +2449,7 @@ static void ggml_sycl_op_get_rows(ggml_backend_sycl_context & ctx, const ggml_te break; default: // TODO: k-quants - fprintf(stderr, "%s: unsupported type: %s\n", __func__, ggml_type_name(src0->type)); + GGML_LOG_ERROR("%s: unsupported type: %s\n", __func__, ggml_type_name(src0->type)); GGML_ABORT("fatal error"); break; } @@ -2561,12 +2573,17 @@ inline void ggml_sycl_op_mul_mat_sycl( const float alpha = 1.0f; const float beta = 0.0f; #if !GGML_SYCL_DNNL +# ifdef GGML_SYCL_NVIDIA SYCL_CHECK(CHECK_TRY_ERROR(oneapi::mkl::blas::column_major::gemm( - *stream, oneapi::mkl::transpose::trans, - oneapi::mkl::transpose::nontrans, row_diff, src1_ncols, ne10, - dpct::get_value(&alpha, *stream), src0_ddf_i, ne00, - src1_ddf1_i, ne10, dpct::get_value(&beta, *stream), + oneapi::mkl::backend_selector{ *stream }, oneapi::mkl::transpose::trans, + oneapi::mkl::transpose::nontrans, row_diff, src1_ncols, ne10, dpct::get_value(&alpha, *stream), src0_ddf_i, + ne00, src1_ddf1_i, ne10, dpct::get_value(&beta, *stream), dst_dd_i, ldc))); +# else + SYCL_CHECK(CHECK_TRY_ERROR(oneapi::mkl::blas::column_major::gemm( + *stream, oneapi::mkl::transpose::trans, oneapi::mkl::transpose::nontrans, row_diff, src1_ncols, ne10, + dpct::get_value(&alpha, *stream), src0_ddf_i, ne00, src1_ddf1_i, ne10, dpct::get_value(&beta, *stream), dst_dd_i, ldc))); +# endif #else auto dnnl_stream = ctx.stream_dnnl(stream); DnnlGemmWrapper::row_gemm(dnnl_stream, false, true, src1_ncols, row_diff, ne10, src1_ddf1_i, DnnlGemmWrapper::to_dt(), @@ -3447,8 +3464,15 @@ static void ggml_sycl_mul_mat(ggml_backend_sycl_context & ctx, const ggml_tensor use_dequantize_mul_mat_vec = use_dequantize_mul_mat_vec && !use_mul_mat_vec_q; if (!split && src0->type == GGML_TYPE_F16 && ggml_is_permuted(src0) && ggml_is_permuted(src1) && src1->ne[1] == 1) { - // KQ single-batch - ggml_sycl_mul_mat_vec_p021(ctx, src0, src1, dst); + // TODO: Refactor and cleanup of mul mat dispatching. + if (src0->ne[3] == 1 && src1->ne[3] == 1) { + // KQ single-batch + // mmv p021 was specific for these dimensions + ggml_sycl_mul_mat_vec_p021(ctx, src0, src1, dst); + } else { + // The kernel from the if path is faster for that specific case, but does not support all mul mats. + ggml_sycl_mul_mat_batched_sycl(ctx, src0, src1, dst); + } } else if (!split && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { // KQV single-batch ggml_sycl_mul_mat_vec_nc(ctx, src0, src1, dst); @@ -3743,7 +3767,7 @@ static void ggml_sycl_cpy(ggml_backend_sycl_context & ctx, const ggml_tensor *sr } else if (src0->type == GGML_TYPE_I32 && src1->type == GGML_TYPE_I32) { ggml_cpy_i32_i32_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else { - fprintf(stderr, "%s: unsupported type combination (%s to %s)\n", __func__, + GGML_LOG_ERROR("%s: unsupported type combination (%s to %s)\n", __func__, ggml_type_name(src0->type), ggml_type_name(src1->type)); GGML_ABORT("fatal error"); } @@ -3818,7 +3842,7 @@ void ggml_sycl_set_main_device(const int main_device) try { dpct::device_info prop; SYCL_CHECK(CHECK_TRY_ERROR(dpct::get_device_info( prop, dpct::dev_mgr::instance().get_device(main_device)))); - fprintf(stderr, "Using device %d (%s) as main device\n", + GGML_LOG_INFO("Using device %d (%s) as main device\n", main_device, prop.get_name()); } } @@ -4165,7 +4189,7 @@ static ggml_status ggml_backend_sycl_graph_compute(ggml_backend_t backend, ggml_ #endif bool ok = ggml_sycl_compute_forward(*sycl_ctx, node); if (!ok) { - fprintf(stderr, "%s: error: op not supported %s (%s)\n", __func__, node->name, ggml_op_name(node->op)); + GGML_LOG_ERROR("%s: error: op not supported %s (%s)\n", __func__, node->name, ggml_op_name(node->op)); } GGML_ASSERT(ok); } @@ -4486,7 +4510,7 @@ static bool ggml_backend_sycl_device_supports_buft(ggml_backend_dev_t dev, ggml_ static int64_t get_op_batch_size(const ggml_tensor * op) { switch (op->op) { case GGML_OP_GET_ROWS: - return op->ne[1]; // this will increse the speed of prefill in test + return 0; case GGML_OP_MUL_MAT: return op->ne[1]; case GGML_OP_MUL_MAT_ID: @@ -4606,7 +4630,7 @@ static void *ggml_backend_sycl_reg_get_proc_address(ggml_backend_reg_t reg, cons static const ggml_backend_reg_i ggml_backend_sycl_reg_interface = { /* .get_name = */ ggml_backend_sycl_reg_get_name, /* .get_device_count = */ ggml_backend_sycl_reg_get_device_count, - /* .get_device_get = */ ggml_backend_sycl_reg_get_device, + /* .get_device = */ ggml_backend_sycl_reg_get_device, /* .get_proc_address = */ ggml_backend_sycl_reg_get_proc_address, }; @@ -4665,7 +4689,7 @@ ggml_backend_t ggml_backend_sycl_init(int device) { ggml_backend_sycl_context * ctx = new ggml_backend_sycl_context(device); if (ctx == nullptr) { - fprintf(stderr, "%s: error: failed to allocate context\n", __func__); + GGML_LOG_ERROR("%s: error: failed to allocate context\n", __func__); return nullptr; }; diff --git a/ggml/src/ggml-sycl/outprod.cpp b/ggml/src/ggml-sycl/outprod.cpp index e61cdc2ca5d53..ef9af0b7633ab 100644 --- a/ggml/src/ggml-sycl/outprod.cpp +++ b/ggml/src/ggml-sycl/outprod.cpp @@ -40,14 +40,14 @@ void ggml_sycl_op_out_prod(ggml_backend_sycl_context& ctx, const ggml_tensor* sr try { // Perform matrix multiplication using oneMKL GEMM - oneapi::mkl::blas::column_major::gemm(*stream, - oneapi::mkl::transpose::nontrans, src1_op, - ne0, ne1, ne01, - alpha, - src0_d, ne00, - src1_d, ldb, - beta, - dst_d, ne0); +#ifdef GGML_SYCL_NVIDIA + oneapi::mkl::blas::column_major::gemm(oneapi::mkl::backend_selector{ *stream }, + oneapi::mkl::transpose::nontrans, src1_op, ne0, ne1, ne01, alpha, src0_d, + ne00, src1_d, ldb, beta, dst_d, ne0); +#else + oneapi::mkl::blas::column_major::gemm(*stream, oneapi::mkl::transpose::nontrans, src1_op, ne0, ne1, ne01, alpha, + src0_d, ne00, src1_d, ldb, beta, dst_d, ne0); +#endif } catch (sycl::exception const& exc) { std::cerr << exc.what() << std::endl; diff --git a/ggml/src/ggml-vulkan/CMakeLists.txt b/ggml/src/ggml-vulkan/CMakeLists.txt index ae0485e04255d..6d46e5f24c169 100644 --- a/ggml/src/ggml-vulkan/CMakeLists.txt +++ b/ggml/src/ggml-vulkan/CMakeLists.txt @@ -8,6 +8,20 @@ if (Vulkan_FOUND) ../../include/ggml-vulkan.h ) + # Compile a test shader to determine whether GL_NV_cooperative_matrix2 is supported. + # If it's not, there will be an error to stderr. + # If it's supported, set a define to indicate that we should compile those shaders + execute_process(COMMAND ${Vulkan_GLSLC_EXECUTABLE} -o - -fshader-stage=compute --target-env=vulkan1.3 "${CMAKE_CURRENT_SOURCE_DIR}/vulkan-shaders/test_coopmat2_support.comp" + OUTPUT_VARIABLE glslc_output + ERROR_VARIABLE glslc_error) + + if (${glslc_error} MATCHES ".*extension not supported: GL_NV_cooperative_matrix2.*") + message(STATUS "GL_NV_cooperative_matrix2 not supported by glslc") + else() + message(STATUS "GL_NV_cooperative_matrix2 supported by glslc") + add_compile_definitions(GGML_VULKAN_COOPMAT2_GLSLC_SUPPORT) + endif() + target_link_libraries(ggml-vulkan PRIVATE Vulkan::Vulkan) target_include_directories(ggml-vulkan PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) @@ -67,7 +81,7 @@ if (Vulkan_FOUND) --target-cpp ${_ggml_vk_source} --no-clean - DEPENDS ${_ggml_vk_shader_deps} + DEPENDS ${_ggml_vk_shader_deps} ${_ggml_vk_genshaders_cmd} COMMENT "Generate vulkan shaders" ) diff --git a/ggml/src/ggml-vulkan/ggml-vulkan.cpp b/ggml/src/ggml-vulkan/ggml-vulkan.cpp index 849c119237bba..a8ae58ee2ce85 100644 --- a/ggml/src/ggml-vulkan/ggml-vulkan.cpp +++ b/ggml/src/ggml-vulkan/ggml-vulkan.cpp @@ -1,7 +1,8 @@ #include "ggml-vulkan.h" #include -#if defined(GGML_VULKAN_RUN_TESTS) || defined(GGML_VULKAN_PERF) +#if defined(GGML_VULKAN_RUN_TESTS) || defined(GGML_VULKAN_PERF) || defined(GGML_VULKAN_CHECK_RESULTS) #include +#include "ggml-cpu.h" #endif #include @@ -43,12 +44,6 @@ #define MAX_VK_BUFFERS 256 -#ifndef K_QUANTS_PER_ITERATION -#define K_QUANTS_PER_ITERATION 1 -#else -static_assert(K_QUANTS_PER_ITERATION == 1 || K_QUANTS_PER_ITERATION == 2, "K_QUANTS_PER_ITERATION must be 1 or 2"); -#endif - #define VK_CHECK(err, msg) \ do { \ vk::Result err_ = (err); \ @@ -165,23 +160,41 @@ struct vk_device_struct { vk_queue transfer_queue; bool single_queue; uint32_t subgroup_size; + uint32_t shader_core_count; bool uma; + bool float_controls_rte_fp16; + bool coopmat2; + + bool coopmat_support; + bool coopmat_acc_f32_support; + bool coopmat_acc_f16_support; + uint32_t coopmat_m; + uint32_t coopmat_n; + uint32_t coopmat_k; size_t idx; + bool mul_mat_l; + bool mul_mat_m; + bool mul_mat_s; + bool mul_mat_id_l; + bool mul_mat_id_m; + bool mul_mat_id_s; + vk_matmul_pipeline pipeline_matmul_f32; vk_matmul_pipeline pipeline_matmul_f32_f16; vk_matmul_pipeline2 pipeline_matmul_f16; vk_matmul_pipeline2 pipeline_matmul_f16_f32; vk_pipeline pipeline_matmul_split_k_reduce; + vk_matmul_pipeline2 pipeline_dequant_mul_mat_mat_f16[GGML_TYPE_COUNT]; vk_matmul_pipeline2 pipeline_dequant_mul_mat_mat[GGML_TYPE_COUNT]; vk_matmul_pipeline pipeline_matmul_id_f32; - vk_matmul_pipeline pipeline_matmul_id_f16; - vk_matmul_pipeline pipeline_matmul_id_f16_f32; + vk_matmul_pipeline2 pipeline_matmul_id_f16; + vk_matmul_pipeline2 pipeline_matmul_id_f16_f32; - vk_matmul_pipeline pipeline_dequant_mul_mat_mat_id[GGML_TYPE_COUNT]; + vk_matmul_pipeline2 pipeline_dequant_mul_mat_mat_id[GGML_TYPE_COUNT]; vk_pipeline pipeline_dequant[GGML_TYPE_COUNT]; vk_pipeline pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_COUNT]; @@ -228,6 +241,14 @@ struct vk_device_struct { vk_pipeline pipeline_timestep_embedding_f32; vk_pipeline pipeline_pool2d_f32; + // [2][2][2] is for {f16acc,f32acc}x{large,small_rows}x{unaligned, aligned} + vk_pipeline pipeline_flash_attn_f32_f16_D64[GGML_TYPE_COUNT][2][2][2]; + vk_pipeline pipeline_flash_attn_f32_f16_D80[GGML_TYPE_COUNT][2][2][2]; + vk_pipeline pipeline_flash_attn_f32_f16_D96[GGML_TYPE_COUNT][2][2][2]; + vk_pipeline pipeline_flash_attn_f32_f16_D112[GGML_TYPE_COUNT][2][2][2]; + vk_pipeline pipeline_flash_attn_f32_f16_D128[GGML_TYPE_COUNT][2][2][2]; + vk_pipeline pipeline_flash_attn_f32_f16_D256[GGML_TYPE_COUNT][2][2][2]; + std::unordered_map pipelines; std::unordered_map pipeline_descriptor_set_requirements; @@ -339,6 +360,40 @@ struct vk_mat_vec_id_push_constants { uint32_t nei0; uint32_t ne11; }; +struct vk_flash_attn_push_constants { + uint32_t N; + uint32_t KV; + + uint32_t ne1; + uint32_t ne2; + uint32_t ne3; + + uint32_t neq2; + uint32_t neq3; + uint32_t nek2; + uint32_t nek3; + uint32_t nev2; + uint32_t nev3; + uint32_t nem1; + + uint32_t nb02; + uint32_t nb03; + uint32_t nb12; + uint32_t nb13; + uint32_t nb22; + uint32_t nb23; + uint32_t nb31; + + float scale; + float max_bias; + float logit_softcap; + + uint32_t mask; + uint32_t n_head_log2; + float m0; + float m1; +}; + struct vk_op_push_constants { uint32_t KX; uint32_t KY; @@ -352,7 +407,46 @@ struct vk_op_unary_push_constants { uint32_t ne10; uint32_t ne11; uint32_t ne12; uint32_t ne13; uint32_t nb10; uint32_t nb11; uint32_t nb12; uint32_t nb13; uint32_t d_offset; float param1; float param2; + uint32_t ne0_012mp; uint32_t ne0_012L; + uint32_t ne0_01mp; uint32_t ne0_01L; + uint32_t ne0_0mp; uint32_t ne0_0L; + uint32_t ne1_012mp; uint32_t ne1_012L; + uint32_t ne1_01mp; uint32_t ne1_01L; + uint32_t ne1_0mp; uint32_t ne1_0L; }; +static_assert(sizeof(vk_op_unary_push_constants) <= 128, "sizeof(vk_op_unary_push_constants) must be <= 128"); + +// See https://gmplib.org/~tege/divcnst-pldi94.pdf figure 4.1. +// Precompute mp (m' in the paper) and L such that division +// can be computed using a multiply (high 32b of 64b result) +// and a shift: +// +// n/d = (mulhi(n, mp) + n) >> L; +static void init_fastdiv_values(uint32_t d, uint32_t &mp, uint32_t &L) +{ + // compute L = ceil(log2(d)); + L = 0; + while (L < 32 && (uint32_t{1} << L) < d) { + L++; + } + + mp = (uint32_t)((uint64_t{1} << 32) * ((uint64_t{1} << L) - d) / d + 1); +} + +template void init_pushconst_fastdiv(T &p) { + GGML_UNUSED(p); + static_assert(!std::is_const::value, "unexpected type"); +} + +template <> void init_pushconst_fastdiv(vk_op_unary_push_constants &p) { + // Compute magic values to divide by these six numbers. + init_fastdiv_values(p.ne02*p.ne01*p.ne00, p.ne0_012mp, p.ne0_012L); + init_fastdiv_values(p.ne01*p.ne00, p.ne0_01mp, p.ne0_01L); + init_fastdiv_values(p.ne00, p.ne0_0mp, p.ne0_0L); + init_fastdiv_values(p.ne12*p.ne11*p.ne10, p.ne1_012mp, p.ne1_012L); + init_fastdiv_values(p.ne11*p.ne10, p.ne1_01mp, p.ne1_01L); + init_fastdiv_values(p.ne10, p.ne1_0mp, p.ne1_0L); +} struct vk_op_binary_push_constants { uint32_t ne; @@ -1226,85 +1320,180 @@ static void ggml_vk_wait_events(vk_context& ctx, std::vector&& events ); } +// number of rows/cols for flash attention shader +static constexpr uint32_t flash_attention_num_small_rows = 32; +static std::array fa_rows_cols(uint32_t D, uint32_t clamp, ggml_type type, bool small_rows) { + GGML_UNUSED(clamp); + + // small rows, large cols + if (small_rows) { + return {flash_attention_num_small_rows, 128}; + } + // small cols to reduce register count + if (ggml_is_quantized(type) || D == 256) { + return {64, 32}; + } + return {64, 64}; +}; + +static bool ggml_vk_matmul_shmem_support(const vk_device& device, const std::vector& warptile, bool mul_mat_id) { + // Needs to be kept up to date on shader changes + const uint32_t bank_conflict_offset = device->coopmat_support ? 8 : 1; + const uint32_t type_size = device->fp16 ? sizeof(ggml_fp16_t) : sizeof(float); + const uint32_t warps = warptile[0] / device->subgroup_size; + + const uint32_t load_bufs = (warptile[1] + warptile[2]) * (warptile[3] + bank_conflict_offset) * type_size; + const uint32_t mmid_row_ids = mul_mat_id ? 3072 * sizeof(uint32_t) : 0; + const uint32_t coopmat_stage = device->coopmat_support ? warptile[7] * warptile[8] / warps * sizeof(float) : 0; + + return (load_bufs + mmid_row_ids + coopmat_stage) <= device->properties.limits.maxComputeSharedMemorySize; +} + static void ggml_vk_load_shaders(vk_device& device) { VK_LOG_DEBUG("ggml_vk_load_shaders(" << device->name << ")"); std::cerr << "ggml_vulkan: Compiling shaders"; + // some shaders require the subgroup size to be 16 or larger + const uint32_t subgroup_size_16 = std::max(device->subgroup_size, 16u); + // mulmat std::vector l_warptile, m_warptile, s_warptile, - l_warptile_mmq, m_warptile_mmq, s_warptile_mmq; + l_warptile_mmq, m_warptile_mmq, s_warptile_mmq, + l_warptile_mmq_k, m_warptile_mmq_k, s_warptile_mmq_k, + l_warptile_mmqid, m_warptile_mmqid, s_warptile_mmqid; std::array l_wg_denoms, m_wg_denoms, s_wg_denoms, - l_mmq_wg_denoms, m_mmq_wg_denoms, s_mmq_wg_denoms; - uint32_t l_align, m_align, s_align; + l_mmq_wg_denoms, m_mmq_wg_denoms, s_mmq_wg_denoms, + l_mmq_wg_denoms_k, m_mmq_wg_denoms_k, s_mmq_wg_denoms_k, + l_mmqid_wg_denoms, m_mmqid_wg_denoms, s_mmqid_wg_denoms; - l_warptile = { 128, 128, 128, 16, device->subgroup_size * 2, 64, 2, 4, 4, device->subgroup_size }; - m_warptile = { 128, 64, 64, 16, device->subgroup_size, 32, 2, 4, 2, device->subgroup_size }; - s_warptile = { std::max(device->subgroup_size, 16u), 32, 32, 16, 32, 32, 2, 2, 2, device->subgroup_size }; - - l_warptile_mmq = { 128, 128, 128, 32, device->subgroup_size * 2, 64, 2, 4, 4, device->subgroup_size }; - m_warptile_mmq = { 128, 64, 64, 32, device->subgroup_size, 32, 2, 4, 2, device->subgroup_size }; - s_warptile_mmq = { std::max(device->subgroup_size, 16u), 32, 32, 32, 32, 32, 2, 2, 2, device->subgroup_size }; - - l_mmq_wg_denoms = l_wg_denoms = {128, 128, 1 }; - m_mmq_wg_denoms = m_wg_denoms = { 64, 64, 1 }; - s_mmq_wg_denoms = s_wg_denoms = { 32, 32, 1 }; - - l_align = 128; - m_align = 64; - s_align = 32; - - // Fallback to smaller sizes if there's not enough shared memory. Given the current shaders - // and tile sizes, this should handle 16KB, 32KB, and 48KB+. - // This logic doesn't explicitly account for the 12KB row_ids in the mul_mat_mat_id shaders. - // But the numbers happen to work out for 32KB shared memory size that when using the medium - // size there's enough room for everything, and we assert for this. - uint32_t shmem_needed = (l_warptile[1] + l_warptile[2]) * (l_warptile[3] + 1) * sizeof(float); - if (shmem_needed > device->properties.limits.maxComputeSharedMemorySize) { - l_warptile = m_warptile; - l_wg_denoms = m_wg_denoms; - shmem_needed = (l_warptile[1] + l_warptile[2]) * (l_warptile[3] + 1) * sizeof(float); - GGML_ASSERT(shmem_needed <= device->properties.limits.maxComputeSharedMemorySize); - } - if (device->properties.limits.maxComputeSharedMemorySize >= 32768) { - // assert mul_mat_mat_id shaders will fit. - GGML_ASSERT(shmem_needed + 3072*4 <= device->properties.limits.maxComputeSharedMemorySize); - } - - shmem_needed = (l_warptile_mmq[1] + l_warptile_mmq[2]) * (l_warptile_mmq[3] + 1) * sizeof(float); - if (shmem_needed > device->properties.limits.maxComputeSharedMemorySize) { - if (device->properties.limits.maxComputeSharedMemorySize == 32768) { - l_warptile_mmq = m_warptile_mmq; - l_mmq_wg_denoms = m_mmq_wg_denoms; - } else { - l_warptile_mmq = s_warptile_mmq; - l_mmq_wg_denoms = s_mmq_wg_denoms; + uint32_t l_align, m_align, s_align; + if (device->coopmat2) { + // spec constants and tile sizes for non-quant matmul/matmul_id + l_warptile = { 256, 128, 256, 64 }; + m_warptile = { 256, 128, 128, 64 }; + s_warptile = { 128, 32, 16, 64 }; + l_wg_denoms = {128, 256, 1 }; + m_wg_denoms = {128, 128, 1 }; + s_wg_denoms = { 32, 16, 1 }; + + // spec constants and tile sizes for quant matmul (non-Qi_K) + l_warptile_mmq = { 256, 128, 256, 64 }; + m_warptile_mmq = { 256, 128, 128, 64 }; + s_warptile_mmq = { 256, 128, 128, 64 }; + l_mmq_wg_denoms = { 128, 256, 1 }; + m_mmq_wg_denoms = { 128, 128, 1 }; + s_mmq_wg_denoms = { 128, 128, 1 }; + + // spec constants and tile sizes for quant matmul (Qi_K) + l_warptile_mmq_k = { 256, 128, 512, 16 }; + m_warptile_mmq_k = { 256, 128, 256, 16 }; + s_warptile_mmq_k = { 256, 32, 128, 64 }; + l_mmq_wg_denoms_k = { 128, 512, 1 }; + m_mmq_wg_denoms_k = { 128, 256, 1 }; + s_mmq_wg_denoms_k = { 32, 128, 1 }; + + // spec constants and tile sizes for quant matmul_id + l_warptile_mmqid = { 256, 128, 128, 16 }; + m_warptile_mmqid = { 256, 128, 64, 16 }; + s_warptile_mmqid = { 256, 64, 64, 16 }; + l_mmqid_wg_denoms = { 128, 128, 1 }; + m_mmqid_wg_denoms = { 128, 64, 1 }; + s_mmqid_wg_denoms = { 64, 64, 1 }; + + l_align = 128; + m_align = 64; + s_align = 32; + } else { + // Matrix cores require different warp group sizes + const uint32_t tm_l = device->coopmat_support ? device->coopmat_m : 4; + const uint32_t tm_m = device->coopmat_support ? device->coopmat_m : 4; + const uint32_t tm_s = device->coopmat_support ? device->coopmat_m : 2; + const uint32_t tn_l = device->coopmat_support ? device->coopmat_n : 4; + const uint32_t tn_m = device->coopmat_support ? device->coopmat_n : 2; + const uint32_t tn_s = device->coopmat_support ? device->coopmat_n : 2; + const uint32_t tk_l = device->coopmat_support ? device->coopmat_k : 1; + const uint32_t tk_m = device->coopmat_support ? device->coopmat_k : 1; + const uint32_t tk_s = device->coopmat_support ? device->coopmat_k : 1; + + l_warptile = { 128, 128, 128, 16, device->subgroup_size * 2, 64, 2, tm_l, tn_l, tk_l, device->subgroup_size }; + m_warptile = { 128, 64, 64, 16, device->subgroup_size, 32, 2, tm_m, tn_m, tk_m, device->subgroup_size }; + s_warptile = { subgroup_size_16, 32, 32, 16, 32, 32, 2, tm_s, tn_s, tk_s, device->subgroup_size }; + + l_warptile_mmq = { 128, 128, 128, 32, device->subgroup_size * 2, 64, 2, tm_l, tn_l, tk_l, device->subgroup_size }; + m_warptile_mmq = { 128, 64, 64, 32, device->subgroup_size, 32, 2, tm_m, tn_m, tk_m, device->subgroup_size }; + s_warptile_mmq = { subgroup_size_16, 32, 32, 32, 32, 32, 2, tm_s, tn_s, tk_s, device->subgroup_size }; + + l_mmq_wg_denoms = l_wg_denoms = {128, 128, 1 }; + m_mmq_wg_denoms = m_wg_denoms = { 64, 64, 1 }; + s_mmq_wg_denoms = s_wg_denoms = { 32, 32, 1 }; + l_align = 128; + m_align = 64; + s_align = 32; + + // Fallback to smaller sizes if there's not enough shared memory. Given the current shaders + // and tile sizes, this should handle 16KB, 32KB, and 48KB+. + // This logic doesn't explicitly account for the 12KB row_ids in the mul_mat_mat_id shaders. + // But the numbers happen to work out for 32KB shared memory size that when using the medium + // size there's enough room for everything, and we assert for this. + uint32_t shmem_needed = (l_warptile[1] + l_warptile[2]) * (l_warptile[3] + 1) * sizeof(float); + if (shmem_needed > device->properties.limits.maxComputeSharedMemorySize) { + l_warptile = m_warptile; + l_wg_denoms = m_wg_denoms; + shmem_needed = (l_warptile[1] + l_warptile[2]) * (l_warptile[3] + 1) * sizeof(float); + GGML_ASSERT(shmem_needed <= device->properties.limits.maxComputeSharedMemorySize); } + if (device->properties.limits.maxComputeSharedMemorySize >= 32768) { + // assert mul_mat_mat_id shaders will fit. + GGML_ASSERT(shmem_needed + 3072*4 <= device->properties.limits.maxComputeSharedMemorySize); + } + shmem_needed = (l_warptile_mmq[1] + l_warptile_mmq[2]) * (l_warptile_mmq[3] + 1) * sizeof(float); - GGML_ASSERT(shmem_needed <= device->properties.limits.maxComputeSharedMemorySize); - } - if (device->properties.limits.maxComputeSharedMemorySize >= 32768) { - // assert mul_mat_mat_id shaders will fit. - GGML_ASSERT(shmem_needed + 3072*4 <= device->properties.limits.maxComputeSharedMemorySize); + if (shmem_needed > device->properties.limits.maxComputeSharedMemorySize) { + if (device->properties.limits.maxComputeSharedMemorySize == 32768) { + l_warptile_mmq = m_warptile_mmq; + l_mmq_wg_denoms = m_mmq_wg_denoms; + } else { + l_warptile_mmq = s_warptile_mmq; + l_mmq_wg_denoms = s_mmq_wg_denoms; + } + shmem_needed = (l_warptile_mmq[1] + l_warptile_mmq[2]) * (l_warptile_mmq[3] + 1) * sizeof(float); + GGML_ASSERT(shmem_needed <= device->properties.limits.maxComputeSharedMemorySize); + } + if (device->properties.limits.maxComputeSharedMemorySize >= 32768) { + // assert mul_mat_mat_id shaders will fit. + GGML_ASSERT(shmem_needed + 3072*4 <= device->properties.limits.maxComputeSharedMemorySize); + } + // Disable medium and large matrix multiplication if not enough shared memory is available + // Check mmq warptiles as the largest configuration + // Throw an error if not enough for any matrix multiplication is available + if (!ggml_vk_matmul_shmem_support(device, s_warptile_mmq, false)) { + std::cerr << "ggml_vulkan: Error: Shared memory size too small for matrix multiplication." << std::endl; + throw std::runtime_error("Shared memory size too small for matrix multiplication."); + } else if (!ggml_vk_matmul_shmem_support(device, m_warptile_mmq, false)) { + device->mul_mat_m = false; + device->mul_mat_l = false; + } else if (!ggml_vk_matmul_shmem_support(device, l_warptile_mmq, false)) { + device->mul_mat_l = false; + } + + // Disable mul_mat_id if not enough shared memory is available + if (!ggml_vk_matmul_shmem_support(device, s_warptile_mmq, true)) { + device->mul_mat_id_s = false; + device->mul_mat_id_m = false; + device->mul_mat_id_l = false; + } else if (!ggml_vk_matmul_shmem_support(device, m_warptile_mmq, true)) { + device->mul_mat_id_m = false; + device->mul_mat_id_l = false; + } else if (!ggml_vk_matmul_shmem_support(device, l_warptile_mmq, true)) { + device->mul_mat_id_l = false; + } } device->pipeline_matmul_f32 = std::make_shared(); device->pipeline_matmul_f32_f16 = std::make_shared(); device->pipeline_matmul_id_f32 = std::make_shared(); - device->pipeline_matmul_id_f16_f32 = std::make_shared(); - device->pipeline_matmul_id_f16 = std::make_shared(); - device->pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_0] = std::make_shared(); - device->pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_1] = std::make_shared(); - device->pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_0] = std::make_shared(); - device->pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_1] = std::make_shared(); - device->pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q8_0] = std::make_shared(); - device->pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q2_K] = std::make_shared(); - device->pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q3_K] = std::make_shared(); - device->pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_K] = std::make_shared(); - device->pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_K] = std::make_shared(); - device->pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q6_K] = std::make_shared(); - device->pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_NL] = std::make_shared(); std::vector> compiles; auto const &ggml_vk_create_pipeline = [&](vk_device& device, vk_pipeline& pipeline, const std::string &name, size_t spv_size, const void* spv_data, const std::string &entrypoint, uint32_t parameter_count, uint32_t push_constant_size, std::array wg_denoms, const std::vector& specialization_constants, uint32_t align, bool disable_robustness = false) { @@ -1320,101 +1509,272 @@ static void ggml_vk_load_shaders(vk_device& device) { compiles.push_back(std::async(ggml_vk_create_pipeline_func, std::ref(device), std::ref(pipeline), name, spv_size, spv_data, entrypoint, parameter_count, push_constant_size, wg_denoms, specialization_constants, align, disable_robustness)); }; - if (device->fp16) { +#if defined(VK_NV_cooperative_matrix2) && defined(GGML_VULKAN_COOPMAT2_GLSLC_SUPPORT) + if (device->coopmat2) { + + auto const &fa_wg_denoms = [&](uint32_t D, uint32_t clamp, ggml_type type, bool small_rows) -> std::array { + return {fa_rows_cols(D, clamp, type, small_rows)[0], 1, 1}; + }; + + auto const &fa_spec_constants = [&](uint32_t D, uint32_t clamp, ggml_type type, bool small_rows) -> std::vector { + // For large number of rows, 128 invocations seems to work best. + // For small number of rows (e.g. N==1), 256 works better. But matrix granularity for 256 is 32, so we + // can't use 256 for D==80. + uint32_t wg_size = (small_rows && (D % 32) == 0) ? 256 : 128; + auto rows_cols = fa_rows_cols(D, clamp, type, small_rows); + return {wg_size, rows_cols[0], rows_cols[1], (D), clamp}; + }; + +#define CREATE_FA2(TYPE, NAMELC, D) \ + ggml_vk_create_pipeline(device, device->pipeline_flash_attn_f32_f16_D ## D[TYPE][0][0][0], "flash_attn_f32_f16_D" #D "_f16acc" #NAMELC, flash_attn_f32_f16_ ## NAMELC ## _f16acc_cm2_len, flash_attn_f32_f16_ ## NAMELC ## _f16acc_cm2_data, "main", 5, sizeof(vk_flash_attn_push_constants), fa_wg_denoms(D,1,TYPE,false), fa_spec_constants(D,1,TYPE,false), 1); \ + ggml_vk_create_pipeline(device, device->pipeline_flash_attn_f32_f16_D ## D[TYPE][0][0][1], "flash_attn_f32_f16_D" #D "_aligned_f16acc" #NAMELC, flash_attn_f32_f16_ ## NAMELC ## _f16acc_cm2_len, flash_attn_f32_f16_ ## NAMELC ## _f16acc_cm2_data, "main", 5, sizeof(vk_flash_attn_push_constants), fa_wg_denoms(D,0,TYPE,false), fa_spec_constants(D,0,TYPE,false), fa_rows_cols(D,0,TYPE,false)[1]); \ + ggml_vk_create_pipeline(device, device->pipeline_flash_attn_f32_f16_D ## D[TYPE][1][0][0], "flash_attn_f32_f16_D" #D "_f32acc" #NAMELC, flash_attn_f32_f16_ ## NAMELC ## _cm2_len, flash_attn_f32_f16_ ## NAMELC ## _cm2_data, "main", 5, sizeof(vk_flash_attn_push_constants), fa_wg_denoms(D,1,TYPE,false), fa_spec_constants(D,1,TYPE,false), 1); \ + ggml_vk_create_pipeline(device, device->pipeline_flash_attn_f32_f16_D ## D[TYPE][1][0][1], "flash_attn_f32_f16_D" #D "_aligned_f32acc" #NAMELC, flash_attn_f32_f16_ ## NAMELC ## _cm2_len, flash_attn_f32_f16_ ## NAMELC ## _cm2_data, "main", 5, sizeof(vk_flash_attn_push_constants), fa_wg_denoms(D,0,TYPE,false), fa_spec_constants(D,0,TYPE,false), fa_rows_cols(D,0,TYPE,false)[1]); \ + ggml_vk_create_pipeline(device, device->pipeline_flash_attn_f32_f16_D ## D[TYPE][0][1][0], "flash_attn_f32_f16_D" #D "_f16acc_smallrows" #NAMELC, flash_attn_f32_f16_ ## NAMELC ## _f16acc_cm2_len, flash_attn_f32_f16_ ## NAMELC ## _f16acc_cm2_data, "main", 5, sizeof(vk_flash_attn_push_constants), fa_wg_denoms(D,1,TYPE,true), fa_spec_constants(D,1,TYPE,true), 1); \ + ggml_vk_create_pipeline(device, device->pipeline_flash_attn_f32_f16_D ## D[TYPE][0][1][1], "flash_attn_f32_f16_D" #D "_aligned_f16acc_smallrows" #NAMELC, flash_attn_f32_f16_ ## NAMELC ## _f16acc_cm2_len, flash_attn_f32_f16_ ## NAMELC ## _f16acc_cm2_data, "main", 5, sizeof(vk_flash_attn_push_constants), fa_wg_denoms(D,0,TYPE,true), fa_spec_constants(D,0,TYPE,true), fa_rows_cols(D,0,TYPE,true)[1]); \ + ggml_vk_create_pipeline(device, device->pipeline_flash_attn_f32_f16_D ## D[TYPE][1][1][0], "flash_attn_f32_f16_D" #D "_f32acc_smallrows" #NAMELC, flash_attn_f32_f16_ ## NAMELC ## _cm2_len, flash_attn_f32_f16_ ## NAMELC ## _cm2_data, "main", 5, sizeof(vk_flash_attn_push_constants), fa_wg_denoms(D,1,TYPE,true), fa_spec_constants(D,1,TYPE,true), 1); \ + ggml_vk_create_pipeline(device, device->pipeline_flash_attn_f32_f16_D ## D[TYPE][1][1][1], "flash_attn_f32_f16_D" #D "_aligned_f32acc_smallrows" #NAMELC, flash_attn_f32_f16_ ## NAMELC ## _cm2_len, flash_attn_f32_f16_ ## NAMELC ## _cm2_data, "main", 5, sizeof(vk_flash_attn_push_constants), fa_wg_denoms(D,0,TYPE,true), fa_spec_constants(D,0,TYPE,true), fa_rows_cols(D,0,TYPE,true)[1]); \ + +#define CREATE_FA(TYPE, NAMELC) \ + CREATE_FA2(TYPE, NAMELC, 64) \ + CREATE_FA2(TYPE, NAMELC, 80) \ + CREATE_FA2(TYPE, NAMELC, 96) \ + CREATE_FA2(TYPE, NAMELC, 112) \ + CREATE_FA2(TYPE, NAMELC, 128) \ + CREATE_FA2(TYPE, NAMELC, 256) + + CREATE_FA(GGML_TYPE_F16, f16) + CREATE_FA(GGML_TYPE_Q4_0, q4_0) + CREATE_FA(GGML_TYPE_Q4_1, q4_1) + CREATE_FA(GGML_TYPE_Q5_0, q5_0) + CREATE_FA(GGML_TYPE_Q5_1, q5_1) + CREATE_FA(GGML_TYPE_Q8_0, q8_0) + // K dequants currently disabled because D dimension is rounded up to 256 and runs inefficiently + //CREATE_FA(GGML_TYPE_Q2_K, q2_k) + //CREATE_FA(GGML_TYPE_Q3_K, q3_k) + //CREATE_FA(GGML_TYPE_Q4_K, q4_k) + //CREATE_FA(GGML_TYPE_Q5_K, q5_k) + //CREATE_FA(GGML_TYPE_Q6_K, q6_k) + CREATE_FA(GGML_TYPE_IQ4_NL, iq4_nl) +#undef CREATE_FA + // Create 6 variants, {s,m,l}x{unaligned,aligned} #define CREATE_MM(PIPELINE_NAME, NAMELC, F16ACC, WG_DENOMS, WARPTILE, PUSHCONST, PARAMCOUNT) \ - ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->l, #NAMELC #F16ACC "_l", NAMELC ## F16ACC ## _len, NAMELC ## F16ACC ## _data, "main", PARAMCOUNT, sizeof(PUSHCONST), l_ ## WG_DENOMS, l_ ## WARPTILE, 1); \ - ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->m, #NAMELC #F16ACC "_m", NAMELC ## F16ACC ## _len, NAMELC ## F16ACC ## _data, "main", PARAMCOUNT, sizeof(PUSHCONST), m_ ## WG_DENOMS, m_ ## WARPTILE, 1); \ - ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->s, #NAMELC #F16ACC "_s", NAMELC ## F16ACC ## _len, NAMELC ## F16ACC ## _data, "main", PARAMCOUNT, sizeof(PUSHCONST), s_ ## WG_DENOMS, s_ ## WARPTILE, 1); \ - ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_l, #NAMELC #F16ACC "_aligned_l", NAMELC ## _aligned ## F16ACC ## _len, NAMELC ## _aligned ## F16ACC ## _data, "main", PARAMCOUNT, sizeof(PUSHCONST), l_ ## WG_DENOMS, l_ ## WARPTILE, l_align); \ - ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_m, #NAMELC #F16ACC "_aligned_m", NAMELC ## _aligned ## F16ACC ## _len, NAMELC ## _aligned ## F16ACC ## _data, "main", PARAMCOUNT, sizeof(PUSHCONST), m_ ## WG_DENOMS, m_ ## WARPTILE, m_align); \ - ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_s, #NAMELC #F16ACC "_aligned_s", NAMELC ## _aligned ## F16ACC ## _len, NAMELC ## _aligned ## F16ACC ## _data, "main", PARAMCOUNT, sizeof(PUSHCONST), s_ ## WG_DENOMS, s_ ## WARPTILE, s_align); \ - - CREATE_MM(pipeline_matmul_f32, matmul_f32_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_matmul_f32_f16, matmul_f32_f16, , wg_denoms, warptile, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_matmul_f16.f32acc, matmul_f16, , wg_denoms, warptile, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_matmul_f16_f32.f32acc, matmul_f16_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 3); - - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0].f32acc, matmul_q4_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1].f32acc, matmul_q4_1_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0].f32acc, matmul_q5_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1].f32acc, matmul_q5_1_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0].f32acc, matmul_q8_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q2_K].f32acc, matmul_q2_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q3_K].f32acc, matmul_q3_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_K].f32acc, matmul_q4_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_K].f32acc, matmul_q5_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q6_K].f32acc, matmul_q6_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_IQ4_NL].f32acc, matmul_iq4_nl_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->l, #NAMELC #F16ACC "_l", NAMELC ## F16ACC ## _cm2_len, NAMELC ## F16ACC ## _cm2_data, "main", PARAMCOUNT, sizeof(PUSHCONST), l_ ## WG_DENOMS, l_ ## WARPTILE, 1); \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->m, #NAMELC #F16ACC "_m", NAMELC ## F16ACC ## _cm2_len, NAMELC ## F16ACC ## _cm2_data, "main", PARAMCOUNT, sizeof(PUSHCONST), m_ ## WG_DENOMS, m_ ## WARPTILE, 1); \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->s, #NAMELC #F16ACC "_s", NAMELC ## F16ACC ## _cm2_len, NAMELC ## F16ACC ## _cm2_data, "main", PARAMCOUNT, sizeof(PUSHCONST), s_ ## WG_DENOMS, s_ ## WARPTILE, 1); \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_l, #NAMELC #F16ACC "_aligned_l", NAMELC ## _aligned ## F16ACC ## _cm2_len, NAMELC ## _aligned ## F16ACC ## _cm2_data, "main", PARAMCOUNT, sizeof(PUSHCONST), l_ ## WG_DENOMS, l_ ## WARPTILE, l_align); \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_m, #NAMELC #F16ACC "_aligned_m", NAMELC ## _aligned ## F16ACC ## _cm2_len, NAMELC ## _aligned ## F16ACC ## _cm2_data, "main", PARAMCOUNT, sizeof(PUSHCONST), m_ ## WG_DENOMS, m_ ## WARPTILE, m_align); \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_s, #NAMELC #F16ACC "_aligned_s", NAMELC ## _aligned ## F16ACC ## _cm2_len, NAMELC ## _aligned ## F16ACC ## _cm2_data, "main", PARAMCOUNT, sizeof(PUSHCONST), s_ ## WG_DENOMS, s_ ## WARPTILE, s_align); \ + + // Create 2 variants, {f16,f32} accumulator +#define CREATE_MM2(PIPELINE_NAME, NAMELC, WG_DENOMS, WARPTILE, PUSHCONST, PARAMCOUNT) \ + CREATE_MM(PIPELINE_NAME . f16acc, NAMELC, _f16acc, WG_DENOMS, WARPTILE, PUSHCONST, PARAMCOUNT) \ + CREATE_MM(PIPELINE_NAME . f32acc, NAMELC, , WG_DENOMS, WARPTILE, PUSHCONST, PARAMCOUNT) \ + + CREATE_MM(pipeline_matmul_f32, matmul_f32_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 3) + CREATE_MM(pipeline_matmul_f32_f16, matmul_f32_f16, , wg_denoms, warptile, vk_mat_mat_push_constants, 3) + + CREATE_MM2(pipeline_matmul_f16, matmul_f16, wg_denoms, warptile, vk_mat_mat_push_constants, 3) + CREATE_MM2(pipeline_matmul_f16_f32, matmul_f16_f32, wg_denoms, warptile, vk_mat_mat_push_constants, 3) + CREATE_MM(pipeline_dequant_mul_mat_mat_f16[GGML_TYPE_Q4_0].f16acc, matmul_q4_0_f16, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3) + CREATE_MM(pipeline_dequant_mul_mat_mat_f16[GGML_TYPE_Q4_1].f16acc, matmul_q4_1_f16, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3) + CREATE_MM(pipeline_dequant_mul_mat_mat_f16[GGML_TYPE_Q5_0].f16acc, matmul_q5_0_f16, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3) + CREATE_MM(pipeline_dequant_mul_mat_mat_f16[GGML_TYPE_Q5_1].f16acc, matmul_q5_1_f16, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3) + CREATE_MM(pipeline_dequant_mul_mat_mat_f16[GGML_TYPE_Q8_0].f16acc, matmul_q8_0_f16, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3) + CREATE_MM(pipeline_dequant_mul_mat_mat_f16[GGML_TYPE_Q2_K].f16acc, matmul_q2_k_f16, _f16acc, mmq_wg_denoms_k, warptile_mmq_k, vk_mat_mat_push_constants, 3) + CREATE_MM(pipeline_dequant_mul_mat_mat_f16[GGML_TYPE_Q3_K].f16acc, matmul_q3_k_f16, _f16acc, mmq_wg_denoms_k, warptile_mmq_k, vk_mat_mat_push_constants, 3) + CREATE_MM(pipeline_dequant_mul_mat_mat_f16[GGML_TYPE_Q4_K].f16acc, matmul_q4_k_f16, _f16acc, mmq_wg_denoms_k, warptile_mmq_k, vk_mat_mat_push_constants, 3) + CREATE_MM(pipeline_dequant_mul_mat_mat_f16[GGML_TYPE_Q5_K].f16acc, matmul_q5_k_f16, _f16acc, mmq_wg_denoms_k, warptile_mmq_k, vk_mat_mat_push_constants, 3) + CREATE_MM(pipeline_dequant_mul_mat_mat_f16[GGML_TYPE_Q6_K].f16acc, matmul_q6_k_f16, _f16acc, mmq_wg_denoms_k, warptile_mmq_k, vk_mat_mat_push_constants, 3) + CREATE_MM(pipeline_dequant_mul_mat_mat_f16[GGML_TYPE_IQ4_NL].f16acc, matmul_iq4_nl_f16, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3) + + CREATE_MM(pipeline_matmul_id_f32, matmul_id_f32_f32, , wg_denoms, warptile, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_matmul_id_f16, matmul_id_f16, wg_denoms, warptile, vk_mat_mat_id_push_constants, 4) + CREATE_MM2(pipeline_matmul_id_f16_f32, matmul_id_f16_f32, wg_denoms, warptile, vk_mat_mat_id_push_constants, 4) + + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_0].f16acc, matmul_id_q4_0_f32, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_1].f16acc, matmul_id_q4_1_f32, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_0].f16acc, matmul_id_q5_0_f32, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_1].f16acc, matmul_id_q5_1_f32, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q8_0].f16acc, matmul_id_q8_0_f32, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q2_K].f16acc, matmul_id_q2_k_f32, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q3_K].f16acc, matmul_id_q3_k_f32, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_K].f16acc, matmul_id_q4_k_f32, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_K].f16acc, matmul_id_q5_k_f32, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q6_K].f16acc, matmul_id_q6_k_f32, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_NL].f16acc, matmul_id_iq4_nl_f32, , mmqid_wg_denoms, warptile_mmqid, vk_mat_mat_id_push_constants, 4) +#undef CREATE_MM +#undef CREATE_MM2 + } else +#endif // defined(VK_NV_cooperative_matrix2) && defined(GGML_VULKAN_COOPMAT2_GLSLC_SUPPORT) + if (device->coopmat_support) { + // Create 6 variants, {s,m,l}x{unaligned,aligned} +#define CREATE_MM(PIPELINE_NAME, NAMELC, F16ACC, WG_DENOMS, WARPTILE, PUSHCONST, PARAMCOUNT, ID) \ + if (device->mul_mat ## ID ## _l) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->l, #NAMELC #F16ACC "_l", NAMELC ## F16ACC ## _coopmat_len, NAMELC ## F16ACC ## _coopmat_data, "main", PARAMCOUNT, sizeof(PUSHCONST), l_ ## WG_DENOMS, l_ ## WARPTILE, 1); \ + if (device->mul_mat ## ID ## _m) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->m, #NAMELC #F16ACC "_m", NAMELC ## F16ACC ## _coopmat_len, NAMELC ## F16ACC ## _coopmat_data, "main", PARAMCOUNT, sizeof(PUSHCONST), m_ ## WG_DENOMS, m_ ## WARPTILE, 1); \ + if (device->mul_mat ## ID ## _s) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->s, #NAMELC #F16ACC "_s", NAMELC ## F16ACC ## _coopmat_len, NAMELC ## F16ACC ## _coopmat_data, "main", PARAMCOUNT, sizeof(PUSHCONST), s_ ## WG_DENOMS, s_ ## WARPTILE, 1); \ + if (device->mul_mat ## ID ## _l) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_l, #NAMELC #F16ACC "_aligned_l", NAMELC ## _aligned ## F16ACC ## _coopmat_len, NAMELC ## _aligned ## F16ACC ## _coopmat_data, "main", PARAMCOUNT, sizeof(PUSHCONST), l_ ## WG_DENOMS, l_ ## WARPTILE, l_align); \ + if (device->mul_mat ## ID ## _m) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_m, #NAMELC #F16ACC "_aligned_m", NAMELC ## _aligned ## F16ACC ## _coopmat_len, NAMELC ## _aligned ## F16ACC ## _coopmat_data, "main", PARAMCOUNT, sizeof(PUSHCONST), m_ ## WG_DENOMS, m_ ## WARPTILE, m_align); \ + if (device->mul_mat ## ID ## _s) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_s, #NAMELC #F16ACC "_aligned_s", NAMELC ## _aligned ## F16ACC ## _coopmat_len, NAMELC ## _aligned ## F16ACC ## _coopmat_data, "main", PARAMCOUNT, sizeof(PUSHCONST), s_ ## WG_DENOMS, s_ ## WARPTILE, s_align); \ + + // Create 2 variants, {f16,f32} accumulator +#define CREATE_MM2(PIPELINE_NAME, NAMELC, WG_DENOMS, WARPTILE, PUSHCONST, PARAMCOUNT, ID) \ + CREATE_MM(PIPELINE_NAME . f16acc, NAMELC, _f16acc, WG_DENOMS, WARPTILE, PUSHCONST, PARAMCOUNT, ID) \ + CREATE_MM(PIPELINE_NAME . f32acc, NAMELC, , WG_DENOMS, WARPTILE, PUSHCONST, PARAMCOUNT, ID) \ + + CREATE_MM(pipeline_matmul_f32, matmul_f32_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_matmul_f32_f16, matmul_f32_f16, , wg_denoms, warptile, vk_mat_mat_push_constants, 3, ); + CREATE_MM2(pipeline_matmul_f16, matmul_f16, wg_denoms, warptile, vk_mat_mat_push_constants, 3, ); + CREATE_MM2(pipeline_matmul_f16_f32, matmul_f16_f32, wg_denoms, warptile, vk_mat_mat_push_constants, 3, ); + + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0].f16acc, matmul_q4_0_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1].f16acc, matmul_q4_1_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0].f16acc, matmul_q5_0_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1].f16acc, matmul_q5_1_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0].f16acc, matmul_q8_0_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q2_K].f16acc, matmul_q2_k_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q3_K].f16acc, matmul_q3_k_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_K].f16acc, matmul_q4_k_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_K].f16acc, matmul_q5_k_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q6_K].f16acc, matmul_q6_k_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_IQ4_NL].f16acc, matmul_iq4_nl_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); // If there's not enough shared memory for row_ids and the result tile, don't create these pipelines. - if (device->properties.limits.maxComputeSharedMemorySize >= 32768) { - CREATE_MM(pipeline_matmul_id_f32, matmul_id_f32_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 4); - CREATE_MM(pipeline_matmul_id_f16, matmul_id_f16, , wg_denoms, warptile, vk_mat_mat_push_constants, 4); - CREATE_MM(pipeline_matmul_id_f16_f32, matmul_id_f16_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 4); - - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_0], matmul_id_q4_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_1], matmul_id_q4_1_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_0], matmul_id_q5_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_1], matmul_id_q5_1_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q8_0], matmul_id_q8_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q2_K], matmul_id_q2_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q3_K], matmul_id_q3_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_K], matmul_id_q4_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_K], matmul_id_q5_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q6_K], matmul_id_q6_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_NL], matmul_id_iq4_nl_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); + if (device->mul_mat_id_s || device->mul_mat_id_m || device->mul_mat_id_l) { + CREATE_MM(pipeline_matmul_id_f32, matmul_id_f32_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 4, _id); + CREATE_MM2(pipeline_matmul_id_f16, matmul_id_f16, wg_denoms, warptile, vk_mat_mat_push_constants, 4, _id); + CREATE_MM2(pipeline_matmul_id_f16_f32, matmul_id_f16_f32, wg_denoms, warptile, vk_mat_mat_push_constants, 4, _id); + + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_0].f16acc, matmul_id_q4_0_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_1].f16acc, matmul_id_q4_1_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_0].f16acc, matmul_id_q5_0_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_1].f16acc, matmul_id_q5_1_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q8_0].f16acc, matmul_id_q8_0_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q2_K].f16acc, matmul_id_q2_k_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q3_K].f16acc, matmul_id_q3_k_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_K].f16acc, matmul_id_q4_k_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_K].f16acc, matmul_id_q5_k_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q6_K].f16acc, matmul_id_q6_k_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_NL].f16acc, matmul_id_iq4_nl_f32, _f16acc, wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); } #undef CREATE_MM - } else { + } else if (device->fp16) { // Create 6 variants, {s,m,l}x{unaligned,aligned} -#define CREATE_MM(PIPELINE_NAME, NAMELC, F16ACC, WG_DENOMS, WARPTILE, PUSHCONST, PARAMCOUNT) \ - ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->l, #NAMELC #F16ACC "_l", NAMELC ## F16ACC ## _fp32_len, NAMELC ## F16ACC ## _fp32_data, "main", PARAMCOUNT, sizeof(PUSHCONST), l_ ## WG_DENOMS, l_ ## WARPTILE, 1); \ - ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->m, #NAMELC #F16ACC "_m", NAMELC ## F16ACC ## _fp32_len, NAMELC ## F16ACC ## _fp32_data, "main", PARAMCOUNT, sizeof(PUSHCONST), m_ ## WG_DENOMS, m_ ## WARPTILE, 1); \ - ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->s, #NAMELC #F16ACC "_s", NAMELC ## F16ACC ## _fp32_len, NAMELC ## F16ACC ## _fp32_data, "main", PARAMCOUNT, sizeof(PUSHCONST), s_ ## WG_DENOMS, s_ ## WARPTILE, 1); \ - ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_l, #NAMELC #F16ACC "_aligned_l", NAMELC ## _aligned ## F16ACC ## _fp32_len, NAMELC ## _aligned ## F16ACC ## _fp32_data, "main", PARAMCOUNT, sizeof(PUSHCONST), l_ ## WG_DENOMS, l_ ## WARPTILE, l_align); \ - ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_m, #NAMELC #F16ACC "_aligned_m", NAMELC ## _aligned ## F16ACC ## _fp32_len, NAMELC ## _aligned ## F16ACC ## _fp32_data, "main", PARAMCOUNT, sizeof(PUSHCONST), m_ ## WG_DENOMS, m_ ## WARPTILE, m_align); \ - ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_s, #NAMELC #F16ACC "_aligned_s", NAMELC ## _aligned ## F16ACC ## _fp32_len, NAMELC ## _aligned ## F16ACC ## _fp32_data, "main", PARAMCOUNT, sizeof(PUSHCONST), s_ ## WG_DENOMS, s_ ## WARPTILE, s_align); \ - - CREATE_MM(pipeline_matmul_f32, matmul_f32_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_matmul_f32_f16, matmul_f32_f16, , wg_denoms, warptile, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_matmul_f16.f32acc, matmul_f16, , wg_denoms, warptile, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_matmul_f16_f32.f32acc, matmul_f16_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 3); - - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0].f32acc, matmul_q4_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1].f32acc, matmul_q4_1_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0].f32acc, matmul_q5_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1].f32acc, matmul_q5_1_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0].f32acc, matmul_q8_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q2_K].f32acc, matmul_q2_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q3_K].f32acc, matmul_q3_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_K].f32acc, matmul_q4_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_K].f32acc, matmul_q5_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q6_K].f32acc, matmul_q6_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); - CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_IQ4_NL].f32acc, matmul_iq4_nl_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3); +#define CREATE_MM(PIPELINE_NAME, NAMELC, F16ACC, WG_DENOMS, WARPTILE, PUSHCONST, PARAMCOUNT, ID) \ + if (device->mul_mat ## ID ## _l) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->l, #NAMELC #F16ACC "_l", NAMELC ## F16ACC ## _len, NAMELC ## F16ACC ## _data, "main", PARAMCOUNT, sizeof(PUSHCONST), l_ ## WG_DENOMS, l_ ## WARPTILE, 1); \ + if (device->mul_mat ## ID ## _m) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->m, #NAMELC #F16ACC "_m", NAMELC ## F16ACC ## _len, NAMELC ## F16ACC ## _data, "main", PARAMCOUNT, sizeof(PUSHCONST), m_ ## WG_DENOMS, m_ ## WARPTILE, 1); \ + if (device->mul_mat ## ID ## _s) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->s, #NAMELC #F16ACC "_s", NAMELC ## F16ACC ## _len, NAMELC ## F16ACC ## _data, "main", PARAMCOUNT, sizeof(PUSHCONST), s_ ## WG_DENOMS, s_ ## WARPTILE, 1); \ + if (device->mul_mat ## ID ## _l) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_l, #NAMELC #F16ACC "_aligned_l", NAMELC ## _aligned ## F16ACC ## _len, NAMELC ## _aligned ## F16ACC ## _data, "main", PARAMCOUNT, sizeof(PUSHCONST), l_ ## WG_DENOMS, l_ ## WARPTILE, l_align); \ + if (device->mul_mat ## ID ## _m) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_m, #NAMELC #F16ACC "_aligned_m", NAMELC ## _aligned ## F16ACC ## _len, NAMELC ## _aligned ## F16ACC ## _data, "main", PARAMCOUNT, sizeof(PUSHCONST), m_ ## WG_DENOMS, m_ ## WARPTILE, m_align); \ + if (device->mul_mat ## ID ## _s) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_s, #NAMELC #F16ACC "_aligned_s", NAMELC ## _aligned ## F16ACC ## _len, NAMELC ## _aligned ## F16ACC ## _data, "main", PARAMCOUNT, sizeof(PUSHCONST), s_ ## WG_DENOMS, s_ ## WARPTILE, s_align); \ + + CREATE_MM(pipeline_matmul_f32, matmul_f32_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_matmul_f32_f16, matmul_f32_f16, , wg_denoms, warptile, vk_mat_mat_push_constants, 3, ); + CREATE_MM2(pipeline_matmul_f16, matmul_f16, wg_denoms, warptile, vk_mat_mat_push_constants, 3, ); + CREATE_MM2(pipeline_matmul_f16_f32, matmul_f16_f32, wg_denoms, warptile, vk_mat_mat_push_constants, 3, ); + + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0].f16acc, matmul_q4_0_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1].f16acc, matmul_q4_1_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0].f16acc, matmul_q5_0_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1].f16acc, matmul_q5_1_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0].f16acc, matmul_q8_0_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q2_K].f16acc, matmul_q2_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q3_K].f16acc, matmul_q3_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_K].f16acc, matmul_q4_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_K].f16acc, matmul_q5_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q6_K].f16acc, matmul_q6_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_IQ4_NL].f16acc, matmul_iq4_nl_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); // If there's not enough shared memory for row_ids and the result tile, don't create these pipelines. - if (device->properties.limits.maxComputeSharedMemorySize >= 32768) { - CREATE_MM(pipeline_matmul_id_f32, matmul_id_f32_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 4); - CREATE_MM(pipeline_matmul_id_f16, matmul_id_f16, , wg_denoms, warptile, vk_mat_mat_push_constants, 4); - CREATE_MM(pipeline_matmul_id_f16_f32, matmul_id_f16_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 4); - - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_0], matmul_id_q4_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_1], matmul_id_q4_1_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_0], matmul_id_q5_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_1], matmul_id_q5_1_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q8_0], matmul_id_q8_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q2_K], matmul_id_q2_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q3_K], matmul_id_q3_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_K], matmul_id_q4_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_K], matmul_id_q5_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q6_K], matmul_id_q6_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); - CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_NL], matmul_id_iq4_nl_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4); + if (device->mul_mat_id_s || device->mul_mat_id_m || device->mul_mat_id_l) { + CREATE_MM(pipeline_matmul_id_f32, matmul_id_f32_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 4, _id); + CREATE_MM2(pipeline_matmul_id_f16, matmul_id_f16, wg_denoms, warptile, vk_mat_mat_push_constants, 4, _id); + CREATE_MM2(pipeline_matmul_id_f16_f32, matmul_id_f16_f32, wg_denoms, warptile, vk_mat_mat_push_constants, 4, _id); + + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_0].f16acc, matmul_id_q4_0_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_1].f16acc, matmul_id_q4_1_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_0].f16acc, matmul_id_q5_0_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_1].f16acc, matmul_id_q5_1_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q8_0].f16acc, matmul_id_q8_0_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q2_K].f16acc, matmul_id_q2_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q3_K].f16acc, matmul_id_q3_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_K].f16acc, matmul_id_q4_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_K].f16acc, matmul_id_q5_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q6_K].f16acc, matmul_id_q6_k_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_NL].f16acc, matmul_id_iq4_nl_f32, _f16acc, mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); } +#undef CREATE_MM + } else { + // Create 6 variants, {s,m,l}x{unaligned,aligned} +#define CREATE_MM(PIPELINE_NAME, NAMELC, F16ACC, WG_DENOMS, WARPTILE, PUSHCONST, PARAMCOUNT, ID) \ + if (device->mul_mat ## ID ## _l) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->l, #NAMELC #F16ACC "_l", NAMELC ## F16ACC ## _fp32_len, NAMELC ## F16ACC ## _fp32_data, "main", PARAMCOUNT, sizeof(PUSHCONST), l_ ## WG_DENOMS, l_ ## WARPTILE, 1); \ + if (device->mul_mat ## ID ## _m) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->m, #NAMELC #F16ACC "_m", NAMELC ## F16ACC ## _fp32_len, NAMELC ## F16ACC ## _fp32_data, "main", PARAMCOUNT, sizeof(PUSHCONST), m_ ## WG_DENOMS, m_ ## WARPTILE, 1); \ + if (device->mul_mat ## ID ## _s) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->s, #NAMELC #F16ACC "_s", NAMELC ## F16ACC ## _fp32_len, NAMELC ## F16ACC ## _fp32_data, "main", PARAMCOUNT, sizeof(PUSHCONST), s_ ## WG_DENOMS, s_ ## WARPTILE, 1); \ + if (device->mul_mat ## ID ## _l) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_l, #NAMELC #F16ACC "_aligned_l", NAMELC ## _aligned ## F16ACC ## _fp32_len, NAMELC ## _aligned ## F16ACC ## _fp32_data, "main", PARAMCOUNT, sizeof(PUSHCONST), l_ ## WG_DENOMS, l_ ## WARPTILE, l_align); \ + if (device->mul_mat ## ID ## _m) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_m, #NAMELC #F16ACC "_aligned_m", NAMELC ## _aligned ## F16ACC ## _fp32_len, NAMELC ## _aligned ## F16ACC ## _fp32_data, "main", PARAMCOUNT, sizeof(PUSHCONST), m_ ## WG_DENOMS, m_ ## WARPTILE, m_align); \ + if (device->mul_mat ## ID ## _s) \ + ggml_vk_create_pipeline(device, device-> PIPELINE_NAME ->a_s, #NAMELC #F16ACC "_aligned_s", NAMELC ## _aligned ## F16ACC ## _fp32_len, NAMELC ## _aligned ## F16ACC ## _fp32_data, "main", PARAMCOUNT, sizeof(PUSHCONST), s_ ## WG_DENOMS, s_ ## WARPTILE, s_align); \ + + CREATE_MM(pipeline_matmul_f32, matmul_f32_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_matmul_f32_f16, matmul_f32_f16, , wg_denoms, warptile, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_matmul_f16.f32acc, matmul_f16, , wg_denoms, warptile, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_matmul_f16_f32.f32acc, matmul_f16_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 3, ); + + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0].f32acc, matmul_q4_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1].f32acc, matmul_q4_1_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0].f32acc, matmul_q5_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1].f32acc, matmul_q5_1_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0].f32acc, matmul_q8_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q2_K].f32acc, matmul_q2_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q3_K].f32acc, matmul_q3_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_K].f32acc, matmul_q4_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_K].f32acc, matmul_q5_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_Q6_K].f32acc, matmul_q6_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + CREATE_MM(pipeline_dequant_mul_mat_mat[GGML_TYPE_IQ4_NL].f32acc, matmul_iq4_nl_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_push_constants, 3, ); + + // If there's not enough shared memory for row_ids and the result tile, don't create these pipelines. + if (device->mul_mat_id_s || device->mul_mat_id_m || device->mul_mat_id_l) { + CREATE_MM(pipeline_matmul_id_f32, matmul_id_f32_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 4, _id); + CREATE_MM(pipeline_matmul_id_f16.f32acc, matmul_id_f16, , wg_denoms, warptile, vk_mat_mat_push_constants, 4, _id); + CREATE_MM(pipeline_matmul_id_f16_f32.f32acc, matmul_id_f16_f32, , wg_denoms, warptile, vk_mat_mat_push_constants, 4, _id); + + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_0].f32acc, matmul_id_q4_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_1].f32acc, matmul_id_q4_1_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_0].f32acc, matmul_id_q5_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_1].f32acc, matmul_id_q5_1_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q8_0].f32acc, matmul_id_q8_0_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q2_K].f32acc, matmul_id_q2_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q3_K].f32acc, matmul_id_q3_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q4_K].f32acc, matmul_id_q4_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q5_K].f32acc, matmul_id_q5_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_Q6_K].f32acc, matmul_id_q6_k_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + CREATE_MM(pipeline_dequant_mul_mat_mat_id[GGML_TYPE_IQ4_NL].f32acc, matmul_id_iq4_nl_f32, , mmq_wg_denoms, warptile_mmq, vk_mat_mat_id_push_constants, 4, _id); + } +#undef CREATE_MM2 #undef CREATE_MM } @@ -1427,11 +1787,11 @@ static void ggml_vk_load_shaders(vk_device& device) { ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q5_0], "mul_mat_vec_q5_0_f32_f32", mul_mat_vec_q5_0_f32_f32_len, mul_mat_vec_q5_0_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {device->subgroup_size, 2}, 1, true); ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q5_1], "mul_mat_vec_q5_1_f32_f32", mul_mat_vec_q5_1_f32_f32_len, mul_mat_vec_q5_1_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {device->subgroup_size, 2}, 1, true); ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q8_0], "mul_mat_vec_q8_0_f32_f32", mul_mat_vec_q8_0_f32_f32_len, mul_mat_vec_q8_0_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {device->subgroup_size, 1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q2_K], "mul_mat_vec_q2_k_f32_f32", mul_mat_vec_q2_k_f32_f32_len, mul_mat_vec_q2_k_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {device->subgroup_size}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q3_K], "mul_mat_vec_q3_k_f32_f32", mul_mat_vec_q3_k_f32_f32_len, mul_mat_vec_q3_k_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {device->subgroup_size}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q4_K], "mul_mat_vec_q4_k_f32_f32", mul_mat_vec_q4_k_f32_f32_len, mul_mat_vec_q4_k_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {device->subgroup_size}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q5_K], "mul_mat_vec_q5_k_f32_f32", mul_mat_vec_q5_k_f32_f32_len, mul_mat_vec_q5_k_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {device->subgroup_size}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q6_K], "mul_mat_vec_q6_k_f32_f32", mul_mat_vec_q6_k_f32_f32_len, mul_mat_vec_q6_k_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {device->subgroup_size}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q2_K], "mul_mat_vec_q2_k_f32_f32", mul_mat_vec_q2_k_f32_f32_len, mul_mat_vec_q2_k_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {subgroup_size_16}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q3_K], "mul_mat_vec_q3_k_f32_f32", mul_mat_vec_q3_k_f32_f32_len, mul_mat_vec_q3_k_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {subgroup_size_16}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q4_K], "mul_mat_vec_q4_k_f32_f32", mul_mat_vec_q4_k_f32_f32_len, mul_mat_vec_q4_k_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {subgroup_size_16}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q5_K], "mul_mat_vec_q5_k_f32_f32", mul_mat_vec_q5_k_f32_f32_len, mul_mat_vec_q5_k_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {subgroup_size_16}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_Q6_K], "mul_mat_vec_q6_k_f32_f32", mul_mat_vec_q6_k_f32_f32_len, mul_mat_vec_q6_k_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {subgroup_size_16}, 1, true); ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f32_f32[GGML_TYPE_IQ4_NL], "mul_mat_vec_iq4_nl_f32_f32", mul_mat_vec_iq4_nl_f32_f32_len, mul_mat_vec_iq4_nl_f32_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {device->subgroup_size, 2}, 1, true); ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_F32 ], "mul_mat_vec_f32_f16_f32", mul_mat_vec_f32_f16_f32_len, mul_mat_vec_f32_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {device->subgroup_size, 2}, 1); @@ -1441,11 +1801,11 @@ static void ggml_vk_load_shaders(vk_device& device) { ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q5_0], "mul_mat_vec_q5_0_f16_f32", mul_mat_vec_q5_0_f16_f32_len, mul_mat_vec_q5_0_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {device->subgroup_size, 2}, 1, true); ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q5_1], "mul_mat_vec_q5_1_f16_f32", mul_mat_vec_q5_1_f16_f32_len, mul_mat_vec_q5_1_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {device->subgroup_size, 2}, 1, true); ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q8_0], "mul_mat_vec_q8_0_f16_f32", mul_mat_vec_q8_0_f16_f32_len, mul_mat_vec_q8_0_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {device->subgroup_size, 1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q2_K], "mul_mat_vec_q2_k_f16_f32", mul_mat_vec_q2_k_f16_f32_len, mul_mat_vec_q2_k_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {device->subgroup_size}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q3_K], "mul_mat_vec_q3_k_f16_f32", mul_mat_vec_q3_k_f16_f32_len, mul_mat_vec_q3_k_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {device->subgroup_size}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q4_K], "mul_mat_vec_q4_k_f16_f32", mul_mat_vec_q4_k_f16_f32_len, mul_mat_vec_q4_k_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {device->subgroup_size}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q5_K], "mul_mat_vec_q5_k_f16_f32", mul_mat_vec_q5_k_f16_f32_len, mul_mat_vec_q5_k_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {device->subgroup_size}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q6_K], "mul_mat_vec_q6_k_f16_f32", mul_mat_vec_q6_k_f16_f32_len, mul_mat_vec_q6_k_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {device->subgroup_size}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q2_K], "mul_mat_vec_q2_k_f16_f32", mul_mat_vec_q2_k_f16_f32_len, mul_mat_vec_q2_k_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {subgroup_size_16}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q3_K], "mul_mat_vec_q3_k_f16_f32", mul_mat_vec_q3_k_f16_f32_len, mul_mat_vec_q3_k_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {subgroup_size_16}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q4_K], "mul_mat_vec_q4_k_f16_f32", mul_mat_vec_q4_k_f16_f32_len, mul_mat_vec_q4_k_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {subgroup_size_16}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q5_K], "mul_mat_vec_q5_k_f16_f32", mul_mat_vec_q5_k_f16_f32_len, mul_mat_vec_q5_k_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {subgroup_size_16}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_Q6_K], "mul_mat_vec_q6_k_f16_f32", mul_mat_vec_q6_k_f16_f32_len, mul_mat_vec_q6_k_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {1, 1, 1}, {subgroup_size_16}, 1, true); ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_f16_f32[GGML_TYPE_IQ4_NL], "mul_mat_vec_iq4_nl_f16_f32", mul_mat_vec_iq4_nl_f16_f32_len, mul_mat_vec_iq4_nl_f16_f32_data, "main", 3, sizeof(vk_mat_vec_push_constants), {2, 1, 1}, {device->subgroup_size}, 1, true); ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_F32 ], "mul_mat_vec_id_f32_f32", mul_mat_vec_id_f32_f32_len, mul_mat_vec_id_f32_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {2, 1, 1}, {device->subgroup_size, 2}, 1); @@ -1455,11 +1815,11 @@ static void ggml_vk_load_shaders(vk_device& device) { ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_Q5_0], "mul_mat_vec_id_q5_0_f32", mul_mat_vec_id_q5_0_f32_len, mul_mat_vec_id_q5_0_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {2, 1, 1}, {device->subgroup_size, 2}, 1, true); ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_Q5_1], "mul_mat_vec_id_q5_1_f32", mul_mat_vec_id_q5_1_f32_len, mul_mat_vec_id_q5_1_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {2, 1, 1}, {device->subgroup_size, 2}, 1, true); ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_Q8_0], "mul_mat_vec_id_q8_0_f32", mul_mat_vec_id_q8_0_f32_len, mul_mat_vec_id_q8_0_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {1, 1, 1}, {device->subgroup_size, 1}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_Q2_K], "mul_mat_vec_id_q2_k_f32", mul_mat_vec_id_q2_k_f32_len, mul_mat_vec_id_q2_k_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {1, 1, 1}, {device->subgroup_size}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_Q3_K], "mul_mat_vec_id_q3_k_f32", mul_mat_vec_id_q3_k_f32_len, mul_mat_vec_id_q3_k_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {1, 1, 1}, {device->subgroup_size}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_Q4_K], "mul_mat_vec_id_q4_k_f32", mul_mat_vec_id_q4_k_f32_len, mul_mat_vec_id_q4_k_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {1, 1, 1}, {device->subgroup_size}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_Q5_K], "mul_mat_vec_id_q5_k_f32", mul_mat_vec_id_q5_k_f32_len, mul_mat_vec_id_q5_k_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {1, 1, 1}, {device->subgroup_size}, 1, true); - ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_Q6_K], "mul_mat_vec_id_q6_k_f32", mul_mat_vec_id_q6_k_f32_len, mul_mat_vec_id_q6_k_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {1, 1, 1}, {device->subgroup_size}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_Q2_K], "mul_mat_vec_id_q2_k_f32", mul_mat_vec_id_q2_k_f32_len, mul_mat_vec_id_q2_k_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {1, 1, 1}, {subgroup_size_16}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_Q3_K], "mul_mat_vec_id_q3_k_f32", mul_mat_vec_id_q3_k_f32_len, mul_mat_vec_id_q3_k_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {1, 1, 1}, {subgroup_size_16}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_Q4_K], "mul_mat_vec_id_q4_k_f32", mul_mat_vec_id_q4_k_f32_len, mul_mat_vec_id_q4_k_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {1, 1, 1}, {subgroup_size_16}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_Q5_K], "mul_mat_vec_id_q5_k_f32", mul_mat_vec_id_q5_k_f32_len, mul_mat_vec_id_q5_k_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {1, 1, 1}, {subgroup_size_16}, 1, true); + ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_Q6_K], "mul_mat_vec_id_q6_k_f32", mul_mat_vec_id_q6_k_f32_len, mul_mat_vec_id_q6_k_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {1, 1, 1}, {subgroup_size_16}, 1, true); ggml_vk_create_pipeline(device, device->pipeline_dequant_mul_mat_vec_id_f32[GGML_TYPE_IQ4_NL], "mul_mat_vec_id_iq4_nl_f32", mul_mat_vec_id_iq4_nl_f32_len, mul_mat_vec_id_iq4_nl_f32_data, "main", 4, sizeof(vk_mat_vec_id_push_constants), {2, 1, 1}, {device->subgroup_size, 2}, 1, true); // dequant shaders @@ -1495,7 +1855,7 @@ static void ggml_vk_load_shaders(vk_device& device) { ggml_vk_create_pipeline(device, device->pipeline_get_rows_f32[GGML_TYPE_Q8_0], "get_rows_q8_0_f32", get_rows_q8_0_f32_len, get_rows_q8_0_f32_data, "main", 3, sizeof(vk_op_binary_push_constants), {1024, 1, 1}, {}, 1); ggml_vk_create_pipeline(device, device->pipeline_get_rows_f32[GGML_TYPE_IQ4_NL], "get_rows_iq4_nl_f32", get_rows_iq4_nl_f32_len, get_rows_iq4_nl_f32_data, "main", 3, sizeof(vk_op_binary_push_constants), {1024, 1, 1}, {}, 1); - ggml_vk_create_pipeline(device, device->pipeline_matmul_split_k_reduce, "split_k_reduce", split_k_reduce_len, split_k_reduce_data, "main", 2, 2 * sizeof(uint32_t), {256, 1, 1}, {}, 1); + ggml_vk_create_pipeline(device, device->pipeline_matmul_split_k_reduce, "split_k_reduce", split_k_reduce_len, split_k_reduce_data, "main", 2, 2 * sizeof(uint32_t), {256 * 4, 1, 1}, {}, 1); ggml_vk_create_pipeline(device, device->pipeline_mul_mat_vec_p021_f16_f32, "mul_mat_vec_p021_f16_f32", mul_mat_vec_p021_f16_f32_len, mul_mat_vec_p021_f16_f32_data, "main", 3, 6 * sizeof(uint32_t), {1, 1, 1}, {}, 1); ggml_vk_create_pipeline(device, device->pipeline_mul_mat_vec_nc_f16_f32, "mul_mat_vec_nc_f16_f32", mul_mat_vec_nc_f16_f32_len, mul_mat_vec_nc_f16_f32_data, "main", 3, 7 * sizeof(uint32_t), {1, 1, 1}, {}, 1); @@ -1557,17 +1917,26 @@ static void ggml_vk_load_shaders(vk_device& device) { ggml_vk_create_pipeline(device, device->pipeline_soft_max_f32_f16_wg512, "soft_max_f32_f16_wg512", soft_max_f32_f16_len, soft_max_f32_f16_data, "main", 3, sizeof(vk_op_soft_max_push_constants), {1, 1, 1}, { 512 }, 1); ggml_vk_create_pipeline(device, device->pipeline_rope_norm_f32, "rope_norm_f32", rope_norm_f32_len, rope_norm_f32_data, "main", 4, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); - ggml_vk_create_pipeline(device, device->pipeline_rope_norm_f16, "rope_norm_f16", rope_norm_f16_len, rope_norm_f16_data, "main", 4, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); - ggml_vk_create_pipeline(device, device->pipeline_rope_neox_f32, "rope_neox_f32", rope_neox_f32_len, rope_neox_f32_data, "main", 4, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); - ggml_vk_create_pipeline(device, device->pipeline_rope_neox_f16, "rope_neox_f16", rope_neox_f16_len, rope_neox_f16_data, "main", 4, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); + + if (device->float_controls_rte_fp16) { + ggml_vk_create_pipeline(device, device->pipeline_rope_norm_f16, "rope_norm_f16", rope_norm_f16_rte_len, rope_norm_f16_rte_data, "main", 4, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(device, device->pipeline_rope_neox_f16, "rope_neox_f16", rope_neox_f16_rte_len, rope_neox_f16_rte_data, "main", 4, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); + } else { + ggml_vk_create_pipeline(device, device->pipeline_rope_norm_f16, "rope_norm_f16", rope_norm_f16_len, rope_norm_f16_data, "main", 4, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(device, device->pipeline_rope_neox_f16, "rope_neox_f16", rope_neox_f16_len, rope_neox_f16_data, "main", 4, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); + } ggml_vk_create_pipeline(device, device->pipeline_argsort_f32, "argsort_f32", argsort_f32_len, argsort_f32_data, "main", 2, sizeof(vk_op_argsort_push_constants), {1024, 1, 1}, {}, 1); ggml_vk_create_pipeline(device, device->pipeline_sum_rows_f32, "sum_rows_f32", sum_rows_f32_len, sum_rows_f32_data, "main", 2, sizeof(vk_op_push_constants), {1, 1, 1}, { device->subgroup_size }, 1); ggml_vk_create_pipeline(device, device->pipeline_im2col_f32, "im2col_f32", im2col_f32_len, im2col_f32_data, "main", 2, sizeof(vk_op_im2col_push_constants), {256, 1, 1}, {}, 1); - ggml_vk_create_pipeline(device, device->pipeline_im2col_f32_f16, "im2col_f32_f16", im2col_f32_f16_len, im2col_f32_f16_data, "main", 2, sizeof(vk_op_im2col_push_constants), {256, 1, 1}, {}, 1); + if (device->float_controls_rte_fp16) { + ggml_vk_create_pipeline(device, device->pipeline_im2col_f32_f16, "im2col_f32_f16", im2col_f32_f16_rte_len, im2col_f32_f16_rte_data, "main", 2, sizeof(vk_op_im2col_push_constants), {256, 1, 1}, {}, 1); + } else { + ggml_vk_create_pipeline(device, device->pipeline_im2col_f32_f16, "im2col_f32_f16", im2col_f32_f16_len, im2col_f32_f16_data, "main", 2, sizeof(vk_op_im2col_push_constants), {256, 1, 1}, {}, 1); + } ggml_vk_create_pipeline(device, device->pipeline_timestep_embedding_f32, "timestep_embedding_f32", timestep_embedding_f32_len, timestep_embedding_f32_data, "main", 2, sizeof(vk_op_timestep_embedding_push_constants), {256, 1, 1}, {}, 1); @@ -1606,12 +1975,38 @@ static vk_device ggml_vk_get_device(size_t idx) { device->physical_device = physical_devices[dev_num]; const std::vector ext_props = device->physical_device.enumerateDeviceExtensionProperties(); + bool fp16_storage = false; + bool fp16_compute = false; bool maintenance4_support = false; + bool sm_builtins = false; + bool amd_shader_core_properties2 = false; + bool pipeline_robustness = false; + bool coopmat2_support = false; + device->coopmat_support = false; // Check if maintenance4 is supported for (const auto& properties : ext_props) { if (strcmp("VK_KHR_maintenance4", properties.extensionName) == 0) { maintenance4_support = true; + } else if (strcmp("VK_KHR_16bit_storage", properties.extensionName) == 0) { + fp16_storage = true; + } else if (strcmp("VK_KHR_shader_float16_int8", properties.extensionName) == 0) { + fp16_compute = true; + } else if (strcmp("VK_NV_shader_sm_builtins", properties.extensionName) == 0) { + sm_builtins = true; + } else if (strcmp("VK_AMD_shader_core_properties2", properties.extensionName) == 0) { + amd_shader_core_properties2 = true; + } else if (strcmp("VK_EXT_pipeline_robustness", properties.extensionName) == 0) { + pipeline_robustness = true; + } else if (strcmp("VK_KHR_cooperative_matrix", properties.extensionName) == 0 && + !getenv("GGML_VK_DISABLE_COOPMAT")) { + device->coopmat_support = true; + device->coopmat_m = 0; + device->coopmat_n = 0; + device->coopmat_k = 0; + } else if (strcmp("VK_NV_cooperative_matrix2", properties.extensionName) == 0 && + !getenv("GGML_VK_DISABLE_COOPMAT2")) { + coopmat2_support = true; } } @@ -1619,18 +2014,45 @@ static vk_device ggml_vk_get_device(size_t idx) { vk::PhysicalDeviceMaintenance3Properties props3; vk::PhysicalDeviceMaintenance4Properties props4; vk::PhysicalDeviceSubgroupProperties subgroup_props; + vk::PhysicalDeviceDriverProperties driver_props; + vk::PhysicalDeviceShaderSMBuiltinsPropertiesNV sm_props; + vk::PhysicalDeviceShaderCoreProperties2AMD amd_shader_core_properties2_props; + vk::PhysicalDeviceVulkan12Properties vk12_props; props2.pNext = &props3; props3.pNext = &subgroup_props; + subgroup_props.pNext = &driver_props; + driver_props.pNext = &vk12_props; + + VkBaseOutStructure * last_struct = (VkBaseOutStructure *)&vk12_props; + if (maintenance4_support) { - subgroup_props.pNext = &props4; + last_struct->pNext = (VkBaseOutStructure *)&props4; + last_struct = (VkBaseOutStructure *)&props4; + } + if (sm_builtins) { + last_struct->pNext = (VkBaseOutStructure *)&sm_props; + last_struct = (VkBaseOutStructure *)&sm_props; } + if (amd_shader_core_properties2) { + last_struct->pNext = (VkBaseOutStructure *)&amd_shader_core_properties2_props; + last_struct = (VkBaseOutStructure *)&amd_shader_core_properties2_props; + } + +#if defined(VK_NV_cooperative_matrix2) + vk::PhysicalDeviceCooperativeMatrix2PropertiesNV coopmat2_props; + if (coopmat2_support) { + last_struct->pNext = (VkBaseOutStructure *)&coopmat2_props; + last_struct = (VkBaseOutStructure *)&coopmat2_props; + } +#endif + device->physical_device.getProperties2(&props2); device->properties = props2.properties; const char* GGML_VK_FORCE_MAX_ALLOCATION_SIZE = getenv("GGML_VK_FORCE_MAX_ALLOCATION_SIZE"); if (GGML_VK_FORCE_MAX_ALLOCATION_SIZE != nullptr) { - device->max_memory_allocation_size = std::stoi(GGML_VK_FORCE_MAX_ALLOCATION_SIZE); + device->max_memory_allocation_size = std::stoul(GGML_VK_FORCE_MAX_ALLOCATION_SIZE); } else if (maintenance4_support) { device->max_memory_allocation_size = std::min(props3.maxMemoryAllocationSize, props4.maxBufferSize); } else { @@ -1640,26 +2062,25 @@ static vk_device ggml_vk_get_device(size_t idx) { device->vendor_id = device->properties.vendorID; device->subgroup_size = subgroup_props.subgroupSize; device->uma = device->properties.deviceType == vk::PhysicalDeviceType::eIntegratedGpu; - - bool fp16_storage = false; - bool fp16_compute = false; - bool pipeline_robustness = false; - - for (const auto& properties : ext_props) { - if (strcmp("VK_KHR_16bit_storage", properties.extensionName) == 0) { - fp16_storage = true; - } else if (strcmp("VK_KHR_shader_float16_int8", properties.extensionName) == 0) { - fp16_compute = true; - } else if (strcmp("VK_EXT_pipeline_robustness", properties.extensionName) == 0) { - pipeline_robustness = true; - } + if (sm_builtins) { + device->shader_core_count = sm_props.shaderSMCount; + } else if (amd_shader_core_properties2) { + device->shader_core_count = amd_shader_core_properties2_props.activeComputeUnitCount; + } else { + device->shader_core_count = 0; } + device->float_controls_rte_fp16 = vk12_props.shaderRoundingModeRTEFloat16; - const char* GGML_VK_DISABLE_F16 = getenv("GGML_VK_DISABLE_F16"); - const bool force_disable_f16 = GGML_VK_DISABLE_F16 != nullptr; + const bool force_disable_f16 = getenv("GGML_VK_DISABLE_F16") != nullptr; device->fp16 = !force_disable_f16 && fp16_storage && fp16_compute; + if (device->vendor_id == VK_VENDOR_ID_INTEL || (props2.properties.vendorID == VK_VENDOR_ID_AMD && driver_props.driverID == vk::DriverId::eAmdProprietary)) { + // Intel drivers don't support coopmat properly yet + // Only RADV supports coopmat properly on AMD + device->coopmat_support = false; + } + std::vector queue_family_props = device->physical_device.getQueueFamilyProperties(); // Try to find a non-graphics compute queue and transfer-focused queues @@ -1697,22 +2118,124 @@ static vk_device ggml_vk_get_device(size_t idx) { vk12_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES; vk11_features.pNext = &vk12_features; + last_struct = (VkBaseOutStructure *)&vk12_features; + VkPhysicalDevicePipelineRobustnessFeaturesEXT pl_robustness_features; pl_robustness_features.pNext = nullptr; pl_robustness_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT; pl_robustness_features.pipelineRobustness = VK_FALSE; if (pipeline_robustness) { - vk12_features.pNext = &pl_robustness_features; + last_struct->pNext = (VkBaseOutStructure *)&pl_robustness_features; + last_struct = (VkBaseOutStructure *)&pl_robustness_features; device_extensions.push_back("VK_EXT_pipeline_robustness"); } + VkPhysicalDeviceCooperativeMatrixFeaturesKHR coopmat_features; + coopmat_features.pNext = nullptr; + coopmat_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR; + coopmat_features.cooperativeMatrix = VK_FALSE; + + if (device->coopmat_support) { + last_struct->pNext = (VkBaseOutStructure *)&coopmat_features; + last_struct = (VkBaseOutStructure *)&coopmat_features; + } + +#if defined(VK_NV_cooperative_matrix2) + VkPhysicalDeviceCooperativeMatrix2FeaturesNV coopmat2_features {}; + coopmat2_features.pNext = nullptr; + coopmat2_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV; + if (coopmat2_support) { + last_struct->pNext = (VkBaseOutStructure *)&coopmat2_features; + last_struct = (VkBaseOutStructure *)&coopmat2_features; + device_extensions.push_back("VK_NV_cooperative_matrix2"); + } +#endif + vkGetPhysicalDeviceFeatures2(device->physical_device, &device_features2); device->fp16 = device->fp16 && vk12_features.shaderFloat16; device->pipeline_robustness = pl_robustness_features.pipelineRobustness; + device->coopmat_support = device->coopmat_support && coopmat_features.cooperativeMatrix; + + if (coopmat2_support) { +#if defined(VK_NV_cooperative_matrix2) && defined(GGML_VULKAN_COOPMAT2_GLSLC_SUPPORT) + if (coopmat2_features.cooperativeMatrixWorkgroupScope && + coopmat2_features.cooperativeMatrixFlexibleDimensions && + coopmat2_features.cooperativeMatrixReductions && + coopmat2_features.cooperativeMatrixConversions && + coopmat2_features.cooperativeMatrixPerElementOperations && + coopmat2_features.cooperativeMatrixTensorAddressing && + coopmat2_features.cooperativeMatrixBlockLoads && + vk12_features.bufferDeviceAddress) { + + std::vector flexible_dimensions; + uint32_t count = 0; + + PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV + _vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = + (PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV) + vk_instance.instance.getProcAddr("vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV"); + + _vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV(device->physical_device, &count, nullptr); + + VkCooperativeMatrixFlexibleDimensionsPropertiesNV empty_prop {}; + empty_prop.sType = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV; + flexible_dimensions.resize(count, empty_prop); + + _vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV(device->physical_device, &count, flexible_dimensions.data()); + + bool found_fp16_128 = false, + found_fp16_256 = false, + found_fp32_128 = false, + found_fp32_256 = false; + // need to support fp16*fp16 with fp16/fp32 accumulator, for workgroupsize 128 + // with 32x16x16 and 256 with 32x32x16. + for (auto &prop : flexible_dimensions) { + if (prop.saturatingAccumulation == VK_FALSE && + prop.scope == VK_SCOPE_WORKGROUP_KHR && + prop.AType == VK_COMPONENT_TYPE_FLOAT16_KHR && + prop.BType == VK_COMPONENT_TYPE_FLOAT16_KHR) { + + if (prop.workgroupInvocations == 128 && + prop.MGranularity <= 32 && + prop.NGranularity <= 16 && + prop.KGranularity <= 16) { + if (prop.CType == VK_COMPONENT_TYPE_FLOAT16_KHR && + prop.ResultType == VK_COMPONENT_TYPE_FLOAT16_KHR) { + found_fp16_128 = true; + } + if (prop.CType == VK_COMPONENT_TYPE_FLOAT32_KHR && + prop.ResultType == VK_COMPONENT_TYPE_FLOAT32_KHR) { + found_fp32_128 = true; + } + } + if (prop.workgroupInvocations == 256 && + prop.MGranularity <= 32 && + prop.NGranularity <= 32 && + prop.KGranularity <= 16) { + if (prop.CType == VK_COMPONENT_TYPE_FLOAT16_KHR && + prop.ResultType == VK_COMPONENT_TYPE_FLOAT16_KHR) { + found_fp16_256 = true; + } + if (prop.CType == VK_COMPONENT_TYPE_FLOAT32_KHR && + prop.ResultType == VK_COMPONENT_TYPE_FLOAT32_KHR) { + found_fp32_256 = true; + } + } + } + } + if (found_fp16_128 && found_fp16_256 && + found_fp32_128 && found_fp32_256 && + coopmat2_props.cooperativeMatrixFlexibleDimensionsMaxDimension >= 512) { + device->coopmat2 = true; + } + } +#endif + } + if (!vk11_features.storageBuffer16BitAccess) { std::cerr << "ggml_vulkan: device " << GGML_VK_NAME << idx << " does not support 16-bit storage." << std::endl; throw std::runtime_error("Unsupported device"); @@ -1727,6 +2250,74 @@ static vk_device ggml_vk_get_device(size_t idx) { if (device->fp16) { device_extensions.push_back("VK_KHR_shader_float16_int8"); } + + if (device->coopmat_support) { + // Query supported shapes + std::vector cm_props; + + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR pfn_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = + (PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR)vkGetInstanceProcAddr(vk_instance.instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR"); + + uint32_t cm_props_num; + + pfn_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR(device->physical_device, &cm_props_num, nullptr); + + cm_props.resize(cm_props_num); + + for (auto& prop : cm_props) { + prop.sType = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_KHR; + } + + pfn_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR(device->physical_device, &cm_props_num, cm_props.data()); + + VK_LOG_DEBUG("ggml_vulkan: Cooperative Matrix Shapes: " << cm_props.size()); + + for (auto& prop : cm_props) { + VK_LOG_DEBUG("ggml_vulkan: M: " << prop.MSize << " N: " << prop.NSize << " K: " << prop.KSize << " A: " << vk::to_string((vk::ComponentTypeKHR)prop.AType) << " B: " << vk::to_string((vk::ComponentTypeKHR)prop.BType) << " C: " << vk::to_string((vk::ComponentTypeKHR)prop.CType) << " Result: " << vk::to_string((vk::ComponentTypeKHR)prop.ResultType) << " saturatingAccumulation: " << prop.saturatingAccumulation << " scope: " << vk::to_string((vk::ScopeKHR)prop.scope)); + + if ((vk::ComponentTypeKHR)prop.AType == vk::ComponentTypeKHR::eFloat16 && + (vk::ComponentTypeKHR)prop.BType == vk::ComponentTypeKHR::eFloat16 && + (vk::ScopeKHR)prop.scope == vk::ScopeKHR::eSubgroup + ) { + if ((vk::ComponentTypeKHR)prop.CType == vk::ComponentTypeKHR::eFloat32 && + (vk::ComponentTypeKHR)prop.ResultType == vk::ComponentTypeKHR::eFloat32) { + // coopmat sizes not set yet + if (device->coopmat_m == 0) { + device->coopmat_acc_f32_support = true; + device->coopmat_m = prop.MSize; + device->coopmat_n = prop.NSize; + device->coopmat_k = prop.KSize; + } else if (device->coopmat_m == prop.MSize && device->coopmat_n == prop.NSize && device->coopmat_k == prop.KSize) { + // Only enable if shape is identical + device->coopmat_acc_f32_support = true; + } + } else if ((vk::ComponentTypeKHR)prop.CType == vk::ComponentTypeKHR::eFloat16 && + (vk::ComponentTypeKHR)prop.ResultType == vk::ComponentTypeKHR::eFloat16) { + // coopmat sizes not set yet + if (device->coopmat_m == 0) { + device->coopmat_acc_f16_support = true; + device->coopmat_m = prop.MSize; + device->coopmat_n = prop.NSize; + device->coopmat_k = prop.KSize; + } else if (device->coopmat_m == prop.MSize && device->coopmat_n == prop.NSize && device->coopmat_k == prop.KSize) { + // Only enable if shape is identical + device->coopmat_acc_f16_support = true; + } + } + } + } + + if (device->coopmat_m == 0) { + // No suitable matmul mode found + GGML_LOG_DEBUG("ggml_vulkan: WARNING: No suitable matrix core mode found. Disabling matrix cores.\n"); + device->coopmat_support = false; + } + } + + if (device->coopmat_support) { + device_extensions.push_back("VK_KHR_cooperative_matrix"); + } + device->name = GGML_VK_NAME + std::to_string(idx); device_create_info = { @@ -1742,6 +2333,37 @@ static vk_device ggml_vk_get_device(size_t idx) { ggml_vk_create_queue(device, device->compute_queue, compute_queue_family_index, 0, { vk::PipelineStageFlagBits::eComputeShader | vk::PipelineStageFlagBits::eTransfer }, false); // Shaders + // Disable matmul tile sizes early if performance low or not supported + switch (device->vendor_id) { +#ifndef GGML_VULKAN_RUN_TESTS + case VK_VENDOR_ID_AMD: + case VK_VENDOR_ID_INTEL: + device->mul_mat_l = false; + device->mul_mat_m = true; + device->mul_mat_s = true; + device->mul_mat_id_l = false; + device->mul_mat_id_m = true; + device->mul_mat_id_s = true; + break; + case VK_VENDOR_ID_APPLE: + device->mul_mat_l = false; + device->mul_mat_m = true; + device->mul_mat_s = false; + device->mul_mat_id_l = false; + device->mul_mat_id_m = true; + device->mul_mat_id_s = false; + break; +#endif + default: + device->mul_mat_l = true; + device->mul_mat_m = true; + device->mul_mat_s = true; + device->mul_mat_id_l = true; + device->mul_mat_id_m = true; + device->mul_mat_id_s = true; + break; + } + ggml_vk_load_shaders(device); if (!device->single_queue) { @@ -1799,15 +2421,31 @@ static void ggml_vk_print_gpu_info(size_t idx) { bool fp16_storage = false; bool fp16_compute = false; + bool coopmat_support = false; + bool coopmat2_support = false; for (auto properties : ext_props) { if (strcmp("VK_KHR_16bit_storage", properties.extensionName) == 0) { fp16_storage = true; } else if (strcmp("VK_KHR_shader_float16_int8", properties.extensionName) == 0) { fp16_compute = true; + } else if (strcmp("VK_KHR_cooperative_matrix", properties.extensionName) == 0 && + !getenv("GGML_VK_DISABLE_COOPMAT")) { + coopmat_support = true; +#if defined(GGML_VULKAN_COOPMAT2_GLSLC_SUPPORT) + } else if (strcmp("VK_NV_cooperative_matrix2", properties.extensionName) == 0 && + !getenv("GGML_VK_DISABLE_COOPMAT2")) { + coopmat2_support = true; +#endif } } + if (props2.properties.vendorID == VK_VENDOR_ID_INTEL || (props2.properties.vendorID == VK_VENDOR_ID_AMD && driver_props.driverID == vk::DriverId::eAmdProprietary)) { + // Intel drivers don't support coopmat properly yet + // Only RADV supports coopmat properly on AMD + coopmat_support = false; + } + const char* GGML_VK_DISABLE_F16 = getenv("GGML_VK_DISABLE_F16"); bool force_disable_f16 = GGML_VK_DISABLE_F16 != nullptr; @@ -1830,13 +2468,30 @@ static void ggml_vk_print_gpu_info(size_t idx) { vk12_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES; vk11_features.pNext = &vk12_features; + // Pointer to the last chain element + VkBaseOutStructure * last_struct = (VkBaseOutStructure *)&vk12_features; + + VkPhysicalDeviceCooperativeMatrixFeaturesKHR coopmat_features; + coopmat_features.pNext = nullptr; + coopmat_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR; + coopmat_features.cooperativeMatrix = VK_FALSE; + + if (coopmat_support) { + last_struct->pNext = (VkBaseOutStructure *)&coopmat_features; + last_struct = (VkBaseOutStructure *)&coopmat_features; + } + vkGetPhysicalDeviceFeatures2(physical_device, &device_features2); fp16 = fp16 && vk12_features.shaderFloat16; + coopmat_support = coopmat_support && coopmat_features.cooperativeMatrix; + + std::string matrix_cores = coopmat2_support ? "NV_coopmat2" : coopmat_support ? "KHR_coopmat" : "none"; + std::string device_name = props2.properties.deviceName.data(); - GGML_LOG_DEBUG("ggml_vulkan: %zu = %s (%s) | uma: %d | fp16: %d | warp size: %zu\n", - idx, device_name.c_str(), driver_props.driverName.data(), uma, fp16, subgroup_size); + GGML_LOG_DEBUG("ggml_vulkan: %zu = %s (%s) | uma: %d | fp16: %d | warp size: %zu | matrix cores: %s\n", + idx, device_name.c_str(), driver_props.driverName.data(), uma, fp16, subgroup_size, matrix_cores.c_str()); if (props2.properties.deviceType == vk::PhysicalDeviceType::eCpu) { GGML_LOG_DEBUG("ggml_vulkan: Warning: Device type is CPU. This is probably not the device you want.\n"); @@ -2064,7 +2719,7 @@ static vk_pipeline ggml_vk_get_to_fp16(ggml_backend_vk_context * ctx, ggml_type return ctx->device->pipeline_dequant[type]; } -static vk_matmul_pipeline ggml_vk_get_mul_mat_mat_pipeline(ggml_backend_vk_context * ctx, ggml_type src0_type, ggml_type src1_type) { +static vk_matmul_pipeline ggml_vk_get_mul_mat_mat_pipeline(ggml_backend_vk_context * ctx, ggml_type src0_type, ggml_type src1_type, ggml_prec prec) { VK_LOG_DEBUG("ggml_vk_get_mul_mat_mat_pipeline(" << ggml_type_name(src0_type) << ", " << ggml_type_name(src1_type) << ")"); if (src0_type == GGML_TYPE_F32 && src1_type == GGML_TYPE_F32) { return ctx->device->pipeline_matmul_f32; @@ -2072,14 +2727,23 @@ static vk_matmul_pipeline ggml_vk_get_mul_mat_mat_pipeline(ggml_backend_vk_conte if (src0_type == GGML_TYPE_F32 && src1_type == GGML_TYPE_F16) { return ctx->device->pipeline_matmul_f32_f16; } - if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F32) { - return ctx->device->pipeline_matmul_f16_f32.f32acc; - } - if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F16) { - return ctx->device->pipeline_matmul_f16.f32acc; + if (prec == GGML_PREC_DEFAULT && ctx->device->fp16) { + if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F32) { + return ctx->device->pipeline_matmul_f16_f32.f16acc; + } + if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F16) { + return ctx->device->pipeline_matmul_f16.f16acc; + } + } else { + if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F32) { + return ctx->device->pipeline_matmul_f16_f32.f32acc; + } + if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F16) { + return ctx->device->pipeline_matmul_f16.f32acc; + } } - if (src1_type != GGML_TYPE_F32) { + if (src1_type != GGML_TYPE_F32 && !ctx->device->coopmat2) { return nullptr; } @@ -2100,7 +2764,11 @@ static vk_matmul_pipeline ggml_vk_get_mul_mat_mat_pipeline(ggml_backend_vk_conte return nullptr; } - return ctx->device->pipeline_dequant_mul_mat_mat[src0_type].f32acc; + if (ctx->device->coopmat2) { + assert(src1_type == GGML_TYPE_F16); + return ctx->device->pipeline_dequant_mul_mat_mat_f16[src0_type].f16acc; + } + return ctx->device->fp16 ? ctx->device->pipeline_dequant_mul_mat_mat[src0_type].f16acc : ctx->device->pipeline_dequant_mul_mat_mat[src0_type].f32acc; } static vk_pipeline ggml_vk_get_dequantize_mul_mat_vec(ggml_backend_vk_context * ctx, ggml_type a_type, ggml_type b_type) { @@ -2129,16 +2797,25 @@ static vk_pipeline ggml_vk_get_dequantize_mul_mat_vec(ggml_backend_vk_context * return b_type == GGML_TYPE_F32 ? ctx->device->pipeline_dequant_mul_mat_vec_f32_f32[a_type] : ctx->device->pipeline_dequant_mul_mat_vec_f16_f32[a_type]; } -static vk_matmul_pipeline ggml_vk_get_mul_mat_mat_id_pipeline(ggml_backend_vk_context * ctx, ggml_type src0_type, ggml_type src1_type) { +static vk_matmul_pipeline ggml_vk_get_mul_mat_mat_id_pipeline(ggml_backend_vk_context * ctx, ggml_type src0_type, ggml_type src1_type, ggml_prec prec) { VK_LOG_DEBUG("ggml_vk_get_mul_mat_mat_id_pipeline()"); if (src0_type == GGML_TYPE_F32 && src1_type == GGML_TYPE_F32) { return ctx->device->pipeline_matmul_id_f32; } - if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F32) { - return ctx->device->pipeline_matmul_id_f16_f32; - } - if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F16) { - return ctx->device->pipeline_matmul_id_f16; + if (prec == GGML_PREC_DEFAULT && ctx->device->fp16) { + if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F32) { + return ctx->device->pipeline_matmul_id_f16_f32.f16acc; + } + if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F16) { + return ctx->device->pipeline_matmul_id_f16.f16acc; + } + } else { + if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F32) { + return ctx->device->pipeline_matmul_id_f16_f32.f32acc; + } + if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F16) { + return ctx->device->pipeline_matmul_id_f16.f32acc; + } } GGML_ASSERT(src1_type == GGML_TYPE_F32); @@ -2160,7 +2837,7 @@ static vk_matmul_pipeline ggml_vk_get_mul_mat_mat_id_pipeline(ggml_backend_vk_co return nullptr; } - return ctx->device->pipeline_dequant_mul_mat_mat_id[src0_type]; + return ctx->device->fp16 ? ctx->device->pipeline_dequant_mul_mat_mat_id[src0_type].f16acc : ctx->device->pipeline_dequant_mul_mat_mat_id[src0_type].f32acc; } static vk_pipeline ggml_vk_get_dequantize_mul_mat_vec_id(ggml_backend_vk_context * ctx, ggml_type a_type, ggml_type b_type) { @@ -2729,55 +3406,44 @@ static void ggml_vk_buffer_memset(vk_buffer& dst, size_t offset, uint32_t c, siz dst->device->device.resetFences({ dst->device->fence }); } -static uint32_t ggml_vk_guess_split_k(int m, int n, int k) { +static uint32_t ggml_vk_guess_split_k(ggml_backend_vk_context * ctx, int m, int n, int k, const vk_pipeline& pipeline) { VK_LOG_DEBUG("ggml_vk_guess_split_k(" << m << ", " << n << ", " << k << ")"); - // if (k > 128 && (m < 128 || n < 128) && m > 2 && n > 2) { - // return 4; - // } - - return 1; - GGML_UNUSED(m); GGML_UNUSED(n); GGML_UNUSED(k); -} - -static vk_pipeline ggml_vk_guess_matmul_pipeline_amd(ggml_backend_vk_context * ctx, vk_matmul_pipeline& mmp, int m, int n, bool aligned) { - if (m <= 32 || n <= 32) { - return aligned ? mmp->a_s : mmp->s; + uint32_t split_k = 1; + if (ctx->device->shader_core_count != 0 && m >= (int)pipeline->wg_denoms[0] && n >= (int)pipeline->wg_denoms[1]) { + // If k is 'large' and the SMs will fill less than halfway, use split_k. + uint32_t m_tiles = CEIL_DIV(m, pipeline->wg_denoms[0]); + uint32_t n_tiles = CEIL_DIV(n, pipeline->wg_denoms[1]); + if (k >= 2048 && m_tiles * n_tiles < ctx->device->shader_core_count / 2) { + split_k = ctx->device->shader_core_count / (m_tiles * n_tiles); + // Clamp to 2 or 4 + split_k = std::min(split_k, 4u); + if (split_k == 3) { + split_k = 2; + } + } } - return aligned ? mmp->a_m : mmp->m; - - GGML_UNUSED(ctx); -} - -static vk_pipeline ggml_vk_guess_matmul_pipeline_apple(ggml_backend_vk_context * ctx, vk_matmul_pipeline& mmp, bool aligned) { - return aligned ? mmp->a_m : mmp->m; - - GGML_UNUSED(ctx); -} -static vk_pipeline ggml_vk_guess_matmul_pipeline_intel(ggml_backend_vk_context * ctx, vk_matmul_pipeline& mmp, bool aligned) { - return aligned ? mmp->a_s : mmp->s; - - GGML_UNUSED(ctx); + return split_k; } static vk_pipeline ggml_vk_guess_matmul_pipeline(ggml_backend_vk_context * ctx, vk_matmul_pipeline& mmp, int m, int n, bool aligned) { VK_LOG_DEBUG("ggml_vk_guess_matmul_pipeline(" << m << ", " << n << ", " << aligned << ")"); - switch (ctx->device->vendor_id) { - case VK_VENDOR_ID_AMD: - return ggml_vk_guess_matmul_pipeline_amd(ctx, mmp, m, n, aligned); - case VK_VENDOR_ID_APPLE: - return ggml_vk_guess_matmul_pipeline_apple(ctx, mmp, aligned); - case VK_VENDOR_ID_INTEL: - return ggml_vk_guess_matmul_pipeline_intel(ctx, mmp, aligned); - default: - break; + + if (ctx->device->coopmat2) { + if ((ctx->device->mul_mat_l && (m % mmp->l->wg_denoms[0]) == 0 && (n % mmp->l->wg_denoms[1]) == 0) || (!ctx->device->mul_mat_m && !ctx->device->mul_mat_s)) { + return aligned ? mmp->a_l : mmp->l; + } + if ((ctx->device->mul_mat_m && (m % mmp->m->wg_denoms[0]) == 0 && (n % mmp->m->wg_denoms[1]) == 0) || !ctx->device->mul_mat_s) { + return aligned ? mmp->a_m : mmp->m; + } + return aligned ? mmp->a_s : mmp->s; } - if (m <= 32 || n <= 32) { + if ((ctx->device->mul_mat_s && (m <= 32 || n <= 32)) || (!ctx->device->mul_mat_m && !ctx->device->mul_mat_l)) { return aligned ? mmp->a_s : mmp->s; } - if (m <= 64 || n <= 64) { + if ((ctx->device->mul_mat_m && (m <= 64 || n <= 64)) || !ctx->device->mul_mat_l) { return aligned ? mmp->a_m : mmp->m; } return aligned ? mmp->a_l : mmp->l; @@ -2812,6 +3478,33 @@ static void ggml_vk_matmul( ggml_vk_dispatch_pipeline(ctx, subctx, ctx->device->pipeline_matmul_split_k_reduce, { split_k_buffer, d }, pc2.size() * sizeof(uint32_t), pc2.data(), { m * n * batch, 1, 1 }); } +static vk_pipeline ggml_vk_guess_matmul_id_pipeline(ggml_backend_vk_context * ctx, vk_matmul_pipeline& mmp, int m, int n, bool aligned) { + VK_LOG_DEBUG("ggml_vk_guess_matmul_pipeline(" << m << ", " << n << ", " << aligned << ")"); + + if (ctx->device->coopmat2) { + if ((ctx->device->mul_mat_id_l && (m % mmp->l->wg_denoms[0]) == 0 && (n % mmp->l->wg_denoms[1]) == 0) || (!ctx->device->mul_mat_id_m && !ctx->device->mul_mat_id_s)) { + return aligned ? mmp->a_l : mmp->l; + } + if ((ctx->device->mul_mat_id_m && (m % mmp->m->wg_denoms[0]) == 0 && (n % mmp->m->wg_denoms[1]) == 0) || !ctx->device->mul_mat_id_s) { + return aligned ? mmp->a_m : mmp->m; + } + return aligned ? mmp->a_s : mmp->s; + } + + if ((ctx->device->mul_mat_id_s && (m <= 32 || n <= 32)) || (!ctx->device->mul_mat_id_m && !ctx->device->mul_mat_id_l)) { + return aligned ? mmp->a_s : mmp->s; + } + if ((ctx->device->mul_mat_id_m && (m <= 64 || n <= 64)) || !ctx->device->mul_mat_id_l) { + return aligned ? mmp->a_m : mmp->m; + } + return aligned ? mmp->a_l : mmp->l; +} + +static uint32_t ggml_vk_guess_matmul_id_pipeline_align(ggml_backend_vk_context * ctx, vk_matmul_pipeline& mmp, int m, int n) { + VK_LOG_DEBUG("ggml_vk_guess_matmul_pipeline_align(" << m << ", " << n << ")"); + return ggml_vk_guess_matmul_id_pipeline(ctx, mmp, m, n, true)->align; +} + static void ggml_vk_matmul_id( ggml_backend_vk_context * ctx, vk_context& subctx, vk_pipeline& pipeline, vk_subbuffer&& a, vk_subbuffer&& b, vk_subbuffer&& d, vk_subbuffer&& ids, @@ -2882,13 +3575,15 @@ static void ggml_vk_cpy_to_contiguous(ggml_backend_vk_context * ctx, vk_context& elements = { ne, 1, 1 }; } - const vk_op_unary_push_constants pc = { + vk_op_unary_push_constants pc = { (uint32_t)ne, (uint32_t)tensor->ne[0], (uint32_t)tensor->ne[1], (uint32_t)tensor->ne[2], (uint32_t)tensor->ne[3], (uint32_t)tensor->nb[0] / tensor_type_size, (uint32_t)tensor->nb[1] / tensor_type_size, (uint32_t)tensor->nb[2] / tensor_type_size, (uint32_t)tensor->nb[3] / tensor_type_size, (uint32_t)tensor->ne[0], (uint32_t)tensor->ne[1], (uint32_t)tensor->ne[2], (uint32_t)tensor->ne[3], 1 , (uint32_t)tensor->ne[0] , (uint32_t)(tensor->ne[0] * tensor->ne[1]) , (uint32_t)(tensor->ne[0] * tensor->ne[1] * tensor->ne[2]), 0, 0.0f, 0.0f, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }; + init_pushconst_fastdiv(pc); ggml_vk_sync_buffers(subctx); ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { in, out }, sizeof(vk_op_unary_push_constants), &pc, elements); } @@ -2937,18 +3632,20 @@ static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context& sub } const bool x_non_contig = !ggml_vk_dim01_contiguous(src0); - const bool y_non_contig = !ggml_vk_dim01_contiguous(src1); + // Reformat and convert to fp16 if src1 is non-contiguous, or for coopmat2 for better perf + const bool y_non_contig = (ctx->device->coopmat2 && src1->type == GGML_TYPE_F32) || + !ggml_vk_dim01_contiguous(src1); const bool y_f32_kernel = src1->type == GGML_TYPE_F32 && !y_non_contig; - vk_matmul_pipeline mmp = ggml_vk_get_mul_mat_mat_pipeline(ctx, src0->type, y_non_contig ? GGML_TYPE_F16 : src1->type); + vk_matmul_pipeline mmp = ggml_vk_get_mul_mat_mat_pipeline(ctx, src0->type, y_non_contig ? GGML_TYPE_F16 : src1->type, (ggml_prec)dst->op_params[0]); const bool qx_needs_dequant = mmp == nullptr || x_non_contig; const bool qy_needs_dequant = (src1->type != GGML_TYPE_F16 && !y_f32_kernel) || y_non_contig; if (qx_needs_dequant) { // Fall back to dequant + f16 mulmat - mmp = ggml_vk_get_mul_mat_mat_pipeline(ctx, GGML_TYPE_F16, y_f32_kernel ? GGML_TYPE_F32 : GGML_TYPE_F16); + mmp = ggml_vk_get_mul_mat_mat_pipeline(ctx, GGML_TYPE_F16, y_f32_kernel ? GGML_TYPE_F32 : GGML_TYPE_F16, (ggml_prec)dst->op_params[0]); } // Not implemented @@ -2961,10 +3658,10 @@ static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context& sub const uint32_t kpad = ggml_vk_align_size(ne10, ggml_vk_guess_matmul_pipeline_align(ctx, mmp, ne01, ne11)); const bool aligned = ne10 == kpad && ne01 > 8 && ne11 > 8; - const uint32_t split_k = ggml_vk_guess_split_k(ne01, ne11, ne10); - vk_pipeline pipeline = ggml_vk_guess_matmul_pipeline(ctx, mmp, ne01, ne11, aligned); + const uint32_t split_k = ggml_vk_guess_split_k(ctx, ne01, ne11, ne10, pipeline); + const uint64_t qx_sz = ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type); const uint64_t qy_sz = ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type); const uint64_t x_sz = !qx_needs_dequant ? qx_sz : sizeof(ggml_fp16_t) * x_ne; @@ -2990,7 +3687,7 @@ static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context& sub if (dryrun) { const uint64_t x_sz_upd = x_sz * ne02 * ne03; const uint64_t y_sz_upd = y_sz * ne12 * ne13; - const uint64_t split_k_size = split_k > 1 ? d_sz * ne12 * ne13 * 4 : 0; + const uint64_t split_k_size = split_k > 1 ? d_sz * ne12 * ne13 * split_k : 0; if ( (qx_needs_dequant && x_sz_upd > ctx->device->max_memory_allocation_size) || (qy_needs_dequant && y_sz_upd > ctx->device->max_memory_allocation_size) || @@ -3512,7 +4209,7 @@ static void ggml_vk_mul_mat_id_q_f16(ggml_backend_vk_context * ctx, vk_context& const bool y_f32_kernel = src1->type == GGML_TYPE_F32 && !y_non_contig; - vk_matmul_pipeline mmp = ggml_vk_get_mul_mat_mat_id_pipeline(ctx, src0->type, y_non_contig ? GGML_TYPE_F16 : src1->type); + vk_matmul_pipeline mmp = ggml_vk_get_mul_mat_mat_id_pipeline(ctx, src0->type, y_non_contig ? GGML_TYPE_F16 : src1->type, (ggml_prec)dst->op_params[0]); const bool qx_needs_dequant = mmp == nullptr || x_non_contig; const bool qy_needs_dequant = (src1->type != GGML_TYPE_F16 && !y_f32_kernel) || y_non_contig; @@ -3528,10 +4225,10 @@ static void ggml_vk_mul_mat_id_q_f16(ggml_backend_vk_context * ctx, vk_context& const uint64_t y_ne = ne11 * ne10; const uint64_t d_ne = ne21 * ne20; - const uint32_t kpad = ggml_vk_align_size(ne10, ggml_vk_guess_matmul_pipeline_align(ctx, mmp, ne01, nei1)); + const uint32_t kpad = ggml_vk_align_size(ne10, ggml_vk_guess_matmul_id_pipeline_align(ctx, mmp, ne01, nei1)); const bool aligned = ne10 == kpad && ne01 > 8 && nei1 > 8; - vk_pipeline pipeline = ggml_vk_guess_matmul_pipeline(ctx, mmp, ne01, nei1, aligned); + vk_pipeline pipeline = ggml_vk_guess_matmul_id_pipeline(ctx, mmp, ne01, nei1, aligned); const uint64_t qx_sz = ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type); const uint64_t qy_sz = ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type); @@ -3859,6 +4556,167 @@ static void ggml_vk_mul_mat_id(ggml_backend_vk_context * ctx, vk_context& subctx } } +static void ggml_vk_flash_attn(ggml_backend_vk_context * ctx, vk_context& subctx, const ggml_tensor * q, const ggml_tensor * k, const ggml_tensor * v, const ggml_tensor * mask, ggml_tensor * dst, bool dryrun = false) { + VK_LOG_DEBUG("ggml_vk_flash_attn((" << q << ", name=" << q->name << ", type=" << q->type << ", ne0=" << q->ne[0] << ", ne1=" << q->ne[1] << ", ne2=" << q->ne[2] << ", ne3=" << q->ne[3] << ", nb0=" << q->nb[0] << ", nb1=" << q->nb[1] << ", nb2=" << q->nb[2] << ", nb3=" << q->nb[3]; + std::cerr << "), (" << k << ", name=" << k->name << ", type=" << k->type << ", ne0=" << k->ne[0] << ", ne1=" << k->ne[1] << ", ne2=" << k->ne[2] << ", ne3=" << k->ne[3] << ", nb0=" << k->nb[0] << ", nb1=" << k->nb[1] << ", nb2=" << k->nb[2] << ", nb3=" << k->nb[3]; + std::cerr << "), (" << v << ", name=" << v->name << ", type=" << v->type << ", ne0=" << v->ne[0] << ", ne1=" << v->ne[1] << ", ne2=" << v->ne[2] << ", ne3=" << v->ne[3] << ", nb0=" << v->nb[0] << ", nb1=" << v->nb[1] << ", nb2=" << v->nb[2] << ", nb3=" << v->nb[3]; + std::cerr << "), (" << dst << ", name=" << dst->name << ", type=" << dst->type << ", ne0=" << dst->ne[0] << ", ne1=" << dst->ne[1] << ", ne2=" << dst->ne[2] << ", ne3=" << dst->ne[3] << ", nb0=" << dst->nb[0] << ", nb1=" << dst->nb[1] << ", nb2=" << dst->nb[2] << ", nb3=" << dst->nb[3]; + std::cerr << "), " << (dryrun ? "dryrun" : "") << ")"); + + GGML_TENSOR_LOCALS(int64_t, neq, q, ne) + GGML_TENSOR_LOCALS(size_t, nbq, q, nb) + GGML_TENSOR_LOCALS(int64_t, nek, k, ne) + GGML_TENSOR_LOCALS(size_t, nbk, k, nb) + GGML_TENSOR_LOCALS(int64_t, nev, v, ne) + GGML_TENSOR_LOCALS(size_t, nbv, v, nb) + GGML_TENSOR_LOCALS(int64_t, ne, dst, ne) + GGML_TENSOR_LOCALS(size_t, nb, dst, nb) + + const uint32_t nem1 = mask ? mask->ne[1] : 0; + const uint32_t nbm1 = mask ? mask->nb[1] : 0; + + const uint32_t D = neq0; + const uint32_t N = neq1; + const uint32_t KV = nek1; + + GGML_ASSERT(ne0 == D); + GGML_ASSERT(ne2 == N); + + // input tensor rows must be contiguous + GGML_ASSERT(nbq0 == ggml_type_size(q->type)); + GGML_ASSERT(nbk0 == ggml_type_size(k->type)); + GGML_ASSERT(nbv0 == ggml_type_size(v->type)); + + GGML_ASSERT(neq0 == D); + GGML_ASSERT(nek0 == D); + GGML_ASSERT(nev0 == D); + + GGML_ASSERT(neq1 == N); + GGML_ASSERT(nev0 == D); + + GGML_ASSERT(nev1 == nek1); + + // dst cannot be transposed or permuted + GGML_ASSERT(nb0 == sizeof(float)); + GGML_ASSERT(nb0 <= nb1); + GGML_ASSERT(nb1 <= nb2); + GGML_ASSERT(nb2 <= nb3); + + assert(dst->type == GGML_TYPE_F32); + assert(q->type == GGML_TYPE_F32); + assert(k->type == v->type); + + vk_pipeline *pipelines; + // XXX TODO other backends may be changing accumulator precision to default to f32 soon + bool f32acc = dst->op_params[3] == GGML_PREC_F32; + bool small_rows = N <= flash_attention_num_small_rows; + switch (D) { + case 64: pipelines = &ctx->device->pipeline_flash_attn_f32_f16_D64[k->type][f32acc][small_rows][0]; break; + case 80: pipelines = &ctx->device->pipeline_flash_attn_f32_f16_D80[k->type][f32acc][small_rows][0]; break; + case 96: pipelines = &ctx->device->pipeline_flash_attn_f32_f16_D96[k->type][f32acc][small_rows][0]; break; + case 112: pipelines = &ctx->device->pipeline_flash_attn_f32_f16_D112[k->type][f32acc][small_rows][0]; break; + case 128: pipelines = &ctx->device->pipeline_flash_attn_f32_f16_D128[k->type][f32acc][small_rows][0]; break; + case 256: pipelines = &ctx->device->pipeline_flash_attn_f32_f16_D256[k->type][f32acc][small_rows][0]; break; + default: + assert(!"unsupported D value"); + return; + } + assert(pipelines); + + bool aligned = (KV % pipelines[1]->align) == 0; + vk_pipeline pipeline = pipelines[aligned]; + assert(pipeline); + + if (dryrun) { + // Request descriptor sets + ggml_pipeline_request_descriptor_sets(ctx->device, pipeline, 1); + return; + } + + float scale = 1.0f; + float max_bias = 0.0f; + float logit_softcap = 0.0f; + + memcpy(&scale, (const float *) dst->op_params + 0, sizeof(float)); + memcpy(&max_bias, (const float *) dst->op_params + 1, sizeof(float)); + memcpy(&logit_softcap, (const float *) dst->op_params + 2, sizeof(float)); + + if (logit_softcap != 0) { + scale /= logit_softcap; + } + + const uint32_t n_head_kv = neq2; + const uint32_t n_head_log2 = 1u << (uint32_t) floorf(log2f((float) n_head_kv)); + const float m0 = powf(2.0f, -(max_bias ) / n_head_log2); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_head_log2); + + ggml_vk_sync_buffers(subctx); + + vk_buffer d_Q, d_K, d_V, d_D, d_M; + uint64_t q_buf_offset, k_buf_offset, v_buf_offset, d_buf_offset, m_buf_offset; + + bool Q_uma = false, K_uma = false, V_uma = false, D_uma = false, M_uma = false; + + if (ctx->device->uma) { + ggml_vk_host_get(ctx->device, q->data, d_Q, q_buf_offset); + ggml_vk_host_get(ctx->device, k->data, d_K, q_buf_offset); + ggml_vk_host_get(ctx->device, v->data, d_V, q_buf_offset); + ggml_vk_host_get(ctx->device, dst->data, d_D, q_buf_offset); + Q_uma = d_Q != nullptr; + K_uma = d_K != nullptr; + V_uma = d_V != nullptr; + D_uma = d_D != nullptr; + if (mask) { + ggml_vk_host_get(ctx->device, mask->data, d_M, q_buf_offset); + M_uma = d_M != nullptr; + } + } + + + ggml_backend_vk_buffer_context * d_buf_ctx = (ggml_backend_vk_buffer_context *)dst->buffer->context; + ggml_backend_vk_buffer_context * q_buf_ctx = (ggml_backend_vk_buffer_context *)q->buffer->context; + ggml_backend_vk_buffer_context * k_buf_ctx = (ggml_backend_vk_buffer_context *)k->buffer->context; + ggml_backend_vk_buffer_context * v_buf_ctx = (ggml_backend_vk_buffer_context *)v->buffer->context; + + if (!Q_uma) { + d_Q = q_buf_ctx->dev_buffer; + q_buf_offset = vk_tensor_offset(q) + q->view_offs; + } + if (!K_uma) { + d_K = k_buf_ctx->dev_buffer; + k_buf_offset = vk_tensor_offset(k) + k->view_offs; + } + if (!V_uma) { + d_V = v_buf_ctx->dev_buffer; + v_buf_offset = vk_tensor_offset(v) + v->view_offs; + } + if (!D_uma) { + d_D = d_buf_ctx->dev_buffer; + d_buf_offset = vk_tensor_offset(dst) + dst->view_offs; + } + + if (!M_uma) { + d_M = d_Q; + m_buf_offset = q_buf_offset; + if (mask) { + ggml_backend_vk_buffer_context * m_buf_ctx = (ggml_backend_vk_buffer_context*)mask->buffer->context; + d_M = m_buf_ctx->dev_buffer; + m_buf_offset = vk_tensor_offset(mask) + mask->view_offs; + } + } + + const vk_flash_attn_push_constants pc = { N, KV, (uint32_t)ne1, (uint32_t)ne2, (uint32_t)ne3, (uint32_t)neq2, (uint32_t)neq3, (uint32_t)nek2, (uint32_t)nek3, (uint32_t)nev2, (uint32_t)nev3, nem1, (uint32_t)nbq2, (uint32_t)nbq3, (uint32_t)nbk2, (uint32_t)nbk3, (uint32_t)nbv2, (uint32_t)nbv3, nbm1, scale, max_bias, logit_softcap, mask != nullptr, n_head_log2, m0, m1 }; + ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, + { + vk_subbuffer{d_Q, q_buf_offset, VK_WHOLE_SIZE}, + vk_subbuffer{d_K, k_buf_offset, VK_WHOLE_SIZE}, + vk_subbuffer{d_V, v_buf_offset, VK_WHOLE_SIZE}, + vk_subbuffer{d_M, m_buf_offset, VK_WHOLE_SIZE}, + vk_subbuffer{d_D, d_buf_offset, VK_WHOLE_SIZE}, + }, + sizeof(vk_flash_attn_push_constants), &pc, { (uint32_t)neq1, (uint32_t)neq2, (uint32_t)neq3 }); +} + static vk_pipeline ggml_vk_op_get_pipeline(ggml_backend_vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, const ggml_tensor * src2, ggml_tensor * dst, ggml_op op) { switch (op) { case GGML_OP_GET_ROWS: @@ -4093,7 +4951,7 @@ static bool ggml_vk_op_supports_incontiguous(ggml_op op) { } template -static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context& subctx, const ggml_tensor * src0, const ggml_tensor * src1, const ggml_tensor * src2, ggml_tensor * dst, ggml_op op, const PC&& pc, bool dryrun = false) { +static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context& subctx, const ggml_tensor * src0, const ggml_tensor * src1, const ggml_tensor * src2, ggml_tensor * dst, ggml_op op, PC&& pc, bool dryrun = false) { VK_LOG_DEBUG("ggml_vk_op_f32((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; if (src1 != nullptr) { std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; @@ -4133,6 +4991,8 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context& subctx, co const uint64_t ned3 = dst->ne[3]; const uint64_t ned = ned0 * ned1; + init_pushconst_fastdiv(pc); + vk_pipeline pipeline = ggml_vk_op_get_pipeline(ctx, src0, src1, src2, dst, op); if (pipeline == nullptr) { @@ -4502,7 +5362,8 @@ static void ggml_vk_scale(ggml_backend_vk_context * ctx, vk_context& subctx, con (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], (uint32_t)src0->ne[2], (uint32_t)src0->ne[3], (uint32_t)src0->nb[0] / src0_type_size, (uint32_t)src0->nb[1] / src0_type_size, (uint32_t)src0->nb[2] / src0_type_size, (uint32_t)src0->nb[3] / src0_type_size, (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->ne[2], (uint32_t) dst->ne[3], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, (uint32_t) dst->nb[3] / dst_type_size, 0, - op_params[0], 0.0f + op_params[0], 0.0f, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }, dryrun); } @@ -4516,6 +5377,7 @@ static void ggml_vk_sqr(ggml_backend_vk_context * ctx, vk_context& subctx, const (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->ne[2], (uint32_t) dst->ne[3], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, (uint32_t) dst->nb[3] / dst_type_size, 0, 0.0f, 0.0f, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }, dryrun); } @@ -4529,6 +5391,7 @@ static void ggml_vk_sin(ggml_backend_vk_context * ctx, vk_context& subctx, const (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->ne[2], (uint32_t) dst->ne[3], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, (uint32_t) dst->nb[3] / dst_type_size, 0, 0.0f, 0.0f, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }, dryrun); } @@ -4542,6 +5405,7 @@ static void ggml_vk_cos(ggml_backend_vk_context * ctx, vk_context& subctx, const (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->ne[2], (uint32_t) dst->ne[3], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, (uint32_t) dst->nb[3] / dst_type_size, 0, 0.0f, 0.0f, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }, dryrun); } @@ -4556,6 +5420,7 @@ static void ggml_vk_clamp(ggml_backend_vk_context * ctx, vk_context& subctx, con (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->ne[2], (uint32_t) dst->ne[3], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, (uint32_t) dst->nb[3] / dst_type_size, 0, op_params[0], op_params[1], + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }, dryrun); } @@ -4569,6 +5434,7 @@ static void ggml_vk_pad(ggml_backend_vk_context * ctx, vk_context& subctx, const (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->ne[2], (uint32_t) dst->ne[3], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, (uint32_t) dst->nb[3] / dst_type_size, 0, 0.0f, 0.0f, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }, dryrun); } @@ -4582,6 +5448,7 @@ static void ggml_vk_repeat(ggml_backend_vk_context * ctx, vk_context& subctx, co (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->ne[2], (uint32_t) dst->ne[3], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, (uint32_t) dst->nb[3] / dst_type_size, 0, 0.0f, 0.0f, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }, dryrun); } @@ -4596,6 +5463,7 @@ static void ggml_vk_cpy(ggml_backend_vk_context * ctx, vk_context& subctx, const (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->ne[2], (uint32_t) dst->ne[3], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, (uint32_t) dst->nb[3] / dst_type_size, d_offset, 0.0f, 0.0f, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }, dryrun); } @@ -4949,19 +5817,27 @@ static void ggml_vk_test_matmul(ggml_backend_vk_context * ctx, size_t m, size_t for (size_t i = 0; i < x_ne; i++) { if (std::is_same()) { x[i] = (rand() / (float)RAND_MAX) * 2.0f - 1.0f; + // x[i] = 1.0f; + // x[i] = i + 1; + // x[i] = (i % k == i / k) ? 1.0f : 0.0f; } else if (std::is_same()) { x[i] = ggml_fp32_to_fp16((rand() / (float)RAND_MAX) * 2.0f - 1.0f); + // x[i] = ggml_fp32_to_fp16(1.0f); + // x[i] = ggml_fp32_to_fp16(i + 1); + // x[i] = ggml_fp32_to_fp16((i % k == i / k) ? 1.0f : 0.0f); } else { GGML_ABORT("fatal error"); } } for (size_t i = 0; i < y_ne; i++) { if (std::is_same()) { - // y[i] = (rand() / (float)RAND_MAX) * 2.0f - 1.0f; - y[i] = (i % k == i / k) ? 1.0f : 0.0f; + y[i] = (rand() / (float)RAND_MAX) * 2.0f - 1.0f; + // y[i] = (i % k == i / k) ? 1.0f : 0.0f; + // y[i] = i + 1; } else if (std::is_same()) { - // y[i] = ggml_fp32_to_fp16((rand() / (float)RAND_MAX) * 2.0f - 1.0f); - y[i] = ggml_fp32_to_fp16((i % k == i / k) ? 1.0f : 0.0f); + y[i] = ggml_fp32_to_fp16((rand() / (float)RAND_MAX) * 2.0f - 1.0f); + // y[i] = ggml_fp32_to_fp16((i % k == i / k) ? 1.0f : 0.0f); + // y[i] = ggml_fp32_to_fp16(i + 1); } else { GGML_ABORT("fatal error"); } @@ -4971,16 +5847,16 @@ static void ggml_vk_test_matmul(ggml_backend_vk_context * ctx, size_t m, size_t ggml_vk_buffer_write(d_Y, 0, y, sizeof(Y_TYPE) * k * n * batch); vk_context subctx = ggml_vk_create_context(ctx, ctx->device->compute_queue); + ggml_vk_ctx_begin(ctx->device, subctx); for (size_t i = 0; i < num_it; i++) { - ggml_vk_ctx_begin(ctx->device, subctx); ggml_vk_matmul( ctx, subctx, p, ggml_vk_subbuffer(d_X), ggml_vk_subbuffer(d_Y), ggml_vk_subbuffer(d_D), ggml_vk_subbuffer(ctx->prealloc_split_k), m, n, k, k, k, m, k*m, k*n, m*n, split_k, batch, batch, batch, 1, 1 ); - ggml_vk_ctx_end(subctx); } + ggml_vk_ctx_end(subctx); auto begin = std::chrono::high_resolution_clock::now(); ggml_vk_submit(subctx, ctx->fence); @@ -5045,7 +5921,7 @@ static void ggml_vk_test_matmul(ggml_backend_vk_context * ctx, size_t m, size_t double err = std::fabs(d[i] - d_chk[i]); avg_err += err; - if (err > 0.05f && first_err_n == -1) { + if ((err > 0.05f || std::isnan(err)) && first_err_n == -1) { first_err_b = i / (m * n); first_err_n = (i % (m * n)) / m; first_err_m = (i % (m * n)) % m; @@ -5058,12 +5934,10 @@ static void ggml_vk_test_matmul(ggml_backend_vk_context * ctx, size_t m, size_t std::cerr << "TEST " << shname << " m=" << m << " n=" << n << " k=" << k << " batch=" << batch << " split_k=" << split_k << " matmul " << time / num_it << "ms " << tflops << " TFLOPS avg_err=" << avg_err << std::endl; - if (avg_err > 0.1) { + if (avg_err > 0.1 || std::isnan(avg_err)) { std::cerr << "m = " << first_err_m << " n = " << first_err_n << " b = " << first_err_b << std::endl; std::cerr << "Actual result: " << std::endl << std::endl; ggml_vk_print_matrix_area(d, GGML_TYPE_F32, m, n, first_err_m, first_err_n, first_err_b); - std::cerr << std::endl; - ggml_vk_print_matrix_area(d, GGML_TYPE_F32, m, n, first_err_m, first_err_n + 15, first_err_b); std::cerr << "Expected result: " << std::endl << std::endl; ggml_vk_print_matrix_area(d_chk, GGML_TYPE_F32, m, n, first_err_m, first_err_n, first_err_b); @@ -5246,13 +6120,13 @@ static void ggml_vk_test_dequant_matmul(ggml_backend_vk_context * ctx, size_t m, vk_pipeline p; std::string shname; if (shader_size == 0) { - p = ctx->device->pipeline_dequant_mul_mat_mat[quant].f32acc->a_s; + p = ctx->device->fp16 ? ctx->device->pipeline_dequant_mul_mat_mat[quant].f16acc->a_s : ctx->device->pipeline_dequant_mul_mat_mat[quant].f32acc->a_s; shname = std::string(ggml_type_name(quant)) + "_ALIGNED_S"; } else if (shader_size == 1) { - p = ctx->device->pipeline_dequant_mul_mat_mat[quant].f32acc->a_m; + p = ctx->device->fp16 ? ctx->device->pipeline_dequant_mul_mat_mat[quant].f16acc->a_m : ctx->device->pipeline_dequant_mul_mat_mat[quant].f32acc->a_m; shname = std::string(ggml_type_name(quant)) + "_ALIGNED_M"; } else if (shader_size == 2) { - p = ctx->device->pipeline_dequant_mul_mat_mat[quant].f32acc->a_l; + p = ctx->device->fp16 ? ctx->device->pipeline_dequant_mul_mat_mat[quant].f16acc->a_l : ctx->device->pipeline_dequant_mul_mat_mat[quant].f32acc->a_l; shname = std::string(ggml_type_name(quant)) + "_ALIGNED_L"; } else { GGML_ASSERT(0); @@ -5262,13 +6136,13 @@ static void ggml_vk_test_dequant_matmul(ggml_backend_vk_context * ctx, size_t m, if (k != kpad) { if (shader_size == 0) { - p = ctx->device->pipeline_dequant_mul_mat_mat[quant].f32acc->s; + p = ctx->device->fp16 ? ctx->device->pipeline_dequant_mul_mat_mat[quant].f16acc->s : ctx->device->pipeline_dequant_mul_mat_mat[quant].f32acc->s; shname = std::string(ggml_type_name(quant)) + "_S"; } else if (shader_size == 1) { - p = ctx->device->pipeline_dequant_mul_mat_mat[quant].f32acc->m; + p = ctx->device->fp16 ? ctx->device->pipeline_dequant_mul_mat_mat[quant].f16acc->m : ctx->device->pipeline_dequant_mul_mat_mat[quant].f32acc->m; shname = std::string(ggml_type_name(quant)) + "_M"; } else if (shader_size == 2) { - p = ctx->device->pipeline_dequant_mul_mat_mat[quant].f32acc->l; + p = ctx->device->fp16 ? ctx->device->pipeline_dequant_mul_mat_mat[quant].f16acc->l : ctx->device->pipeline_dequant_mul_mat_mat[quant].f32acc->l; shname = std::string(ggml_type_name(quant)) + "_L"; } else { GGML_ASSERT(0); @@ -5318,16 +6192,16 @@ static void ggml_vk_test_dequant_matmul(ggml_backend_vk_context * ctx, size_t m, ggml_vk_buffer_write(y_buf, 0, y, y_sz); vk_context subctx = ggml_vk_create_context(ctx, ctx->device->compute_queue); + ggml_vk_ctx_begin(ctx->device, subctx); for (size_t i = 0; i < num_it; i++) { - ggml_vk_ctx_begin(ctx->device, subctx); ggml_vk_matmul( ctx, subctx, p, ggml_vk_subbuffer(qx_buf), ggml_vk_subbuffer(y_buf), ggml_vk_subbuffer(d_buf), ggml_vk_subbuffer(ctx->prealloc_split_k), m, n, k, k, k, m, k*m, k*n, m*n, split_k, batch, batch, batch, 1, 1 ); - ggml_vk_ctx_end(subctx); } + ggml_vk_ctx_end(subctx); auto begin = std::chrono::high_resolution_clock::now(); @@ -5427,105 +6301,13 @@ static void ggml_vk_test_dequant_matmul(ggml_backend_vk_context * ctx, size_t m, static void ggml_vk_preallocate_buffers(ggml_backend_vk_context * ctx) { #if defined(GGML_VULKAN_RUN_TESTS) - ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_F32); - ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q4_0); - ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q4_1); - ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q5_0); - ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q5_1); - ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q8_0); - ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q2_K); - ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q3_K); - ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q4_K); - ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q5_K); - ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q6_K); - ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_IQ4_NL); - - ggml_vk_test_matmul(ctx, 512, 512, 100, 32, 100, 1, 2); - - ggml_vk_test_matmul(ctx, 128, 512, 512, 2, 100, 1, 0); - ggml_vk_test_matmul(ctx, 128, 512, 512, 2, 100, 1, 1); - ggml_vk_test_matmul(ctx, 128, 512, 512, 2, 100, 1, 2); - // ggml_vk_test_matmul(ctx, 128, 512, 512, 2, 100, 4, 0); - // ggml_vk_test_matmul(ctx, 128, 512, 512, 2, 100, 4, 1); - // ggml_vk_test_matmul(ctx, 128, 512, 512, 2, 100, 4, 2); - - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_Q4_0); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_Q4_0); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_Q4_0); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 0, GGML_TYPE_Q4_0); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 1, GGML_TYPE_Q4_0); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 2, GGML_TYPE_Q4_0); - - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_Q4_1); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_Q4_1); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_Q4_1); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 0, GGML_TYPE_Q4_1); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 1, GGML_TYPE_Q4_1); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 2, GGML_TYPE_Q4_1); - - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_Q5_0); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_Q5_0); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_Q5_0); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 0, GGML_TYPE_Q5_0); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 1, GGML_TYPE_Q5_0); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 2, GGML_TYPE_Q5_0); - - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_Q5_1); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_Q5_1); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_Q5_1); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 0, GGML_TYPE_Q5_1); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 1, GGML_TYPE_Q5_1); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 2, GGML_TYPE_Q5_1); - - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_Q8_0); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_Q8_0); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_Q8_0); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 0, GGML_TYPE_Q8_0); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 1, GGML_TYPE_Q8_0); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 2, GGML_TYPE_Q8_0); - - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_Q2_K); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_Q2_K); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_Q2_K); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 0, GGML_TYPE_Q2_K); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 1, GGML_TYPE_Q2_K); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 2, GGML_TYPE_Q2_K); - - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_Q3_K); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_Q3_K); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_Q3_K); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 0, GGML_TYPE_Q3_K); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 1, GGML_TYPE_Q3_K); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 2, GGML_TYPE_Q3_K); - - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_Q4_K); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_Q4_K); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_Q4_K); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 0, GGML_TYPE_Q4_K); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 1, GGML_TYPE_Q4_K); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 2, GGML_TYPE_Q4_K); - - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_Q5_K); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_Q5_K); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_Q5_K); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 0, GGML_TYPE_Q5_K); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 1, GGML_TYPE_Q5_K); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 2, GGML_TYPE_Q5_K); - - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_Q6_K); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_Q6_K); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_Q6_K); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 0, GGML_TYPE_Q6_K); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 1, GGML_TYPE_Q6_K); - // ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 2, GGML_TYPE_Q6_K); - - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_IQ4_NL); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_IQ4_NL); - ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_IQ4_NL); - - std::cerr << std::endl; - const std::vector vals { + 512, 512, 128, + 128, 512, 512, + 4096, 512, 4096, + 11008, 512, 4096, + 4096, 512, 11008, + 32000, 512, 4096, 8, 8, 8, 100, 46, 576, 623, 111, 128, @@ -5538,25 +6320,52 @@ static void ggml_vk_preallocate_buffers(ggml_backend_vk_context * ctx) { 49, 49, 128, 128, 49, 49, 4096, 49, 4096, - 11008, 49, 4096, - 4096, 49, 11008, - 32000, 49, 4096, - 512, 512, 128, - 128, 512, 512, - 4096, 512, 4096, - 11008, 512, 4096, - 4096, 512, 11008, - 32000, 512, 4096, }; - const size_t num_it = 1; + const size_t num_it = 100; + for (size_t i = 0; i < vals.size(); i += 3) { ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 0); ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 1); ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 2); - // ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 0); - // ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 1); - // ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 2); - std::cerr << std::endl; + std::cerr << '\n'; + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 2, 0); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 2, 1); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 2, 2); + std::cerr << '\n'; + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 0); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 1); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 2); + std::cerr << '\n' << std::endl; + + if (vals[i + 2] % 32 == 0) { + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 0, GGML_TYPE_Q4_0); + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 1, GGML_TYPE_Q4_0); + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 2, GGML_TYPE_Q4_0); + std::cerr << '\n'; + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 2, 0, GGML_TYPE_Q4_0); + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 2, 1, GGML_TYPE_Q4_0); + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 2, 2, GGML_TYPE_Q4_0); + std::cerr << '\n'; + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 0, GGML_TYPE_Q4_0); + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 1, GGML_TYPE_Q4_0); + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 2, GGML_TYPE_Q4_0); + std::cerr << '\n' << std::endl; + } + + if (vals[i + 2] % 256 == 0) { + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 0, GGML_TYPE_Q4_K); + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 1, GGML_TYPE_Q4_K); + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 2, GGML_TYPE_Q4_K); + std::cerr << '\n'; + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 2, 0, GGML_TYPE_Q4_K); + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 2, 1, GGML_TYPE_Q4_K); + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 2, 2, GGML_TYPE_Q4_K); + std::cerr << '\n'; + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 0, GGML_TYPE_Q4_K); + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 1, GGML_TYPE_Q4_K); + ggml_vk_test_dequant_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 2, GGML_TYPE_Q4_K); + std::cerr << '\n' << std::endl; + } } GGML_ABORT("fatal error"); @@ -5603,6 +6412,7 @@ static bool ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * nod const ggml_tensor * src0 = node->src[0]; const ggml_tensor * src1 = node->src[1]; const ggml_tensor * src2 = node->src[2]; + const ggml_tensor * src3 = node->src[3]; switch (node->op) { // Return on empty ops to avoid generating a compute_ctx and setting exit_tensor @@ -5655,6 +6465,7 @@ static bool ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * nod case GGML_OP_TIMESTEP_EMBEDDING: case GGML_OP_POOL_2D: case GGML_OP_LEAKY_RELU: + case GGML_OP_FLASH_ATTN_EXT: break; default: std::cerr << "ggml_vulkan: Error: Missing op: " << ggml_op_name(node->op) << std::endl; @@ -5847,6 +6658,11 @@ static bool ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * nod case GGML_OP_MUL_MAT_ID: ggml_vk_mul_mat_id(ctx, compute_ctx, src0, src1, src2, node, dryrun); + break; + + case GGML_OP_FLASH_ATTN_EXT: + ggml_vk_flash_attn(ctx, compute_ctx, src0, src1, src2, src3, node, dryrun); + break; default: return false; @@ -5947,6 +6763,7 @@ static bool ggml_vk_compute_forward(ggml_backend_vk_context * ctx, ggml_tensor * break; case GGML_OP_MUL_MAT: case GGML_OP_MUL_MAT_ID: + case GGML_OP_FLASH_ATTN_EXT: buf = tensor->buffer; break; @@ -6636,8 +7453,8 @@ static bool ggml_backend_vk_device_supports_op(ggml_backend_dev_t dev, const ggm case GGML_OP_MUL_MAT_ID: { ggml_backend_vk_device_context * ctx = (ggml_backend_vk_device_context *)dev->context; - if (op->op == GGML_OP_MUL_MAT_ID && - ggml_vk_get_device(ctx->device)->properties.limits.maxComputeSharedMemorySize < 32768) { + const vk_device& device = ggml_vk_get_device(ctx->device); + if (op->op == GGML_OP_MUL_MAT_ID && !device->mul_mat_id_s && !device->mul_mat_id_m && !device->mul_mat_id_l) { // If there's not enough shared memory for row_ids and the result tile, fallback to CPU return false; } @@ -6678,6 +7495,57 @@ static bool ggml_backend_vk_device_supports_op(ggml_backend_dev_t dev, const ggm return true; } break; + case GGML_OP_FLASH_ATTN_EXT: + { + ggml_backend_vk_device_context * ctx = (ggml_backend_vk_device_context *)dev->context; + if (!ggml_vk_get_device(ctx->device)->coopmat2) { + return false; + } + switch (op->src[0]->ne[0]) { + case 64: + case 80: + case 96: + case 112: + case 128: + case 256: + break; + default: + return false; + } + if (op->src[0]->type != GGML_TYPE_F32) { + return false; + } + if (op->type != GGML_TYPE_F32) { + return false; + } + if (op->src[3] && op->src[3]->type != GGML_TYPE_F16) { + return false; + } + // It's straightforward to support different K/V dequant, but would + // significantly increase the number of pipelines + if (op->src[1]->type != op->src[2]->type) { + return false; + } + switch (op->src[1]->type) { + case GGML_TYPE_F16: + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + case GGML_TYPE_Q5_0: + case GGML_TYPE_Q5_1: + case GGML_TYPE_Q8_0: + // K dequants currently disabled because D dimension is rounded up to 256 and runs inefficiently + //case GGML_TYPE_Q2_K: + //case GGML_TYPE_Q3_K: + //case GGML_TYPE_Q4_K: + //case GGML_TYPE_Q5_K: + //case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ4_NL: + break; + default: + return false; + } + return true; + } case GGML_OP_GET_ROWS: { switch (op->src[0]->type) { @@ -6992,6 +7860,7 @@ static void ggml_vk_check_results_0(ggml_tensor * tensor) { ggml_tensor * src0 = tensor->src[0]; ggml_tensor * src1 = tensor->src[1]; ggml_tensor * src2 = tensor->src[2]; + ggml_tensor * src3 = tensor->src[3]; struct ggml_init_params iparams = { /*.mem_size =*/ 2ul*1024ul*1024ul*1024ul, @@ -7004,15 +7873,18 @@ static void ggml_vk_check_results_0(ggml_tensor * tensor) { struct ggml_tensor * src0_clone = nullptr; struct ggml_tensor * src1_clone = nullptr; struct ggml_tensor * src2_clone = nullptr; + struct ggml_tensor * src3_clone = nullptr; struct ggml_tensor * tensor_clone = nullptr; size_t src0_size; size_t src1_size; size_t src2_size; + size_t src3_size; void * src0_buffer = nullptr; void * src1_buffer = nullptr; void * src2_buffer = nullptr; + void * src3_buffer = nullptr; if (src0 != nullptr) { src0_clone = ggml_dup_tensor(ggml_ctx, src0); @@ -7140,8 +8012,53 @@ static void ggml_vk_check_results_0(ggml_tensor * tensor) { ggml_vk_print_tensor(src2, "src2"); } } + if (src3 != nullptr) { + src3_clone = ggml_dup_tensor(ggml_ctx, src3); + + src3_size = ggml_nbytes(src3); + + src3_buffer = malloc(src3_size); + src3_clone->data = src3_buffer; + if (ggml_backend_buffer_is_host(src3->buffer)) { + memcpy(src3_clone->data, src3->data, src3_size); + memcpy(src3_clone->nb, src3->nb, sizeof(size_t) * GGML_MAX_DIMS); + } else if (ggml_backend_buffer_is_vk(src3->buffer)) { + ggml_backend_vk_buffer_context * buf_ctx = (ggml_backend_vk_buffer_context *)src3->buffer->context; + vk_buffer& buffer_gpu = buf_ctx->dev_buffer; + uint64_t offset = vk_tensor_offset(src3) + src3->view_offs; + if (!ggml_is_contiguous(src3) && ggml_vk_dim01_contiguous(src3)) { + for (int i3 = 0; i3 < src3->ne[3]; i3++) { + for (int i2 = 0; i2 < src3->ne[2]; i2++) { + const int idx = i3*src3->ne[2] + i2; + ggml_vk_buffer_read(buffer_gpu, offset + idx * src3->nb[2], ((char *)src3_clone->data + idx * src3_clone->nb[2]), src3->ne[1] * src3->nb[1]); + } + } + + src3_clone->nb[0] = src3->nb[0]; + src3_clone->nb[1] = src3->nb[1]; + for (int i = 2; i < GGML_MAX_DIMS; i++) { + src3_clone->nb[i] = src3_clone->nb[i - 1]*src3_clone->ne[i - 1]; + } + } else { + if (offset + src3_size >= buffer_gpu->size) { + src3_size = buffer_gpu->size - offset; + } + ggml_vk_buffer_read(buffer_gpu, offset, src3_clone->data, src3_size); + memcpy(src3_clone->nb, src3->nb, sizeof(size_t) * GGML_MAX_DIMS); + } + } else { + GGML_ABORT("fatal error"); + } + + if (vk_output_tensor > 0 && vk_output_tensor == check_counter) { + ggml_vk_print_tensor(src3, "src3"); + } + } - if (tensor->op == GGML_OP_MUL_MAT) { + if (tensor->op == GGML_OP_FLASH_ATTN_EXT) { + const float *params = (const float *)tensor->op_params; + tensor_clone = ggml_flash_attn_ext(ggml_ctx, src0_clone, src1_clone, src2_clone, src3_clone, params[0], params[1], params[2]); + } else if (tensor->op == GGML_OP_MUL_MAT) { tensor_clone = ggml_mul_mat(ggml_ctx, src0_clone, src1_clone); } else if (tensor->op == GGML_OP_MUL_MAT_ID) { tensor_clone = ggml_mul_mat_id(ggml_ctx, src0_clone, src1_clone, src2_clone); diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/CMakeLists.txt b/ggml/src/ggml-vulkan/vulkan-shaders/CMakeLists.txt index 10075db337737..bd0c74cb1c770 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/CMakeLists.txt +++ b/ggml/src/ggml-vulkan/vulkan-shaders/CMakeLists.txt @@ -1,7 +1,9 @@ find_package (Threads REQUIRED) +find_package(Vulkan COMPONENTS glslc REQUIRED) set(TARGET vulkan-shaders-gen) add_executable(${TARGET} vulkan-shaders-gen.cpp) install(TARGETS ${TARGET} RUNTIME) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) target_link_libraries(vulkan-shaders-gen PUBLIC Threads::Threads) +target_link_libraries(vulkan-shaders-gen PRIVATE Vulkan::Vulkan) diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/dequant_funcs_cm2.comp b/ggml/src/ggml-vulkan/vulkan-shaders/dequant_funcs_cm2.comp new file mode 100644 index 0000000000000..a8707b621e7e2 --- /dev/null +++ b/ggml/src/ggml-vulkan/vulkan-shaders/dequant_funcs_cm2.comp @@ -0,0 +1,305 @@ + +#include "types.comp" + +layout(buffer_reference, std430, buffer_reference_align = 2) buffer decodeBufQ4_0 { + block_q4_0_packed16 block; +}; + +float16_t dequantFuncQ4_0(const in decodeBufQ4_0 bl, const in uint blockCoords[2], const in uint coordInBlock[2]) +{ + const float16_t d = bl.block.d; + const uint idx = coordInBlock[1]; + const uint shift = (idx & 0x10) >> 2; + uint32_t qs = unpack8(uint32_t(bl.block.qs[(idx & 0xE) >> 1]))[idx & 1]; + qs >>= shift; + qs &= 0xF; + float16_t ret = (float16_t(qs) - float16_t(8)) * d; + return ret; +} + +layout(buffer_reference, std430, buffer_reference_align = 4) buffer decodeBufQ4_1 { + block_q4_1 block; +}; + +float16_t dequantFuncQ4_1(const in decodeBufQ4_1 bl, const in uint blockCoords[2], const in uint coordInBlock[2]) +{ + const float16_t d = bl.block.d; + const float16_t m = bl.block.m; + const uint idx = coordInBlock[1]; + const uint iqs = idx & 0xF; + const uint shift = (idx & 0x10) >> 2; + uint32_t qs = bl.block.qs[iqs]; + qs >>= shift; + qs &= 0xF; + float16_t ret = float16_t(qs) * d + m; + return ret; +} + +layout(buffer_reference, std430, buffer_reference_align = 2) buffer decodeBufQ5_0 { + block_q5_0 block; +}; + +float16_t dequantFuncQ5_0(const in decodeBufQ5_0 bl, const in uint blockCoords[2], const in uint coordInBlock[2]) +{ + const float16_t d = bl.block.d; + const uint idx = coordInBlock[1]; + const uint iqs = idx & 0xF; + + const uint uint_qh = uint(bl.block.qh[1]) << 16 | bl.block.qh[0]; + const uint qh = ((uint_qh >> idx) << 4) & 0x10; + + const uint shift = (idx & 0x10) >> 2; + uint32_t qs = bl.block.qs[iqs]; + qs >>= shift; + qs &= 0xF; + + float16_t ret = (float16_t(qs | qh) - float16_t(16)) * d; + return ret; +} + +layout(buffer_reference, std430, buffer_reference_align = 8) buffer decodeBufQ5_1 { + block_q5_1 block; +}; + +float16_t dequantFuncQ5_1(const in decodeBufQ5_1 bl, const in uint blockCoords[2], const in uint coordInBlock[2]) +{ + const float16_t d = bl.block.d; + const float16_t m = bl.block.m; + const uint idx = coordInBlock[1]; + const uint iqs = idx & 0xF; + + const uint uint_qh = bl.block.qh; + const uint qh = ((uint_qh >> idx) << 4) & 0x10; + + const uint shift = (idx & 0x10) >> 2; + uint32_t qs = bl.block.qs[iqs]; + qs >>= shift; + qs &= 0xF; + + float16_t ret = float16_t(qs | qh) * d + m; + return ret; +} + +layout(buffer_reference, std430, buffer_reference_align = 2) buffer decodeBufQ8_0 { + block_q8_0_packed16 block; +}; + +float16_t dequantFuncQ8_0(const in decodeBufQ8_0 bl, const in uint blockCoords[2], const in uint coordInBlock[2]) +{ + const float16_t d = bl.block.d; + const uint idx = coordInBlock[1]; + const uint iqs = idx; + + // Load 16b and select the byte for this element + int32_t qs = unpack8(int32_t(bl.block.qs[(iqs & 0x1E) >> 1]))[iqs & 1]; + float16_t ret = float16_t(qs) * d; + return ret; +} + +layout(buffer_reference, std430, buffer_reference_align = 4) buffer decodeBufQ2_K { + block_q2_K block; +}; + +float16_t dequantFuncQ2_K(const in decodeBufQ2_K bl, const in uint blockCoords[2], const in uint coordInBlock[2]) +{ + const f16vec2 d = bl.block.d; + const uint idx = coordInBlock[1]; + const uint iqs = idx; + + const uint qsi = (iqs / 128) * 32 + (iqs % 32); // 0..31 + const uint scalesi = iqs / 16; // 0..15 + const uint qsshift = ((iqs % 128) / 32) * 2; // 0,2,4,6 + + uint32_t qs = bl.block.qs[qsi]; + const uint scales = bl.block.scales[scalesi]; + float16_t ret = d.x * float16_t(scales & 0xF) * float16_t((qs >> qsshift) & 3) - d.y * float16_t(scales >> 4); + return ret; +} + +layout(buffer_reference, std430, buffer_reference_align = 2) buffer decodeBufQ3_K { + block_q3_K block; +}; + +float16_t dequantFuncQ3_K(const in decodeBufQ3_K bl, const in uint blockCoords[2], const in uint coordInBlock[2]) +{ + const uint idx = coordInBlock[1]; + const uint iqs = idx; + + const uint n = iqs / 128; // 0,1 + const uint qsi = n * 32 + (iqs % 32); // 0..63 + const uint hmi = (iqs % 32); // 0..31 + const uint j = (iqs % 128) / 8; // 0..15 + const uint is = iqs / 16; // 0..15 + const uint halfsplit = ((iqs % 128) / 32); // 0,1,2,3 + const uint qsshift = halfsplit * 2; // 0,2,4,6 + const uint m = 1 << (4 * n + halfsplit); // 1,2,4,8,16,32,64,128 + + uint32_t scaleidx0 = (is < 8) ? is : (is-8); + uint32_t scaleidx0shift = (is < 8) ? 0 : 4; + uint32_t scaleidx1 = is + 8 - (is/4)*4; + uint32_t scaleidx1shift = (is/4)*2; + + const int8_t us = int8_t(((bl.block.scales[scaleidx0] >> scaleidx0shift) & 0xF) | (((bl.block.scales[scaleidx1] >> scaleidx1shift) & 3) << 4)); + + const float16_t dl = bl.block.d * float16_t(us - 32); + + float16_t ret = dl * float16_t(int8_t((bl.block.qs[qsi ] >> qsshift) & 3) - (((bl.block.hmask[hmi ] & m) != 0) ? 0 : 4)); + + return ret; +} + +layout(buffer_reference, std430, buffer_reference_align = 16) buffer decodeBufQ4_K { + block_q4_K block; +}; + +float16_t dequantFuncQ4_K(const in decodeBufQ4_K bl, const in uint blockCoords[2], const in uint coordInBlock[2]) +{ + const uint idx = coordInBlock[1]; + const uint iqs = idx; + + const uint n = iqs / 64; // 0,1,2,3 + const uint b = (iqs % 64) / 32; // 0,1 + const uint is = (idx & 0xE0) >> 5; // 0..7 + const uint qsi = n * 32 + (iqs % 32); // 0..127 + + const f16vec2 loadd = bl.block.d; + + uint32_t sc; + uint32_t mbyte; + + uint32_t scidx0 = (is < 4) ? is : (is + 4); + uint32_t scidx1 = (is < 4) ? is : (is - 4); + uint32_t scidxmask1 = (is < 4) ? 0x30 : 0xC0; + uint32_t scidxshift1 = (is < 4) ? 0 : 2; + uint32_t mbidx0 = is + 4; + uint32_t mbidx1 = (is < 4) ? is + 4 : is; + uint32_t mbidxmask0 = (is < 4) ? 0xF : 0xF0; + uint32_t mbidxshift0 = (is < 4) ? 0 : 4; + uint32_t mbidxmask1 = (is < 4) ? 0x30 : 0xC0; + uint32_t mbidxshift1 = (is < 4) ? 0 : 2; + + sc = uint8_t((bl.block.scales[scidx0] & 0xF) | ((bl.block.scales[scidx1] & scidxmask1) >> scidxshift1)); + mbyte = uint8_t(((bl.block.scales[mbidx0] & mbidxmask0) >> mbidxshift0) | ((bl.block.scales[mbidx1] & mbidxmask1) >> mbidxshift1)); + + const float16_t d = loadd.x * float16_t(sc); + const float16_t m = loadd.y * float16_t(mbyte); + + uint32_t dmask = 0xF << (b * 4); + + float16_t ret = d * float16_t((bl.block.qs[qsi ] & dmask) >> (b * 4)) - m; + + return ret; +} + +layout(buffer_reference, std430, buffer_reference_align = 16) buffer decodeBufQ5_K { + block_q5_K block; +}; + +float16_t dequantFuncQ5_K(const in decodeBufQ5_K bl, const in uint blockCoords[2], const in uint coordInBlock[2]) +{ + const uint idx = coordInBlock[1]; + const uint iqs = idx; + + const uint n = iqs / 64; // 0,1,2,3 + const uint b = (iqs % 64) / 32; // 0,1 + const uint is = (idx & 0xE0) >> 5; // 0..7 + const uint qsi = n * 32 + (iqs % 32); // 0..127 + const uint qhi = (iqs % 32); // 0..31 + + const uint8_t hm = uint8_t(1 << (iqs / 32)); + + const f16vec2 loadd = bl.block.d; + + uint32_t sc; + uint32_t mbyte; + + uint32_t scidx0 = (is < 4) ? is : (is + 4); + uint32_t scidx1 = (is < 4) ? is : (is - 4); + uint32_t scidxmask1 = (is < 4) ? 0x30 : 0xC0; + uint32_t scidxshift1 = (is < 4) ? 0 : 2; + uint32_t mbidx0 = is + 4; + uint32_t mbidx1 = (is < 4) ? is + 4 : is; + uint32_t mbidxmask0 = (is < 4) ? 0xF : 0xF0; + uint32_t mbidxshift0 = (is < 4) ? 0 : 4; + uint32_t mbidxmask1 = (is < 4) ? 0x30 : 0xC0; + uint32_t mbidxshift1 = (is < 4) ? 0 : 2; + + sc = uint8_t((bl.block.scales[scidx0] & 0xF) | ((bl.block.scales[scidx1] & scidxmask1) >> scidxshift1)); + mbyte = uint8_t(((bl.block.scales[mbidx0] & mbidxmask0) >> mbidxshift0) | ((bl.block.scales[mbidx1] & mbidxmask1) >> mbidxshift1)); + + const float16_t d = loadd.x * float16_t(sc); + const float16_t m = loadd.y * float16_t(mbyte); + + uint32_t dmask = 0xF << (b * 4); + + float16_t ret = d * (float16_t((bl.block.qs[qsi ] & dmask) >> (b * 4)) + float16_t((bl.block.qh[qhi ] & hm) != 0 ? 16 : 0)) - m; + + return ret; +} + +layout(buffer_reference, std430, buffer_reference_align = 2) buffer decodeBufQ6_K { + block_q6_K block; +}; + +float16_t dequantFuncQ6_K(const in decodeBufQ6_K bl, const in uint blockCoords[2], const in uint coordInBlock[2]) +{ + const uint idx = coordInBlock[1]; + const uint iqs = idx; + + const uint n = iqs / 128; // 0,1 + const uint b = (iqs % 128) / 64; // 0,1 + const uint is_b = (iqs % 32) / 16; // 0,1 + const uint qhshift = ((iqs % 128) / 32) * 2;// 0,2,4,6 + const uint is = 8 * n + qhshift + is_b; // 0..15 + const uint qsi = n * 64 + (iqs % 64); // 0..127 + const uint qhi = n * 32 + (iqs % 32); // 0..63 + + const float16_t dscale = bl.block.d * float16_t(bl.block.scales[is]); + + float16_t ret = dscale * float16_t(int8_t(((bl.block.ql[qsi ] >> (b * 4)) & 0xF) | (((bl.block.qh[qhi ] >> qhshift) & 3) << 4)) - 32); + + return ret; +} + +#if defined(DATA_A_IQ4_NL) +layout(buffer_reference, std430, buffer_reference_align = 2) buffer decodeBufIQ4_NL { + block_iq4_nl block; +}; + +float16_t dequantFuncIQ4_NL(const in decodeBufIQ4_NL bl, const in uint blockCoords[2], const in uint coordInBlock[2]) +{ + const float16_t d = bl.block.d; + const uint idx = coordInBlock[1]; + const uint iqs = idx & 0xF; + const uint shift = (idx & 0x10) >> 2; + uint32_t qs = bl.block.qs[iqs]; + qs >>= shift; + qs &= 0xF; + float16_t ret = float16_t(kvalues_iq4nl[qs]) * d; + return ret; +} +#endif + +#if defined(DATA_A_Q4_0) +#define dequantFuncA dequantFuncQ4_0 +#elif defined(DATA_A_Q4_1) +#define dequantFuncA dequantFuncQ4_1 +#elif defined(DATA_A_Q5_0) +#define dequantFuncA dequantFuncQ5_0 +#elif defined(DATA_A_Q5_1) +#define dequantFuncA dequantFuncQ5_1 +#elif defined(DATA_A_Q8_0) +#define dequantFuncA dequantFuncQ8_0 +#elif defined(DATA_A_Q2_K) +#define dequantFuncA dequantFuncQ2_K +#elif defined(DATA_A_Q3_K) +#define dequantFuncA dequantFuncQ3_K +#elif defined(DATA_A_Q4_K) +#define dequantFuncA dequantFuncQ4_K +#elif defined(DATA_A_Q5_K) +#define dequantFuncA dequantFuncQ5_K +#elif defined(DATA_A_Q6_K) +#define dequantFuncA dequantFuncQ6_K +#elif defined(DATA_A_IQ4_NL) +#define dequantFuncA dequantFuncIQ4_NL +#endif diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_cm2.comp b/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_cm2.comp new file mode 100644 index 0000000000000..c5be8131b30db --- /dev/null +++ b/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_cm2.comp @@ -0,0 +1,289 @@ +#version 450 + +#extension GL_EXT_control_flow_attributes : enable +#extension GL_EXT_shader_16bit_storage : require + +#extension GL_EXT_shader_explicit_arithmetic_types_float16 : require +#extension GL_EXT_shader_explicit_arithmetic_types_int8 : require +#extension GL_EXT_shader_explicit_arithmetic_types_int32 : require +#extension GL_EXT_shader_explicit_arithmetic_types_int16 : require + +#extension GL_KHR_memory_scope_semantics : enable +#extension GL_KHR_cooperative_matrix : enable +#extension GL_NV_cooperative_matrix2 : enable +#extension GL_EXT_buffer_reference : enable +#extension GL_KHR_shader_subgroup_ballot : enable +#extension GL_KHR_shader_subgroup_vote : enable +#extension GL_EXT_null_initializer : enable + +#include "types.comp" +#include "dequant_funcs_cm2.comp" + +layout(local_size_x_id = 0, local_size_y = 1, local_size_z = 1) in; + +layout (constant_id = 1) const uint32_t Br = 32; +layout (constant_id = 2) const uint32_t Bc = 32; +layout (constant_id = 3) const uint32_t D = 32; +layout (constant_id = 4) const uint32_t Clamp = gl_CooperativeMatrixClampModeConstantNV; + +layout (push_constant) uniform parameter { + uint32_t N; + uint32_t KV; + + uint32_t ne1; + uint32_t ne2; + uint32_t ne3; + + uint32_t neq2; + uint32_t neq3; + uint32_t nek2; + uint32_t nek3; + uint32_t nev2; + uint32_t nev3; + uint32_t nem1; + + uint32_t nb02; + uint32_t nb03; + uint32_t nb12; + uint32_t nb13; + uint32_t nb22; + uint32_t nb23; + uint32_t nb31; + + float scale; + float max_bias; + float logit_softcap; + + uint32_t mask; + uint32_t n_head_log2; + float m0; + float m1; +} p; + +layout (binding = 0) readonly buffer Q {uint8_t data_q[];}; +layout (binding = 1) readonly buffer K {uint8_t data_k[];}; +layout (binding = 2) readonly buffer V {uint8_t data_v[];}; +layout (binding = 3) readonly buffer M {uint8_t data_m[];}; +layout (binding = 4) writeonly buffer O {D_TYPE data_o[];}; + +#define CEIL_DIV(a, b) (((a) + (b) - 1) / (b)) + +ACC_TYPE maxReduce(const in ACC_TYPE x, const in ACC_TYPE y) { + return max(x, y); +} + +ACC_TYPE smearReduce(const in ACC_TYPE x, const in ACC_TYPE y) { + return x; +} + +// Replace matrix elements >= numRows or numCols with 'replace' +ACC_TYPE replacePadding(const in uint32_t row, const in uint32_t col, const in ACC_TYPE elem, const in ACC_TYPE replace, const in uint32_t numRows, const in uint32_t numCols) { + if (row >= numRows || col >= numCols) { + return replace; + } + return elem; +} + +ACC_TYPE Exp(const in uint32_t row, const in uint32_t col, const in ACC_TYPE elem) +{ + return exp(elem); +} + +ACC_TYPE Max(const in uint32_t row, const in uint32_t col, const in ACC_TYPE elem0, const in ACC_TYPE elem1) +{ + return max(elem0, elem1); +} + +#if defined(BLOCK_SIZE) +#define DECODEFUNC , DEQUANTFUNC +#else +#define DECODEFUNC +#endif + +void main() { +#if defined(DATA_A_IQ4_NL) + init_iq4nl_shmem(); +#endif + + const uint32_t N = p.N; + const uint32_t KV = p.KV; + + const uint32_t Tr = CEIL_DIV(N, Br); + const uint32_t Tc = CEIL_DIV(KV, Bc); + + const uint32_t i = gl_WorkGroupID.x; + + const uint32_t iq2 = gl_WorkGroupID.y; + const uint32_t iq3 = gl_WorkGroupID.z; + + // broadcast factors + const uint32_t rk2 = p.neq2/p.nek2; + const uint32_t rk3 = p.neq3/p.nek3; + + const uint32_t rv2 = p.neq2/p.nev2; + const uint32_t rv3 = p.neq3/p.nev3; + + // k indices + const uint32_t ik3 = iq3 / rk3; + const uint32_t ik2 = iq2 / rk2; + + // v indices + const uint32_t iv3 = iq3 / rv3; + const uint32_t iv2 = iq2 / rv2; + + tensorLayoutNV<2, gl_CooperativeMatrixClampModeConstantNV> tensorLayoutQ = createTensorLayoutNV(2, gl_CooperativeMatrixClampModeConstantNV); + tensorLayoutNV<2, Clamp> tensorLayoutK = createTensorLayoutNV(2, Clamp); + tensorLayoutNV<2, Clamp> tensorLayoutV = createTensorLayoutNV(2, Clamp); + + tensorViewNV<2, false, 1, 0> tensorViewTranspose = createTensorViewNV(2, false, 1, 0); + +#if defined(BLOCK_SIZE) + tensorLayoutK = setTensorLayoutBlockSizeNV(tensorLayoutK, 1, BLOCK_SIZE); + tensorLayoutV = setTensorLayoutBlockSizeNV(tensorLayoutV, 1, BLOCK_SIZE); +#endif + + tensorLayoutQ = setTensorLayoutDimensionNV(tensorLayoutQ, N, D); + tensorLayoutK = setTensorLayoutDimensionNV(tensorLayoutK, KV, D); + tensorLayoutV = setTensorLayoutDimensionNV(tensorLayoutV, KV, D); + + coopmat Q; + coopmat Qf16; + + uint32_t q_offset = iq2*p.nb02+iq3*p.nb03; + coopMatLoadTensorNV(Q, data_q, q_offset, sliceTensorLayoutNV(tensorLayoutQ, i * Br, Br, 0, D)); + + Qf16 = coopmat(Q); + Qf16 *= float16_t(p.scale); + + coopmat O = coopmat(0); + + coopmat L, M; + + L = coopmat(0); + M = coopmat(-1.0/0.0); + + ACC_TYPE slope = ACC_TYPE(1.0); + + // ALiBi + if (p.max_bias > 0.0f) { + const uint32_t h = iq2; + + const ACC_TYPE base = ACC_TYPE(h < p.n_head_log2 ? p.m0 : p.m1); + const int exph = int(h < p.n_head_log2 ? h + 1 : 2*(h - p.n_head_log2) + 1); + + slope = pow(base, ACC_TYPE(exph)); + } + + [[dont_unroll]] + for (uint32_t j = 0; j < Tc; ++j) { + + coopmat S = coopmat(0); + + coopmat K_T; + + uint32_t k_offset = ik2*p.nb12 + ik3*p.nb13; + coopMatLoadTensorNV(K_T, data_k, k_offset, sliceTensorLayoutNV(tensorLayoutK, j * Bc, Bc, 0, D), tensorViewTranspose DECODEFUNC); + S = coopMatMulAdd(Qf16, K_T, S); + + if (p.logit_softcap != 0.0f) { + [[unroll]] + for (int k = 0; k < S.length(); ++k) { + S[k] = ACC_TYPE(p.logit_softcap)*tanh(S[k]); + } + } + + if (p.mask != 0) { + tensorLayoutNV<2, gl_CooperativeMatrixClampModeConstantNV> tensorLayoutM = createTensorLayoutNV(2, gl_CooperativeMatrixClampModeConstantNV); + tensorLayoutM = setTensorLayoutDimensionNV(tensorLayoutM, p.nem1, KV); + + coopmat mv; + + coopMatLoadTensorNV(mv, data_m, 0, sliceTensorLayoutNV(tensorLayoutM, i * Br, Br, j * Bc, Bc)); + + S += slope*coopmat(mv); + } + + // Clear padding elements to -inf, so they don't contribute to rowmax + if (Clamp != 0 && + ((j + 1) * Bc > KV || + (i + 1) * Br > N)) { + + uint R = ((i + 1) * Br > N) ? (N % Br) : Br; + uint C = ((j + 1) * Bc > KV) ? (KV % Bc) : Bc; + + coopMatPerElementNV(S, S, replacePadding, ACC_TYPE(-1.0/0.0), R, C); + } + + coopmat rowmax, P, rowsum, eM; + + coopMatReduceNV(rowmax, S, gl_CooperativeMatrixReduceRowNV, maxReduce); + + coopmat Mold = M; + + // M = max(rowmax, Mold) + // P = e^(S - M) + // eM = e^(Mold - M) + coopMatPerElementNV(M, rowmax, Max, Mold); + coopMatPerElementNV(P, S - M, Exp); + coopMatPerElementNV(eM, Mold - M, Exp); + + // Clear padding elements to 0, so they don't contribute to rowsum + if (Clamp != 0 && + ((j + 1) * Bc > KV || + (i + 1) * Br > N)) { + + uint R = ((i + 1) * Br > N) ? (N % Br) : Br; + uint C = ((j + 1) * Bc > KV) ? (KV % Bc) : Bc; + + coopMatPerElementNV(P, P, replacePadding, ACC_TYPE(0.0), R, C); + } + + coopmat P_A = coopmat(P); + + // compute rowsum by multiplying by matrix of all ones. + coopmat One = coopmat(1.0); + + rowsum = coopmat(0.0); + rowsum = coopMatMulAdd(P_A, One, rowsum); + + coopmat V; + uint32_t v_offset = iv2*p.nb22 + iv3*p.nb23; + coopMatLoadTensorNV(V, data_v, v_offset, sliceTensorLayoutNV(tensorLayoutV, j * Bc, Bc, 0, D) DECODEFUNC); + + L = eM*L + rowsum; + + // This is the "diagonal" matrix in the paper, but since we do componentwise + // multiply rather than matrix multiply it has the diagonal element smeared + // across the row + coopmat eMdiag; + + // resize eM by using smear/reduce + coopMatReduceNV(eMdiag, eM, gl_CooperativeMatrixReduceRowNV, smearReduce); + + O = eMdiag * O; + + O = coopMatMulAdd(P_A, V, O); + } + + coopmat Ldiag; + + // resize L by using smear/reduce + coopMatReduceNV(Ldiag, L, gl_CooperativeMatrixReduceRowNV, smearReduce); + + [[unroll]] + for (int k = 0; k < Ldiag.length(); ++k) { + Ldiag[k] = ACC_TYPE(1.0) / Ldiag[k]; + } + + O = Ldiag*O; + + tensorLayoutNV<3, gl_CooperativeMatrixClampModeConstantNV> tensorLayoutD = createTensorLayoutNV(3, gl_CooperativeMatrixClampModeConstantNV); + tensorLayoutD = setTensorLayoutDimensionNV(tensorLayoutD, p.ne2, p.ne1, D); + + // permute dimensions + tensorViewNV<3, false, 1, 0, 2> tensorViewPermute = createTensorViewNV(3, false, 1, 0, 2); + uint32_t o_offset = iq3*p.ne2*p.ne1; + + coopmat O_D = coopmat(O); + coopMatStoreTensorNV(O_D, data_o, o_offset, sliceTensorLayoutNV(tensorLayoutD, i * Br, Br, iq2, 1, 0, D), tensorViewPermute); +} diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/generic_unary_head.comp b/ggml/src/ggml-vulkan/vulkan-shaders/generic_unary_head.comp index 4e1fa3af3ad62..ab7c9d7eb1b09 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/generic_unary_head.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/generic_unary_head.comp @@ -8,6 +8,13 @@ layout (push_constant) uniform parameter uint ne10; uint ne11; uint ne12; uint ne13; uint nb10; uint nb11; uint nb12; uint nb13; uint d_offset; float param1; float param2; + + uint ne0_012mp; uint ne0_012L; + uint ne0_01mp; uint ne0_01L; + uint ne0_0mp; uint ne0_0L; + uint ne1_012mp; uint ne1_012L; + uint ne1_01mp; uint ne1_01L; + uint ne1_0mp; uint ne1_0L; } p; layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; @@ -17,22 +24,30 @@ uint get_idx() { return gl_GlobalInvocationID.z * 262144 + gl_GlobalInvocationID.y * 512 + gl_GlobalInvocationID.x; } +// see init_fastdiv_values in ggml-vulkan.cpp +uint fastdiv(uint n, uint mp, uint L) { + uint msbs, lsbs; + // msbs = mulhi(n, mp) + umulExtended(n, mp, msbs, lsbs); + return (msbs + n) >> L; +} + uint src0_idx(uint idx) { - const uint i03 = idx / (p.ne02*p.ne01*p.ne00); + const uint i03 = fastdiv(idx, p.ne0_012mp, p.ne0_012L); const uint i03_offset = i03 * p.ne02*p.ne01*p.ne00; - const uint i02 = (idx - i03_offset) / (p.ne01*p.ne00); + const uint i02 = fastdiv(idx - i03_offset, p.ne0_01mp, p.ne0_01L); const uint i02_offset = i02*p.ne01*p.ne00; - const uint i01 = (idx - i03_offset - i02_offset) / p.ne00; + const uint i01 = fastdiv(idx - i03_offset - i02_offset, p.ne0_0mp, p.ne0_0L); const uint i00 = idx - i03_offset - i02_offset - i01*p.ne00; return i03*p.nb03 + i02*p.nb02 + i01*p.nb01 + i00*p.nb00; } uint dst_idx(uint idx) { - const uint i13 = idx / (p.ne12*p.ne11*p.ne10); + const uint i13 = fastdiv(idx, p.ne1_012mp, p.ne1_012L); const uint i13_offset = i13 * p.ne12*p.ne11*p.ne10; - const uint i12 = (idx - i13_offset) / (p.ne11*p.ne10); + const uint i12 = fastdiv(idx - i13_offset, p.ne1_01mp, p.ne1_01L); const uint i12_offset = i12*p.ne11*p.ne10; - const uint i11 = (idx - i13_offset - i12_offset) / p.ne10; + const uint i11 = fastdiv(idx - i13_offset - i12_offset, p.ne1_0mp, p.ne1_0L); const uint i10 = idx - i13_offset - i12_offset - i11*p.ne10; return i13*p.nb13 + i12*p.nb12 + i11*p.nb11 + i10*p.nb10; } diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/im2col.comp b/ggml/src/ggml-vulkan/vulkan-shaders/im2col.comp index 4d48610a3adcb..966fedf8fa7a0 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/im2col.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/im2col.comp @@ -1,6 +1,11 @@ #version 450 #extension GL_EXT_shader_16bit_storage : require +#extension GL_EXT_spirv_intrinsics: enable + +#if RTE16 +spirv_execution_mode(capabilities = [4467], 4462, 16); // RoundingModeRTE, 16 bits +#endif layout (push_constant) uniform parameter { diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_split_k_reduce.comp b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_split_k_reduce.comp index 825b91031f569..4c64fd47af718 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_split_k_reduce.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_split_k_reduce.comp @@ -5,7 +5,9 @@ layout(local_size_x = 256, local_size_y = 1, local_size_z = 1) in; layout (binding = 0) readonly buffer A {float data_a[];}; +layout (binding = 0) readonly buffer A4 {vec4 data_a4[];}; layout (binding = 1) writeonly buffer D {float data_d[];}; +layout (binding = 1) writeonly buffer D4 {vec4 data_d4[];}; layout (push_constant) uniform parameter { uint ne; @@ -13,17 +15,34 @@ layout (push_constant) uniform parameter { } p; void main() { - const uint idx = gl_GlobalInvocationID.x; + // Each invocation handles four consecutive components + const uint idx = gl_GlobalInvocationID.x * 4; if (idx >= p.ne) { return; } - float result = 0.0f; + // Check if all four components are in bounds and aligned, + // then use vector loads + if (idx + 3 < p.ne && (p.ne % 4) == 0) { + vec4 result = vec4(0.0f); - [[unroll]] for (uint i = 0; i < p.k_num; i++) { - result += data_a[i * p.ne + idx]; - } + [[unroll]] for (uint i = 0; i < p.k_num; i++) { + result += data_a4[(i * p.ne + idx) / 4]; + } + + data_d4[idx / 4] = result; + } else { + [[unroll]] for (uint j = 0; j < 4; ++j) { + if (idx + j < p.ne) { + float result = 0.0f; - data_d[idx] = result; + [[unroll]] for (uint i = 0; i < p.k_num; i++) { + result += data_a[i * p.ne + idx + j]; + } + + data_d[idx + j] = result; + } + } + } } diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_base.comp b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_base.comp index 2ec1af5c75542..3894fca8260a5 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_base.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_base.comp @@ -2,8 +2,6 @@ #extension GL_EXT_shader_16bit_storage : require #extension GL_EXT_shader_8bit_storage : require -#define K_QUANTS_PER_ITERATION 2 - #ifdef MUL_MAT_ID #define EXPERT_COUNT 8 #endif diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q2_k.comp b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q2_k.comp index fcf02210ea6c7..1a5350d99ea14 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q2_k.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q2_k.comp @@ -3,9 +3,11 @@ #include "mul_mat_vec_base.comp" -layout(local_size_x = 32, local_size_y = 1, local_size_z = 1) in; +layout(local_size_x_id = 0, local_size_y = 1, local_size_z = 1) in; -shared FLOAT_TYPE tmp[32]; +layout (constant_id = 0) const uint BLOCK_SIZE = 32; + +shared FLOAT_TYPE tmp[BLOCK_SIZE]; void main() { const uint row = gl_WorkGroupID.x + gl_NumWorkGroups.x * gl_WorkGroupID.z; @@ -20,22 +22,25 @@ void main() { const uint num_blocks_per_row = p.ncols / QUANT_K; const uint ib0 = a_offset / QUANT_K + row*num_blocks_per_row; - const uint tid = gl_LocalInvocationID.x/K_QUANTS_PER_ITERATION; // 0...31 or 0...16 - const uint ix = gl_LocalInvocationID.x%K_QUANTS_PER_ITERATION; // 0 or 0, 1 + // 16 threads are used to process each block + const uint it_size = gl_WorkGroupSize.x/16; + const uint tid = gl_LocalInvocationID.x; + const uint itid = tid%16; // 0...16 + const uint ix = tid/16; - const uint step = 16/K_QUANTS_PER_ITERATION; // 16 or 8 + const uint step = 8; - const uint v_im = tid/step; // 0 or 1. 0 computes 0..., 1 computes 128... - const uint v_in = tid - step*v_im; // 0...15 or 0...7 + const uint v_im = itid/step; // 0 or 1. 0 computes 0..., 1 computes 128... + const uint v_in = itid - step*v_im; // 0...15 or 0...7 - const uint l0 = K_QUANTS_PER_ITERATION*v_in; // 0...15 + const uint l0 = 2*v_in; // 0...15 const uint q_offset = 32*v_im + l0; const uint s_offset = 8*v_im; const uint y_offset = 128*v_im + l0; FLOAT_TYPE temp = FLOAT_TYPE(0.0); // partial sum for thread in warp - [[unroll]] for (uint i = ix; i < num_blocks_per_row; i += K_QUANTS_PER_ITERATION) { + [[unroll]] for (uint i = ix; i < num_blocks_per_row; i += it_size) { const uint y_idx = i * QUANT_K + y_offset; f16vec2 d = data_a[ib0 + i].d; @@ -71,7 +76,7 @@ void main() { FLOAT_TYPE sum1 = FLOAT_TYPE(0.0); FLOAT_TYPE sum2 = FLOAT_TYPE(0.0); - [[unroll]] for (int l = 0; l < K_QUANTS_PER_ITERATION; ++l) { + [[unroll]] for (int l = 0; l < 2; ++l) { sum1 = fma(FLOAT_TYPE(b0[l]), FLOAT_TYPE(s0_lo4[0]) * FLOAT_TYPE((qs0[l] >> 0) & 3), fma(FLOAT_TYPE(b16[l]), FLOAT_TYPE(s0_lo4[1]) * FLOAT_TYPE((qs16[l] >> 0) & 3), fma(FLOAT_TYPE(b32[l]), FLOAT_TYPE(s0_lo4[2]) * FLOAT_TYPE((qs0[l] >> 2) & 3), @@ -96,7 +101,7 @@ void main() { // sum up partial sums and write back result barrier(); - [[unroll]] for (uint s = 16; s > 0; s >>= 1) { + [[unroll]] for (uint s = gl_WorkGroupSize.x/2; s > 0; s >>= 1) { if (tid < s) { tmp[tid] += tmp[tid + s]; } diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q3_k.comp b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q3_k.comp index 723fadde0d52b..b19c3811136bb 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q3_k.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q3_k.comp @@ -3,9 +3,11 @@ #include "mul_mat_vec_base.comp" -layout(local_size_x = 32, local_size_y = 1, local_size_z = 1) in; +layout(local_size_x_id = 0, local_size_y = 1, local_size_z = 1) in; -shared FLOAT_TYPE tmp[32]; +layout (constant_id = 0) const uint BLOCK_SIZE = 32; + +shared FLOAT_TYPE tmp[BLOCK_SIZE]; void main() { const uint row = gl_WorkGroupID.x + gl_NumWorkGroups.x * gl_WorkGroupID.z; @@ -20,17 +22,20 @@ void main() { const uint num_blocks_per_row = p.ncols / QUANT_K; const uint ib0 = a_offset / QUANT_K + row*num_blocks_per_row; - const uint tid = gl_LocalInvocationID.x/K_QUANTS_PER_ITERATION; // 0...31 or 0...16 - const uint ix = gl_LocalInvocationID.x%K_QUANTS_PER_ITERATION; // 0 or 0, 1 + // 16 threads are used to process each block + const uint it_size = gl_WorkGroupSize.x/16; + const uint tid = gl_LocalInvocationID.x; + const uint itid = tid%16; // 0...16 + const uint ix = tid/16; - const uint step = 16/K_QUANTS_PER_ITERATION; // 16 or 8 + const uint step = 8; - const uint v_im = tid/step; // 0 or 1. 0 computes 0..., 1 computes 128... - const uint v_in = tid - step*v_im; // 0...15 or 0...7 + const uint v_im = itid/step; // 0 or 1. 0 computes 0..., 1 computes 128... + const uint v_in = itid - step*v_im; // 0...15 or 0...7 const uint8_t m = uint8_t(1 << (4 * v_im)); - const uint l0 = K_QUANTS_PER_ITERATION*v_in; // 0...15 + const uint l0 = 2*v_in; // 0...15 const uint q_offset = 32*v_im + l0; const uint y_offset = 128*v_im + l0; @@ -38,7 +43,7 @@ void main() { const uint s_shift = 4 * v_im; - [[unroll]] for (uint i = ix; i < num_blocks_per_row; i += K_QUANTS_PER_ITERATION) { + [[unroll]] for (uint i = ix; i < num_blocks_per_row; i += it_size) { const uint y_idx = i * QUANT_K + y_offset; const FLOAT_TYPE d = FLOAT_TYPE(data_a[ib0 + i].d); @@ -66,7 +71,7 @@ void main() { u8vec2 s10 = unpack8(s10_16); FLOAT_TYPE sum = FLOAT_TYPE(0.0); - for (int l = 0; l < K_QUANTS_PER_ITERATION; ++l) { + [[unroll]] for (int l = 0; l < 2; ++l) { sum = fma(FLOAT_TYPE(b0[l]) * FLOAT_TYPE(int8_t(((s0[0] >> s_shift) & 0xF) | ((s8[0] >> (s_shift + 0) & 0x3) << 4)) - 32), FLOAT_TYPE(((data_a[ib0 + i].qs[q_offset + l ] ) & 3) - (((data_a[ib0 + i].hmask[l0 + l ] & (m << 0)) != 0) ? 0 : 4)), fma(FLOAT_TYPE(b32[l]) * FLOAT_TYPE(int8_t(((s2[0] >> s_shift) & 0xF) | ((s10[0] >> (s_shift + 0) & 0x3) << 4)) - 32), FLOAT_TYPE(((data_a[ib0 + i].qs[q_offset + l ] >> 2) & 3) - (((data_a[ib0 + i].hmask[l0 + l ] & (m << 1)) != 0) ? 0 : 4)), fma(FLOAT_TYPE(b64[l]) * FLOAT_TYPE(int8_t(((s4[0] >> s_shift) & 0xF) | ((s8[0] >> (s_shift + 2) & 0x3) << 4)) - 32), FLOAT_TYPE(((data_a[ib0 + i].qs[q_offset + l ] >> 4) & 3) - (((data_a[ib0 + i].hmask[l0 + l ] & (m << 2)) != 0) ? 0 : 4)), @@ -83,7 +88,7 @@ void main() { // sum up partial sums and write back result barrier(); - [[unroll]] for (uint s = 16; s > 0; s >>= 1) { + [[unroll]] for (uint s = gl_WorkGroupSize.x/2; s > 0; s >>= 1) { if (tid < s) { tmp[tid] += tmp[tid + s]; } diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q4_k.comp b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q4_k.comp index 5846f2e86f3e3..b86d28589c64c 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q4_k.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q4_k.comp @@ -4,11 +4,12 @@ #include "mul_mat_vec_base.comp" -layout(local_size_x = 32, local_size_y = 1, local_size_z = 1) in; +layout(local_size_x_id = 0, local_size_y = 1, local_size_z = 1) in; -shared FLOAT_TYPE tmp[32]; +layout (constant_id = 0) const uint BLOCK_SIZE = 32; + +shared FLOAT_TYPE tmp[BLOCK_SIZE]; -// This shader assumes K_QUANTS_PER_ITERATION == 2 for alignment of loads void main() { const uint row = gl_WorkGroupID.x + gl_NumWorkGroups.x * gl_WorkGroupID.z; @@ -22,14 +23,17 @@ void main() { const uint num_blocks_per_row = p.ncols / QUANT_K; const uint ib0 = a_offset / QUANT_K + row*num_blocks_per_row; - const uint tid = gl_LocalInvocationID.x/K_QUANTS_PER_ITERATION; // 0...31 or 0...16 - const uint ix = gl_LocalInvocationID.x%K_QUANTS_PER_ITERATION; // 0 or 0, 1 + // 16 threads are used to process each block + const uint it_size = gl_WorkGroupSize.x/16; + const uint tid = gl_LocalInvocationID.x; + const uint itid = tid%16; // 0...16 + const uint ix = tid/16; - const uint step = 8/K_QUANTS_PER_ITERATION; // 8 or 4 + const uint step = 4; - const uint il = tid/step; // 0...3 - const uint ir = tid - step*il; // 0...7 or 0...3 - const uint n = 2 * K_QUANTS_PER_ITERATION; // 2 or 4 + const uint il = itid/step; // 0...3 + const uint ir = itid - step*il; // 0...7 or 0...3 + const uint n = 4; const uint v_im = il / 2; // 0 or 1. 0 computes 0,32 + 128,160, 1 computes 64,96 + 192,224 const uint v_in = il % 2; @@ -40,7 +44,7 @@ void main() { FLOAT_TYPE temp = FLOAT_TYPE(0.0); // partial sum for thread in warp - [[unroll]] for (uint i = ix; i < num_blocks_per_row; i += K_QUANTS_PER_ITERATION) { + [[unroll]] for (uint i = ix; i < num_blocks_per_row; i += it_size) { const uint y1_idx = i * QUANT_K + y_offset; const uint y2_idx = y1_idx + 128; @@ -115,7 +119,7 @@ void main() { // sum up partial sums and write back result barrier(); - [[unroll]] for (uint s = 16; s > 0; s >>= 1) { + [[unroll]] for (uint s = gl_WorkGroupSize.x/2; s > 0; s >>= 1) { if (tid < s) { tmp[tid] += tmp[tid + s]; } diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q5_k.comp b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q5_k.comp index b455cbd31ec91..fd243cf9161fc 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q5_k.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q5_k.comp @@ -4,9 +4,11 @@ #include "mul_mat_vec_base.comp" -layout(local_size_x = 32, local_size_y = 1, local_size_z = 1) in; +layout(local_size_x_id = 0, local_size_y = 1, local_size_z = 1) in; -shared FLOAT_TYPE tmp[32]; +layout (constant_id = 0) const uint BLOCK_SIZE = 32; + +shared FLOAT_TYPE tmp[BLOCK_SIZE]; void main() { const uint row = gl_WorkGroupID.x + gl_NumWorkGroups.x * gl_WorkGroupID.z; @@ -21,11 +23,14 @@ void main() { const uint num_blocks_per_row = p.ncols / QUANT_K; const uint ib0 = a_offset / QUANT_K + row*num_blocks_per_row; - const uint tid = gl_LocalInvocationID.x/2; // 0...31 or 0...16 - const uint ix = gl_LocalInvocationID.x%2; // 0 or 0, 1 + // 16 threads are used to process each block + const uint it_size = gl_WorkGroupSize.x/16; + const uint tid = gl_LocalInvocationID.x; + const uint itid = tid%16; // 0...16 + const uint ix = tid/16; - const uint il = tid/4; // 0...3 - const uint ir = tid - 4*il; // 0...7 or 0...3 + const uint il = itid/4; // 0...3 + const uint ir = itid - 4*il; // 0...7 or 0...3 const uint v_im = il / 2; // 0 or 1. 0 computes 0,32 + 128,160, 1 computes 64,96 + 192,224 const uint v_in = il % 2; @@ -36,7 +41,7 @@ void main() { FLOAT_TYPE temp = FLOAT_TYPE(0.0); // partial sum for thread in warp - [[unroll]] for (uint i = ix; i < num_blocks_per_row; i += 2) { + [[unroll]] for (uint i = ix; i < num_blocks_per_row; i += it_size) { const uint y1_idx = i * QUANT_K + y_offset; const uint y2_idx = y1_idx + 128; @@ -143,7 +148,7 @@ void main() { // sum up partial sums and write back result barrier(); - [[unroll]] for (uint s = 16; s > 0; s >>= 1) { + [[unroll]] for (uint s = gl_WorkGroupSize.x/2; s > 0; s >>= 1) { if (tid < s) { tmp[tid] += tmp[tid + s]; } diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q6_k.comp b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q6_k.comp index 0b392d68d04d0..760aff85499f4 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q6_k.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q6_k.comp @@ -4,9 +4,11 @@ #include "mul_mat_vec_base.comp" -layout(local_size_x = 32, local_size_y = 1, local_size_z = 1) in; +layout(local_size_x_id = 0, local_size_y = 1, local_size_z = 1) in; -shared FLOAT_TYPE tmp[32]; +layout (constant_id = 0) const uint BLOCK_SIZE = 32; + +shared FLOAT_TYPE tmp[BLOCK_SIZE]; void main() { const uint row = gl_WorkGroupID.x + gl_NumWorkGroups.x * gl_WorkGroupID.z; @@ -21,21 +23,19 @@ void main() { const uint num_blocks_per_row = p.ncols / QUANT_K; const uint ib0 = a_offset / QUANT_K + row*num_blocks_per_row; - const uint tid = gl_LocalInvocationID.x/K_QUANTS_PER_ITERATION; // 0...31 or 0...16 - const uint ix = gl_LocalInvocationID.x%K_QUANTS_PER_ITERATION; // 0 or 0, 1 + // 16 threads are used to process each block + const uint it_size = gl_WorkGroupSize.x/16; + const uint tid = gl_LocalInvocationID.x; + const uint itid = tid%16; // 0...16 + const uint ix = tid/16; - const uint step = 16/K_QUANTS_PER_ITERATION; // 16 or 8 + const uint step = 8; - const uint v_im = tid/step; // 0 or 1. 0 computes 0..., 1 computes 128... - const uint v_in = tid - step*v_im; // 0...15 or 0...7 + const uint v_im = itid/step; // 0 or 1. 0 computes 0..., 1 computes 128... + const uint v_in = itid - step*v_im; // 0...15 or 0...7 -#if K_QUANTS_PER_ITERATION == 1 - const uint l0 = v_in; // 0...15 - const uint is = 0; -#else const uint l0 = 4 * v_in; // 0, 4, 8, ..., 28 const uint is = v_in / 4; -#endif const uint ql_offset = 64*v_im + l0; const uint qh_offset = 32*v_im + l0; @@ -44,7 +44,7 @@ void main() { FLOAT_TYPE temp = FLOAT_TYPE(0.0); // partial sum for thread in warp - [[unroll]] for (uint i = ix; i < num_blocks_per_row; i += K_QUANTS_PER_ITERATION) { + [[unroll]] for (uint i = ix; i < num_blocks_per_row; i += it_size) { const uint y_idx = i * QUANT_K + y_offset; const FLOAT_TYPE d = FLOAT_TYPE(data_a[ib0 + i].d); @@ -95,10 +95,10 @@ void main() { } tmp[gl_LocalInvocationID.x] = temp; - // sum up partial sums and write back result + barrier(); - [[unroll]] for (uint s = 16; s > 0; s >>= 1) { + [[unroll]] for (uint s = gl_WorkGroupSize.x/2; s > 0; s >>= 1) { if (tid < s) { tmp[tid] += tmp[tid + s]; } diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm.comp b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm.comp index 2ff5c430519bc..48122cbef906e 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm.comp @@ -7,6 +7,12 @@ #extension GL_EXT_shader_explicit_arithmetic_types_float16 : require #endif +#ifdef COOPMAT +#extension GL_KHR_cooperative_matrix : enable +#extension GL_KHR_memory_scope_semantics : enable +#extension GL_KHR_shader_subgroup_basic : enable +#endif + #ifdef MUL_MAT_ID #extension GL_EXT_shader_explicit_arithmetic_types_int16 : require #endif @@ -57,6 +63,7 @@ layout (push_constant) uniform parameter #endif } p; +layout (constant_id = 0) const uint BLOCK_SIZE = 64; layout (constant_id = 1) const uint BM = 64; layout (constant_id = 2) const uint BN = 64; layout (constant_id = 3) const uint BK = 16; // Assumed to be 32 if working with a quant @@ -65,13 +72,26 @@ layout (constant_id = 5) const uint WN = 32; layout (constant_id = 6) const uint WMITER = 2; layout (constant_id = 7) const uint TM = 4; layout (constant_id = 8) const uint TN = 2; -layout (constant_id = 9) const uint WARP = 32; +layout (constant_id = 9) const uint TK = 1; // Only needed for coopmat +layout (constant_id = 10) const uint WARP = 32; + +#ifdef COOPMAT +#define SHMEM_STRIDE (BK + 8) +#else +#define SHMEM_STRIDE (BK + 1) +#endif -shared FLOAT_TYPE buf_a[BM * (BK+1)]; -shared FLOAT_TYPE buf_b[BN * (BK+1)]; +shared FLOAT_TYPE buf_a[BM * SHMEM_STRIDE]; +shared FLOAT_TYPE buf_b[BN * SHMEM_STRIDE]; #ifdef MUL_MAT_ID shared u16vec2 row_ids[3072]; +#endif // MUL_MAT_ID + +#define NUM_WARPS (BLOCK_SIZE / WARP) + +#ifdef COOPMAT +shared ACC_TYPE coopmat_stage[TM * TN * NUM_WARPS]; #endif void main() { @@ -98,17 +118,32 @@ void main() { const uint ik = gl_WorkGroupID.x / blocks_m; const uint ic = gl_WorkGroupID.y; - const uint warp_i = gl_LocalInvocationID.x / WARP; - const uint warp_r = warp_i % (BM / WM); - const uint warp_c = warp_i / (BM / WM); - const uint WNITER = (WM * WN) / (WARP * TM * TN * WMITER); const uint WSUBM = WM / WMITER; const uint WSUBN = WN / WNITER; +#ifdef COOPMAT + const uint warp_i = gl_SubgroupID; + + const uint tiw = gl_SubgroupInvocationID; + + const uint cms_per_row = WM / TM; + const uint cms_per_col = WN / TN; + + const uint storestride = WARP / TM; + const uint store_r = tiw % TM; + const uint store_c = tiw / TM; +#else + const uint warp_i = gl_LocalInvocationID.x / WARP; + const uint tiw = gl_LocalInvocationID.x % WARP; + const uint tiwr = tiw % (WSUBM / TM); const uint tiwc = tiw / (WSUBM / TM); +#endif + + const uint warp_r = warp_i % (BM / WM); + const uint warp_c = warp_i / (BM / WM); const uint loadr_a = gl_LocalInvocationID.x % (BK / LOAD_VEC_A); const uint loadc_a = gl_LocalInvocationID.x / (BK / LOAD_VEC_A); @@ -156,21 +191,31 @@ void main() { uint pos_b = (batch_idx * p.batch_stride_b + ic * BN * p.stride_b + start_k) / LOAD_VEC_B; #endif - float sums[WMITER * TM * WNITER * TN]; +#ifdef COOPMAT + coopmat cache_a; + coopmat cache_b; + coopmat sums[cms_per_row * cms_per_col]; + + [[unroll]] for (uint i = 0; i < cms_per_row * cms_per_col; i++) { + sums[i] = coopmat(0.0f); + } +#else + ACC_TYPE sums[WMITER * TM * WNITER * TN]; FLOAT_TYPE cache_a[WMITER * TM]; FLOAT_TYPE cache_b[WNITER * TN]; [[unroll]] for (uint i = 0; i < WMITER*TM*WNITER*TN; i++) { - sums[i] = 0.0f; + sums[i] = ACC_TYPE(0.0f); } +#endif - [[unroll]] for (uint block = start_k; block < end_k; block += BK) { + for (uint block = start_k; block < end_k; block += BK) { [[unroll]] for (uint l = 0; l < BM; l += loadstride_a) { #if defined(DATA_A_F32) || defined(DATA_A_F16) #if LOAD_VEC_A == 8 const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; - const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a * LOAD_VEC_A; + const uint buf_idx = (loadc_a + l) * SHMEM_STRIDE + loadr_a * LOAD_VEC_A; buf_a[buf_idx ] = FLOAT_TYPE(data_a[idx][0].x); buf_a[buf_idx + 1] = FLOAT_TYPE(data_a[idx][0].y); buf_a[buf_idx + 2] = FLOAT_TYPE(data_a[idx][0].z); @@ -181,21 +226,21 @@ void main() { buf_a[buf_idx + 7] = FLOAT_TYPE(data_a[idx][1].w); #elif LOAD_VEC_A == 4 const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; - const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a * LOAD_VEC_A; + const uint buf_idx = (loadc_a + l) * SHMEM_STRIDE + loadr_a * LOAD_VEC_A; buf_a[buf_idx ] = FLOAT_TYPE(data_a[idx].x); buf_a[buf_idx + 1] = FLOAT_TYPE(data_a[idx].y); buf_a[buf_idx + 2] = FLOAT_TYPE(data_a[idx].z); buf_a[buf_idx + 3] = FLOAT_TYPE(data_a[idx].w); #else if (ir * BM + loadc_a + l < p.M && block + loadr_a < end_k) { - buf_a[(loadc_a + l) * (BK+1) + loadr_a] = FLOAT_TYPE(data_a[pos_a + (loadc_a + l) * p.stride_a + loadr_a]); + buf_a[(loadc_a + l) * SHMEM_STRIDE + loadr_a] = FLOAT_TYPE(data_a[pos_a + (loadc_a + l) * p.stride_a + loadr_a]); } else { - buf_a[(loadc_a + l) * (BK+1) + loadr_a] = FLOAT_TYPE(0.0f); + buf_a[(loadc_a + l) * SHMEM_STRIDE + loadr_a] = FLOAT_TYPE(0.0f); } #endif #elif defined(DATA_A_Q4_0) const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; - const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a; + const uint buf_idx = (loadc_a + l) * SHMEM_STRIDE + loadr_a; const uint ib = idx / 16; const uint iqs = idx & 0xF; @@ -208,7 +253,7 @@ void main() { buf_a[buf_idx + 16] = FLOAT_TYPE(v.y); #elif defined(DATA_A_Q4_1) const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; - const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a; + const uint buf_idx = (loadc_a + l) * SHMEM_STRIDE + loadr_a; const uint ib = idx / 16; const uint iqs = idx & 0xF; @@ -222,7 +267,7 @@ void main() { buf_a[buf_idx + 16] = FLOAT_TYPE(v.y); #elif defined(DATA_A_Q5_0) const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; - const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a; + const uint buf_idx = (loadc_a + l) * SHMEM_STRIDE + loadr_a; const uint ib = idx / 16; const uint iqs = idx & 0xF; @@ -237,7 +282,7 @@ void main() { buf_a[buf_idx + 16] = FLOAT_TYPE(v.y); #elif defined(DATA_A_Q5_1) const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; - const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a; + const uint buf_idx = (loadc_a + l) * SHMEM_STRIDE + loadr_a; const uint ib = idx / 16; const uint iqs = idx & 0xF; @@ -253,7 +298,7 @@ void main() { buf_a[buf_idx + 16] = FLOAT_TYPE(v.y); #elif defined(DATA_A_Q8_0) const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; - const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a * LOAD_VEC_A; + const uint buf_idx = (loadc_a + l) * SHMEM_STRIDE + loadr_a * LOAD_VEC_A; const uint ib = idx / 16; const uint iqs = (idx & 0xF) * 2; @@ -265,7 +310,7 @@ void main() { buf_a[buf_idx + 1] = FLOAT_TYPE(v.y); #elif defined(DATA_A_Q2_K) const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; - const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a * LOAD_VEC_A; + const uint buf_idx = (loadc_a + l) * SHMEM_STRIDE + loadr_a * LOAD_VEC_A; const uint ib = idx / 128; // 2 values per idx const uint iqs = idx % 128; // 0..127 @@ -284,7 +329,7 @@ void main() { buf_a[buf_idx + 1] = FLOAT_TYPE(v.y); #elif defined(DATA_A_Q3_K) const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; - const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a * LOAD_VEC_A; + const uint buf_idx = (loadc_a + l) * SHMEM_STRIDE + loadr_a * LOAD_VEC_A; const uint ib = idx / 128; // 2 values per idx const uint iqs = idx % 128; // 0..127 @@ -308,7 +353,7 @@ void main() { buf_a[buf_idx + 1] = FLOAT_TYPE(dl * float(int8_t((data_a[ib].qs[qsi + 1] >> qsshift) & 3) - (((data_a[ib].hmask[hmi + 1] & m) != 0) ? 0 : 4))); #elif defined(DATA_A_Q4_K) const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; - const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a * LOAD_VEC_A; + const uint buf_idx = (loadc_a + l) * SHMEM_STRIDE + loadr_a * LOAD_VEC_A; const uint ib = idx / 128; // 2 values per idx const uint iqs = idx % 128; // 0..127 @@ -320,15 +365,20 @@ void main() { const vec2 loadd = vec2(data_a[ib].d); - uint8_t sc; - uint8_t mbyte; - if (is < 4) { - sc = uint8_t(data_a[ib].scales[is ] & 63); - mbyte = uint8_t(data_a[ib].scales[is + 4] & 63); - } else { - sc = uint8_t((data_a[ib].scales[is + 4] & 0xF) | ((data_a[ib].scales[is - 4] >> 6) << 4)); - mbyte = uint8_t((data_a[ib].scales[is + 4] >> 4) | ((data_a[ib].scales[is ] >> 6) << 4)); - } + const uint scidx0 = (is < 4) ? is : (is + 4); + const uint scidx1 = (is < 4) ? is : (is - 4); + const uint scidxmask1 = (is < 4) ? 0x30 : 0xC0; + const uint scidxshift1 = (is < 4) ? 0 : 2; + const uint mbidx0 = is + 4; + const uint mbidx1 = (is < 4) ? is + 4 : is; + const uint mbidxmask0 = (is < 4) ? 0xF : 0xF0; + const uint mbidxshift0 = (is < 4) ? 0 : 4; + const uint mbidxmask1 = (is < 4) ? 0x30 : 0xC0; + const uint mbidxshift1 = (is < 4) ? 0 : 2; + + const uint8_t sc = uint8_t((data_a[ib].scales[scidx0] & 0xF) | ((data_a[ib].scales[scidx1] & scidxmask1) >> scidxshift1)); + const uint8_t mbyte = uint8_t((data_a[ib].scales[mbidx0] & mbidxmask0) >> mbidxshift0 | ((data_a[ib].scales[mbidx1] & mbidxmask1) >> mbidxshift1)); + const float d = loadd.x * sc; const float m = -loadd.y * mbyte; @@ -336,7 +386,7 @@ void main() { buf_a[buf_idx + 1] = FLOAT_TYPE(fma(d, float((data_a[ib].qs[qsi + 1] >> (b * 4)) & 0xF), m)); #elif defined(DATA_A_Q5_K) const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; - const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a * LOAD_VEC_A; + const uint buf_idx = (loadc_a + l) * SHMEM_STRIDE + loadr_a * LOAD_VEC_A; const uint ib = idx / 128; // 2 values per idx const uint iqs = idx % 128; // 0..127 @@ -351,15 +401,20 @@ void main() { const vec2 loadd = vec2(data_a[ib].d); - uint8_t sc; - uint8_t mbyte; - if (is < 4) { - sc = uint8_t(data_a[ib].scales[is ] & 63); - mbyte = uint8_t(data_a[ib].scales[is + 4] & 63); - } else { - sc = uint8_t((data_a[ib].scales[is + 4] & 0xF) | ((data_a[ib].scales[is - 4] >> 6) << 4)); - mbyte = uint8_t((data_a[ib].scales[is + 4] >> 4) | ((data_a[ib].scales[is ] >> 6) << 4)); - } + const uint scidx0 = (is < 4) ? is : (is + 4); + const uint scidx1 = (is < 4) ? is : (is - 4); + const uint scidxmask1 = (is < 4) ? 0x30 : 0xC0; + const uint scidxshift1 = (is < 4) ? 0 : 2; + const uint mbidx0 = is + 4; + const uint mbidx1 = (is < 4) ? is + 4 : is; + const uint mbidxmask0 = (is < 4) ? 0xF : 0xF0; + const uint mbidxshift0 = (is < 4) ? 0 : 4; + const uint mbidxmask1 = (is < 4) ? 0x30 : 0xC0; + const uint mbidxshift1 = (is < 4) ? 0 : 2; + + const uint8_t sc = uint8_t((data_a[ib].scales[scidx0] & 0xF) | ((data_a[ib].scales[scidx1] & scidxmask1) >> scidxshift1)); + const uint8_t mbyte = uint8_t(((data_a[ib].scales[mbidx0] & mbidxmask0) >> mbidxshift0) | ((data_a[ib].scales[mbidx1] & mbidxmask1) >> mbidxshift1)); + const float d = loadd.x * sc; const float m = -loadd.y * mbyte; @@ -367,7 +422,7 @@ void main() { buf_a[buf_idx + 1] = FLOAT_TYPE(fma(d, float((data_a[ib].qs[qsi + 1] >> (b * 4)) & 0xF) + float((data_a[ib].qh[qhi + 1] & hm) != 0 ? 16 : 0), m)); #elif defined(DATA_A_Q6_K) const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; - const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a * LOAD_VEC_A; + const uint buf_idx = (loadc_a + l) * SHMEM_STRIDE + loadr_a * LOAD_VEC_A; const uint ib = idx / 128; // 2 values per idx const uint iqs = idx % 128; // 0..127 @@ -386,7 +441,7 @@ void main() { buf_a[buf_idx + 1] = FLOAT_TYPE(dscale * float(int8_t(((data_a[ib].ql[qsi + 1] >> (b * 4)) & 0xF) | (((data_a[ib].qh[qhi + 1] >> qhshift) & 3) << 4)) - 32)); #elif defined(DATA_A_IQ4_NL) const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; - const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a; + const uint buf_idx = (loadc_a + l) * SHMEM_STRIDE + loadr_a; const uint ib = idx / 16; const uint iqs = idx & 0xF; @@ -407,7 +462,7 @@ void main() { #else const uint idx = pos_b + (loadc_b + l) * p.stride_b / LOAD_VEC_B + loadr_b; #endif - const uint buf_idx = (loadc_b + l) * (BK+1) + loadr_b * LOAD_VEC_B; + const uint buf_idx = (loadc_b + l) * SHMEM_STRIDE + loadr_b * LOAD_VEC_B; buf_b[buf_idx + 0] = FLOAT_TYPE(data_b[idx][0].x); buf_b[buf_idx + 1] = FLOAT_TYPE(data_b[idx][0].y); buf_b[buf_idx + 2] = FLOAT_TYPE(data_b[idx][0].z); @@ -423,24 +478,24 @@ void main() { #else const uint idx = pos_b + (loadc_b + l) * p.stride_b / LOAD_VEC_B + loadr_b; #endif - const uint buf_idx = (loadc_b + l) * (BK+1) + loadr_b * LOAD_VEC_B; + const uint buf_idx = (loadc_b + l) * SHMEM_STRIDE + loadr_b * LOAD_VEC_B; buf_b[buf_idx + 0] = FLOAT_TYPE(data_b[idx].x); buf_b[buf_idx + 1] = FLOAT_TYPE(data_b[idx].y); buf_b[buf_idx + 2] = FLOAT_TYPE(data_b[idx].z); buf_b[buf_idx + 3] = FLOAT_TYPE(data_b[idx].w); #elif !MUL_MAT_ID if (ic * BN + loadc_b + l < p.N && block + loadr_b < end_k) { - buf_b[(loadc_b + l) * (BK+1) + loadr_b] = FLOAT_TYPE(data_b[pos_b + (loadc_b + l) * p.stride_b + loadr_b]); + buf_b[(loadc_b + l) * SHMEM_STRIDE + loadr_b] = FLOAT_TYPE(data_b[pos_b + (loadc_b + l) * p.stride_b + loadr_b]); } else { - buf_b[(loadc_b + l) * (BK+1) + loadr_b] = FLOAT_TYPE(0.0f); + buf_b[(loadc_b + l) * SHMEM_STRIDE + loadr_b] = FLOAT_TYPE(0.0f); } #else const uint row_i = ic * BN + loadc_b + l; if (row_i < _ne1) { const u16vec2 row_idx = row_ids[row_i]; - buf_b[(loadc_b + l) * (BK+1) + loadr_b] = FLOAT_TYPE(data_b[pos_b + row_idx.y * p.batch_stride_b + (row_idx.x % p.ne11) * p.stride_b + loadr_b]); + buf_b[(loadc_b + l) * SHMEM_STRIDE + loadr_b] = FLOAT_TYPE(data_b[pos_b + row_idx.y * p.batch_stride_b + (row_idx.x % p.ne11) * p.stride_b + loadr_b]); } else { - buf_b[(loadc_b + l) * (BK+1) + loadr_b] = FLOAT_TYPE(0.0f); + buf_b[(loadc_b + l) * SHMEM_STRIDE + loadr_b] = FLOAT_TYPE(0.0f); } #endif } @@ -450,16 +505,30 @@ void main() { pos_a += BK / LOAD_VEC_A; pos_b += BK / LOAD_VEC_B; - for (uint i = 0; i < BK; i++) { +#ifdef COOPMAT + [[unroll]] for (uint i = 0; i < BK; i += TK) { + [[unroll]] for (uint cm_row = 0; cm_row < cms_per_row; cm_row++) { + // Load from shared into cache + coopMatLoad(cache_a, buf_a, (warp_r * WM + cm_row * TM) * SHMEM_STRIDE + i, SHMEM_STRIDE, gl_CooperativeMatrixLayoutRowMajor); + + [[unroll]] for (uint cm_col = 0; cm_col < cms_per_col; cm_col++) { + coopMatLoad(cache_b, buf_b, (warp_c * WN + cm_col * TN) * SHMEM_STRIDE + i, SHMEM_STRIDE, gl_CooperativeMatrixLayoutColumnMajor); + + sums[cm_col * cms_per_row + cm_row] = coopMatMulAdd(cache_a, cache_b, sums[cm_col * cms_per_row + cm_row]); + } + } + } +#else + [[unroll]] for (uint i = 0; i < BK; i++) { // Load from shared into cache [[unroll]] for (uint wsir = 0; wsir < WMITER; wsir++) { [[unroll]] for (uint j = 0; j < TM; j++) { - cache_a[wsir * TM + j] = buf_a[(warp_r * WM + wsir * WSUBM + tiwr * TM + j) * (BK+1) + i]; + cache_a[wsir * TM + j] = buf_a[(warp_r * WM + wsir * WSUBM + tiwr * TM + j) * SHMEM_STRIDE + i]; } } [[unroll]] for (uint wsic = 0; wsic < WNITER; wsic++) { [[unroll]] for (uint j = 0; j < TN; j++) { - cache_b[wsic * TN + j] = buf_b[(warp_c * WN + wsic * WSUBN + tiwc * TN + j) * (BK+1) + i]; + cache_b[wsic * TN + j] = buf_b[(warp_c * WN + wsic * WSUBN + tiwc * TN + j) * SHMEM_STRIDE + i]; } } @@ -468,12 +537,13 @@ void main() { [[unroll]] for (uint cc = 0; cc < TN; cc++) { [[unroll]] for (uint cr = 0; cr < TM; cr++) { const uint sums_idx = (wsic * TN + cc) * (WMITER * TM) + wsir * TM + cr; - sums[sums_idx] = fma(float(cache_a[wsir * TM + cr]), float(cache_b[wsic * TN + cc]), sums[sums_idx]); + sums[sums_idx] = fma(ACC_TYPE(cache_a[wsir * TM + cr]), ACC_TYPE(cache_b[wsic * TN + cc]), sums[sums_idx]); } } } } } +#endif barrier(); } @@ -485,6 +555,54 @@ void main() { const uint offsets = batch_idx * p.batch_stride_d + ik * p.batch_stride_d * gl_NumWorkGroups.z; #endif +#ifdef COOPMAT +#ifdef MUL_MAT_ID + [[unroll]] for (uint cm_row = 0; cm_row < cms_per_row; cm_row++) { + [[unroll]] for (uint cm_col = 0; cm_col < cms_per_col; cm_col++) { + coopMatStore(sums[cm_col * cms_per_row + cm_row], coopmat_stage, warp_i * TM * TN, TM, gl_CooperativeMatrixLayoutColumnMajor); + + [[unroll]] for (uint col = 0; col < BN; col += storestride) { + const uint row_i = dc + cm_col * TN + col + store_c; + if (row_i >= _ne1) break; + + const u16vec2 row_idx = row_ids[row_i]; + + data_d[row_idx.y * p.batch_stride_d + row_idx.x * p.stride_d + dr + cm_row * TM + store_r] = D_TYPE(coopmat_stage[warp_i * TM * TN + (col + store_c) * TM + store_r]); + } + } + } +#else + const bool is_aligned = p.stride_d % 4 == 0; // Assumption: D_TYPE == float + + [[unroll]] for (uint cm_row = 0; cm_row < cms_per_row; cm_row++) { + [[unroll]] for (uint cm_col = 0; cm_col < cms_per_col; cm_col++) { + const bool is_in_bounds = dr + (cm_row + 1) * TM <= p.M && dc + (cm_col + 1) * TN <= p.N; + + if (is_aligned && is_in_bounds) { + // Full coopMat is within bounds and stride_d is aligned with 16B + coopmat cm_dtype = coopmat(sums[cm_col * cms_per_row + cm_row]); + coopMatStore(cm_dtype, data_d, offsets + (dc + cm_col * TN) * p.stride_d + dr + cm_row * TM, p.stride_d, gl_CooperativeMatrixLayoutColumnMajor); + } else if (is_in_bounds) { + // Full coopMat is within bounds, but stride_d is not aligned + coopMatStore(sums[cm_col * cms_per_row + cm_row], coopmat_stage, warp_i * TM * TN, TM, gl_CooperativeMatrixLayoutColumnMajor); + + [[unroll]] for (uint col = 0; col < TN; col += storestride) { + data_d[offsets + (dc + cm_col * TN + col + store_c) * p.stride_d + dr + cm_row * TM + store_r] = D_TYPE(coopmat_stage[warp_i * TM * TN + (col + store_c) * TM + store_r]); + } + } else if (dr + cm_row * TM < p.M && dc + cm_col * TN < p.N) { + // Partial coopMat is within bounds + coopMatStore(sums[cm_col * cms_per_row + cm_row], coopmat_stage, warp_i * TM * TN, TM, gl_CooperativeMatrixLayoutColumnMajor); + + [[unroll]] for (uint col = 0; col < TN; col += storestride) { + if (dr + cm_row * TM + store_r < p.M && dc + cm_col * TN + col + store_c < p.N) { + data_d[offsets + (dc + cm_col * TN + col + store_c) * p.stride_d + dr + cm_row * TM + store_r] = D_TYPE(coopmat_stage[warp_i * TM * TN + (col + store_c) * TM + store_r]); + } + } + } + } + } +#endif // MUL_MAT_ID +#else [[unroll]] for (uint wsic = 0; wsic < WNITER; wsic++) { [[unroll]] for (uint wsir = 0; wsir < WMITER; wsir++) { @@ -496,7 +614,7 @@ void main() { if (row_i >= _ne1) break; const u16vec2 row_idx = row_ids[row_i]; -#endif +#endif // MUL_MAT_ID [[unroll]] for (uint cr = 0; cr < TM; cr++) { #ifdef MUL_MAT_ID data_d[row_idx.y * p.batch_stride_d + row_idx.x * p.stride_d + dr_warp + cr] = D_TYPE(sums[(wsic * TN + cc) * (WMITER * TM) + wsir * TM + cr]); @@ -504,9 +622,10 @@ void main() { if (dr_warp + cr < p.M && dc_warp + cc < p.N) { data_d[offsets + (dc_warp + cc) * p.stride_d + dr_warp + cr] = D_TYPE(sums[(wsic * TN + cc) * (WMITER * TM) + wsir * TM + cr]); } -#endif +#endif // MUL_MAT_ID } } } } +#endif // COOPMAT } diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm_cm2.comp b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm_cm2.comp new file mode 100644 index 0000000000000..cbfa5dce1b091 --- /dev/null +++ b/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm_cm2.comp @@ -0,0 +1,328 @@ +#version 450 + +#extension GL_EXT_control_flow_attributes : enable +#extension GL_EXT_shader_16bit_storage : require + +#extension GL_EXT_shader_explicit_arithmetic_types_float16 : require +#extension GL_EXT_shader_explicit_arithmetic_types_int8 : require +#extension GL_EXT_shader_explicit_arithmetic_types_int32 : require +#extension GL_EXT_shader_explicit_arithmetic_types_int16 : require + +#extension GL_KHR_memory_scope_semantics : enable +#extension GL_KHR_cooperative_matrix : enable +#extension GL_NV_cooperative_matrix2 : enable +#extension GL_EXT_buffer_reference : enable +#extension GL_KHR_shader_subgroup_ballot : enable +#extension GL_KHR_shader_subgroup_vote : enable + +#include "types.comp" + +layout(local_size_x_id = 0, local_size_y = 1, local_size_z = 1) in; + +layout (constant_id = 1) const uint BM = 64; +layout (constant_id = 2) const uint BN = 64; +layout (constant_id = 3) const uint BK = 16; // Assumed to be 32 if working with a quant + +layout (push_constant) uniform parameter +{ + uint M; + uint N; + uint K; + uint stride_a; + uint stride_b; + uint stride_d; + + uint batch_stride_a; + uint batch_stride_b; + uint batch_stride_d; + +#ifdef MUL_MAT_ID + uint nei0; + uint nei1; + uint nbi1; + uint ne11; +#else + uint k_split; + uint ne02; + uint ne12; + uint broadcast2; + uint broadcast3; +#endif +} p; + + +layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; +layout (binding = 1) readonly buffer B {B_TYPE data_b[];}; +layout (binding = 2) writeonly buffer D {D_TYPE data_d[];}; + +#if QUANT_K > 1 +#define DECODEFUNCA , dequantFuncA +#define MAT_A_TYPE float16_t + +#include "dequant_funcs_cm2.comp" + +#else +#define DECODEFUNCA +#define MAT_A_TYPE A_TYPE +#endif + +#define MAT_B_TYPE B_TYPE + +#ifdef MUL_MAT_ID +layout (binding = 3) readonly buffer IDS {int data_ids[];}; + +shared u16vec4 row_ids[3072]; + +layout(buffer_reference, std430, buffer_reference_align = 2) buffer decodeBufB { + B_TYPE b[]; +}; + +uint _ne1; +shared uint _ne1_sh; + +B_TYPE decodeFuncB(const in decodeBufB bl, const in uint blockCoords[2], const in uint coordInBlock[2]) +{ + const uint row_i = blockCoords[0]; + + if (row_i >= _ne1) { + return B_TYPE(0.0); + } + + const u16vec4 row_idx = row_ids[row_i]; + B_TYPE ret = data_b[row_idx.y * p.batch_stride_b + row_idx.x * p.stride_b + blockCoords[1]]; + + return ret; +} + +D_TYPE perElemOpD(const in uint32_t r, const in uint32_t c, const in D_TYPE elem, const in uint32_t ir, const in uint32_t ic) +{ + uint dr = ir * BM + r; + uint dc = ic * BN + c; + + if (dr < p.M && dc < _ne1) { + uint row_i = dc; + const u16vec4 row_idx = row_ids[row_i]; + data_d[row_idx.y * p.batch_stride_d + row_idx.z * p.stride_d + dr] = elem; + } + return elem; +} + +#endif + +void main() { +#if defined(DATA_A_IQ4_NL) + init_iq4nl_shmem(); +#endif + +#ifdef MUL_MAT_ID + const uint expert_idx = gl_GlobalInvocationID.z; +#else + const uint batch_idx = gl_GlobalInvocationID.z; + + const uint i13 = batch_idx / p.ne12; + const uint i12 = batch_idx % p.ne12; + + const uint i03 = i13 / p.broadcast3; + const uint i02 = i12 / p.broadcast2; + + const uint batch_idx_a = i03 * p.ne02 + i02; +#endif + + const uint blocks_m = (p.M + BM - 1) / BM; + const uint ir = gl_WorkGroupID.x % blocks_m; + const uint ik = gl_WorkGroupID.x / blocks_m; + const uint ic = gl_WorkGroupID.y; + +#ifdef MUL_MAT_ID + // Spread the search across all elements in the first subgroup + if (gl_SubgroupID == 0) { + _ne1 = 0; + uint num_elements = p.nei1 * p.nei0; + + for (uint i = gl_SubgroupInvocationID; subgroupAny(i < num_elements); i += gl_SubgroupSize) { + bool in_range = i < num_elements; + uint ii0 = i % p.nei0; + uint ii1 = i / p.nei0; + uint id = in_range ? data_ids[ii1*p.nbi1 + ii0] : 0; + uvec4 ballot = subgroupBallot(in_range && id == expert_idx); + uint idx = subgroupBallotExclusiveBitCount(ballot); + if (in_range && id == expert_idx) { + row_ids[_ne1 + idx] = u16vec4(ii0 % p.ne11, ii1, ii0, 0); + } + _ne1 += subgroupBallotBitCount(ballot); + } + _ne1_sh = _ne1; + } + + barrier(); + + _ne1 = _ne1_sh; + + // Workgroup has no work + if (ic * BN >= _ne1) return; +#endif + +#ifdef MUL_MAT_ID + uint start_k = 0; + const uint end_k = p.K; +#else + uint start_k = ik * p.k_split; + const uint end_k = min(p.K, (ik + 1) * p.k_split); +#endif + + coopmat sum; + sum = coopmat(0.0); + +#ifdef MUL_MAT_ID + uint pos_a = (expert_idx * p.batch_stride_a) / QUANT_K; + uint pos_b = 0; +#else + uint pos_a = (batch_idx_a * p.batch_stride_a) / QUANT_K; + uint pos_b = batch_idx * p.batch_stride_b; +#endif + + uint stride_a = p.stride_a / QUANT_K; + uint stride_b = p.stride_b; + + // Hint to the compiler that values are aligned (want 16B alignment). + // Quants are always block-aligned, no alignment needed. +#if ALIGNED +#if QUANT_K == 1 + stride_a &= ~7; +#endif + stride_b &= ~7; +#endif + + // Create layouts for both clamped and unclamped accesses + tensorLayoutNV<2> tensorLayoutA = createTensorLayoutNV(2); + tensorLayoutNV<2, gl_CooperativeMatrixClampModeConstantNV> tensorLayoutAClamp = createTensorLayoutNV(2, gl_CooperativeMatrixClampModeConstantNV); + tensorLayoutNV<2> tensorLayoutB = createTensorLayoutNV(2); + tensorLayoutNV<2, gl_CooperativeMatrixClampModeConstantNV> tensorLayoutBClamp = createTensorLayoutNV(2, gl_CooperativeMatrixClampModeConstantNV); + tensorLayoutNV<2, gl_CooperativeMatrixClampModeConstantNV> tensorLayoutD = createTensorLayoutNV(2, gl_CooperativeMatrixClampModeConstantNV); + +#if QUANT_K > 1 + tensorLayoutA = setTensorLayoutBlockSizeNV(tensorLayoutA, 1, QUANT_K); + tensorLayoutAClamp = setTensorLayoutBlockSizeNV(tensorLayoutAClamp, 1, QUANT_K); +#endif + + // Use end_k rather than p.K as the dimension because that's what + // we need to bound check against when using split_k + tensorLayoutA = setTensorLayoutDimensionNV(tensorLayoutA, p.M, end_k); + tensorLayoutB = setTensorLayoutDimensionNV(tensorLayoutB, p.N, end_k); + tensorLayoutD = setTensorLayoutDimensionNV(tensorLayoutD, p.N, p.M); + tensorLayoutAClamp = setTensorLayoutDimensionNV(tensorLayoutAClamp, p.M, end_k); + tensorLayoutBClamp = setTensorLayoutDimensionNV(tensorLayoutBClamp, p.N, end_k); + + tensorViewNV<2, false, 1, 0> tensorViewTranspose = createTensorViewNV(2, false, 1, 0); + +#if !defined(MUL_MAT_ID) + // Detect a fast path where all loads are entirely in bounds and no clamping is required + if ((ir + 1) * BM <= p.M && (ic + 1) * BN <= p.N && (start_k % BK) == 0 && (end_k % BK) == 0 && +#if QUANT_K == 1 + (stride_a % 8) == 0 && +#endif + (stride_b % 8) == 0 && (start_k % 8) == 0) { + // Hint to the compiler that values are aligned (want 16B alignment) + start_k &= ~7; + stride_b &= ~7; +#if QUANT_K == 1 + stride_a &= ~7; +#endif + + tensorLayoutA = setTensorLayoutStrideNV(tensorLayoutA, stride_a, 1); + tensorLayoutB = setTensorLayoutStrideNV(tensorLayoutB, stride_b, 1); + + uint k_iters = (end_k - start_k + BK - 1) / BK; + + for (uint block_k = start_k, i = 0; i < k_iters; block_k += BK, ++i) { + + coopmat mat_a; + coopmat mat_b; + + coopMatLoadTensorNV(mat_a, data_a, pos_a, sliceTensorLayoutNV(tensorLayoutA, ir * BM, BM, block_k, BK) DECODEFUNCA); + coopmat mat_a_ft = coopmat(mat_a); + + coopMatLoadTensorNV(mat_b, data_b, pos_b, sliceTensorLayoutNV(tensorLayoutB, ic * BN, BN, block_k, BK), tensorViewTranspose); + coopmat mat_b_ft = coopmat(mat_b); + + sum = coopMatMulAdd(mat_a_ft, mat_b_ft, sum); + } + } else +#endif // !defined(MUL_MAT_ID) + { + tensorLayoutA = setTensorLayoutStrideNV(tensorLayoutA, stride_a, 1); + + tensorLayoutAClamp = setTensorLayoutStrideNV(tensorLayoutAClamp, stride_a, 1); + + tensorLayoutB = setTensorLayoutStrideNV(tensorLayoutB, stride_b, 1); + + tensorLayoutBClamp = setTensorLayoutStrideNV(tensorLayoutBClamp, stride_b, 1); + + [[dont_unroll]] + for (uint block_k = start_k; block_k < end_k; block_k += BK) { + + coopmat mat_a; + coopmat mat_b; + coopmat mat_a_ft; + coopmat mat_b_ft; + + // Clamping is expensive, so detect different code paths for each combination + // of A and B needing clamping. + bool unclampedA = (ir + 1) * BM <= p.M && block_k + BK <= end_k && (block_k % 8) == 0; +#ifdef MUL_MAT_ID + bool unclampedB = true; +#else + bool unclampedB = (ic + 1) * BN <= p.N && block_k + BK <= end_k && (block_k % 8) == 0; +#endif + if (unclampedA && unclampedB) { + coopMatLoadTensorNV(mat_a, data_a, pos_a, sliceTensorLayoutNV(tensorLayoutA, ir * BM, BM, (block_k & ~7), BK) DECODEFUNCA); +#ifdef MUL_MAT_ID + coopMatLoadTensorNV(mat_b, data_b, pos_b, sliceTensorLayoutNV(tensorLayoutB, ic * BN, BN, block_k, BK), tensorViewTranspose, decodeFuncB); +#else + coopMatLoadTensorNV(mat_b, data_b, pos_b, sliceTensorLayoutNV(tensorLayoutB, ic * BN, BN, (block_k & ~7), BK), tensorViewTranspose); +#endif + mat_a_ft = coopmat(mat_a); + mat_b_ft = coopmat(mat_b); + sum = coopMatMulAdd(mat_a_ft, mat_b_ft, sum); + } else if (unclampedA && !unclampedB) { + coopMatLoadTensorNV(mat_a, data_a, pos_a, sliceTensorLayoutNV(tensorLayoutA, ir * BM, BM, (block_k & ~7), BK) DECODEFUNCA); + coopMatLoadTensorNV(mat_b, data_b, pos_b, sliceTensorLayoutNV(tensorLayoutBClamp, ic * BN, BN, block_k, BK), tensorViewTranspose); + + mat_a_ft = coopmat(mat_a); + mat_b_ft = coopmat(mat_b); + sum = coopMatMulAdd(mat_a_ft, mat_b_ft, sum); + } else if (!unclampedA && unclampedB) { + coopMatLoadTensorNV(mat_a, data_a, pos_a, sliceTensorLayoutNV(tensorLayoutAClamp, ir * BM, BM, block_k, BK) DECODEFUNCA); +#ifdef MUL_MAT_ID + coopMatLoadTensorNV(mat_b, data_b, pos_b, sliceTensorLayoutNV(tensorLayoutB, ic * BN, BN, block_k, BK), tensorViewTranspose, decodeFuncB); +#else + coopMatLoadTensorNV(mat_b, data_b, pos_b, sliceTensorLayoutNV(tensorLayoutB, ic * BN, BN, (block_k & ~7), BK), tensorViewTranspose); +#endif + mat_a_ft = coopmat(mat_a); + mat_b_ft = coopmat(mat_b); + sum = coopMatMulAdd(mat_a_ft, mat_b_ft, sum); + } else if (!unclampedA && !unclampedB) { + coopMatLoadTensorNV(mat_a, data_a, pos_a, sliceTensorLayoutNV(tensorLayoutAClamp, ir * BM, BM, block_k, BK) DECODEFUNCA); + coopMatLoadTensorNV(mat_b, data_b, pos_b, sliceTensorLayoutNV(tensorLayoutBClamp, ic * BN, BN, block_k, BK), tensorViewTranspose); + + mat_a_ft = coopmat(mat_a); + mat_b_ft = coopmat(mat_b); + sum = coopMatMulAdd(mat_a_ft, mat_b_ft, sum); + } + } + } + + // Convert from ACC_TYPE to D_TYPE + coopmat mat_d; + mat_d = coopmat(sum); + +#ifdef MUL_MAT_ID + // Call callback to store each element, remapping row through shared memory + coopMatPerElementNV(mat_d, mat_d, perElemOpD, ir, ic); +#else + tensorLayoutD = setTensorLayoutStrideNV(tensorLayoutD, p.stride_d, 1); + + uint pos_d = batch_idx * p.batch_stride_d + ik * p.batch_stride_d * gl_NumWorkGroups.z; + coopMatStoreTensorNV(mat_d, data_d, pos_d, sliceTensorLayoutNV(tensorLayoutD, ic * BN, BN, ir * BM, BM), tensorViewTranspose); +#endif +} diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/rope_head.comp b/ggml/src/ggml-vulkan/vulkan-shaders/rope_head.comp index ea89542261cc4..574b51ca55389 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/rope_head.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/rope_head.comp @@ -1,6 +1,11 @@ #include "types.comp" #extension GL_EXT_shader_16bit_storage : require +#extension GL_EXT_spirv_intrinsics: enable + +#if RTE16 +spirv_execution_mode(capabilities = [4467], 4462, 16); // RoundingModeRTE, 16 bits +#endif layout(local_size_x = 1, local_size_y = 256, local_size_z = 1) in; diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/tanh.comp b/ggml/src/ggml-vulkan/vulkan-shaders/tanh.comp index 74630dc7fef12..495f966bdc6e5 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/tanh.comp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/tanh.comp @@ -16,6 +16,5 @@ void main() { if (i >= p.KX) { return; } - - data_d[i] = D_TYPE(tanh(data_a[i])); + data_d[i] = D_TYPE(1. - 2. / (exp(2.*data_a[i]) + 1.)); } diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/test_coopmat2_support.comp b/ggml/src/ggml-vulkan/vulkan-shaders/test_coopmat2_support.comp new file mode 100644 index 0000000000000..28eb24e11f871 --- /dev/null +++ b/ggml/src/ggml-vulkan/vulkan-shaders/test_coopmat2_support.comp @@ -0,0 +1,7 @@ +#version 460 + +#extension GL_NV_cooperative_matrix2 : require + +void main() +{ +} diff --git a/ggml/src/ggml-vulkan/vulkan-shaders/vulkan-shaders-gen.cpp b/ggml/src/ggml-vulkan/vulkan-shaders/vulkan-shaders-gen.cpp index 5c317b68b7ea0..c48a228aef65d 100644 --- a/ggml/src/ggml-vulkan/vulkan-shaders/vulkan-shaders-gen.cpp +++ b/ggml/src/ggml-vulkan/vulkan-shaders/vulkan-shaders-gen.cpp @@ -30,6 +30,8 @@ #include #endif +#include + #define ASYNCIO_CONCURRENCY 64 std::mutex lock; @@ -58,6 +60,7 @@ const std::vector type_names = { "iq4_nl" }; +namespace { void execute_command(const std::string& command, std::string& stdout_str, std::string& stderr_str) { #ifdef _WIN32 HANDLE stdout_read, stdout_write; @@ -196,15 +199,20 @@ static uint32_t compile_count = 0; static std::mutex compile_count_mutex; static std::condition_variable compile_count_cond; -void string_to_spv_func(const std::string& _name, const std::string& in_fname, const std::map& defines, bool fp16 = true) { - std::string name = _name + (fp16 ? "" : "_fp32"); +void string_to_spv_func(const std::string& _name, const std::string& in_fname, const std::map& defines, bool fp16 = true, bool coopmat = false, bool coopmat2 = false, bool f16acc = false) { + std::string name = _name + (f16acc ? "_f16acc" : "") + (coopmat ? "_coopmat" : "") + (coopmat2 ? "_cm2" : (fp16 ? "" : "_fp32")); std::string out_fname = join_paths(output_dir, name + ".spv"); std::string in_path = join_paths(input_dir, in_fname); + std::string target_env = (name.find("_cm2") != std::string::npos) ? "--target-env=vulkan1.3" : "--target-env=vulkan1.2"; + + // disable spirv-opt for coopmat shaders for https://github.com/ggerganov/llama.cpp/issues/10734 + std::string opt_level = coopmat ? "" : "-O"; + #ifdef _WIN32 - std::vector cmd = {GLSLC, "-fshader-stage=compute", "--target-env=vulkan1.2", "-O", "\"" + in_path + "\"", "-o", "\"" + out_fname + "\""}; + std::vector cmd = {GLSLC, "-fshader-stage=compute", target_env, opt_level, "\"" + in_path + "\"", "-o", "\"" + out_fname + "\""}; #else - std::vector cmd = {GLSLC, "-fshader-stage=compute", "--target-env=vulkan1.2", "-O", in_path, "-o", out_fname}; + std::vector cmd = {GLSLC, "-fshader-stage=compute", target_env, opt_level, in_path, "-o", out_fname}; #endif #ifdef GGML_VULKAN_SHADER_DEBUG_INFO @@ -254,7 +262,7 @@ std::map merge_maps(const std::map> compiles; -void string_to_spv(const std::string& _name, const std::string& in_fname, const std::map& defines, bool fp16 = true) { +void string_to_spv(const std::string& _name, const std::string& in_fname, const std::map& defines, bool fp16 = true, bool coopmat = false, bool coopmat2 = false, bool f16acc = false) { { // wait until fewer than N compiles are in progress. // 16 is an arbitrary limit, the goal is to avoid "failed to create pipe" errors. @@ -265,15 +273,15 @@ void string_to_spv(const std::string& _name, const std::string& in_fname, const } compile_count++; } - compiles.push_back(std::async(string_to_spv_func, _name, in_fname, defines, fp16)); + compiles.push_back(std::async(string_to_spv_func, _name, in_fname, defines, fp16, coopmat, coopmat2, f16acc)); } -void matmul_shaders(bool fp16, bool matmul_id) { - std::string load_vec = fp16 ? "8" : "4"; - std::string aligned_b_type_f32 = fp16 ? "mat2x4" : "vec4"; - std::string aligned_b_type_f16 = fp16 ? "f16mat2x4" : "f16vec4"; +void matmul_shaders(bool fp16, bool matmul_id, bool coopmat, bool coopmat2, bool f16acc) { + std::string load_vec = coopmat2 ? "1" : fp16 ? "8" : "4"; + std::string aligned_b_type_f32 = coopmat2 ? "float" : fp16 ? "mat2x4" : "vec4"; + std::string aligned_b_type_f16 = coopmat2 ? "float16_t" : fp16 ? "f16mat2x4" : "f16vec4"; - std::map base_dict = {{"FLOAT_TYPE", fp16 ? "float16_t" : "float"}}; + std::map base_dict = {{"FLOAT_TYPE", (coopmat2 || fp16) ? "float16_t" : "float"}}; std::string shader_name = "matmul"; if (matmul_id) { @@ -285,21 +293,37 @@ void matmul_shaders(bool fp16, bool matmul_id) { base_dict["FLOAT16"] = "1"; } + base_dict["ACC_TYPE"] = f16acc ? "float16_t" : "float"; + + if (coopmat) { + base_dict["COOPMAT"] = "1"; + } + + base_dict["ACC_TYPE"] = f16acc ? "float16_t" : "float"; + + std::string source_name = coopmat2 ? "mul_mm_cm2.comp" : "mul_mm.comp"; + // Shaders with f16 B_TYPE - string_to_spv(shader_name + "_f32_f16", "mul_mm.comp", merge_maps(base_dict, {{"DATA_A_F32", "1"}, {"B_TYPE", "float16_t"}, {"D_TYPE", "float"}}), fp16); - string_to_spv(shader_name + "_f32_f16_aligned", "mul_mm.comp", merge_maps(base_dict, {{"DATA_A_F32", "1"}, {"LOAD_VEC_A", load_vec}, {"LOAD_VEC_B", load_vec}, {"B_TYPE", aligned_b_type_f16}, {"D_TYPE", "float"}}), fp16); + string_to_spv(shader_name + "_f32_f16", source_name, merge_maps(base_dict, {{"DATA_A_F32", "1"}, {"B_TYPE", "float16_t"}, {"D_TYPE", "float"}, }), fp16, coopmat, coopmat2, f16acc); + string_to_spv(shader_name + "_f32_f16_aligned", source_name, merge_maps(base_dict, {{"DATA_A_F32", "1"}, {"LOAD_VEC_A", load_vec}, {"LOAD_VEC_B", load_vec}, {"B_TYPE", aligned_b_type_f16}, {"D_TYPE", "float"}, {"ALIGNED", "1"}}), fp16, coopmat, coopmat2, f16acc); - string_to_spv(shader_name + "_f16", "mul_mm.comp", merge_maps(base_dict, {{"DATA_A_F16", "1"}, {"B_TYPE", "float16_t"}, {"D_TYPE", "float"}}), fp16); - string_to_spv(shader_name + "_f16_aligned", "mul_mm.comp", merge_maps(base_dict, {{"DATA_A_F16", "1"}, {"LOAD_VEC_A", load_vec}, {"LOAD_VEC_B", load_vec}, {"B_TYPE", aligned_b_type_f16}, {"D_TYPE", "float"}}), fp16); + string_to_spv(shader_name + "_f16_aligned", source_name, merge_maps(base_dict, {{"DATA_A_F16", "1"}, {"LOAD_VEC_A", load_vec}, {"LOAD_VEC_B", load_vec}, {"B_TYPE", aligned_b_type_f16}, {"D_TYPE", "float"}, {"ALIGNED", "1"}}), fp16, coopmat, coopmat2, f16acc); + string_to_spv(shader_name + "_f16", source_name, merge_maps(base_dict, {{"DATA_A_F16", "1"}, {"B_TYPE", "float16_t"}, {"D_TYPE", "float"}}), fp16, coopmat, coopmat2, f16acc); for (const auto& tname : type_names) { std::string data_a_key = "DATA_A_" + to_uppercase(tname); // For unaligned, load one at a time for f32/f16, or two at a time for quants - std::string load_vec_a_unaligned = (tname == "f32" || tname == "f16") ? "1" : "2"; + std::string load_vec_a_unaligned = (coopmat2 || tname == "f32" || tname == "f16") ? "1" : "2"; // For aligned matmul loads - std::string load_vec_a = (tname == "f32" || tname == "f16") ? load_vec : "2"; - string_to_spv(shader_name + "_" + tname + "_f32", "mul_mm.comp", merge_maps(base_dict, {{data_a_key, "1"}, {"LOAD_VEC_A", load_vec_a_unaligned}, {"B_TYPE", "float"}, {"D_TYPE", "float"}}), fp16); - string_to_spv(shader_name + "_" + tname + "_f32_aligned", "mul_mm.comp", merge_maps(base_dict, {{data_a_key, "1"}, {"LOAD_VEC_A", load_vec_a}, {"LOAD_VEC_B", load_vec}, {"B_TYPE", aligned_b_type_f32}, {"D_TYPE", "float"}}), fp16); + std::string load_vec_a = (coopmat2 || tname == "f32" || tname == "f16") ? load_vec : "2"; + + string_to_spv(shader_name + "_" + tname + "_f32", source_name, merge_maps(base_dict, {{data_a_key, "1"}, {"LOAD_VEC_A", load_vec_a_unaligned}, {"B_TYPE", "float"}, {"D_TYPE", "float"}, {"B_IS_FLOAT", "1"}}), fp16, coopmat, coopmat2, f16acc); + string_to_spv(shader_name + "_" + tname + "_f32_aligned", source_name, merge_maps(base_dict, {{data_a_key, "1"}, {"LOAD_VEC_A", load_vec_a}, {"LOAD_VEC_B", load_vec}, {"B_TYPE", aligned_b_type_f32}, {"D_TYPE", "float"}, {"B_IS_FLOAT", "1"}, {"ALIGNED", "1"}}), fp16, coopmat, coopmat2, f16acc); + + if (tname != "f16" && tname != "f32") { + string_to_spv(shader_name + "_" + tname + "_f16", source_name, merge_maps(base_dict, {{data_a_key, "1"}, {"LOAD_VEC_A", load_vec_a_unaligned}, {"B_TYPE", "float16_t"}, {"D_TYPE", "float"}, {"B_IS_FLOAT", "1"}}), fp16, coopmat, coopmat2, f16acc); + string_to_spv(shader_name + "_" + tname + "_f16_aligned", source_name, merge_maps(base_dict, {{data_a_key, "1"}, {"LOAD_VEC_A", load_vec_a}, {"LOAD_VEC_B", load_vec}, {"B_TYPE", aligned_b_type_f16}, {"D_TYPE", "float"}, {"B_IS_FLOAT", "1"}, {"ALIGNED", "1"}}), fp16, coopmat, coopmat2, f16acc); + } } } @@ -307,11 +331,49 @@ void process_shaders() { std::cout << "ggml_vulkan: Generating and compiling shaders to SPIR-V" << std::endl; std::map base_dict = {{"FLOAT_TYPE", "float"}}; - for (const auto& fp16 : {false, true}) { - matmul_shaders(fp16, false); - matmul_shaders(fp16, true); + // matmul + for (const auto& matmul_id : {false, true}) { + // No coopmats + // fp32 + matmul_shaders(false, matmul_id, false, false, false); + + // fp16, fp32acc and fp16acc + matmul_shaders(true, matmul_id, false, false, false); + matmul_shaders(true, matmul_id, false, false, true); + + // Coopmat, fp32acc and fp16acc + matmul_shaders(true, matmul_id, true, false, false); + matmul_shaders(true, matmul_id, true, false, true); + +#if defined(GGML_VULKAN_COOPMAT2_GLSLC_SUPPORT) + // Coopmat2, fp32acc and fp16acc + matmul_shaders(true, matmul_id, false, true, false); + matmul_shaders(true, matmul_id, false, true, true); +#endif } +#if defined(GGML_VULKAN_COOPMAT2_GLSLC_SUPPORT) + // flash attention + for (const auto& f16acc : {false, true}) { + std::string acctype = f16acc ? "float16_t" : "float"; + + for (const auto& tname : type_names) { + if (tname == "f32") { + continue; + } + + if (tname == "f16") { + string_to_spv("flash_attn_f32_f16_" + tname, "flash_attn_cm2.comp", + merge_maps(base_dict, {{"Q_TYPE", "float"}, {"D_TYPE", "float"}, {"ACC_TYPE", acctype}}), true, false, true, f16acc); + } else { + std::string data_a_key = "DATA_A_" + to_uppercase(tname); + string_to_spv("flash_attn_f32_f16_" + tname, "flash_attn_cm2.comp", + merge_maps(base_dict, {{data_a_key, "1"}, {"Q_TYPE", "float"}, {"D_TYPE", "float"}, {"ACC_TYPE", acctype}, {"DEQUANTFUNC", "dequantFunc"+to_uppercase(tname) }, {"BLOCK_SIZE", "QUANT_K_"+to_uppercase(tname) }}), true, false, true, f16acc); + } + } + } +#endif + for (const auto& tname : type_names) { // mul mat vec std::string data_a_key = "DATA_A_" + to_uppercase(tname); @@ -399,9 +461,11 @@ void process_shaders() { string_to_spv("rope_norm_f32", "rope_norm.comp", {{"A_TYPE", "float"}, {"D_TYPE", "float"}}); string_to_spv("rope_norm_f16", "rope_norm.comp", {{"A_TYPE", "float16_t"}, {"D_TYPE", "float16_t"}}); + string_to_spv("rope_norm_f16_rte", "rope_norm.comp", {{"A_TYPE", "float16_t"}, {"D_TYPE", "float16_t"}, {"RTE16", "1"}}); string_to_spv("rope_neox_f32", "rope_neox.comp", {{"A_TYPE", "float"}, {"D_TYPE", "float"}}); string_to_spv("rope_neox_f16", "rope_neox.comp", {{"A_TYPE", "float16_t"}, {"D_TYPE", "float16_t"}}); + string_to_spv("rope_neox_f16_rte", "rope_neox.comp", {{"A_TYPE", "float16_t"}, {"D_TYPE", "float16_t"}, {"RTE16", "1"}}); string_to_spv("argsort_f32", "argsort.comp", {{"A_TYPE", "float"}}); @@ -409,6 +473,7 @@ void process_shaders() { string_to_spv("im2col_f32", "im2col.comp", merge_maps(base_dict, {{"A_TYPE", "float"}, {"D_TYPE", "float"}})); string_to_spv("im2col_f32_f16", "im2col.comp", merge_maps(base_dict, {{"A_TYPE", "float"}, {"D_TYPE", "float16_t"}})); + string_to_spv("im2col_f32_f16_rte", "im2col.comp", merge_maps(base_dict, {{"A_TYPE", "float"}, {"D_TYPE", "float16_t"}, {"RTE16", "1"}})); string_to_spv("timestep_embedding_f32", "timestep_embedding.comp", merge_maps(base_dict, {{"A_TYPE", "float"}, {"D_TYPE", "float"}})); @@ -471,6 +536,7 @@ void write_output_files() { fclose(hdr); fclose(src); } +} int main(int argc, char** argv) { std::map args; diff --git a/ggml/src/ggml.c b/ggml/src/ggml.c index 26428fc5bcf50..df854f902a3aa 100644 --- a/ggml/src/ggml.c +++ b/ggml/src/ggml.c @@ -8,7 +8,10 @@ // FIXME: required here for quantization functions #include "ggml-quants.h" -#include "ggml-aarch64.h" + +#ifdef GGML_USE_CPU_HBM +#include +#endif #if defined(_MSC_VER) || defined(__MINGW32__) #include // using malloc.h with MSC/MINGW @@ -788,32 +791,23 @@ static const struct ggml_type_traits type_traits[GGML_TYPE_COUNT] = { .to_float = (ggml_to_float_t) ggml_bf16_to_fp32_row, .from_float_ref = (ggml_from_float_t) ggml_fp32_to_bf16_row_ref, }, - [GGML_TYPE_Q4_0_4_4] = { - .type_name = "q4_0_4x4", - .blck_size = QK4_0, - .blck_size_interleave = 4, - .type_size = sizeof(block_q4_0), - .is_quantized = true, - .to_float = NULL, - .from_float_ref = NULL, + [31] = { // GGML_TYPE_Q4_0_4_4 + .type_name = "TYPE_Q4_0_4_4 REMOVED, use Q4_0 with runtime repacking", + .blck_size = 0, + .type_size = 0, + .is_quantized = false, }, - [GGML_TYPE_Q4_0_4_8] = { - .type_name = "q4_0_4x8", - .blck_size = QK4_0, - .blck_size_interleave = 8, - .type_size = sizeof(block_q4_0), - .is_quantized = true, - .to_float = NULL, - .from_float_ref = NULL, + [32] = { // GGML_TYPE_Q4_0_4_8 + .type_name = "TYPE_Q4_0_4_8 REMOVED, use Q4_0 with runtime repacking", + .blck_size = 0, + .type_size = 0, + .is_quantized = false, }, - [GGML_TYPE_Q4_0_8_8] = { - .type_name = "q4_0_8x8", - .blck_size = QK4_0, - .blck_size_interleave = 8, - .type_size = sizeof(block_q4_0), - .is_quantized = true, - .to_float = NULL, - .from_float_ref = NULL, + [33] = { // GGML_TYPE_Q4_0_8_8 + .type_name = "TYPE_Q4_0_8_8 REMOVED, use Q4_0 with runtime repacking", + .blck_size = 0, + .type_size = 0, + .is_quantized = false, }, [GGML_TYPE_TQ1_0] = { .type_name = "tq1_0", @@ -831,14 +825,23 @@ static const struct ggml_type_traits type_traits[GGML_TYPE_COUNT] = { .to_float = (ggml_to_float_t) dequantize_row_tq2_0, .from_float_ref = (ggml_from_float_t) quantize_row_tq2_0_ref, }, - [GGML_TYPE_IQ4_NL_4_4] = { - .type_name = "iq4_nl_4x4", - .blck_size = QK4_NL, - .blck_size_interleave = 4, - .type_size = sizeof(block_iq4_nl), - .is_quantized = true, - .to_float = NULL, - .from_float_ref = NULL, + [36] = { // GGML_TYPE_IQ4_NL_4_4 + .type_name = "TYPE_IQ4_NL_4_4 REMOVED, use IQ4_NL with runtime repacking", + .blck_size = 0, + .type_size = 0, + .is_quantized = false, + }, + [37] = { // GGML_TYPE_IQ4_NL_4_8 + .type_name = "TYPE_IQ4_NL_4_8 REMOVED, use IQ4_NL with runtime repacking", + .blck_size = 0, + .type_size = 0, + .is_quantized = false, + }, + [38] = { // GGML_TYPE_IQ4_NL_8_8 + .type_name = "TYPE_IQ4_NL_8_8 REMOVED, use IQ4_NL with runtime repacking", + .blck_size = 0, + .type_size = 0, + .is_quantized = false, }, }; @@ -950,6 +953,7 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "POOL_2D_BACK", "UPSCALE", "PAD", + "PAD_REFLECT_1D", "ARANGE", "TIMESTEP_EMBEDDING", "ARGSORT", @@ -983,7 +987,7 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "OPT_STEP_ADAMW", }; -static_assert(GGML_OP_COUNT == 81, "GGML_OP_COUNT != 81"); +static_assert(GGML_OP_COUNT == 82, "GGML_OP_COUNT != 82"); static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "none", @@ -1045,6 +1049,7 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "pool_2d_back(x)", "upscale(x)", "pad(x)", + "pad_reflect_1d(x)", "arange(start, stop, step)", "timestep_embedding(timesteps, dim, max_period)", "argsort(x)", @@ -1078,7 +1083,7 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "adamw(x)", }; -static_assert(GGML_OP_COUNT == 81, "GGML_OP_COUNT != 81"); +static_assert(GGML_OP_COUNT == 82, "GGML_OP_COUNT != 82"); static_assert(GGML_OP_POOL_COUNT == 2, "GGML_OP_POOL_COUNT != 2"); @@ -1268,9 +1273,6 @@ enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype) { case GGML_FTYPE_MOSTLY_IQ4_XS: wtype = GGML_TYPE_IQ4_XS; break; case GGML_FTYPE_MOSTLY_IQ3_S: wtype = GGML_TYPE_IQ3_S; break; case GGML_FTYPE_MOSTLY_IQ2_S: wtype = GGML_TYPE_IQ2_S; break; - case GGML_FTYPE_MOSTLY_Q4_0_4_4: wtype = GGML_TYPE_Q4_0_4_4; break; - case GGML_FTYPE_MOSTLY_Q4_0_4_8: wtype = GGML_TYPE_Q4_0_4_8; break; - case GGML_FTYPE_MOSTLY_Q4_0_8_8: wtype = GGML_TYPE_Q4_0_8_8; break; case GGML_FTYPE_UNKNOWN: wtype = GGML_TYPE_COUNT; break; case GGML_FTYPE_MOSTLY_Q4_1_SOME_F16: wtype = GGML_TYPE_COUNT; break; } @@ -4147,6 +4149,37 @@ struct ggml_tensor * ggml_pad( return result; } +// ggml_pad_reflect_1d + +struct ggml_tensor * ggml_pad_reflect_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int p0, + int p1) { + GGML_ASSERT(p0 >= 0); + GGML_ASSERT(p1 >= 0); + + GGML_ASSERT(p0 < a->ne[0]); // padding length on each size must be less than the + GGML_ASSERT(p1 < a->ne[0]); // existing length of the dimension being padded + + GGML_ASSERT(ggml_is_contiguous(a)); + GGML_ASSERT(a->type == GGML_TYPE_F32); + + struct ggml_tensor * result = ggml_new_tensor_4d(ctx, a->type, + a->ne[0] + p0 + p1, + a->ne[1], + a->ne[2], + a->ne[3]); + + int32_t params[] = { p0, p1 }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_PAD_REFLECT_1D; + result->src[0] = a; + + return result; +} + // ggml_arange struct ggml_tensor * ggml_arange( @@ -6321,9 +6354,6 @@ size_t ggml_quantize_chunk( case GGML_TYPE_IQ1_M: result = quantize_iq1_m (src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; case GGML_TYPE_IQ4_NL: result = quantize_iq4_nl (src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; case GGML_TYPE_IQ4_XS: result = quantize_iq4_xs (src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; - case GGML_TYPE_Q4_0_4_4: result = quantize_q4_0_4x4(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; - case GGML_TYPE_Q4_0_4_8: result = quantize_q4_0_4x8(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; - case GGML_TYPE_Q4_0_8_8: result = quantize_q4_0_8x8(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; case GGML_TYPE_F16: { size_t elemsize = sizeof(ggml_fp16_t); @@ -6855,7 +6885,16 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p (int64_t) info->ne[2] * (int64_t) info->ne[3]; - if (ggml_blck_size(info->type) == 0 || ne % ggml_blck_size(info->type) != 0) { + if (ggml_blck_size(info->type) == 0 ) { + // this tensor type support have been removed: + fprintf(stderr, "%s: tensor '%s' of type %d: %s\n", + __func__, info->name.data, (int) info->type, ggml_type_name(info->type)); + fclose(file); + gguf_free(ctx); + return NULL; + } + + if (ne % ggml_blck_size(info->type) != 0) { fprintf(stderr, "%s: tensor '%s' of type %d (%s) number of elements (%" PRId64 ") is not a multiple of block size (%" PRId64 ")\n", __func__, info->name.data, (int) info->type, ggml_type_name(info->type), ne, ggml_blck_size(info->type)); fclose(file); diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 450bd97167678..0cf9394291654 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -764,6 +764,7 @@ class MODEL_TENSOR(IntEnum): MODEL_TENSOR.TOKEN_EMBD, MODEL_TENSOR.OUTPUT_NORM, MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ROPE_FREQS, MODEL_TENSOR.ATTN_NORM, MODEL_TENSOR.ATTN_Q, MODEL_TENSOR.ATTN_K, @@ -913,6 +914,8 @@ class MODEL_TENSOR(IntEnum): MODEL_TENSOR.OUTPUT, MODEL_TENSOR.OUTPUT_NORM, MODEL_TENSOR.ROPE_FREQS, + MODEL_TENSOR.ROPE_FACTORS_LONG, + MODEL_TENSOR.ROPE_FACTORS_SHORT, MODEL_TENSOR.ATTN_NORM, MODEL_TENSOR.ATTN_Q, MODEL_TENSOR.ATTN_K, @@ -1405,9 +1408,10 @@ class TokenType(IntEnum): class RopeScalingType(Enum): - NONE = 'none' - LINEAR = 'linear' - YARN = 'yarn' + NONE = 'none' + LINEAR = 'linear' + YARN = 'yarn' + LONGROPE = 'longrope' class PoolingType(IntEnum): @@ -1446,9 +1450,6 @@ class GGMLQuantizationType(IntEnum): F64 = 28 IQ1_M = 29 BF16 = 30 - Q4_0_4_4 = 31 - Q4_0_4_8 = 32 - Q4_0_8_8 = 33 TQ1_0 = 34 TQ2_0 = 35 @@ -1492,9 +1493,9 @@ class LlamaFileType(IntEnum): MOSTLY_IQ4_XS = 30 # except 1d tensors MOSTLY_IQ1_M = 31 # except 1d tensors MOSTLY_BF16 = 32 # except 1d tensors - MOSTLY_Q4_0_4_4 = 33 # except 1d tensors - MOSTLY_Q4_0_4_8 = 34 # except 1d tensors - MOSTLY_Q4_0_8_8 = 35 # except 1d tensors + # MOSTLY_Q4_0_4_4 = 33 # removed from gguf files, use Q4_0 and runtime repack + # MOSTLY_Q4_0_4_8 = 34 # removed from gguf files, use Q4_0 and runtime repack + # MOSTLY_Q4_0_8_8 = 35 # removed from gguf files, use Q4_0 and runtime repack MOSTLY_TQ1_0 = 36 # except 1d tensors MOSTLY_TQ2_0 = 37 # except 1d tensors @@ -1570,9 +1571,6 @@ def get_type(val: Any) -> GGUFValueType: GGMLQuantizationType.F64: (1, 8), GGMLQuantizationType.IQ1_M: (256, QK_K // 8 + QK_K // 16 + QK_K // 32), GGMLQuantizationType.BF16: (1, 2), - GGMLQuantizationType.Q4_0_4_4:(32, 2 + 16), - GGMLQuantizationType.Q4_0_4_8:(32, 2 + 16), - GGMLQuantizationType.Q4_0_8_8:(32, 2 + 16), GGMLQuantizationType.TQ1_0: (256, 2 + 4 * 13), GGMLQuantizationType.TQ2_0: (256, 2 + 64), } diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 1b6a3f4add875..f0a7b6478508e 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -146,6 +146,7 @@ class TensorNameMap: # Attention query MODEL_TENSOR.ATTN_Q: ( "model.layers.{bid}.self_attn.q_proj", # llama-hf nemotron olmoe olmo2 + "model.layers.{bid}.self_attn.q_proj_no_perm", # llama-custom "layers.{bid}.attention.wq", # llama-pth "encoder.layer.{bid}.attention.self.query", # bert "transformer.h.{bid}.attn.q_proj", # gpt-j @@ -158,6 +159,7 @@ class TensorNameMap: # Attention key MODEL_TENSOR.ATTN_K: ( "model.layers.{bid}.self_attn.k_proj", # llama-hf nemotron olmoe olmo2 + "model.layers.{bid}.self_attn.k_proj_no_perm", # llama-custom "layers.{bid}.attention.wk", # llama-pth "encoder.layer.{bid}.attention.self.key", # bert "transformer.h.{bid}.attn.k_proj", # gpt-j diff --git a/grammars/README.md b/grammars/README.md index 4e57bca5f300b..9769540919f98 100644 --- a/grammars/README.md +++ b/grammars/README.md @@ -46,7 +46,7 @@ Terminals support the full range of Unicode. Unicode characters can be specified Character ranges can be negated with `^`: ``` -single-line ::= [^\n]+ "\n"` +single-line ::= [^\n]+ "\n" ``` ## Sequences and Alternatives diff --git a/grammars/english.gbnf b/grammars/english.gbnf new file mode 100644 index 0000000000000..2e53686c82151 --- /dev/null +++ b/grammars/english.gbnf @@ -0,0 +1,6 @@ +# note: this might be incomplete, mostly an example +root ::= en-char+ ([ \t\n] en-char+)* +en-char ::= letter | digit | punctuation +letter ::= [a-zA-Z] +digit ::= [0-9] +punctuation ::= [!"#$%&'()*+,-./:;<=>?@[\\\]^_`{|}~] diff --git a/include/llama.h b/include/llama.h index 5f181d4c7e96f..c5c7413af2aba 100644 --- a/include/llama.h +++ b/include/llama.h @@ -104,6 +104,7 @@ extern "C" { LLAMA_VOCAB_PRE_TYPE_GPT3_FINNISH = 24, LLAMA_VOCAB_PRE_TYPE_EXAONE = 25, LLAMA_VOCAB_PRE_TYPE_CHAMELEON = 26, + LLAMA_VOCAB_PRE_TYPE_MINERVA = 27, }; enum llama_rope_type { @@ -173,9 +174,9 @@ extern "C" { LLAMA_FTYPE_MOSTLY_IQ4_XS = 30, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ1_M = 31, // except 1d tensors LLAMA_FTYPE_MOSTLY_BF16 = 32, // except 1d tensors - LLAMA_FTYPE_MOSTLY_Q4_0_4_4 = 33, // except 1d tensors - LLAMA_FTYPE_MOSTLY_Q4_0_4_8 = 34, // except 1d tensors - LLAMA_FTYPE_MOSTLY_Q4_0_8_8 = 35, // except 1d tensors + //LLAMA_FTYPE_MOSTLY_Q4_0_4_4 = 33, // removed from gguf files, use Q4_0 and runtime repack + //LLAMA_FTYPE_MOSTLY_Q4_0_4_8 = 34, // removed from gguf files, use Q4_0 and runtime repack + //LLAMA_FTYPE_MOSTLY_Q4_0_8_8 = 35, // removed from gguf files, use Q4_0 and runtime repack LLAMA_FTYPE_MOSTLY_TQ1_0 = 36, // except 1d tensors LLAMA_FTYPE_MOSTLY_TQ2_0 = 37, // except 1d tensors @@ -187,7 +188,8 @@ extern "C" { LLAMA_ROPE_SCALING_TYPE_NONE = 0, LLAMA_ROPE_SCALING_TYPE_LINEAR = 1, LLAMA_ROPE_SCALING_TYPE_YARN = 2, - LLAMA_ROPE_SCALING_TYPE_MAX_VALUE = LLAMA_ROPE_SCALING_TYPE_YARN, + LLAMA_ROPE_SCALING_TYPE_LONGROPE = 3, + LLAMA_ROPE_SCALING_TYPE_MAX_VALUE = LLAMA_ROPE_SCALING_TYPE_LONGROPE, }; enum llama_pooling_type { @@ -992,6 +994,9 @@ extern "C" { char * buf, int32_t length); + // Get list of built-in chat templates + LLAMA_API int32_t llama_chat_builtin_templates(const char ** output, size_t len); + // // Sampling API // diff --git a/models/ggml-vocab-roberta-bpe.gguf.inp b/models/ggml-vocab-roberta-bpe.gguf.inp new file mode 100644 index 0000000000000..9baf7d77ae6b5 --- /dev/null +++ b/models/ggml-vocab-roberta-bpe.gguf.inp @@ -0,0 +1,112 @@ +ied 4 ½ months +__ggml_vocab_test__ +Führer +__ggml_vocab_test__ + +__ggml_vocab_test__ + +__ggml_vocab_test__ + +__ggml_vocab_test__ + +__ggml_vocab_test__ + +__ggml_vocab_test__ + + +__ggml_vocab_test__ + + + +__ggml_vocab_test__ + + + + +__ggml_vocab_test__ + + +__ggml_vocab_test__ +Hello world +__ggml_vocab_test__ + Hello world +__ggml_vocab_test__ +Hello World +__ggml_vocab_test__ + Hello World +__ggml_vocab_test__ + Hello World! +__ggml_vocab_test__ +Hello, world! +__ggml_vocab_test__ + Hello, world! +__ggml_vocab_test__ + this is 🦙.cpp +__ggml_vocab_test__ +w048 7tuijk dsdfhu +__ggml_vocab_test__ +нещо на Български +__ggml_vocab_test__ +កាន់តែពិសេសអាចខលចេញ +__ggml_vocab_test__ +🚀 (normal) 😶‍🌫️ (multiple emojis concatenated) ✅ (only emoji that has its own token) +__ggml_vocab_test__ +Hello +__ggml_vocab_test__ + Hello +__ggml_vocab_test__ + Hello +__ggml_vocab_test__ + Hello +__ggml_vocab_test__ + Hello +__ggml_vocab_test__ + Hello + Hello +__ggml_vocab_test__ + ( +__ggml_vocab_test__ + + = +__ggml_vocab_test__ +' era +__ggml_vocab_test__ +Hello, y'all! How are you 😁 ?我想在apple工作1314151天~ +__ggml_vocab_test__ +!!!!!! +__ggml_vocab_test__ +3 +__ggml_vocab_test__ +33 +__ggml_vocab_test__ +333 +__ggml_vocab_test__ +3333 +__ggml_vocab_test__ +33333 +__ggml_vocab_test__ +333333 +__ggml_vocab_test__ +3333333 +__ggml_vocab_test__ +33333333 +__ggml_vocab_test__ +333333333 +__ggml_vocab_test__ +Cửa Việt +__ggml_vocab_test__ + discards +__ggml_vocab_test__ + + + + + + + + + + + +🚀 (normal) 😶‍🌫️ (multiple emojis concatenated) ✅ 🦙🦙 3 33 333 3333 33333 333333 3333333 33333333 3.3 3..3 3...3 កាន់តែពិសេសអាច😁 ?我想在apple工作1314151天~ ------======= нещо на Български ''''''```````""""......!!!!!!?????? I've been 'told he's there, 'RE you sure? 'M not sure I'll make it, 'D you like some tea? We'Ve a'lL +__ggml_vocab_test__ diff --git a/models/ggml-vocab-roberta-bpe.gguf.out b/models/ggml-vocab-roberta-bpe.gguf.out new file mode 100644 index 0000000000000..f181ac3dcc5bc --- /dev/null +++ b/models/ggml-vocab-roberta-bpe.gguf.out @@ -0,0 +1,46 @@ + 2550 204 18430 377 + 597 2768 298 8564 + + 1437 + 1437 1437 + 1437 1437 1437 + 50117 + 50118 + 50140 + 50140 50118 + 50117 50118 + 31414 232 + 20920 232 + 31414 623 + 20920 623 + 20920 623 328 + 31414 6 232 328 + 20920 6 232 328 + 42 16 8103 18164 27 4 49317 + 605 40976 262 10109 18474 385 29 36807 6455 + 36765 25482 22063 23171 34251 18697 10809 26161 18697 3602 22063 27969 40966 25417 15264 26161 24269 36709 41171 35328 + 1376 17772 7471 1376 17772 19002 1376 17772 9085 1376 4333 13859 1376 17772 9357 1376 4333 9264 1376 17772 25448 1376 17772 18400 1376 17772 4333 1376 4333 10172 1376 17772 4333 1376 17772 7258 1376 17772 19002 1376 17772 5782 1376 17772 10172 1376 17772 3726 1376 17772 5782 1376 4333 10172 1376 17772 23171 + 6569 15113 7471 36 21113 43 17841 19002 17 8384 6569 14285 4958 12605 36 34654 2841 4203 354 10146 26511 1070 43 36174 5782 36 8338 21554 14 34 63 308 19233 43 + 31414 + 20920 + 1437 20920 + 1437 1437 20920 + 1437 1437 1437 20920 + 1437 1437 1437 20920 50118 1437 1437 1437 20920 + 36 + 50118 5457 + 108 3567 + 31414 6 1423 108 1250 328 1336 32 47 17841 10172 17487 47876 3602 48617 15264 46537 11423 27326 48494 8210 49233 1558 1570 27761 49429 43251 10809 17772 + 32376 12846 + 246 + 3103 + 25631 + 46152 + 3103 25631 + 46152 3103 + 46152 25631 + 46152 46152 + 46152 3103 25631 + 347 1376 2023 12410 102 16376 1376 2023 6382 90 + 9553 5954 + 50118 1437 50140 1437 50140 50118 1437 50117 1437 50117 50117 1437 50117 50118 1437 1437 50118 1437 1437 1437 50118 1437 1437 1437 1437 50118 1437 1437 1437 1437 1437 50118 6569 15113 7471 36 21113 43 17841 19002 17 8384 6569 14285 4958 12605 36 34654 2841 4203 354 10146 26511 1070 43 36174 5782 8103 18164 27 6569 18164 27 155 2357 30242 155 25631 30242 3103 30242 25631 30242 46152 30242 3103 25631 155 4 246 155 7586 246 155 734 246 25974 17772 7471 1376 17772 19002 1376 17772 9085 1376 4333 13859 1376 17772 9357 1376 4333 9264 1376 17772 25448 1376 17772 18400 1376 17772 4333 1376 4333 10172 1376 17772 4333 1376 17772 7258 1376 17772 19002 1376 17772 5782 18636 10172 17487 47876 3602 48617 15264 46537 11423 27326 48494 8210 49233 1558 1570 27761 49429 43251 10809 17772 36738 48332 47463 18697 10809 25482 22063 23171 34251 18697 10809 26161 18697 3602 22063 27969 40966 25417 15264 26161 24269 36709 41171 35328 128 49690 108 49972 49519 12905 48149 48149 43796 32376 12846 27282 28749 38 348 57 128 41042 37 18 89 6 128 4629 47 686 116 128 448 45 686 38 581 146 24 6 128 495 47 101 103 6845 116 166 108 30660 10 108 462 574 diff --git a/pocs/vdot/CMakeLists.txt b/pocs/vdot/CMakeLists.txt index d5405ad2991d1..6235aec1fdade 100644 --- a/pocs/vdot/CMakeLists.txt +++ b/pocs/vdot/CMakeLists.txt @@ -1,9 +1,9 @@ set(TARGET llama-vdot) add_executable(${TARGET} vdot.cpp) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) set(TARGET llama-q8dot) add_executable(${TARGET} q8dot.cpp) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +target_compile_features(${TARGET} PRIVATE cxx_std_17) diff --git a/scripts/compare-commits.sh b/scripts/compare-commits.sh index 8b9b1ad39f384..143d987290daf 100755 --- a/scripts/compare-commits.sh +++ b/scripts/compare-commits.sh @@ -16,15 +16,21 @@ bench_args="${@:3}" rm -f llama-bench.sqlite > /dev/null # to test a backend, call the script with the corresponding environment variable (e.g. GGML_CUDA=1 ./scripts/compare-commits.sh ...) +if [ -n "$GGML_CUDA" ]; then + cmake_opts="-DGGML_CUDA=ON" +fi + +function run { + rm -fr build > /dev/null + cmake -B build -S . $cmake_opts > /dev/null + cmake --build build -t llama-bench > /dev/null + build/bin/llama-bench -o sql -oe md $bench_args | sqlite3 llama-bench.sqlite +} git checkout $1 > /dev/null -make clean > /dev/null -make -j$(nproc) $make_opts llama-bench > /dev/null -./llama-bench -o sql -oe md $bench_args | sqlite3 llama-bench.sqlite +run git checkout $2 > /dev/null -make clean > /dev/null -make -j$(nproc) $make_opts llama-bench > /dev/null -./llama-bench -o sql -oe md $bench_args | sqlite3 llama-bench.sqlite +run ./scripts/compare-llama-bench.py -b $1 -c $2 diff --git a/scripts/pod-llama.sh b/scripts/pod-llama.sh deleted file mode 100644 index 6e56e1ed0908c..0000000000000 --- a/scripts/pod-llama.sh +++ /dev/null @@ -1,212 +0,0 @@ -#!/bin/bash -# -# Use this script only on fresh pods (runpod.io)! -# Otherwise, it can break your environment! -# - -if [ -z "$1" ]; then - echo "Usage: $0 " - echo " 0: no models" - echo " 1: tinyllama-1b" - echo " 2: codellama-7b" - echo " 3: codellama-13b" - echo " 4: codellama-34b" - echo " 5: codellama-7b-instruct" - echo " 6: codellama-13b-instruct" - echo " 7: codellama-34b-instruct" - - exit 1 -fi - -set -x - -# setup deps -apt-get update -apt-get install -y git-lfs cmake cmake-curses-gui vim ruby -git-lfs install - -if [ ! -d "/workspace" ]; then - ln -sfn $(pwd) /workspace -fi - -# download data -cd /workspace - -# this is useful to git clone repos without doubling the disk size due to .git -git clone https://github.com/iboB/git-lfs-download -ln -sfn /workspace/git-lfs-download/git-lfs-download /usr/local/bin/git-lfs-download - -# llama.cpp -cd /workspace -git clone https://github.com/ggerganov/llama.cpp - -cd llama.cpp - -GGML_CUDA=1 make -j - -ln -sfn /workspace/TinyLlama-1.1B-Chat-v0.3 ./models/tinyllama-1b -ln -sfn /workspace/CodeLlama-7b-hf ./models/codellama-7b -ln -sfn /workspace/CodeLlama-13b-hf ./models/codellama-13b -ln -sfn /workspace/CodeLlama-34b-hf ./models/codellama-34b -ln -sfn /workspace/CodeLlama-7b-Instruct-hf ./models/codellama-7b-instruct -ln -sfn /workspace/CodeLlama-13b-Instruct-hf ./models/codellama-13b-instruct -ln -sfn /workspace/CodeLlama-34b-Instruct-hf ./models/codellama-34b-instruct - -pip install -r requirements.txt - -# cmake -cd /workspace/llama.cpp - -mkdir build-cublas -cd build-cublas - -cmake -DGGML_CUDA=1 ../ -make -j - -if [ "$1" -eq "0" ]; then - exit 0 -fi - -# more models -if [ "$1" -eq "1" ]; then - cd /workspace - - git-lfs-download https://huggingface.co/PY007/TinyLlama-1.1B-Chat-v0.3 - - cd /workspace/llama.cpp - - python3 examples/convert_legacy_llama.py ./models/tinyllama-1b --outfile ./models/tinyllama-1b/ggml-model-f16.gguf --outtype f16 - - ./llama-quantize ./models/tinyllama-1b/ggml-model-f16.gguf ./models/tinyllama-1b/ggml-model-q4_0.gguf q4_0 - ./llama-quantize ./models/tinyllama-1b/ggml-model-f16.gguf ./models/tinyllama-1b/ggml-model-q4_k.gguf q4_k - ./llama-quantize ./models/tinyllama-1b/ggml-model-f16.gguf ./models/tinyllama-1b/ggml-model-q8_0.gguf q8_0 -fi - -if [ "$1" -eq "2" ]; then - cd /workspace - - git-lfs-download https://huggingface.co/codellama/CodeLlama-7b-hf --without *safetensors* - rm -v ./CodeLlama-7b-hf/*safetensors* - - cd /workspace/llama.cpp - - python3 examples/convert_legacy_llama.py ./models/codellama-7b --outfile ./models/codellama-7b/ggml-model-f16.gguf --outtype f16 - - ./llama-quantize ./models/codellama-7b/ggml-model-f16.gguf ./models/codellama-7b/ggml-model-q4_0.gguf q4_0 - ./llama-quantize ./models/codellama-7b/ggml-model-f16.gguf ./models/codellama-7b/ggml-model-q4_k.gguf q4_k - ./llama-quantize ./models/codellama-7b/ggml-model-f16.gguf ./models/codellama-7b/ggml-model-q8_0.gguf q8_0 -fi - -if [ "$1" -eq "3" ]; then - cd /workspace - - git-lfs-download https://huggingface.co/codellama/CodeLlama-13b-hf --without *safetensors* - rm -v ./CodeLlama-13b-hf/*safetensors* - - cd /workspace/llama.cpp - - python3 examples/convert_legacy_llama.py ./models/codellama-13b --outfile ./models/codellama-13b/ggml-model-f16.gguf --outtype f16 - - ./llama-quantize ./models/codellama-13b/ggml-model-f16.gguf ./models/codellama-13b/ggml-model-q4_0.gguf q4_0 - ./llama-quantize ./models/codellama-13b/ggml-model-f16.gguf ./models/codellama-13b/ggml-model-q4_k.gguf q4_k - ./llama-quantize ./models/codellama-13b/ggml-model-f16.gguf ./models/codellama-13b/ggml-model-q8_0.gguf q8_0 -fi - -if [ "$1" -eq "4" ]; then - cd /workspace - - git-lfs-download https://huggingface.co/codellama/CodeLlama-34b-hf --without *safetensors* - rm -v ./CodeLlama-34b-hf/*safetensors* - - cd /workspace/llama.cpp - - python3 examples/convert_legacy_llama.py ./models/codellama-34b --outfile ./models/codellama-34b/ggml-model-f16.gguf --outtype f16 - - ./llama-quantize ./models/codellama-34b/ggml-model-f16.gguf ./models/codellama-34b/ggml-model-q4_0.gguf q4_0 - ./llama-quantize ./models/codellama-34b/ggml-model-f16.gguf ./models/codellama-34b/ggml-model-q4_k.gguf q4_k - ./llama-quantize ./models/codellama-34b/ggml-model-f16.gguf ./models/codellama-34b/ggml-model-q8_0.gguf q8_0 -fi - -if [ "$1" -eq "5" ]; then - cd /workspace - - git-lfs-download https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf --without *safetensors* - rm -v ./CodeLlama-7b-Instruct-hf/*safetensors* - - cd /workspace/llama.cpp - - python3 examples/convert_legacy_llama.py ./models/codellama-7b-instruct --outfile ./models/codellama-7b-instruct/ggml-model-f16.gguf --outtype f16 - - ./llama-quantize ./models/codellama-7b-instruct/ggml-model-f16.gguf ./models/codellama-7b-instruct/ggml-model-q4_0.gguf q4_0 - ./llama-quantize ./models/codellama-7b-instruct/ggml-model-f16.gguf ./models/codellama-7b-instruct/ggml-model-q4_k.gguf q4_k - ./llama-quantize ./models/codellama-7b-instruct/ggml-model-f16.gguf ./models/codellama-7b-instruct/ggml-model-q8_0.gguf q8_0 -fi - -if [ "$1" -eq "6" ]; then - cd /workspace - - git-lfs-download https://huggingface.co/codellama/CodeLlama-13b-Instruct-hf --without *safetensors* - rm -v ./CodeLlama-13b-Instruct-hf/*safetensors* - - cd /workspace/llama.cpp - - python3 examples/convert_legacy_llama.py ./models/codellama-13b-instruct --outfile ./models/codellama-13b-instruct/ggml-model-f16.gguf --outtype f16 - - ./llama-quantize ./models/codellama-13b-instruct/ggml-model-f16.gguf ./models/codellama-13b-instruct/ggml-model-q4_0.gguf q4_0 - ./llama-quantize ./models/codellama-13b-instruct/ggml-model-f16.gguf ./models/codellama-13b-instruct/ggml-model-q4_k.gguf q4_k - ./llama-quantize ./models/codellama-13b-instruct/ggml-model-f16.gguf ./models/codellama-13b-instruct/ggml-model-q8_0.gguf q8_0 -fi - -if [ "$1" -eq "7" ]; then - cd /workspace - - git-lfs-download https://huggingface.co/codellama/CodeLlama-34b-Instruct-hf --without *safetensors* - rm -v ./CodeLlama-34b-Instruct-hf/*safetensors* - - cd /workspace/llama.cpp - - python3 examples/convert_legacy_llama.py ./models/codellama-34b-instruct --outfile ./models/codellama-34b-instruct/ggml-model-f16.gguf --outtype f16 - - ./llama-quantize ./models/codellama-34b-instruct/ggml-model-f16.gguf ./models/codellama-34b-instruct/ggml-model-q4_0.gguf q4_0 - ./llama-quantize ./models/codellama-34b-instruct/ggml-model-f16.gguf ./models/codellama-34b-instruct/ggml-model-q4_k.gguf q4_k - ./llama-quantize ./models/codellama-34b-instruct/ggml-model-f16.gguf ./models/codellama-34b-instruct/ggml-model-q8_0.gguf q8_0 -fi - -if [ "$1" -eq "1" ]; then - # perf + perplexity - cd /workspace/llama.cpp/build-cublas - - make -j && ../scripts/run-all-perf.sh tinyllama-1b "f16" "-ngl 99 -t 1 -p 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,32,64,128,256,512,1024,2048 -n 128" - - ../scripts/get-wikitext-2.sh - unzip wikitext-2-raw-v1.zip - - make -j && ./bin/llama-perplexity -m ../models/tinyllama-1b/ggml-model-f16.gguf -f ./wikitext-2-raw/wiki.test.raw -ngl 100 --chunks 32 - - # batched - cd /workspace/llama.cpp - - GGML_CUDA=1 make -j && ./llama-batched ./models/tinyllama-1b/ggml-model-f16.gguf "Hello, my name is" 8 128 999 - - # batched-bench - cd /workspace/llama.cpp - - GGML_CUDA=1 make -j && ./llama-batched-bench ./models/tinyllama-1b/ggml-model-f16.gguf 4608 1 99 0 512 128 1,2,3,4,5,6,7,8,16,32 - - # parallel - cd /workspace/llama.cpp - - GGML_CUDA=1 make -j && ./llama-parallel -m ./models/tinyllama-1b/ggml-model-f16.gguf -t 1 -ngl 100 -c 4096 -b 512 -s 1 -np 8 -ns 128 -n 100 -cb - -fi - -# speculative -#if [ "$1" -eq "7" ]; then -# cd /workspace/llama.cpp -# -# GGML_CUDA=1 make -j && ./llama-speculative -m ./models/codellama-34b-instruct/ggml-model-f16.gguf -md ./models/codellama-7b-instruct/ggml-model-q4_0.gguf -p "# Dijkstra's shortest path algorithm in Python (4 spaces indentation) + complexity analysis:\n\n" -e -ngl 999 -ngld 999 -t 4 -n 512 -c 4096 -s 21 --draft 16 -np 1 --temp 0.0 -#fi - -# more benches -#GGML_CUDA=1 make -j && ./llama-batched-bench ./models/codellama-7b/ggml-model-q4_k.gguf 4096 1 99 1 512,3200 128,128,800 1 -#GGML_CUDA=1 make -j && ./llama-batched-bench ./models/codellama-13b/ggml-model-q4_k.gguf 4096 1 99 1 512,3200 128,128,800 1 diff --git a/scripts/server-llm.sh b/scripts/server-llm.sh deleted file mode 100644 index 802592a3e0d3b..0000000000000 --- a/scripts/server-llm.sh +++ /dev/null @@ -1,418 +0,0 @@ -#!/bin/bash -# -# Helper script for deploying llama.cpp server with a single Bash command -# -# - Works on Linux and macOS -# - Supports: CPU, CUDA, Metal -# - Can run all GGUF models from HuggingFace -# - Can serve requests in parallel -# - Always builds latest llama.cpp from GitHub -# -# Limitations -# -# - Chat templates are poorly supported (base models recommended) -# - Might be unstable! -# -# Usage: -# ./server-llm.sh [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose] [-non-interactive] -# -# --port: port number, default is 8888 -# --repo: path to a repo containing GGUF model files -# --wtype: weights type (f16, q8_0, q4_0, q4_1), default is user-input -# --backend: cpu, cuda, metal, depends on the OS -# --gpu-id: gpu id, default is 0 -# --n-parallel: number of parallel requests, default is 8 -# --n-kv: KV cache size, default is 4096 -# --verbose: verbose output -# --non-interactive: run without asking a permission to run -# -# Example: -# -# bash -c "$(curl -s https://ggml.ai/server-llm.sh)" -# - -set -e - -# required utils: curl, git, make -if ! command -v curl &> /dev/null; then - printf "[-] curl not found\n" - exit 1 -fi -if ! command -v git &> /dev/null; then - printf "[-] git not found\n" - exit 1 -fi -if ! command -v make &> /dev/null; then - printf "[-] make not found\n" - exit 1 -fi - -# parse arguments -is_interactive=1 -port=8888 -repo="" -wtype="" -backend="cpu" - -# if macOS, use metal backend by default -if [[ "$OSTYPE" == "darwin"* ]]; then - backend="metal" -elif command -v nvcc &> /dev/null; then - backend="cuda" -fi - -gpu_id=0 -n_parallel=8 -n_kv=4096 -verbose=0 - -function print_usage { - printf "Usage:\n" - printf " ./server-llm.sh [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose] [-non-interactive]\n\n" - printf " --port: port number, default is 8888\n" - printf " --repo: path to a repo containing GGUF model files\n" - printf " --wtype: weights type (f16, q8_0, q4_0, q4_1), default is user-input\n" - printf " --backend: cpu, cuda, metal, depends on the OS\n" - printf " --gpu-id: gpu id, default is 0\n" - printf " --n-parallel: number of parallel requests, default is 8\n" - printf " --n-kv: KV cache size, default is 4096\n" - printf " --verbose: verbose output\n\n" - printf " --non-interactive: run without asking a permission to run\n" - printf "Example:\n\n" - printf ' bash -c "$(curl -s https://ggml.ai/server-llm.sh)"\n\n' -} - -while [[ $# -gt 0 ]]; do - key="$1" - case $key in - --non-interactive) - is_interactive=0 - shift - ;; - --port) - port="$2" - shift - shift - ;; - --repo) - repo="$2" - shift - shift - ;; - --wtype) - wtype="$2" - shift - shift - ;; - --backend) - backend="$2" - shift - shift - ;; - --gpu-id) - gpu_id="$2" - shift - shift - ;; - --n-parallel) - n_parallel="$2" - shift - shift - ;; - --n-kv) - n_kv="$2" - shift - shift - ;; - --verbose) - verbose=1 - shift - ;; - --help) - print_usage - exit 0 - ;; - *) - echo "Unknown argument: $key" - print_usage - exit 1 - ;; - esac -done - -# available weights types -wtypes=("F16" "Q8_0" "Q4_0" "Q4_1" "Q5_0" "Q5_1" "Q6_K" "Q5_K_M" "Q5_K_S" "Q4_K_M" "Q4_K_S" "Q3_K_L" "Q3_K_M" "Q3_K_S" "Q2_K") - -wfiles=() -for wt in "${wtypes[@]}"; do - wfiles+=("") -done - -# map wtype input to index -if [[ ! -z "$wtype" ]]; then - iw=-1 - is=0 - for wt in "${wtypes[@]}"; do - # uppercase - uwt=$(echo "$wt" | tr '[:lower:]' '[:upper:]') - if [[ "$uwt" == "$wtype" ]]; then - iw=$is - break - fi - is=$((is+1)) - done - - if [[ $iw -eq -1 ]]; then - printf "[-] Invalid weight type: %s\n" "$wtype" - exit 1 - fi - - wtype="$iw" -fi - -# sample repos -repos=( - "https://huggingface.co/TheBloke/Llama-2-7B-GGUF" - "https://huggingface.co/TheBloke/Llama-2-13B-GGUF" - "https://huggingface.co/TheBloke/Llama-2-70B-GGUF" - "https://huggingface.co/TheBloke/CodeLlama-7B-GGUF" - "https://huggingface.co/TheBloke/CodeLlama-13B-GGUF" - "https://huggingface.co/TheBloke/CodeLlama-34B-GGUF" - "https://huggingface.co/TheBloke/Mistral-7B-v0.1-GGUF" - "https://huggingface.co/TheBloke/zephyr-7B-beta-GGUF" - "https://huggingface.co/TheBloke/OpenHermes-2-Mistral-7B-GGUF" - "https://huggingface.co/TheBloke/CausalLM-7B-GGUF" -) -if [ $is_interactive -eq 1 ]; then - printf "\n" - printf "[I] This is a helper script for deploying llama.cpp's server on this machine.\n\n" - printf " Based on the options that follow, the script might download a model file\n" - printf " from the internet, which can be a few GBs in size. The script will also\n" - printf " build the latest llama.cpp source code from GitHub, which can be unstable.\n" - printf "\n" - printf " Upon success, an HTTP server will be started and it will serve the selected\n" - printf " model using llama.cpp for demonstration purposes.\n" - printf "\n" - printf " Please note:\n" - printf "\n" - printf " - All new data will be stored in the current folder\n" - printf " - The server will be listening on all network interfaces\n" - printf " - The server will run with default settings which are not always optimal\n" - printf " - Do not judge the quality of a model based on the results from this script\n" - printf " - Do not use this script to benchmark llama.cpp\n" - printf " - Do not use this script in production\n" - printf " - This script is only for demonstration purposes\n" - printf "\n" - printf " If you don't know what you are doing, please press Ctrl-C to abort now\n" - printf "\n" - printf " Press Enter to continue ...\n\n" - - read -fi - -if [[ -z "$repo" ]]; then - printf "[+] No repo provided from the command line\n" - printf " Please select a number from the list below or enter an URL:\n\n" - - is=0 - for r in "${repos[@]}"; do - printf " %2d) %s\n" $is "$r" - is=$((is+1)) - done - - # ask for repo until index of sample repo is provided or an URL - while [[ -z "$repo" ]]; do - printf "\n Or choose one from: https://huggingface.co/models?sort=trending&search=gguf\n\n" - read -p "[+] Select repo: " repo - - # check if the input is a number - if [[ "$repo" =~ ^[0-9]+$ ]]; then - if [[ "$repo" -ge 0 && "$repo" -lt ${#repos[@]} ]]; then - repo="${repos[$repo]}" - else - printf "[-] Invalid repo index: %s\n" "$repo" - repo="" - fi - elif [[ "$repo" =~ ^https?:// ]]; then - repo="$repo" - else - printf "[-] Invalid repo URL: %s\n" "$repo" - repo="" - fi - done -fi - -# remove suffix -repo=$(echo "$repo" | sed -E 's/\/tree\/main$//g') - -printf "[+] Checking for GGUF model files in %s\n" "$repo" - -# find GGUF files in the source -# TODO: better logic -model_tree="${repo%/}/tree/main" -model_files=$(curl -s "$model_tree" | grep -i "\\.gguf" | sed -E 's/.*(.*)<\/span><\/a>/\1/g') - -# list all files in the provided git repo -printf "[+] Model files:\n\n" -for file in $model_files; do - # determine iw by grepping the filename with wtypes - iw=-1 - is=0 - for wt in "${wtypes[@]}"; do - # uppercase - ufile=$(echo "$file" | tr '[:lower:]' '[:upper:]') - if [[ "$ufile" =~ "$wt" ]]; then - iw=$is - break - fi - is=$((is+1)) - done - - if [[ $iw -eq -1 ]]; then - continue - fi - - wfiles[$iw]="$file" - - have=" " - if [[ -f "$file" ]]; then - have="*" - fi - - printf " %2d) %s %s\n" $iw "$have" "$file" -done - -wfile="${wfiles[$wtype]}" - -# ask for weights type until provided and available -while [[ -z "$wfile" ]]; do - printf "\n" - read -p "[+] Select weight type: " wtype - wfile="${wfiles[$wtype]}" - - if [[ -z "$wfile" ]]; then - printf "[-] Invalid weight type: %s\n" "$wtype" - wtype="" - fi -done - -printf "[+] Selected weight type: %s (%s)\n" "$wtype" "$wfile" - -url="${repo%/}/resolve/main/$wfile" - -# check file if the model has been downloaded before -chk="$wfile.chk" - -# check if we should download the file -# - if $wfile does not exist -# - if $wfile exists but $chk does not exist -# - if $wfile exists and $chk exists but $wfile is newer than $chk -# TODO: better logic using git lfs info - -do_download=0 - -if [[ ! -f "$wfile" ]]; then - do_download=1 -elif [[ ! -f "$chk" ]]; then - do_download=1 -elif [[ "$wfile" -nt "$chk" ]]; then - do_download=1 -fi - -if [[ $do_download -eq 1 ]]; then - printf "[+] Downloading weights from %s\n" "$url" - - # download the weights file - curl -o "$wfile" -# -L "$url" - - # create a check file if successful - if [[ $? -eq 0 ]]; then - printf "[+] Creating check file %s\n" "$chk" - touch "$chk" - fi -else - printf "[+] Using cached weights %s\n" "$wfile" -fi - -# get latest llama.cpp and build - -printf "[+] Downloading latest llama.cpp\n" - -llama_cpp_dir="__llama_cpp_port_${port}__" - -if [[ -d "$llama_cpp_dir" && ! -f "$llama_cpp_dir/__ggml_script__" ]]; then - # if the dir exists and there isn't a file "__ggml_script__" in it, abort - printf "[-] Directory %s already exists\n" "$llama_cpp_dir" - printf "[-] Please remove it and try again\n" - exit 1 -elif [[ -d "$llama_cpp_dir" ]]; then - printf "[+] Directory %s already exists\n" "$llama_cpp_dir" - printf "[+] Using cached llama.cpp\n" - - cd "$llama_cpp_dir" - git reset --hard - git fetch - git checkout origin/master - - cd .. -else - printf "[+] Cloning llama.cpp\n" - - git clone https://github.com/ggerganov/llama.cpp "$llama_cpp_dir" -fi - -# mark that that the directory is made by this script -touch "$llama_cpp_dir/__ggml_script__" - -if [[ $verbose -eq 1 ]]; then - set -x -fi - -# build -cd "$llama_cpp_dir" - -make clean - -log="--silent" -if [[ $verbose -eq 1 ]]; then - log="" -fi - -if [[ "$backend" == "cuda" ]]; then - printf "[+] Building with CUDA backend\n" - GGML_CUDA=1 make -j llama-server $log -elif [[ "$backend" == "cpu" ]]; then - printf "[+] Building with CPU backend\n" - make -j llama-server $log -elif [[ "$backend" == "metal" ]]; then - printf "[+] Building with Metal backend\n" - make -j llama-server $log -else - printf "[-] Unknown backend: %s\n" "$backend" - exit 1 -fi - -# run the server - -printf "[+] Running server\n" - -args="" -if [[ "$backend" == "cuda" ]]; then - export CUDA_VISIBLE_DEVICES=$gpu_id - args="-ngl 999" -elif [[ "$backend" == "cpu" ]]; then - args="-ngl 0" -elif [[ "$backend" == "metal" ]]; then - args="-ngl 999" -else - printf "[-] Unknown backend: %s\n" "$backend" - exit 1 -fi - -if [[ $verbose -eq 1 ]]; then - args="$args --verbose" -fi - -./llama-server -m "../$wfile" --host 0.0.0.0 --port "$port" -c $n_kv -np "$n_parallel" $args - -exit 0 diff --git a/scripts/sync-ggml-am.sh b/scripts/sync-ggml-am.sh index d0815cf89e2f5..8cf25b77f98ea 100755 --- a/scripts/sync-ggml-am.sh +++ b/scripts/sync-ggml-am.sh @@ -73,7 +73,6 @@ while read c; do src/ggml*.h \ src/ggml*.c \ src/ggml*.cpp \ - src/ggml-amx/* \ src/ggml-blas/* \ src/ggml-cann/* \ src/ggml-cpu/* \ @@ -124,7 +123,6 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then # src/ggml*.c -> ggml/src/ggml*.c # src/ggml*.cpp -> ggml/src/ggml*.cpp # src/ggml*.h -> ggml/src/ggml*.h - # src/ggml-amx/* -> ggml/src/ggml-amx/* # src/ggml-blas/* -> ggml/src/ggml-blas/* # src/ggml-cann/* -> ggml/src/ggml-cann/* # src/ggml-cpu/* -> ggml/src/ggml-cpu/* @@ -151,7 +149,6 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then -e 's/([[:space:]]|[ab]\/)src\/ggml(.*)\.c/\1ggml\/src\/ggml\2.c/g' \ -e 's/([[:space:]]|[ab]\/)src\/ggml(.*)\.cpp/\1ggml\/src\/ggml\2.cpp/g' \ -e 's/([[:space:]]|[ab]\/)src\/ggml(.*)\.h/\1ggml\/src\/ggml\2.h/g' \ - -e 's/([[:space:]]|[ab]\/)src\/ggml-amx\//\1ggml\/src\/ggml-amx\//g' \ -e 's/([[:space:]]|[ab]\/)src\/ggml-blas\//\1ggml\/src\/ggml-blas\//g' \ -e 's/([[:space:]]|[ab]\/)src\/ggml-cann\//\1ggml\/src\/ggml-cann\//g' \ -e 's/([[:space:]]|[ab]\/)src\/ggml-cpu\//\1ggml\/src\/ggml-cpu\//g' \ diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index fd71a0a46fa8e..47eae44f7d7b6 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -c598cbe30621251e80acbcf3b601589a37c17f4d +74d66b63eaf207a24f3e93bb922aba131cbf2906 diff --git a/scripts/sync-ggml.sh b/scripts/sync-ggml.sh index 000270afbfd1e..f81615bb6b76d 100755 --- a/scripts/sync-ggml.sh +++ b/scripts/sync-ggml.sh @@ -7,7 +7,6 @@ cp -rpv ../ggml/cmake/FindSIMD.cmake ./ggml/cmake/FindSIMD.cmake cp -rpv ../ggml/src/ggml*.c ./ggml/src/ cp -rpv ../ggml/src/ggml*.cpp ./ggml/src/ cp -rpv ../ggml/src/ggml*.h ./ggml/src/ -cp -rpv ../ggml/src/ggml-amx/* ./ggml/src/ggml-amx/ cp -rpv ../ggml/src/ggml-blas/* ./ggml/src/ggml-blas/ cp -rpv ../ggml/src/ggml-cann/* ./ggml/src/ggml-cann/ cp -rpv ../ggml/src/ggml-cpu/* ./ggml/src/ggml-cpu/ diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 2f581b9218728..f3b3908b11222 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -25,7 +25,7 @@ add_library(llama ) target_include_directories(llama PUBLIC . ../include) -target_compile_features (llama PUBLIC cxx_std_11) # don't bump +target_compile_features (llama PUBLIC cxx_std_17) # don't bump target_link_libraries(llama PUBLIC ggml) diff --git a/src/llama-vocab.cpp b/src/llama-vocab.cpp index d1dc96276c2a2..8c9aaf5a0c85d 100644 --- a/src/llama-vocab.cpp +++ b/src/llama-vocab.cpp @@ -418,6 +418,7 @@ struct llm_tokenizer_bpe : llm_tokenizer { case LLAMA_VOCAB_PRE_TYPE_SMOLLM: case LLAMA_VOCAB_PRE_TYPE_CODESHELL: case LLAMA_VOCAB_PRE_TYPE_EXAONE: + case LLAMA_VOCAB_PRE_TYPE_MINERVA: regex_exprs = { "\\p{N}", "'s|'t|'re|'ve|'m|'ll|'d| ?\\p{L}+| ?\\p{N}+| ?[^\\s\\p{L}\\p{N}]+|\\s+(?!\\S)", diff --git a/src/llama.cpp b/src/llama.cpp index 31eb0a634c6c5..83ed32800bccd 100644 --- a/src/llama.cpp +++ b/src/llama.cpp @@ -1057,6 +1057,8 @@ static const std::map> LLM_TENSOR_N { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, { LLM_TENSOR_OUTPUT, "output" }, { LLM_TENSOR_ROPE_FREQS, "rope_freqs" }, + { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" }, + { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" }, { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, @@ -1570,6 +1572,67 @@ static const std::map> LLM_TENSOR_N }, }; +enum llm_chat_template { + LLM_CHAT_TEMPLATE_CHATML, + LLM_CHAT_TEMPLATE_LLAMA_2, + LLM_CHAT_TEMPLATE_LLAMA_2_SYS, + LLM_CHAT_TEMPLATE_LLAMA_2_SYS_BOS, + LLM_CHAT_TEMPLATE_LLAMA_2_SYS_STRIP, + LLM_CHAT_TEMPLATE_MISTRAL_V1, + LLM_CHAT_TEMPLATE_MISTRAL_V3, + LLM_CHAT_TEMPLATE_MISTRAL_V3_TEKKEN, + LLM_CHAT_TEMPLATE_MISTRAL_V7, + LLM_CHAT_TEMPLATE_PHI_3, + LLM_CHAT_TEMPLATE_ZEPHYR, + LLM_CHAT_TEMPLATE_MONARCH, + LLM_CHAT_TEMPLATE_GEMMA, + LLM_CHAT_TEMPLATE_ORION, + LLM_CHAT_TEMPLATE_OPENCHAT, + LLM_CHAT_TEMPLATE_VICUNA, + LLM_CHAT_TEMPLATE_VICUNA_ORCA, + LLM_CHAT_TEMPLATE_DEEPSEEK, + LLM_CHAT_TEMPLATE_DEEPSEEK_2, + LLM_CHAT_TEMPLATE_COMMAND_R, + LLM_CHAT_TEMPLATE_LLAMA_3, + LLM_CHAT_TEMPLATE_CHATGML_3, + LLM_CHAT_TEMPLATE_CHATGML_4, + LLM_CHAT_TEMPLATE_MINICPM, + LLM_CHAT_TEMPLATE_EXAONE_3, + LLM_CHAT_TEMPLATE_RWKV_WORLD, + LLM_CHAT_TEMPLATE_GRANITE, + LLM_CHAT_TEMPLATE_UNKNOWN, +}; + +static const std::map LLM_CHAT_TEMPLATES = { + { "chatml", LLM_CHAT_TEMPLATE_CHATML }, + { "llama2", LLM_CHAT_TEMPLATE_LLAMA_2 }, + { "llama2-sys", LLM_CHAT_TEMPLATE_LLAMA_2_SYS }, + { "llama2-sys-bos", LLM_CHAT_TEMPLATE_LLAMA_2_SYS_BOS }, + { "llama2-sys-strip", LLM_CHAT_TEMPLATE_LLAMA_2_SYS_STRIP }, + { "mistral-v1", LLM_CHAT_TEMPLATE_MISTRAL_V1 }, + { "mistral-v3", LLM_CHAT_TEMPLATE_MISTRAL_V3 }, + { "mistral-v3-tekken", LLM_CHAT_TEMPLATE_MISTRAL_V3_TEKKEN }, + { "mistral-v7", LLM_CHAT_TEMPLATE_MISTRAL_V7 }, + { "phi3", LLM_CHAT_TEMPLATE_PHI_3 }, + { "zephyr", LLM_CHAT_TEMPLATE_ZEPHYR }, + { "monarch", LLM_CHAT_TEMPLATE_MONARCH }, + { "gemma", LLM_CHAT_TEMPLATE_GEMMA }, + { "orion", LLM_CHAT_TEMPLATE_ORION }, + { "openchat", LLM_CHAT_TEMPLATE_OPENCHAT }, + { "vicuna", LLM_CHAT_TEMPLATE_VICUNA }, + { "vicuna-orca", LLM_CHAT_TEMPLATE_VICUNA_ORCA }, + { "deepseek", LLM_CHAT_TEMPLATE_DEEPSEEK }, + { "deepseek2", LLM_CHAT_TEMPLATE_DEEPSEEK_2 }, + { "command-r", LLM_CHAT_TEMPLATE_COMMAND_R }, + { "llama3", LLM_CHAT_TEMPLATE_LLAMA_3 }, + { "chatglm3", LLM_CHAT_TEMPLATE_CHATGML_3 }, + { "chatglm4", LLM_CHAT_TEMPLATE_CHATGML_4 }, + { "minicpm", LLM_CHAT_TEMPLATE_MINICPM }, + { "exaone3", LLM_CHAT_TEMPLATE_EXAONE_3 }, + { "rwkv-world", LLM_CHAT_TEMPLATE_RWKV_WORLD }, + { "granite", LLM_CHAT_TEMPLATE_GRANITE }, +}; + static llm_arch llm_arch_from_string(const std::string & name) { for (const auto & kv : LLM_ARCH_NAMES) { // NOLINT if (kv.second == name) { @@ -1643,9 +1706,10 @@ struct LLM_TN { // static const std::map LLAMA_ROPE_SCALING_TYPES = { - { LLAMA_ROPE_SCALING_TYPE_NONE, "none" }, - { LLAMA_ROPE_SCALING_TYPE_LINEAR, "linear" }, - { LLAMA_ROPE_SCALING_TYPE_YARN, "yarn" }, + { LLAMA_ROPE_SCALING_TYPE_NONE, "none" }, + { LLAMA_ROPE_SCALING_TYPE_LINEAR, "linear" }, + { LLAMA_ROPE_SCALING_TYPE_YARN, "yarn" }, + { LLAMA_ROPE_SCALING_TYPE_LONGROPE, "longrope" }, }; static llama_rope_scaling_type llama_rope_scaling_type_from_string(const std::string & name) { @@ -4544,9 +4608,6 @@ struct llama_model_loader { case GGML_TYPE_IQ4_NL: ftype = LLAMA_FTYPE_MOSTLY_IQ4_NL; break; case GGML_TYPE_IQ4_XS: ftype = LLAMA_FTYPE_MOSTLY_IQ4_XS; break; case GGML_TYPE_IQ3_S: ftype = LLAMA_FTYPE_MOSTLY_IQ3_S; break; - case GGML_TYPE_Q4_0_4_4: ftype = LLAMA_FTYPE_MOSTLY_Q4_0_4_4; break; - case GGML_TYPE_Q4_0_4_8: ftype = LLAMA_FTYPE_MOSTLY_Q4_0_4_8; break; - case GGML_TYPE_Q4_0_8_8: ftype = LLAMA_FTYPE_MOSTLY_Q4_0_8_8; break; default: { LLAMA_LOG_WARN("%s: unknown type %s\n", __func__, ggml_type_name(type_max)); @@ -5310,9 +5371,6 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_IQ4_XS: return "IQ4_XS - 4.25 bpw"; case LLAMA_FTYPE_MOSTLY_IQ3_S: return "IQ3_S - 3.4375 bpw"; case LLAMA_FTYPE_MOSTLY_IQ3_M: return "IQ3_S mix - 3.66 bpw"; - case LLAMA_FTYPE_MOSTLY_Q4_0_4_4: return "Q4_0_4_4"; - case LLAMA_FTYPE_MOSTLY_Q4_0_4_8: return "Q4_0_4_8"; - case LLAMA_FTYPE_MOSTLY_Q4_0_8_8: return "Q4_0_8_8"; default: return "unknown, may not work"; } @@ -5549,8 +5607,12 @@ static void llm_load_hparams( case LLM_ARCH_MINICPM: { ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); + ml.get_key(LLM_KV_EMBEDDING_SCALE, hparams.f_embedding_scale); + ml.get_key(LLM_KV_RESIDUAL_SCALE, hparams.f_residual_scale); + ml.get_key(LLM_KV_LOGIT_SCALE, hparams.f_logit_scale); switch (hparams.n_layer) { + case 52: model.type = e_model::MODEL_1B; break; case 40: model.type = e_model::MODEL_2B; break; default: model.type = e_model::MODEL_UNKNOWN; } @@ -6447,6 +6509,9 @@ static void llm_load_vocab( vocab.type_pre = LLAMA_VOCAB_PRE_TYPE_CHAMELEON; vocab.tokenizer_add_bos = true; vocab.tokenizer_clean_spaces = false; + } else if ( + tokenizer_pre == "minerva-7b") { + vocab.type_pre = LLAMA_VOCAB_PRE_TYPE_MINERVA; } else { throw std::runtime_error(format("unknown pre-tokenizer type: '%s'", tokenizer_pre.c_str())); } @@ -7040,7 +7105,7 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { LLAMA_LOG_INFO("%s: n_ff_shexp = %d\n", __func__, hparams.n_ff_shexp); } - if (model.arch == LLM_ARCH_GRANITE || model.arch == LLM_ARCH_GRANITE_MOE) { + if (model.arch == LLM_ARCH_MINICPM || model.arch == LLM_ARCH_GRANITE || model.arch == LLM_ARCH_GRANITE_MOE) { LLAMA_LOG_INFO("%s: f_embedding_scale = %f\n", __func__, hparams.f_embedding_scale); LLAMA_LOG_INFO("%s: f_residual_scale = %f\n", __func__, hparams.f_residual_scale); LLAMA_LOG_INFO("%s: f_attention_scale = %f\n", __func__, hparams.f_attention_scale); @@ -7665,7 +7730,13 @@ static bool llm_load_tensors( layer.ffn_norm = create_tensor(tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, 0); - layer.rope_freqs = create_tensor(tn(LLM_TENSOR_ROPE_FREQS, "weight", i), {n_rot/2}, llama_model_loader::TENSOR_NOT_REQUIRED | (i != 0 ? llama_model_loader::TENSOR_DUPLICATED : 0)); + if (hparams.rope_scaling_type_train == LLAMA_ROPE_SCALING_TYPE_LONGROPE) { + layer.rope_long = create_tensor(tn(LLM_TENSOR_ROPE_FACTORS_LONG, "weight", i), {n_rot/2}, llama_model_loader::TENSOR_NOT_REQUIRED | (i != 0 ? llama_model_loader::TENSOR_DUPLICATED : 0)); + layer.rope_short = create_tensor(tn(LLM_TENSOR_ROPE_FACTORS_SHORT, "weight", i), {n_rot/2}, llama_model_loader::TENSOR_NOT_REQUIRED | (i != 0 ? llama_model_loader::TENSOR_DUPLICATED : 0)); + } + else { + layer.rope_freqs = create_tensor(tn(LLM_TENSOR_ROPE_FREQS, "weight", i), {n_rot/2}, llama_model_loader::TENSOR_NOT_REQUIRED | (i != 0 ? llama_model_loader::TENSOR_DUPLICATED : 0)); + } if (n_expert == 0) { layer.ffn_gate = create_tensor(tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, 0); @@ -13591,153 +13662,6 @@ struct llm_build_context { return gf; } - // ref: https://arxiv.org/abs/2203.03466 - // https://github.com/ggerganov/llama.cpp/issues/5276#issuecomment-1925774738 - // based on the original build_llama() function - struct ggml_cgraph * build_minicpm() { - struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, llama_model_max_nodes(model), false); - - const int64_t n_embd_head = hparams.n_embd_head_v; - GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_head == hparams.n_rot); - - const int64_t n_embd = hparams.n_embd; - //TODO: if the model varies, these parameters need to be read from the model - const int64_t n_embd_base = 256; - const float scale_embd = 12.0f; - const float scale_depth = 1.4f; - - struct ggml_tensor * cur; - struct ggml_tensor * inpL; - - inpL = llm_build_inp_embd(ctx0, lctx, hparams, ubatch, model.tok_embd, cb); - - // scale the input embeddings - inpL = ggml_scale(ctx0, inpL, scale_embd); - cb(inpL, "inp_scaled", -1); - - // inp_pos - contains the positions - struct ggml_tensor * inp_pos = build_inp_pos(); - - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask = build_inp_KQ_mask(); - - for (int il = 0; il < n_layer; ++il) { - struct ggml_tensor * inpSA = inpL; - - // norm - cur = llm_build_norm(ctx0, inpL, hparams, - model.layers[il].attn_norm, NULL, - LLM_NORM_RMS, cb, il); - cb(cur, "attn_norm", il); - - // self-attention - { - // compute Q and K and RoPE them - struct ggml_tensor * Qcur = llm_build_lora_mm(lctx, ctx0, model.layers[il].wq, cur); - cb(Qcur, "Qcur", il); - if (model.layers[il].bq) { - Qcur = ggml_add(ctx0, Qcur, model.layers[il].bq); - cb(Qcur, "Qcur", il); - } - - struct ggml_tensor * Kcur = llm_build_lora_mm(lctx, ctx0, model.layers[il].wk, cur); - cb(Kcur, "Kcur", il); - if (model.layers[il].bk) { - Kcur = ggml_add(ctx0, Kcur, model.layers[il].bk); - cb(Kcur, "Kcur", il); - } - - struct ggml_tensor * Vcur = llm_build_lora_mm(lctx, ctx0, model.layers[il].wv, cur); - cb(Vcur, "Vcur", il); - if (model.layers[il].bv) { - Vcur = ggml_add(ctx0, Vcur, model.layers[il].bv); - cb(Vcur, "Vcur", il); - } - - Qcur = ggml_rope_ext( - ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, nullptr, - n_rot, rope_type, n_ctx_orig, freq_base, freq_scale, - ext_factor, attn_factor, beta_fast, beta_slow - ); - cb(Qcur, "Qcur", il); - - Kcur = ggml_rope_ext( - ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, nullptr, - n_rot, rope_type, n_ctx_orig, freq_base, freq_scale, - ext_factor, attn_factor, beta_fast, beta_slow - ); - cb(Kcur, "Kcur", il); - - cur = llm_build_kv(ctx0, lctx, kv_self, gf, - model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); - } - - if (il == n_layer - 1) { - // skip computing output for unused tokens - struct ggml_tensor * inp_out_ids = build_inp_out_ids(); - cur = ggml_get_rows(ctx0, cur, inp_out_ids); - inpSA = ggml_get_rows(ctx0, inpSA, inp_out_ids); - } - - // scale_res - scale the hidden states for residual connection - const float scale_res = scale_depth/sqrtf(float(n_layer)); - cur = ggml_scale(ctx0, cur, scale_res); - cb(cur, "hidden_scaled", -1); - - struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); - cb(ffn_inp, "ffn_inp", il); - - // feed-forward network - { - cur = llm_build_norm(ctx0, ffn_inp, hparams, - model.layers[il].ffn_norm, NULL, - LLM_NORM_RMS, cb, il); - cb(cur, "ffn_norm", il); - - cur = llm_build_ffn(ctx0, lctx, cur, - model.layers[il].ffn_up, NULL, NULL, - model.layers[il].ffn_gate, NULL, NULL, - model.layers[il].ffn_down, NULL, NULL, - NULL, - LLM_FFN_SILU, LLM_FFN_PAR, cb, il); - cb(cur, "ffn_out", il); - } - - // scale the hidden states for residual connection - cur = ggml_scale(ctx0, cur, scale_res); - cb(cur, "hidden_scaled_ffn", -1); - - cur = ggml_add(ctx0, cur, ffn_inp); - cur = lctx.cvec.apply_to(ctx0, cur, il); - cb(cur, "l_out", il); - - // input for next layer - inpL = cur; - } - - cur = inpL; - - cur = llm_build_norm(ctx0, cur, hparams, - model.output_norm, NULL, - LLM_NORM_RMS, cb, -1); - cb(cur, "result_norm", -1); - - // lm_head scaling - const float scale_lmhead = float(n_embd_base)/float(n_embd); - cur = ggml_scale(ctx0, cur, scale_lmhead); - cb(cur, "lmhead_scaling", -1); - - // lm_head - cur = llm_build_lora_mm(lctx, ctx0, model.output, cur); - cb(cur, "result_output", -1); - - ggml_build_forward_expand(gf, cur); - - return gf; - } - struct ggml_cgraph * build_minicpm3() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, llama_model_max_nodes(model), false); @@ -16836,6 +16760,7 @@ static struct ggml_cgraph * llama_build_graph( switch (model.arch) { case LLM_ARCH_LLAMA: + case LLM_ARCH_MINICPM: case LLM_ARCH_GRANITE: case LLM_ARCH_GRANITE_MOE: { @@ -16924,10 +16849,6 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_internlm2(); } break; - case LLM_ARCH_MINICPM: - { - result = llm.build_minicpm(); - } break; case LLM_ARCH_MINICPM3: { result = llm.build_minicpm3(); @@ -18600,10 +18521,6 @@ static ggml_type llama_tensor_get_type(quantize_state_internal & qs, ggml_type n else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) { new_type = GGML_TYPE_IQ3_S; } - else if (new_type == GGML_TYPE_Q4_0_4_4 || new_type == GGML_TYPE_Q4_0_4_8 || - new_type == GGML_TYPE_Q4_0_8_8) { - new_type = GGML_TYPE_Q4_0; - } else if (ftype == LLAMA_FTYPE_MOSTLY_TQ1_0 || ftype == LLAMA_FTYPE_MOSTLY_TQ2_0) { new_type = GGML_TYPE_Q4_K; } @@ -18926,9 +18843,6 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s case LLAMA_FTYPE_MOSTLY_IQ4_XS: default_type = GGML_TYPE_IQ4_XS; break; case LLAMA_FTYPE_MOSTLY_IQ3_S: default_type = GGML_TYPE_IQ3_S; break; case LLAMA_FTYPE_MOSTLY_IQ3_M: default_type = GGML_TYPE_IQ3_S; break; - case LLAMA_FTYPE_MOSTLY_Q4_0_4_4: default_type = GGML_TYPE_Q4_0_4_4; break; - case LLAMA_FTYPE_MOSTLY_Q4_0_4_8: default_type = GGML_TYPE_Q4_0_4_8; break; - case LLAMA_FTYPE_MOSTLY_Q4_0_8_8: default_type = GGML_TYPE_Q4_0_8_8; break; default: throw std::runtime_error(format("invalid output file type %d\n", ftype)); } @@ -19267,14 +19181,6 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s f32_data = (float *) f32_conv_buf.data(); } - int chunk_size_multiplier = 1; - if (new_type == GGML_TYPE_Q4_0_4_4 || new_type == GGML_TYPE_Q4_0_4_8 || new_type == GGML_TYPE_Q4_0_8_8) { - if ((new_type == GGML_TYPE_Q4_0_8_8) && (tensor->ne[1] % 8 != 0)) new_type = GGML_TYPE_Q4_0; - else if (tensor->ne[1] % 4 != 0) new_type = GGML_TYPE_Q4_0; - if (new_type == GGML_TYPE_Q4_0_8_8) chunk_size_multiplier = 8; - else if (new_type == GGML_TYPE_Q4_0_4_4 || new_type == GGML_TYPE_Q4_0_4_8) chunk_size_multiplier = 4; - } - LLAMA_LOG_INFO("converting to %s .. ", ggml_type_name(new_type)); fflush(stdout); @@ -19287,8 +19193,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s const int64_t nrows = tensor->ne[1]; static const int64_t min_chunk_size = 32 * 512; - const int64_t chunk_size = (n_per_row >= min_chunk_size ? n_per_row : n_per_row * ((min_chunk_size + n_per_row - 1)/n_per_row)) * - chunk_size_multiplier; + const int64_t chunk_size = (n_per_row >= min_chunk_size ? n_per_row : n_per_row * ((min_chunk_size + n_per_row - 1)/n_per_row)); const int64_t nelements_matrix = tensor->ne[0] * tensor->ne[1]; const int64_t nchunk = (nelements_matrix + chunk_size - 1)/chunk_size; @@ -22006,18 +21911,109 @@ int32_t llama_detokenize( // chat templates // +static llm_chat_template llama_chat_detect_template(const std::string & tmpl) { + if (LLM_CHAT_TEMPLATES.find(tmpl) != LLM_CHAT_TEMPLATES.end()) { + return LLM_CHAT_TEMPLATES.at(tmpl); + } + auto tmpl_contains = [&tmpl](const char * haystack) -> bool { + return tmpl.find(haystack) != std::string::npos; + }; + if (tmpl_contains("<|im_start|>")) { + return LLM_CHAT_TEMPLATE_CHATML; + } else if (tmpl.find("mistral") == 0 || tmpl_contains("[INST]")) { + if (tmpl_contains("[SYSTEM_PROMPT]")) { + return LLM_CHAT_TEMPLATE_MISTRAL_V7; + } else if ( + // catches official 'v1' template + tmpl_contains("' [INST] ' + system_message") + // catches official 'v3' and 'v3-tekken' templates + || tmpl_contains("[AVAILABLE_TOOLS]") + ) { + // Official mistral 'v1', 'v3' and 'v3-tekken' templates + // See: https://github.com/mistralai/cookbook/blob/main/concept-deep-dive/tokenization/chat_templates.md + // See: https://github.com/mistralai/cookbook/blob/main/concept-deep-dive/tokenization/templates.md + if (tmpl_contains(" [INST]")) { + return LLM_CHAT_TEMPLATE_MISTRAL_V1; + } else if (tmpl_contains("\"[INST]\"")) { + return LLM_CHAT_TEMPLATE_MISTRAL_V3_TEKKEN; + } + return LLM_CHAT_TEMPLATE_MISTRAL_V3; + } else { + // llama2 template and its variants + // [variant] support system message + // See: https://huggingface.co/blog/llama2#how-to-prompt-llama-2 + bool support_system_message = tmpl_contains("<>"); + bool add_bos_inside_history = tmpl_contains("bos_token + '[INST]"); + bool strip_message = tmpl_contains("content.strip()"); + if (strip_message) { + return LLM_CHAT_TEMPLATE_LLAMA_2_SYS_STRIP; + } else if (add_bos_inside_history) { + return LLM_CHAT_TEMPLATE_LLAMA_2_SYS_BOS; + } else if (support_system_message) { + return LLM_CHAT_TEMPLATE_LLAMA_2_SYS; + } else { + return LLM_CHAT_TEMPLATE_LLAMA_2; + } + } + } else if (tmpl_contains("<|assistant|>") && tmpl_contains("<|end|>")) { + return LLM_CHAT_TEMPLATE_PHI_3; + } else if (tmpl_contains("<|user|>") && tmpl_contains("<|endoftext|>")) { + return LLM_CHAT_TEMPLATE_ZEPHYR; + } else if (tmpl_contains("bos_token + message['role']")) { + return LLM_CHAT_TEMPLATE_MONARCH; + } else if (tmpl_contains("")) { + return LLM_CHAT_TEMPLATE_GEMMA; + } else if (tmpl_contains("'\\n\\nAssistant: ' + eos_token")) { + // OrionStarAI/Orion-14B-Chat + return LLM_CHAT_TEMPLATE_ORION; + } else if (tmpl_contains("GPT4 Correct ")) { + // openchat/openchat-3.5-0106 + return LLM_CHAT_TEMPLATE_OPENCHAT; + } else if (tmpl_contains("USER: ") && tmpl_contains("ASSISTANT: ")) { + // eachadea/vicuna-13b-1.1 (and Orca variant) + if (tmpl_contains("SYSTEM: ")) { + return LLM_CHAT_TEMPLATE_VICUNA_ORCA; + } + return LLM_CHAT_TEMPLATE_VICUNA; + } else if (tmpl_contains("### Instruction:") && tmpl_contains("<|EOT|>")) { + // deepseek-ai/deepseek-coder-33b-instruct + return LLM_CHAT_TEMPLATE_DEEPSEEK; + } else if (tmpl_contains("<|START_OF_TURN_TOKEN|>") && tmpl_contains("<|USER_TOKEN|>")) { + // CohereForAI/c4ai-command-r-plus + return LLM_CHAT_TEMPLATE_COMMAND_R; + } else if (tmpl_contains("<|start_header_id|>") && tmpl_contains("<|end_header_id|>")) { + return LLM_CHAT_TEMPLATE_LLAMA_3; + } else if (tmpl_contains("[gMASK]sop")) { + // chatglm3-6b + return LLM_CHAT_TEMPLATE_CHATGML_3; + } else if (tmpl_contains("[gMASK]")) { + return LLM_CHAT_TEMPLATE_CHATGML_4; + } else if (tmpl_contains(LU8("<用户>"))) { + // MiniCPM-3B-OpenHermes-2.5-v2-GGUF + return LLM_CHAT_TEMPLATE_MINICPM; + } else if (tmpl_contains("'Assistant: ' + message['content'] + eos_token")) { + return LLM_CHAT_TEMPLATE_DEEPSEEK_2; + } else if (tmpl_contains("[|system|]") && tmpl_contains("[|assistant|]") && tmpl_contains("[|endofturn|]")) { + // ref: https://huggingface.co/LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct/discussions/8#66bae61b1893d14ee8ed85bb + // EXAONE-3.0-7.8B-Instruct + return LLM_CHAT_TEMPLATE_EXAONE_3; + } else if (tmpl_contains("rwkv-world")) { + return LLM_CHAT_TEMPLATE_RWKV_WORLD; + } else if (tmpl_contains("<|start_of_role|>")) { + return LLM_CHAT_TEMPLATE_GRANITE; + } + return LLM_CHAT_TEMPLATE_UNKNOWN; +} + // Simple version of "llama_apply_chat_template" that only works with strings // This function uses heuristic checks to determine commonly used template. It is not a jinja parser. static int32_t llama_chat_apply_template_internal( - const std::string & tmpl, + const llm_chat_template tmpl, const std::vector & chat, std::string & dest, bool add_ass) { // Taken from the research: https://github.com/ggerganov/llama.cpp/issues/5527 std::stringstream ss; - auto tmpl_contains = [&tmpl](std::string haystack) -> bool { - return tmpl.find(haystack) != std::string::npos; - }; - if (tmpl == "chatml" || tmpl_contains("<|im_start|>")) { + if (tmpl == LLM_CHAT_TEMPLATE_CHATML) { // chatml template for (auto message : chat) { ss << "<|im_start|>" << message->role << "\n" << message->content << "<|im_end|>\n"; @@ -22025,16 +22021,59 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<|im_start|>assistant\n"; } - } else if (tmpl == "llama2" || tmpl == "mistral" || tmpl_contains("[INST]")) { + } else if (tmpl == LLM_CHAT_TEMPLATE_MISTRAL_V7) { + // Official mistral 'v7' template + // See: https://huggingface.co/mistralai/Mistral-Large-Instruct-2411#basic-instruct-template-v7 + for (auto message : chat) { + std::string role(message->role); + std::string content(message->content); + if (role == "system") { + ss << "[SYSTEM_PROMPT] " << content << "[/SYSTEM_PROMPT]"; + } else if (role == "user") { + ss << "[INST] " << content << "[/INST]"; + } + else { + ss << " " << content << ""; + } + } + } else if (tmpl == LLM_CHAT_TEMPLATE_MISTRAL_V1 + || tmpl == LLM_CHAT_TEMPLATE_MISTRAL_V3 + || tmpl == LLM_CHAT_TEMPLATE_MISTRAL_V3_TEKKEN) { + // See: https://github.com/mistralai/cookbook/blob/main/concept-deep-dive/tokenization/chat_templates.md + // See: https://github.com/mistralai/cookbook/blob/main/concept-deep-dive/tokenization/templates.md + std::string leading_space = tmpl == LLM_CHAT_TEMPLATE_MISTRAL_V1 ? " " : ""; + std::string trailing_space = tmpl == LLM_CHAT_TEMPLATE_MISTRAL_V3_TEKKEN ? "" : " "; + bool trim_assistant_message = tmpl == LLM_CHAT_TEMPLATE_MISTRAL_V3; + bool is_inside_turn = false; + for (auto message : chat) { + if (!is_inside_turn) { + ss << leading_space << "[INST]" << trailing_space; + is_inside_turn = true; + } + std::string role(message->role); + std::string content(message->content); + if (role == "system") { + ss << content << "\n\n"; + } else if (role == "user") { + ss << content << leading_space << "[/INST]"; + } else { + ss << trailing_space << (trim_assistant_message ? trim(content) : content) << ""; + is_inside_turn = false; + } + } + } else if ( + tmpl == LLM_CHAT_TEMPLATE_LLAMA_2 + || tmpl == LLM_CHAT_TEMPLATE_LLAMA_2_SYS + || tmpl == LLM_CHAT_TEMPLATE_LLAMA_2_SYS_BOS + || tmpl == LLM_CHAT_TEMPLATE_LLAMA_2_SYS_STRIP) { // llama2 template and its variants // [variant] support system message - bool support_system_message = tmpl_contains("<>") || tmpl == "mistral"; - // [variant] space before + after response - bool space_around_response = tmpl_contains("' ' + eos_token"); + // See: https://huggingface.co/blog/llama2#how-to-prompt-llama-2 + bool support_system_message = tmpl != LLM_CHAT_TEMPLATE_LLAMA_2; // [variant] add BOS inside history - bool add_bos_inside_history = tmpl_contains("bos_token + '[INST]"); + bool add_bos_inside_history = tmpl == LLM_CHAT_TEMPLATE_LLAMA_2_SYS_BOS; // [variant] trim spaces from the input message - bool strip_message = tmpl_contains("content.strip()"); + bool strip_message = tmpl == LLM_CHAT_TEMPLATE_LLAMA_2_SYS_STRIP; // construct the prompt bool is_inside_turn = true; // skip BOS at the beginning ss << "[INST] "; @@ -22055,12 +22094,11 @@ static int32_t llama_chat_apply_template_internal( } else if (role == "user") { ss << content << " [/INST]"; } else { - ss << (space_around_response ? " " : "") << content << (space_around_response ? " " : "") << ""; + ss << content << ""; is_inside_turn = false; } } - // llama2 templates seem to not care about "add_generation_prompt" - } else if (tmpl == "phi3" || (tmpl_contains("<|assistant|>") && tmpl_contains("<|end|>"))) { + } else if (tmpl == LLM_CHAT_TEMPLATE_PHI_3) { // Phi 3 for (auto message : chat) { std::string role(message->role); @@ -22069,7 +22107,7 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<|assistant|>\n"; } - } else if (tmpl == "zephyr" || tmpl_contains("<|user|>")) { + } else if (tmpl == LLM_CHAT_TEMPLATE_ZEPHYR) { // zephyr template for (auto message : chat) { ss << "<|" << message->role << "|>" << "\n" << message->content << "<|endoftext|>\n"; @@ -22077,7 +22115,7 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<|assistant|>\n"; } - } else if (tmpl == "monarch" || tmpl_contains("bos_token + message['role']")) { + } else if (tmpl == LLM_CHAT_TEMPLATE_MONARCH) { // mlabonne/AlphaMonarch-7B template (the is included inside history) for (auto message : chat) { std::string bos = (message == chat.front()) ? "" : ""; // skip BOS for first message @@ -22086,7 +22124,7 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "assistant\n"; } - } else if (tmpl == "gemma" || tmpl == "gemma2" || tmpl_contains("")) { + } else if (tmpl == LLM_CHAT_TEMPLATE_GEMMA) { // google/gemma-7b-it std::string system_prompt = ""; for (auto message : chat) { @@ -22108,7 +22146,7 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "model\n"; } - } else if (tmpl == "orion" || tmpl_contains("'\\n\\nAssistant: ' + eos_token")) { + } else if (tmpl == LLM_CHAT_TEMPLATE_ORION) { // OrionStarAI/Orion-14B-Chat std::string system_prompt = ""; for (auto message : chat) { @@ -22128,7 +22166,7 @@ static int32_t llama_chat_apply_template_internal( ss << message->content << ""; } } - } else if (tmpl == "openchat" || tmpl_contains("GPT4 Correct ")) { + } else if (tmpl == LLM_CHAT_TEMPLATE_OPENCHAT) { // openchat/openchat-3.5-0106, for (auto message : chat) { std::string role(message->role); @@ -22142,13 +22180,13 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "GPT4 Correct Assistant:"; } - } else if (tmpl == "vicuna" || tmpl == "vicuna-orca" || (tmpl_contains("USER: ") && tmpl_contains("ASSISTANT: "))) { + } else if (tmpl == LLM_CHAT_TEMPLATE_VICUNA || tmpl == LLM_CHAT_TEMPLATE_VICUNA_ORCA) { // eachadea/vicuna-13b-1.1 (and Orca variant) for (auto message : chat) { std::string role(message->role); if (role == "system") { // Orca-Vicuna variant uses a system prefix - if (tmpl == "vicuna-orca" || tmpl_contains("SYSTEM: ")) { + if (tmpl == LLM_CHAT_TEMPLATE_VICUNA_ORCA) { ss << "SYSTEM: " << message->content << "\n"; } else { ss << message->content << "\n\n"; @@ -22162,7 +22200,7 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "ASSISTANT:"; } - } else if (tmpl == "deepseek" || (tmpl_contains("### Instruction:") && tmpl_contains("<|EOT|>"))) { + } else if (tmpl == LLM_CHAT_TEMPLATE_DEEPSEEK) { // deepseek-ai/deepseek-coder-33b-instruct for (auto message : chat) { std::string role(message->role); @@ -22177,7 +22215,7 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "### Response:\n"; } - } else if (tmpl == "command-r" || (tmpl_contains("<|START_OF_TURN_TOKEN|>") && tmpl_contains("<|USER_TOKEN|>"))) { + } else if (tmpl == LLM_CHAT_TEMPLATE_COMMAND_R) { // CohereForAI/c4ai-command-r-plus for (auto message : chat) { std::string role(message->role); @@ -22192,7 +22230,7 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>"; } - } else if (tmpl == "llama3" || (tmpl_contains("<|start_header_id|>") && tmpl_contains("<|end_header_id|>"))) { + } else if (tmpl == LLM_CHAT_TEMPLATE_LLAMA_3) { // Llama 3 for (auto message : chat) { std::string role(message->role); @@ -22201,7 +22239,7 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<|start_header_id|>assistant<|end_header_id|>\n\n"; } - } else if (tmpl == "chatglm3" || tmpl_contains("[gMASK]sop")) { + } else if (tmpl == LLM_CHAT_TEMPLATE_CHATGML_3) { // chatglm3-6b ss << "[gMASK]" << "sop"; for (auto message : chat) { @@ -22211,7 +22249,7 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<|assistant|>"; } - } else if (tmpl == "chatglm4" || tmpl_contains("[gMASK]")) { + } else if (tmpl == LLM_CHAT_TEMPLATE_CHATGML_4) { ss << "[gMASK]" << ""; for (auto message : chat) { std::string role(message->role); @@ -22220,7 +22258,7 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<|assistant|>"; } - } else if (tmpl == "minicpm" || tmpl_contains(LU8("<用户>"))) { + } else if (tmpl == LLM_CHAT_TEMPLATE_MINICPM) { // MiniCPM-3B-OpenHermes-2.5-v2-GGUF for (auto message : chat) { std::string role(message->role); @@ -22232,7 +22270,7 @@ static int32_t llama_chat_apply_template_internal( ss << trim(message->content); } } - } else if (tmpl == "deepseek2" || tmpl_contains("'Assistant: ' + message['content'] + eos_token")) { + } else if (tmpl == LLM_CHAT_TEMPLATE_DEEPSEEK_2) { // DeepSeek-V2 for (auto message : chat) { std::string role(message->role); @@ -22247,7 +22285,7 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "Assistant:"; } - } else if (tmpl == "exaone3" || (tmpl_contains("[|system|]") && tmpl_contains("[|assistant|]") && tmpl_contains("[|endofturn|]"))) { + } else if (tmpl == LLM_CHAT_TEMPLATE_EXAONE_3) { // ref: https://huggingface.co/LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct/discussions/8#66bae61b1893d14ee8ed85bb // EXAONE-3.0-7.8B-Instruct for (auto message : chat) { @@ -22263,7 +22301,7 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "[|assistant|]"; } - } else if (tmpl == "rwkv-world" || tmpl_contains("rwkv-world")) { + } else if (tmpl == LLM_CHAT_TEMPLATE_RWKV_WORLD) { // this template requires the model to have "\n\n" as EOT token for (auto message : chat) { std::string role(message->role); @@ -22273,7 +22311,7 @@ static int32_t llama_chat_apply_template_internal( ss << message->content << "\n\n"; } } - } else if (tmpl == "granite" || tmpl_contains("<|start_of_role|>")) { + } else if (tmpl == LLM_CHAT_TEMPLATE_GRANITE) { // IBM Granite template for (const auto & message : chat) { std::string role(message->role); @@ -22325,7 +22363,11 @@ int32_t llama_chat_apply_template( } std::string formatted_chat; - int32_t res = llama_chat_apply_template_internal(curr_tmpl, chat_vec, formatted_chat, add_ass); + llm_chat_template detected_tmpl = llama_chat_detect_template(curr_tmpl); + if (detected_tmpl == LLM_CHAT_TEMPLATE_UNKNOWN) { + return -1; + } + int32_t res = llama_chat_apply_template_internal(detected_tmpl, chat_vec, formatted_chat, add_ass); if (res < 0) { return res; } @@ -22335,6 +22377,15 @@ int32_t llama_chat_apply_template( return res; } +int32_t llama_chat_builtin_templates(const char ** output, size_t len) { + auto it = LLM_CHAT_TEMPLATES.begin(); + for (size_t i = 0; i < std::min(len, LLM_CHAT_TEMPLATES.size()); i++) { + output[i] = it->first.c_str(); + std::advance(it, 1); + } + return (int32_t) LLM_CHAT_TEMPLATES.size(); +} + // // sampling // diff --git a/src/unicode.cpp b/src/unicode.cpp index 50b35bbbc918c..3d459263525dc 100644 --- a/src/unicode.cpp +++ b/src/unicode.cpp @@ -201,7 +201,18 @@ static std::unordered_map unicode_utf8_to_byte_map() { } static inline std::wstring unicode_wstring_from_utf8(const std::string & s) { +#if defined(__clang__) + // disable C++17 deprecation warning for std::codecvt_utf8 +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wdeprecated-declarations" +#endif + std::wstring_convert> conv; + +#if defined(__clang__) +# pragma clang diagnostic pop +#endif + return conv.from_bytes(s); } diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 3106887cd68d4..b454cd917dfef 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -2719,6 +2719,33 @@ struct test_pad : public test_case { } }; +// GGML_OP_PAD_REFLECT_1D +struct test_pad_reflect_1d : public test_case { + const ggml_type type; + const std::array ne_a; + const int pad_0; + const int pad_1; + + std::string vars() override { + return VARS_TO_STR4(type, ne_a, pad_0, pad_1); + } + + test_pad_reflect_1d(ggml_type type = GGML_TYPE_F32, + std::array ne_a = {512, 34, 2, 1}, + int pad_0 = 10, int pad_1 = 9) + : type(type), ne_a(ne_a), pad_0(pad_0), pad_1(pad_1) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 2, ne_a.data()); + ggml_set_name(a, "a"); + + ggml_tensor * out = ggml_pad_reflect_1d(ctx, a, pad_0, pad_1); + ggml_set_name(out, "out"); + + return out; + } +}; + // GGML_OP_ARANGE struct test_arange : public test_case { const ggml_type type; @@ -3356,7 +3383,9 @@ static const ggml_type all_types[] = { static const ggml_type base_types[] = { GGML_TYPE_F32, GGML_TYPE_F16, + GGML_TYPE_Q8_0, // for I8MM tests GGML_TYPE_Q4_0, + GGML_TYPE_Q4_1, // for I8MM tests GGML_TYPE_Q4_K, GGML_TYPE_IQ2_XXS }; @@ -3480,13 +3509,14 @@ static std::vector> make_test_cases_eval() { test_cases.emplace_back(new test_conv_transpose_1d({3,2,1,1}, {3,1,2,1}, 1, 0, 1)); test_cases.emplace_back(new test_conv_transpose_1d({2,1,1,1}, {3,1,1,1}, 1, 0, 1)); - test_cases.emplace_back(new test_argmax()); - test_cases.emplace_back(new test_argmax(GGML_TYPE_F32, {32, 1, 1, 1})); - test_cases.emplace_back(new test_argmax(GGML_TYPE_F32, {100, 10, 1, 1})); + test_cases.emplace_back(new test_count_equal()); + + test_cases.emplace_back(new test_argmax(GGML_TYPE_F32, {32, 1, 1, 1})); + test_cases.emplace_back(new test_argmax(GGML_TYPE_F32, {100, 10, 1, 1})); test_cases.emplace_back(new test_argmax(GGML_TYPE_F32, {1024, 10, 1, 1})); + test_cases.emplace_back(new test_argmax(GGML_TYPE_F32, {1024, 12, 1, 1})); test_cases.emplace_back(new test_argmax(GGML_TYPE_F32, {2000, 10, 1, 1})); - - test_cases.emplace_back(new test_count_equal()); + test_cases.emplace_back(new test_argmax(GGML_TYPE_F32, {5438, 3, 1, 1})); for (int ne3 : {1, 3}) { // CUDA backward pass only supports ne3 == 1 test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 5, 4, ne3}, {1, 1, 1, 1})); @@ -3513,6 +3543,10 @@ static std::vector> make_test_cases_eval() { test_cases.emplace_back(new test_set(GGML_TYPE_F32, GGML_TYPE_F32, {6, 5, 4, 3}, dim)); } + for (int dim = 1; dim < GGML_MAX_DIMS; ++dim) { + test_cases.emplace_back(new test_set(GGML_TYPE_I32, GGML_TYPE_I32, {6, 5, 4, 3}, dim)); + } + for (ggml_type type_src : {GGML_TYPE_F16, GGML_TYPE_F32}) { for (ggml_type type_dst : all_types) { test_cases.emplace_back(new test_cpy(type_src, type_dst, {256, 4, 4, 4})); @@ -3592,6 +3626,19 @@ static std::vector> make_test_cases_eval() { test_cases.emplace_back(new test_rwkv_wkv6(GGML_TYPE_F32, 32, 64, 32, 4)); test_cases.emplace_back(new test_rwkv_wkv6(GGML_TYPE_F32, 32, 64, 128, 4)); + for (int i = 1; i < 9; ++i) { + test_cases.emplace_back(new test_mul_mat(GGML_TYPE_F16, GGML_TYPE_F32, 16, i, 256, { 1, 1}, {1, 1})); + test_cases.emplace_back(new test_mul_mat(GGML_TYPE_Q4_0, GGML_TYPE_F32, 16, i, 256, { 1, 1}, {1, 1})); + test_cases.emplace_back(new test_mul_mat(GGML_TYPE_Q4_1, GGML_TYPE_F32, 16, i, 256, { 1, 1}, {1, 1})); + test_cases.emplace_back(new test_mul_mat(GGML_TYPE_Q5_0, GGML_TYPE_F32, 16, i, 256, { 1, 1}, {1, 1})); + test_cases.emplace_back(new test_mul_mat(GGML_TYPE_Q5_1, GGML_TYPE_F32, 16, i, 256, { 1, 1}, {1, 1})); + test_cases.emplace_back(new test_mul_mat(GGML_TYPE_Q8_0, GGML_TYPE_F32, 16, i, 256, { 1, 1}, {1, 1})); + test_cases.emplace_back(new test_mul_mat(GGML_TYPE_Q4_K, GGML_TYPE_F32, 16, i, 256, { 1, 1}, {1, 1})); + test_cases.emplace_back(new test_mul_mat(GGML_TYPE_Q5_K, GGML_TYPE_F32, 16, i, 256, { 1, 1}, {1, 1})); + test_cases.emplace_back(new test_mul_mat(GGML_TYPE_Q6_K, GGML_TYPE_F32, 16, i, 256, { 1, 1}, {1, 1})); + test_cases.emplace_back(new test_mul_mat(GGML_TYPE_IQ4_NL, GGML_TYPE_F32, 16, i, 256, { 1, 1}, {1, 1})); + } + #if 1 for (ggml_type type_a : base_types) { for (ggml_type type_b : {GGML_TYPE_F32, GGML_TYPE_F16}) { @@ -3828,6 +3875,7 @@ static std::vector> make_test_cases_eval() { test_cases.emplace_back(new test_group_norm(GGML_TYPE_F32, {9, 9, 1280, 1})); test_cases.emplace_back(new test_acc()); test_cases.emplace_back(new test_pad()); + test_cases.emplace_back(new test_pad_reflect_1d()); test_cases.emplace_back(new test_arange()); test_cases.emplace_back(new test_timestep_embedding()); test_cases.emplace_back(new test_leaky_relu()); @@ -3874,6 +3922,8 @@ static std::vector> make_test_cases_perf() { test_cases.emplace_back(new test_bin_bcast(ggml_add, GGML_TYPE_F32, {4096, 1, 1, 1}, {1, 512, 1, 1})); test_cases.emplace_back(new test_cpy(GGML_TYPE_F32, GGML_TYPE_F16, {512, 3072, 1, 1})); + test_cases.emplace_back(new test_cpy(GGML_TYPE_F32, GGML_TYPE_F32, {8192, 512, 2, 1}, {0, 2, 1, 3})); + test_cases.emplace_back(new test_cpy(GGML_TYPE_F32, GGML_TYPE_F32, {3072, 512, 2, 1}, {0, 2, 1, 3})); test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {4096, 4096, 5, 1}, false, 1.0f, 0.0f)); test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {77, 4096, 5, 1}, false, 1.0f, 0.0f)); diff --git a/tests/test-chat-template.cpp b/tests/test-chat-template.cpp index 03e897e66dca4..aa140b5696f74 100644 --- a/tests/test-chat-template.cpp +++ b/tests/test-chat-template.cpp @@ -21,7 +21,7 @@ int main(void) { std::vector templates = { // teknium/OpenHermes-2.5-Mistral-7B "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\\n' + message['content'] + '<|im_end|>' + '\\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\\n' }}{% endif %}", - // mistralai/Mistral-7B-Instruct-v0.2 + // mistralai/Mistral-7B-Instruct-v0.2 (NOTE: Old pre-v1 without a system prompt) "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}", // TheBloke/FusionNet_34Bx2_MoE-AWQ "{%- for idx in range(0, messages|length) -%}\\n{%- if messages[idx]['role'] == 'user' -%}\\n{%- if idx > 1 -%}\\n{{- bos_token + '[INST] ' + messages[idx]['content'] + ' [/INST]' -}}\\n{%- else -%}\\n{{- messages[idx]['content'] + ' [/INST]' -}}\\n{%- endif -%}\\n{% elif messages[idx]['role'] == 'system' %}\\n{{- '[INST] <>\\\\n' + messages[idx]['content'] + '\\\\n<>\\\\n\\\\n' -}}\\n{%- elif messages[idx]['role'] == 'assistant' -%}\\n{{- ' ' + messages[idx]['content'] + ' ' + eos_token -}}\\n{% endif %}\\n{% endfor %}", @@ -67,16 +67,24 @@ int main(void) { "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{{ bos_token }}{% for message in messages %}{% if message['role'] == 'user' %}{{ 'User: ' + message['content'] + '\n\n' }}{% elif message['role'] == 'assistant' %}{{ 'Assistant: ' + message['content'] + eos_token }}{% elif message['role'] == 'system' %}{{ message['content'] + '\n\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ 'Assistant:' }}{% endif %}", // ibm-granite/granite-3.0-8b-instruct "{%- if tools %}\n {{- '<|start_of_role|>available_tools<|end_of_role|>\n' }}\n {%- for tool in tools %}\n {{- tool | tojson(indent=4) }}\n {%- if not loop.last %}\n {{- '\n\n' }}\n {%- endif %}\n {%- endfor %}\n {{- '<|end_of_text|>\n' }}\n{%- endif %}\n{%- for message in messages %}\n {%- if message['role'] == 'system' %}\n {{- '<|start_of_role|>system<|end_of_role|>' + message['content'] + '<|end_of_text|>\n' }}\n {%- elif message['role'] == 'user' %}\n {{- '<|start_of_role|>user<|end_of_role|>' + message['content'] + '<|end_of_text|>\n' }}\n {%- elif message['role'] == 'assistant' %}\n {{- '<|start_of_role|>assistant<|end_of_role|>' + message['content'] + '<|end_of_text|>\n' }}\n {%- elif message['role'] == 'assistant_tool_call' %}\n {{- '<|start_of_role|>assistant<|end_of_role|><|tool_call|>' + message['content'] + '<|end_of_text|>\n' }}\n {%- elif message['role'] == 'tool_response' %}\n {{- '<|start_of_role|>tool_response<|end_of_role|>' + message['content'] + '<|end_of_text|>\n' }}\n {%- endif %}\n {%- if loop.last and add_generation_prompt %}\n {{- '<|start_of_role|>assistant<|end_of_role|>' }}\n {%- endif %}\n{%- endfor %}", + // mistralai/Mistral-7B-Instruct-v0.2 (mistralai 'v1' template with a system prompt) + "{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content'] %}\n {%- set loop_messages = messages[1:] %}\n{%- else %}\n {%- set loop_messages = messages %}\n{%- endif %}\n\n{{- bos_token }}\n{%- for message in loop_messages %}\n {%- if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}\n {{- raise_exception('After the optional system message, conversation roles must alternate user/assistant/user/assistant/...') }}\n {%- endif %}\n {%- if message['role'] == 'user' %}\n {%- if loop.first and system_message is defined %}\n {{- ' [INST] ' + system_message + '\\n\\n' + message['content'] + ' [/INST]' }}\n {%- else %}\n {{- ' [INST] ' + message['content'] + ' [/INST]' }}\n {%- endif %}\n {%- elif message['role'] == 'assistant' %}\n {{- ' ' + message['content'] + eos_token}}\n {%- else %}\n {{- raise_exception('Only user and assistant roles are supported, with the exception of an initial optional system message!') }}\n {%- endif %}\n{%- endfor %}\n", + // Mistral-Large-Instruct-2407 (mistralai 'v3' template) + "{%- if messages[0][\"role\"] == \"system\" %}\n {%- set system_message = messages[0][\"content\"] %}\n {%- set loop_messages = messages[1:] %}\n{%- else %}\n {%- set loop_messages = messages %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n{%- set user_messages = loop_messages | selectattr(\"role\", \"equalto\", \"user\") | list %}\n\n{#- This block checks for alternating user/assistant messages, skipping tool calling messages #}\n{%- set ns = namespace() %}\n{%- set ns.index = 0 %}\n{%- for message in loop_messages %}\n {%- if not (message.role == \"tool\" or message.role == \"tool_results\" or (message.tool_calls is defined and message.tool_calls is not none)) %}\n {%- if (message[\"role\"] == \"user\") != (ns.index % 2 == 0) %}\n {{- raise_exception(\"After the optional system message, conversation roles must alternate user/assistant/user/assistant/...\") }}\n {%- endif %}\n {%- set ns.index = ns.index + 1 %}\n {%- endif %}\n{%- endfor %}\n\n{{- bos_token }}\n{%- for message in loop_messages %}\n {%- if message[\"role\"] == \"user\" %}\n {%- if tools is not none and (message == user_messages[-1]) %}\n {{- \"[AVAILABLE_TOOLS] [\" }}\n {%- for tool in tools %}\n {%- set tool = tool.function %}\n {{- '{\"type\": \"function\", \"function\": {' }}\n {%- for key, val in tool.items() if key != \"return\" %}\n {%- if val is string %}\n {{- '\"' + key + '\": \"' + val + '\"' }}\n {%- else %}\n {{- '\"' + key + '\": ' + val|tojson }}\n {%- endif %}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \"}}\" }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- else %}\n {{- \"]\" }}\n {%- endif %}\n {%- endfor %}\n {{- \"[/AVAILABLE_TOOLS]\" }}\n {%- endif %}\n {%- if loop.last and system_message is defined %}\n {{- \"[INST] \" + system_message + \"\\n\\n\" + message[\"content\"] + \"[/INST]\" }}\n {%- else %}\n {{- \"[INST] \" + message[\"content\"] + \"[/INST]\" }}\n {%- endif %}\n {%- elif message.tool_calls is defined and message.tool_calls is not none %}\n {{- \"[TOOL_CALLS] [\" }}\n {%- for tool_call in message.tool_calls %}\n {%- set out = tool_call.function|tojson %}\n {{- out[:-1] }}\n {%- if not tool_call.id is defined or tool_call.id|length != 9 %}\n {{- raise_exception(\"Tool call IDs should be alphanumeric strings with length 9!\") }}\n {%- endif %}\n {{- ', \"id\": \"' + tool_call.id + '\"}' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- else %}\n {{- \"]\" + eos_token }}\n {%- endif %}\n {%- endfor %}\n {%- elif message[\"role\"] == \"assistant\" %}\n {{- \" \" + message[\"content\"]|trim + eos_token}}\n {%- elif message[\"role\"] == \"tool_results\" or message[\"role\"] == \"tool\" %}\n {%- if message.content is defined and message.content.content is defined %}\n {%- set content = message.content.content %}\n {%- else %}\n {%- set content = message.content %}\n {%- endif %}\n {{- '[TOOL_RESULTS] {\"content\": ' + content|string + \", \" }}\n {%- if not message.tool_call_id is defined or message.tool_call_id|length != 9 %}\n {{- raise_exception(\"Tool call IDs should be alphanumeric strings with length 9!\") }}\n {%- endif %}\n {{- '\"call_id\": \"' + message.tool_call_id + '\"}[/TOOL_RESULTS]' }}\n {%- else %}\n {{- raise_exception(\"Only user and assistant roles are supported, with the exception of an initial optional system message!\") }}\n {%- endif %}\n{%- endfor %}\n", + // Mistral-Nemo-Instruct-2407 (mistralai 'v3-tekken' template) + "{%- if messages[0][\"role\"] == \"system\" %}\n {%- set system_message = messages[0][\"content\"] %}\n {%- set loop_messages = messages[1:] %}\n{%- else %}\n {%- set loop_messages = messages %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n{%- set user_messages = loop_messages | selectattr(\"role\", \"equalto\", \"user\") | list %}\n\n{#- This block checks for alternating user/assistant messages, skipping tool calling messages #}\n{%- set ns = namespace() %}\n{%- set ns.index = 0 %}\n{%- for message in loop_messages %}\n {%- if not (message.role == \"tool\" or message.role == \"tool_results\" or (message.tool_calls is defined and message.tool_calls is not none)) %}\n {%- if (message[\"role\"] == \"user\") != (ns.index % 2 == 0) %}\n {{- raise_exception(\"After the optional system message, conversation roles must alternate user/assistant/user/assistant/...\") }}\n {%- endif %}\n {%- set ns.index = ns.index + 1 %}\n {%- endif %}\n{%- endfor %}\n\n{{- bos_token }}\n{%- for message in loop_messages %}\n {%- if message[\"role\"] == \"user\" %}\n {%- if tools is not none and (message == user_messages[-1]) %}\n {{- \"[AVAILABLE_TOOLS][\" }}\n {%- for tool in tools %}\n {%- set tool = tool.function %}\n {{- '{\"type\": \"function\", \"function\": {' }}\n {%- for key, val in tool.items() if key != \"return\" %}\n {%- if val is string %}\n {{- '\"' + key + '\": \"' + val + '\"' }}\n {%- else %}\n {{- '\"' + key + '\": ' + val|tojson }}\n {%- endif %}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \"}}\" }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- else %}\n {{- \"]\" }}\n {%- endif %}\n {%- endfor %}\n {{- \"[/AVAILABLE_TOOLS]\" }}\n {%- endif %}\n {%- if loop.last and system_message is defined %}\n {{- \"[INST]\" + system_message + \"\\n\\n\" + message[\"content\"] + \"[/INST]\" }}\n {%- else %}\n {{- \"[INST]\" + message[\"content\"] + \"[/INST]\" }}\n {%- endif %}\n {%- elif (message.tool_calls is defined and message.tool_calls is not none) %}\n {{- \"[TOOL_CALLS][\" }}\n {%- for tool_call in message.tool_calls %}\n {%- set out = tool_call.function|tojson %}\n {{- out[:-1] }}\n {%- if not tool_call.id is defined or tool_call.id|length != 9 %}\n {{- raise_exception(\"Tool call IDs should be alphanumeric strings with length 9!\") }}\n {%- endif %}\n {{- ', \"id\": \"' + tool_call.id + '\"}' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- else %}\n {{- \"]\" + eos_token }}\n {%- endif %}\n {%- endfor %}\n {%- elif message[\"role\"] == \"assistant\" %}\n {{- message[\"content\"] + eos_token}}\n {%- elif message[\"role\"] == \"tool_results\" or message[\"role\"] == \"tool\" %}\n {%- if message.content is defined and message.content.content is defined %}\n {%- set content = message.content.content %}\n {%- else %}\n {%- set content = message.content %}\n {%- endif %}\n {{- '[TOOL_RESULTS]{\"content\": ' + content|string + \", \" }}\n {%- if not message.tool_call_id is defined or message.tool_call_id|length != 9 %}\n {{- raise_exception(\"Tool call IDs should be alphanumeric strings with length 9!\") }}\n {%- endif %}\n {{- '\"call_id\": \"' + message.tool_call_id + '\"}[/TOOL_RESULTS]' }}\n {%- else %}\n {{- raise_exception(\"Only user and assistant roles are supported, with the exception of an initial optional system message!\") }}\n {%- endif %}\n{%- endfor %}\n", + // mistralai/Mistral-Large-Instruct-2411 (mistralai 'v7' template) + "{{ bos_token }}{% for message in messages %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + '[/INST]' }}{% elif message['role'] == 'system' %}{{ '[SYSTEM_PROMPT] ' + message['content'] + '[/SYSTEM_PROMPT]' }}{% elif message['role'] == 'assistant' %}{{ ' ' + message['content'] + eos_token }}{% else %}{{ raise_exception('Only user, system and assistant roles are supported!') }}{% endif %}{% endfor %}", }; std::vector expected_output = { // teknium/OpenHermes-2.5-Mistral-7B "<|im_start|>system\nYou are a helpful assistant<|im_end|>\n<|im_start|>user\nHello<|im_end|>\n<|im_start|>assistant\nHi there<|im_end|>\n<|im_start|>user\nWho are you<|im_end|>\n<|im_start|>assistant\n I am an assistant <|im_end|>\n<|im_start|>user\nAnother question<|im_end|>\n<|im_start|>assistant\n", - // mistralai/Mistral-7B-Instruct-v0.2 + // mistralai/Mistral-7B-Instruct-v0.2 (NOTE: Old pre-v1 without a system prompt) "[INST] You are a helpful assistant\nHello [/INST]Hi there[INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", // TheBloke/FusionNet_34Bx2_MoE-AWQ - "[INST] <>\nYou are a helpful assistant\n<>\n\nHello [/INST] Hi there [INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + "[INST] <>\nYou are a helpful assistant\n<>\n\nHello [/INST]Hi there[INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", // bofenghuang/vigogne-2-70b-chat - "[INST] <>\nYou are a helpful assistant\n<>\n\nHello [/INST] Hi there [INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + "[INST] <>\nYou are a helpful assistant\n<>\n\nHello [/INST]Hi there[INST] Who are you [/INST]I am an assistant[INST] Another question [/INST]", // mlabonne/AlphaMonarch-7B "system\nYou are a helpful assistant\nuser\nHello\nassistant\nHi there\nuser\nWho are you\nassistant\n I am an assistant \nuser\nAnother question\nassistant\n", // google/gemma-7b-it @@ -113,10 +121,29 @@ int main(void) { u8"You are a helpful assistant\n\nUser: Hello\n\nAssistant: Hi there<|end▁of▁sentence|>User: Who are you\n\nAssistant: I am an assistant <|end▁of▁sentence|>User: Another question\n\nAssistant:", // ibm-granite/granite-3.0-8b-instruct "<|start_of_role|>system<|end_of_role|>You are a helpful assistant<|end_of_text|>\n<|start_of_role|>user<|end_of_role|>Hello<|end_of_text|>\n<|start_of_role|>assistant<|end_of_role|>Hi there<|end_of_text|>\n<|start_of_role|>user<|end_of_role|>Who are you<|end_of_text|>\n<|start_of_role|>assistant<|end_of_role|> I am an assistant <|end_of_text|>\n<|start_of_role|>user<|end_of_role|>Another question<|end_of_text|>\n<|start_of_role|>assistant<|end_of_role|>\n", + // mistralai/Mistral-7B-Instruct-v0.2 (mistralai 'v1' template with a system prompt) + " [INST] You are a helpful assistant\n\nHello [/INST] Hi there [INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + // Mistral-Large-Instruct-2407 (mistralai 'v3' template; modified to have system prompt at start) + "[INST] You are a helpful assistant\n\nHello[/INST] Hi there[INST] Who are you[/INST] I am an assistant[INST] Another question[/INST]", + // Mistral-Nemo-Instruct-2407 (mistralai 'v3-tekken' template; modified to have system prompt at start) + "[INST]You are a helpful assistant\n\nHello[/INST]Hi there[INST]Who are you[/INST] I am an assistant [INST]Another question[/INST]", + // mistralai/Mistral-Large-Instruct-2411 (mistralai 'v7' template) + "[SYSTEM_PROMPT] You are a helpful assistant[/SYSTEM_PROMPT][INST] Hello[/INST] Hi there[INST] Who are you[/INST] I am an assistant [INST] Another question[/INST]", }; std::vector formatted_chat(1024); int32_t res; + // list all supported templates + std::vector supported_tmpl; + res = llama_chat_builtin_templates(nullptr, 0); + assert(res > 0); + supported_tmpl.resize(res); + res = llama_chat_builtin_templates(supported_tmpl.data(), supported_tmpl.size()); + printf("Built-in chat templates:\n"); + for (auto tmpl : supported_tmpl) { + printf(" %s\n", tmpl); + } + // test invalid chat template res = llama_chat_apply_template(nullptr, "INVALID TEMPLATE", conversation, message_count, true, formatted_chat.data(), formatted_chat.size()); assert(res < 0); @@ -154,7 +181,13 @@ int main(void) { return output; }; assert(fmt_sys("chatml") == "<|im_start|>system\nYou are a helpful assistant<|im_end|>\n"); + assert(fmt_sys("mistral-v1") == " [INST] You are a helpful assistant\n\n"); + assert(fmt_sys("mistral-v3") == "[INST] You are a helpful assistant\n\n"); + assert(fmt_sys("mistral-v3-tekken") == "[INST]You are a helpful assistant\n\n"); + assert(fmt_sys("mistral-v7") == "[SYSTEM_PROMPT] You are a helpful assistant[/SYSTEM_PROMPT]"); assert(fmt_sys("llama2") == "[INST] You are a helpful assistant\n"); + assert(fmt_sys("llama2-sys") == "[INST] <>\nYou are a helpful assistant\n<>\n\n"); + assert(fmt_sys("mistral") == "[INST] You are a helpful assistant\n"); // for old pre-v1 templates assert(fmt_sys("gemma") == ""); // for gemma, system message is merged with user message assert(fmt_sys("llama3") == "<|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant<|eot_id|>"); @@ -173,9 +206,16 @@ int main(void) { return output; }; assert(fmt_single("chatml") == "\n<|im_start|>user\nHow are you<|im_end|>\n<|im_start|>assistant\n"); + assert(fmt_single("mistral-v1") == " [INST] How are you [/INST]"); + assert(fmt_single("mistral-v3") == "[INST] How are you[/INST]"); + assert(fmt_single("mistral-v3-tekken") == "[INST]How are you[/INST]"); + assert(fmt_single("mistral-v7") == "[INST] How are you[/INST]"); assert(fmt_single("llama2") == "[INST] How are you [/INST]"); + assert(fmt_single("mistral") == "[INST] How are you [/INST]"); // for old pre-v1 templates assert(fmt_single("gemma") == "\nuser\nHow are you\nmodel\n"); assert(fmt_single("llama3") == "<|start_header_id|>user<|end_header_id|>\n\nHow are you<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n"); + printf("Test chat templates: OK\n"); + return 0; } diff --git a/tests/test-lora-conversion-inference.sh b/tests/test-lora-conversion-inference.sh index fe90ce0d1b801..fb308a9ffb76f 100755 --- a/tests/test-lora-conversion-inference.sh +++ b/tests/test-lora-conversion-inference.sh @@ -10,11 +10,16 @@ declare -a params=( MODELS_REPO=lora-tests MODELS_REPO_URL=https://huggingface.co/ggml-org/$MODELS_REPO +COMMIT=c26d5fb85b4070a9e9c4e65d132c783b98086890 # Clone the Hugging Face repository if the directory does not exist if [ ! -d "$MODELS_REPO" ]; then echo "Cloning the Hugging Face repository..." git clone $MODELS_REPO_URL --depth 1 + cd $MODELS_REPO + git fetch --depth=1 origin $COMMIT + git reset --hard $COMMIT + cd - else echo "Repository already exists. Skipping clone." fi diff --git a/tests/test-sampling.cpp b/tests/test-sampling.cpp index be370044d6f58..e5c9e75e4185f 100644 --- a/tests/test-sampling.cpp +++ b/tests/test-sampling.cpp @@ -284,7 +284,7 @@ static void test_perf() { data.reserve(n_vocab); for (int i = 0; i < n_vocab; i++) { - const float logit = 2.0f*((float)(rand())/RAND_MAX - 0.5f); + const float logit = 2.0f*((double)(rand())/RAND_MAX - 0.5); data.emplace_back(llama_token_data{i, logit, 0.0f}); }