forked from ggerganov/llama.cpp
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #5 from arthw/cherry-1118
Cherry 1118
- Loading branch information
Showing
503 changed files
with
129,333 additions
and
78,865 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,26 @@ | ||
ARG UBUNTU_VERSION=22.04 | ||
# This needs to generally match the container host's environment. | ||
ARG MUSA_VERSION=rc3.1.0 | ||
# Target the MUSA build image | ||
ARG BASE_MUSA_DEV_CONTAINER=mthreads/musa:${MUSA_VERSION}-devel-ubuntu${UBUNTU_VERSION} | ||
|
||
FROM ${BASE_MUSA_DEV_CONTAINER} AS build | ||
|
||
RUN apt-get update && \ | ||
apt-get install -y build-essential cmake python3 python3-pip git libcurl4-openssl-dev libgomp1 | ||
|
||
COPY requirements.txt requirements.txt | ||
COPY requirements requirements | ||
|
||
RUN pip install --upgrade pip setuptools wheel \ | ||
&& pip install -r requirements.txt | ||
|
||
WORKDIR /app | ||
|
||
COPY . . | ||
|
||
RUN cmake -B build -DGGML_NATIVE=OFF -DGGML_MUSA=ON -DLLAMA_CURL=ON ${CMAKE_ARGS} -DCMAKE_EXE_LINKER_FLAGS=-Wl,--allow-shlib-undefined . && \ | ||
cmake --build build --config Release -j$(nproc) && \ | ||
cp build/bin/* . | ||
|
||
ENTRYPOINT ["/app/.devops/tools.sh"] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,44 @@ | ||
ARG ASCEND_VERSION=8.0.rc2.alpha003-910b-openeuler22.03-py3.8 | ||
|
||
FROM ascendai/cann:$ASCEND_VERSION AS build | ||
|
||
WORKDIR /app | ||
|
||
COPY . . | ||
|
||
RUN yum install -y gcc g++ cmake make | ||
ENV ASCEND_TOOLKIT_HOME=/usr/local/Ascend/ascend-toolkit/latest | ||
ENV LIBRARY_PATH=${ASCEND_TOOLKIT_HOME}/lib64:$LIBRARY_PATH | ||
ENV LD_LIBRARY_PATH=${ASCEND_TOOLKIT_HOME}/lib64:${ASCEND_TOOLKIT_HOME}/lib64/plugin/opskernel:${ASCEND_TOOLKIT_HOME}/lib64/plugin/nnengine:${ASCEND_TOOLKIT_HOME}/opp/built-in/op_impl/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} | ||
ENV PYTHONPATH=${ASCEND_TOOLKIT_HOME}/python/site-packages:${ASCEND_TOOLKIT_HOME}/opp/built-in/op_impl/ai_core/tbe:${PYTHONPATH} | ||
ENV PATH=${ASCEND_TOOLKIT_HOME}/bin:${ASCEND_TOOLKIT_HOME}/compiler/ccec_compiler/bin:${PATH} | ||
ENV ASCEND_AICPU_PATH=${ASCEND_TOOLKIT_HOME} | ||
ENV ASCEND_OPP_PATH=${ASCEND_TOOLKIT_HOME}/opp | ||
ENV TOOLCHAIN_HOME=${ASCEND_TOOLKIT_HOME}/toolkit | ||
ENV ASCEND_HOME_PATH=${ASCEND_TOOLKIT_HOME} | ||
|
||
# find libascend_hal.so, because the drive hasn`t been mounted. | ||
ENV LD_LIBRARY_PATH=${ASCEND_TOOLKIT_HOME}/runtime/lib64/stub:$LD_LIBRARY_PATH | ||
|
||
RUN echo "Building with static libs" && \ | ||
source /usr/local/Ascend/ascend-toolkit/set_env.sh --force && \ | ||
cmake -B build -DGGML_NATIVE=OFF -DGGML_CANN=ON -DBUILD_SHARED_LIBS=OFF && \ | ||
cmake --build build --config Release --target llama-cli | ||
|
||
# TODO: use image with NNRT | ||
FROM ascendai/cann:$ASCEND_VERSION AS runtime | ||
COPY --from=build /app/build/bin/llama-cli /llama-cli | ||
|
||
ENV LC_ALL=C.utf8 | ||
|
||
ENV ASCEND_TOOLKIT_HOME=/usr/local/Ascend/ascend-toolkit/latest | ||
ENV LIBRARY_PATH=${ASCEND_TOOLKIT_HOME}/lib64:$LIBRARY_PATH | ||
ENV LD_LIBRARY_PATH=${ASCEND_TOOLKIT_HOME}/lib64:${ASCEND_TOOLKIT_HOME}/lib64/plugin/opskernel:${ASCEND_TOOLKIT_HOME}/lib64/plugin/nnengine:${ASCEND_TOOLKIT_HOME}/opp/built-in/op_impl/ai_core/tbe/op_tiling:${LD_LIBRARY_PATH} | ||
ENV PYTHONPATH=${ASCEND_TOOLKIT_HOME}/python/site-packages:${ASCEND_TOOLKIT_HOME}/opp/built-in/op_impl/ai_core/tbe:${PYTHONPATH} | ||
ENV PATH=${ASCEND_TOOLKIT_HOME}/bin:${ASCEND_TOOLKIT_HOME}/compiler/ccec_compiler/bin:${PATH} | ||
ENV ASCEND_AICPU_PATH=${ASCEND_TOOLKIT_HOME} | ||
ENV ASCEND_OPP_PATH=${ASCEND_TOOLKIT_HOME}/opp | ||
ENV TOOLCHAIN_HOME=${ASCEND_TOOLKIT_HOME}/toolkit | ||
ENV ASCEND_HOME_PATH=${ASCEND_TOOLKIT_HOME} | ||
|
||
ENTRYPOINT ["/llama-cli" ] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,35 +1,38 @@ | ||
ARG UBUNTU_VERSION=22.04 | ||
# This needs to generally match the container host's environment. | ||
ARG CUDA_VERSION=11.7.1 | ||
ARG CUDA_VERSION=12.6.0 | ||
# Target the CUDA build image | ||
ARG BASE_CUDA_DEV_CONTAINER=nvidia/cuda:${CUDA_VERSION}-devel-ubuntu${UBUNTU_VERSION} | ||
# Target the CUDA runtime image | ||
ARG BASE_CUDA_RUN_CONTAINER=nvidia/cuda:${CUDA_VERSION}-runtime-ubuntu${UBUNTU_VERSION} | ||
|
||
FROM ${BASE_CUDA_DEV_CONTAINER} AS build | ||
|
||
# Unless otherwise specified, we make a fat build. | ||
ARG CUDA_DOCKER_ARCH=all | ||
# CUDA architecture to build for (defaults to all supported archs) | ||
ARG CUDA_DOCKER_ARCH=default | ||
|
||
RUN apt-get update && \ | ||
apt-get install -y build-essential git | ||
apt-get install -y build-essential git cmake | ||
|
||
WORKDIR /app | ||
|
||
COPY . . | ||
|
||
# Set nvcc architecture | ||
ENV CUDA_DOCKER_ARCH=${CUDA_DOCKER_ARCH} | ||
# Enable CUDA | ||
ENV GGML_CUDA=1 | ||
|
||
RUN make -j$(nproc) llama-cli | ||
# Use the default CUDA archs if not specified | ||
RUN if [ "${CUDA_DOCKER_ARCH}" != "default" ]; then \ | ||
export CMAKE_ARGS="-DCMAKE_CUDA_ARCHITECTURES=${CUDA_DOCKER_ARCH}"; \ | ||
fi && \ | ||
cmake -B build -DGGML_NATIVE=OFF -DGGML_CUDA=ON ${CMAKE_ARGS} -DCMAKE_EXE_LINKER_FLAGS=-Wl,--allow-shlib-undefined . && \ | ||
cmake --build build --config Release --target llama-cli -j$(nproc) && \ | ||
mkdir -p /app/lib && \ | ||
find build -name "*.so" -exec cp {} /app/lib \; | ||
|
||
FROM ${BASE_CUDA_RUN_CONTAINER} AS runtime | ||
|
||
RUN apt-get update && \ | ||
apt-get install -y libgomp1 | ||
|
||
COPY --from=build /app/llama-cli /llama-cli | ||
COPY --from=build /app/lib/ / | ||
COPY --from=build /app/build/bin/llama-cli / | ||
|
||
ENTRYPOINT [ "/llama-cli" ] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
ARG UBUNTU_VERSION=22.04 | ||
# This needs to generally match the container host's environment. | ||
ARG MUSA_VERSION=rc3.1.0 | ||
# Target the MUSA build image | ||
ARG BASE_MUSA_DEV_CONTAINER=mthreads/musa:${MUSA_VERSION}-devel-ubuntu${UBUNTU_VERSION} | ||
# Target the MUSA runtime image | ||
ARG BASE_MUSA_RUN_CONTAINER=mthreads/musa:${MUSA_VERSION}-runtime-ubuntu${UBUNTU_VERSION} | ||
|
||
FROM ${BASE_MUSA_DEV_CONTAINER} AS build | ||
|
||
RUN apt-get update && \ | ||
apt-get install -y build-essential git cmake | ||
|
||
WORKDIR /app | ||
|
||
COPY . . | ||
|
||
RUN cmake -B build -DGGML_NATIVE=OFF -DGGML_MUSA=ON ${CMAKE_ARGS} -DCMAKE_EXE_LINKER_FLAGS=-Wl,--allow-shlib-undefined . && \ | ||
cmake --build build --config Release --target llama-cli -j$(nproc) && \ | ||
mkdir -p /app/lib && \ | ||
find build -name "*.so" -exec cp {} /app/lib \; | ||
|
||
FROM ${BASE_MUSA_RUN_CONTAINER} AS runtime | ||
|
||
RUN apt-get update && \ | ||
apt-get install -y libgomp1 | ||
|
||
COPY --from=build /app/lib/ / | ||
COPY --from=build /app/build/bin/llama-cli /llama-cli | ||
|
||
ENTRYPOINT [ "/llama-cli" ] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
ARG UBUNTU_VERSION=22.04 | ||
# This needs to generally match the container host's environment. | ||
ARG MUSA_VERSION=rc3.1.0 | ||
# Target the MUSA build image | ||
ARG BASE_MUSA_DEV_CONTAINER=mthreads/musa:${MUSA_VERSION}-devel-ubuntu${UBUNTU_VERSION} | ||
# Target the MUSA runtime image | ||
ARG BASE_MUSA_RUN_CONTAINER=mthreads/musa:${MUSA_VERSION}-runtime-ubuntu${UBUNTU_VERSION} | ||
|
||
FROM ${BASE_MUSA_DEV_CONTAINER} AS build | ||
|
||
RUN apt-get update && \ | ||
apt-get install -y build-essential git cmake libcurl4-openssl-dev | ||
|
||
WORKDIR /app | ||
|
||
COPY . . | ||
|
||
RUN cmake -B build -DGGML_NATIVE=OFF -DGGML_MUSA=ON -DLLAMA_CURL=ON ${CMAKE_ARGS} -DCMAKE_EXE_LINKER_FLAGS=-Wl,--allow-shlib-undefined . && \ | ||
cmake --build build --config Release --target llama-server -j$(nproc) && \ | ||
mkdir -p /app/lib && \ | ||
find build -name "*.so" -exec cp {} /app/lib \; | ||
|
||
FROM ${BASE_MUSA_RUN_CONTAINER} AS runtime | ||
|
||
RUN apt-get update && \ | ||
apt-get install -y libcurl4-openssl-dev libgomp1 curl | ||
|
||
COPY --from=build /app/lib/ / | ||
COPY --from=build /app/build/bin/llama-server /llama-server | ||
|
||
# Must be set to 0.0.0.0 so it can listen to requests from host machine | ||
ENV LLAMA_ARG_HOST=0.0.0.0 | ||
|
||
HEALTHCHECK CMD [ "curl", "-f", "http://localhost:8080/health" ] | ||
|
||
ENTRYPOINT [ "/llama-server" ] |
Oops, something went wrong.