Skip to content

Commit

Permalink
feature: makefile & updates (mudler#23)
Browse files Browse the repository at this point in the history
Co-authored-by: mudler <[email protected]>
Co-authored-by: Ettore Di Giacinto <[email protected]>
  • Loading branch information
3 people authored Apr 15, 2023
1 parent e8eab66 commit c371752
Show file tree
Hide file tree
Showing 10 changed files with 105 additions and 15 deletions.
2 changes: 1 addition & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
@@ -1 +1 @@
models/*.bin
models
4 changes: 3 additions & 1 deletion .env
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
THREADS=14
THREADS=14
CONTEXT_SIZE=700
MODELS_PATH=/models
9 changes: 8 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,9 @@
# go-llama build artifacts
go-llama

# llama-cli build binary
llama-cli
models/*.bin

# Ignore models
models/*.bin
models/ggml-*
16 changes: 16 additions & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"version": "0.2.0",
"configurations": [

{
"name": "Launch Go",
"type": "go",
"request": "launch",
"mode": "debug",
"program": "${workspaceFolder}/main.go",
"args": [
"api"
]
}
]
}
52 changes: 52 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
GOCMD=go
GOTEST=$(GOCMD) test
GOVET=$(GOCMD) vet
BINARY_NAME=llama-cli
GOLLAMA_VERSION?=llama.cpp-8b67998

GREEN := $(shell tput -Txterm setaf 2)
YELLOW := $(shell tput -Txterm setaf 3)
WHITE := $(shell tput -Txterm setaf 7)
CYAN := $(shell tput -Txterm setaf 6)
RESET := $(shell tput -Txterm sgr0)

.PHONY: all test build vendor

all: help

## Build:

build: prepare ## Build the project
$(GOCMD) build -o $(BINARY_NAME) ./

go-llama:
git clone -b $(GOLLAMA_VERSION) --recurse-submodules https://github.com/go-skynet/go-llama.cpp go-llama

prepare: go-llama
$(MAKE) -C go-llama libbinding.a
$(GOCMD) mod edit -replace github.com/go-skynet/go-llama.cpp=$(shell pwd)/go-llama

clean: ## Remove build related file
$(MAKE) -C go-llama clean
rm -fr ./go-llama
rm -f $(BINARY_NAME)

## Run:
run: prepare
C_INCLUDE_PATH=$(shell pwd)/go-llama.cpp LIBRARY_PATH=$(shell pwd)/go-llama.cpp $(GOCMD) run ./ api

## Test:
test: ## Run the tests of the project
$(GOTEST) -v -race ./... $(OUTPUT_OPTIONS)

## Help:
help: ## Show this help.
@echo ''
@echo 'Usage:'
@echo ' ${YELLOW}make${RESET} ${GREEN}<target>${RESET}'
@echo ''
@echo 'Targets:'
@awk 'BEGIN {FS = ":.*?## "} { \
if (/^[a-zA-Z_-]+:.*?##.*$$/) {printf " ${YELLOW}%-20s${GREEN}%s${RESET}\n", $$1, $$2} \
else if (/^## .*$$/) {printf " ${CYAN}%s${RESET}\n", substr($$1,4)} \
}' $(MAKEFILE_LIST)
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ cd llama-cli
# copy your models to models/
cp your-model.bin models/

# (optional) Edit the .env file to set the number of concurrent threads used for inference
# echo "THREADS=14" > .env
# (optional) Edit the .env file to set things like context size and threads
# vim .env

# start with docker-compose
docker compose up -d --build
Expand Down
25 changes: 19 additions & 6 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -1,15 +1,28 @@
version: '3.6'

services:

# chatgpt:
# image: ghcr.io/mckaywrigley/chatbot-ui:main
# # platform: linux/amd64
# ports:
# - 3000:3000
# environment:
# - 'OPENAI_API_KEY=sk-000000000000000'
# - 'OPENAI_API_HOST=http://api:8080'

api:
image: quay.io/go-skynet/llama-cli:latest
build: .
volumes:
- ./models:/models
build:
context: .
dockerfile: Dockerfile
ports:
- 8080:8080
environment:
- MODELS_PATH=/models
- CONTEXT_SIZE=700
- MODELS_PATH=$MODELS_PATH
- CONTEXT_SIZE=$CONTEXT_SIZE
- THREADS=$THREADS
command: api
volumes:
- ./models:/models:cached
command: api

2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ module github.com/go-skynet/llama-cli
go 1.19

require (
github.com/go-skynet/go-llama.cpp v0.0.0-20230415155049-9260bfd28bc4
github.com/go-skynet/go-llama.cpp v0.0.0-20230415213228-bac222030640
github.com/gofiber/fiber/v2 v2.42.0
github.com/urfave/cli/v2 v2.25.0
)
Expand Down
4 changes: 2 additions & 2 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHG
github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0=
github.com/go-skynet/go-llama.cpp v0.0.0-20230415155049-9260bfd28bc4 h1:u/y9MlPHOeIj636IQmrf9ptMjjdgCVIcsfb7lMFh39M=
github.com/go-skynet/go-llama.cpp v0.0.0-20230415155049-9260bfd28bc4/go.mod h1:35AKIEMY+YTKCBJIa/8GZcNGJ2J+nQk1hQiWo/OnEWw=
github.com/go-skynet/go-llama.cpp v0.0.0-20230415213228-bac222030640 h1:8SSVbQ3yvq7JnfLCLF4USV0PkQnnduUkaNCv/hHDa3E=
github.com/go-skynet/go-llama.cpp v0.0.0-20230415213228-bac222030640/go.mod h1:35AKIEMY+YTKCBJIa/8GZcNGJ2J+nQk1hQiWo/OnEWw=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
github.com/gofiber/fiber/v2 v2.42.0 h1:Fnp7ybWvS+sjNQsFvkhf4G8OhXswvB6Vee8hM/LyS+8=
github.com/gofiber/fiber/v2 v2.42.0/go.mod h1:3+SGNjqMh5VQH5Vz2Wdi43zTIV16ktlFd3x3R6O1Zlc=
Expand Down
2 changes: 1 addition & 1 deletion main.go
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ echo "An Alpaca (Vicugna pacos) is a domesticated species of South American came
},
&cli.StringFlag{
Name: "default-model",
EnvVars: []string{"default-model"},
EnvVars: []string{"DEFAULT_MODEL"},
},
&cli.StringFlag{
Name: "address",
Expand Down

0 comments on commit c371752

Please sign in to comment.