forked from defenseunicorns/leapfrogai
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathMakefile
95 lines (70 loc) · 4.55 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
TAG ?= 0.3.2
.PHONY: embeddings push
build: embeddings stablelm stablelm-7b embeddings whisper
requirements:
pip-compile -o leapfrogai/requirements.txt pyproject.toml
push:
docker push ghcr.io/defenseunicorns/leapfrogai/stablelm-3b:${TAG}
docker push ghcr.io/defenseunicorns/leapfrogai/all-minilm-l6-v2:${TAG}
base:
docker build --network=host -t ghcr.io/defenseunicorns/leapfrogai/base:${TAG} -f leapfrogai/Dockerfile leapfrogai
base-push:
docker push ghcr.io/defenseunicorns/leapfrogai/base:${TAG}
stablelm: base
cd models/llms/stablelm && \
docker build --network=host --build-arg IMAGE_TAG=${TAG} -t ghcr.io/defenseunicorns/leapfrogai/stablelm-3b:${TAG} .
embeddings: base
cd models/text2vec/all-minilm-l6-v2/ && \
docker build --network=host --build-arg IMAGE_TAG=${TAG} -t ghcr.io/defenseunicorns/leapfrogai/all-minilm-l6-v2:${TAG} .
mpt-7b-chat: base
cd models/llms/mpt-7b-chat && \
docker build --network=host --build-arg IMAGE_TAG=${TAG} -t ghcr.io/defenseunicorns/leapfrogai/mpt-7b-chat:${TAG} .
whisper: base
cd models/speech2text/whisper && \
docker build --network=host --build-arg IMAGE_TAG=${TAG} -t ghcr.io/defenseunicorns/leapfrogai/whisper:${TAG} .
whisper-push:
docker push ghcr.io/defenseunicorns/leapfrogai/whisper:${TAG}
repeater: base
cd models/test/repeater && \
docker build --network=host --build-arg IMAGE_TAG=${TAG} -t ghcr.io/defenseunicorns/leapfrogai/repeater:${TAG} .
repeater-push:
docker push ghcr.io/defenseunicorns/leapfrogai/repeater:${TAG}
zarf-port-forward:
kubectl port-forward -n zarf svc/zarf-docker-registry 5001:5000
gen: gen-go gen-python
gen-python:
python3 -m grpc_tools.protoc --proto_path=proto chat/chat.proto --python_out=leapfrogai --pyi_out=leapfrogai --grpc_python_out=leapfrogai
python3 -m grpc_tools.protoc --proto_path=proto completion/completion.proto --python_out=leapfrogai --pyi_out=leapfrogai --grpc_python_out=leapfrogai
python3 -m grpc_tools.protoc --proto_path=proto audio/audio.proto --python_out=leapfrogai --pyi_out=leapfrogai --grpc_python_out=leapfrogai
python3 -m grpc_tools.protoc --proto_path=proto embeddings/embeddings.proto --python_out=leapfrogai --pyi_out=leapfrogai --grpc_python_out=leapfrogai
python3 -m grpc_tools.protoc --proto_path=proto name/name.proto --python_out=leapfrogai --pyi_out=leapfrogai --grpc_python_out=leapfrogai
gen-go:
rm -rf pkg/client
mkdir -p pkg/client
protoc --go_out=pkg/client --go_opt=paths=source_relative --go-grpc_out=pkg/client --go-grpc_opt=paths=source_relative --proto_path=proto/ --experimental_allow_proto3_optional completion/completion.proto
protoc --go_out=pkg/client --go_opt=paths=source_relative --go-grpc_out=pkg/client --go-grpc_opt=paths=source_relative --proto_path=proto/ --experimental_allow_proto3_optional chat/chat.proto
protoc --go_out=pkg/client --go_opt=paths=source_relative --go-grpc_out=pkg/client --go-grpc_opt=paths=source_relative --proto_path=proto/ audio/audio.proto
protoc --go_out=pkg/client --go_opt=paths=source_relative --go-grpc_out=pkg/client --go-grpc_opt=paths=source_relative --proto_path=proto/ name/name.proto
protoc --go_out=pkg/client --go_opt=paths=source_relative --go-grpc_out=pkg/client --go-grpc_opt=paths=source_relative --proto_path=proto/ embeddings/embeddings.proto
update-embeddings: embeddings
docker tag ghcr.io/defenseunicorns/leapfrogai/all-minilm-l6-v2:${TAG} localhost:5001/defenseunicorns/leapfrogai/embeddings:${TAG}-zarf-230844594
docker push localhost:5001/defenseunicorns/leapfrogai/embeddings:${TAG}-zarf-230844594
kubectl delete pods -n leapfrogai -l app=embeddings
update-repeater: repeater
docker tag ghcr.io/defenseunicorns/leapfrogai/repeater:${TAG} localhost:5001/defenseunicorns/leapfrogai/repeater:${TAG}-zarf-4122428005
docker push localhost:5001/defenseunicorns/leapfrogai/repeater:${TAG}-zarf-4122428005
kubectl delete pods -n leapfrogai -l app=repeater
update-stablelm: stablelm
docker tag ghcr.io/defenseunicorns/leapfrogai/stablelm-3b:${TAG} localhost:5001/defenseunicorns/leapfrogai/stablelm-3b:${TAG}-zarf-1442747400
docker push localhost:5001/defenseunicorns/leapfrogai/stablelm-3b:${TAG}-zarf-1442747400
kubectl delete pods -n leapfrogai -l app=stablelm
ctransformers: base
cd models/llms/ctransformers && \
docker build --network=host --build-arg IMAGE_TAG=${TAG} -t ghcr.io/defenseunicorns/leapfrogai/ctransformers:${TAG} .
test-init:
docker run -p 50051:50051 -d --rm --name repeater ghcr.io/defenseunicorns/leapfrogai/repeater:${TAG}
test:
PYTHONPATH="." python3 models/test/repeater/test.py
teardown:
docker kill repeater
images: base stablelm whisper repeater mpt-7b-chat embeddings