forked from defenseunicorns/leapfrogai
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathzarf.yaml
95 lines (91 loc) · 3.51 KB
/
zarf.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
# yaml-language-server: $schema=https://raw.githubusercontent.com/defenseunicorns/uds-cli/v0.16.0/zarf.schema.json
kind: ZarfPackageConfig
metadata:
name: leapfrogai-ui
version: "###ZARF_PKG_TMPL_IMAGE_VERSION###"
description: >
A UI for LeapfrogAI
constants:
- name: IMAGE_VERSION
value: "###ZARF_PKG_TMPL_IMAGE_VERSION###"
variables:
- name: LEAPFROGAI_API_BASE_URL
description: The base URL for the LeapfrogAI API
default: http://leapfrogai-api.leapfrogai.svc.cluster.local:8080
prompt: true
sensitive: true
- name: OPENAI_API_KEY
description: OpenAI API key - if specified, the UI will use OpenAI instead of LeapfrogAI
prompt: true
default: ""
sensitive: true
- name: SUBDOMAIN
description: The subdomain for the application, Istio-ingress configuration
default: "ai"
- name: DOMAIN
description: The domain to use for the application, Istio-ingress configuration
default: "uds.dev"
prompt: true
sensitive: true
- name: MODEL
description: The default LLM model to use for chat and summarization
default: llama-cpp-python
prompt: true
sensitive: false
- name: SYSTEM_PROMPT
description: The default system prompt to use for the LLM
default: "You may be provided with a list of files and their content in the following structure: [{filename: test.pdf, text: some fake text}]. Using the content of these files as context, you should refer to specific files by their filename when relevant and use the text content to provide detailed, accurate, and relevant information or answers. If the user asks questions that can be answered based on the content of the provided files, use the appropriate files text in your response. If the user requests clarification, further details, or specific information about a file, respond using the most relevant file or files. If necessary, combine information from multiple files to form a comprehensive response."
prompt: true
sensitive: false
- name: TEMPERATURE
description: The default temperature for the LLM
default: "0.1"
prompt: true
sensitive: false
- name: SUPABASE_ANON_KEY
default: ""
description: Public key for Supabase when hosted outside of the cluster on PaaS
prompt: true
sensitive: false
- name: DISABLE_KEYCLOAK
description: Disable keycloak true or false
default: "true"
prompt: true
sensitive: false
- name: MESSAGE_LENGTH_LIMIT
description: The maximum length of a message to allow from the user to the backend
default: "10000"
prompt: true
sensitive: false
components:
- name: leapfrogai-ui
required: true
only:
flavor: upstream
charts:
- name: leapfrogai-ui
namespace: leapfrogai
localPath: chart
valuesFiles:
- "values/upstream-values.yaml"
# x-release-please-start-version
version: 0.14.0
# x-release-please-end
images:
- ghcr.io/defenseunicorns/leapfrogai/leapfrogai-ui:###ZARF_PKG_TMPL_IMAGE_VERSION###
- ghcr.io/defenseunicorns/leapfrogai/ui-migrations:###ZARF_PKG_TMPL_IMAGE_VERSION###
actions:
onDeploy:
after:
- wait:
cluster:
kind: Job
name: leapfrogai-ui-migrations-###ZARF_PKG_TMPL_IMAGE_VERSION###
namespace: leapfrogai
condition: complete
- wait:
cluster:
kind: Deployment
name: leapfrogai-ui
namespace: leapfrogai
condition: Available