-
Notifications
You must be signed in to change notification settings - Fork 16
/
Copy pathopenai
executable file
·367 lines (309 loc) · 10.7 KB
/
openai
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
#!/usr/bin/env bash
#
# OpenAI CLI v2.2.2
# Created by @janlay
#
set -eo pipefail
# openai-cli accepts various exported environment variables:
# OPENAI_API_KEY : OpenAI's API key
# OPENAI_API_ENDPOINT : Custom API endpoint
# OPENAI_MAX_TOKENS : Maximum number of tokens to use
# OPENAI_CHAT_MODEL : ChatGPT model
# OPENAI_DATA_DIR : Directory to store data
OPENAI_API_ENDPOINT="${OPENAI_API_ENDPOINT:-https://api.openai.com}"
OPENAI_API_KEY="${OPENAI_API_KEY:-}"
OPENAI_MAX_TOKENS="${OPENAI_MAX_TOKENS:-2000}"
OPENAI_CHAT_MODEL="${OPENAI_CHAT_MODEL:-gpt-3.5-turbo-1106}"
declare _config_dir="${OPENAI_DATA_DIR:-$XDG_CONFIG_HOME}"
OPENAI_DATA_DIR="${_config_dir:-$HOME/.openai}"
# defaults
readonly _app_name=openai _app_version=2.2.2
readonly default_api_version=1 default_api_name=chat/completions default_model="$OPENAI_CHAT_MODEL" default_topic=General
declare -i chat_mode=0 dry_run=0
declare tokens_file="$OPENAI_DATA_DIR/total_tokens" api_version=$default_api_version api_name=$default_api_name topic=$default_topic
declare dump_file dumped_file data_file temp_dir rest_args prompt_file prompt
trap cleanup EXIT
cleanup() {
if [ -d "$temp_dir" ]; then
rm -rf -- "$temp_dir"
fi
}
raise_error() {
[ "$2" = 0 ] || echo -n "$_app_name: " >&2
echo -e "$1" >&2
exit "${2:-1}"
}
load_conversation() {
[ -f "$data_file" ] && cat "$data_file" || echo '{}'
}
update_conversation() {
local entry="$2" data
[[ $entry == \{* ]] || entry=$(jq -n --arg content "$entry" '{$content}')
entry=$(jq --arg role "$1" '. += {$role}' <<<"$entry")
data=$(load_conversation)
jq --argjson item "$entry" '.messages += [$item]' <<<"$data" >"$data_file"
}
save_tokens() {
local data num="$1"
[ -f "$data_file" ] && {
data=$(load_conversation)
jq --argjson tokens "$num" '.total_tokens += $tokens' <<<"$data" >"$data_file"
}
data=0
[ -f "$tokens_file" ] && data=$(cat "$tokens_file")
echo "$((data + num))" >"$tokens_file"
}
read_prompt() {
# read prompt from args first
local word accepts_props=1 props='{}' real_prompt
if [ ${#rest_args[@]} -gt 0 ]; then
# read file $prompt_file word by word, and extract words starting with '+'
for word in "${rest_args[@]}"; do
if [ $accepts_props -eq 1 ] && [ "${word:0:1}" = '+' ]; then
word="${word:1}"
# determine value's type for jq
local options=(--arg key "${word%%=*}") value="${word#*=}" arg=--arg
[[ $value =~ ^[+-]?\ ?[0-9.]+$ || $value = true || $value = false || $value == [\[\{]* ]] && arg=--argjson
options+=("$arg" value "$value")
props=$(jq "${options[@]}" '.[$key] = $value' <<<"$props")
else
real_prompt="$real_prompt $word"
accepts_props=0
fi
done
[ -n "$props" ] && echo "$props" >"$temp_dir/props"
fi
if [ -n "$real_prompt" ]; then
[ -n "$prompt_file" ] && echo "* Prompt file \`$prompt_file' will be ignored as the prompt parameters are provided." >&2
echo -n "${real_prompt:1}" >"$temp_dir/prompt"
elif [ -n "$prompt_file" ]; then
[ -f "$prompt_file" ] || raise_error "File not found: $prompt_file." 3
[[ -s $prompt_file ]] || raise_error "Empty file: $prompt_file." 4
fi
}
openai_models() {
call_api | jq
}
openai_moderations() {
local prop_file="$temp_dir/props" payload="{\"model\": \"text-moderation-latest\"}"
# overwrite default properties with user's
read_prompt
[ -f "$prop_file" ] && payload=$(jq -n --argjson payload "$payload" '$payload | . += input' <"$prop_file")
# append user's prompt to messages
local payload_file="$temp_dir/payload" input_file="$temp_dir/prompt"
[ -f "$input_file" ] || input_file="${prompt_file:-/dev/stdin}"
jq -Rs -cn --argjson payload "$payload" '$payload | .input = input' "$input_file" >"$payload_file"
call_api | jq -c '.results[]'
}
openai_images_generations() {
local prop_file="$temp_dir/props" payload="{\"n\": 1, \"size\": \"1024x1024\"}"
# overwrite default properties with user's
read_prompt
[ -f "$prop_file" ] && payload=$(jq -n --argjson payload "$payload" '$payload | . += input | . += {response_format: "url"}' <"$prop_file")
# append user's prompt to messages
local payload_file="$temp_dir/payload" input_file="$temp_dir/prompt"
[ -f "$input_file" ] || input_file="${prompt_file:-/dev/stdin}"
jq -Rs -cn --argjson payload "$payload" '$payload | .prompt = input' "$input_file" >"$payload_file"
call_api | jq -r '.data[].url'
}
openai_embeddings() {
local prop_file="$temp_dir/props" payload="{\"model\": \"text-embedding-ada-002\"}"
# overwrite default properties with user's
read_prompt
[ -f "$prop_file" ] && payload=$(jq -n --argjson payload "$payload" '$payload | . += input' <"$prop_file")
# append user's prompt to messages
local payload_file="$temp_dir/payload" input_file="$temp_dir/prompt"
[ -f "$input_file" ] || input_file="${prompt_file:-/dev/stdin}"
jq -Rs -cn --argjson payload "$payload" '$payload | .input = input' "$input_file" >"$payload_file"
call_api | jq -c
}
openai_chat_completions() {
[ -n "$dumped_file" ] || {
local prop_file="$temp_dir/props" payload="{\"model\": \"$default_model\", \"stream\": true, \"temperature\": 0.5, \"max_tokens\": $OPENAI_MAX_TOKENS}"
# overwrite default properties with user's
read_prompt
[ -f "$prop_file" ] && {
payload=$(jq -n --argjson payload "$payload" '$payload | . += input | . += {messages: []}' <"$prop_file")
}
local data
data=$(load_conversation | jq .messages)
[ "$topic" != "$default_topic" ] && {
if [ $chat_mode -eq 1 ]; then
# load all messages for chat mode
payload=$(jq --argjson messages "$data" 'setpath(["messages"]; $messages)' <<<"$payload")
else
# load only first message for non-chat mode
payload=$(jq --argjson messages "$data" 'setpath(["messages"]; [$messages[0]])' <<<"$payload")
fi
}
# append user's prompt to messages
local payload_file="$temp_dir/payload" input_file="$temp_dir/prompt"
[ -f "$input_file" ] || input_file="${prompt_file:-/dev/stdin}"
jq -Rs -cn --argjson payload "$payload" '$payload | .messages += [{role: "user", content: input}]' "$input_file" >"$payload_file"
}
local chunk reason text role fn_name
call_api | while read -r chunk; do
[ -z "$chunk" ] && continue
chunk=$(cut -d: -f2- <<<"$chunk" | jq '.choices[0]')
reason=$(jq -r '.finish_reason // empty' <<<"$chunk")
[[ $reason = stop || $reason = function_call ]] && break
[ -n "$reason" ] && raise_error "API error: $reason" 10
# get role and function info from the first chunk
[ -z "$role" ] && {
role=$(jq -r '.delta.role // empty' <<<"$chunk")
fn_name=$(jq -r '.delta.function_call.name // empty' <<<"$chunk")
}
# workaround: https://stackoverflow.com/a/15184414
chunk=$(
jq -r '.delta | .function_call.arguments // .content // empty' <<<"$chunk"
printf x
)
# ensure chunk is not empty
[ ${#chunk} -ge 2 ] || continue
chunk="${chunk:0:${#chunk}-2}"
text="$text$chunk"
echo -n "$chunk"
done
# append response to topic file for chat mode
[ "$chat_mode" -eq 1 ] && {
[ -n "$fn_name" ] && text=$(jq -n --arg name "$fn_name" --argjson arguments "${text:-\{\}}" '{function_call: {$name, $arguments}}')
update_conversation user "$prompt"
update_conversation "$role" "$text"
}
echo
}
# shellcheck disable=SC2120
call_api() {
# return dumped file if specified
[ -n "$dumped_file" ] && {
cat "$dumped_file"
return
}
local url="$OPENAI_API_ENDPOINT/v$api_version/$api_name" auth="Bearer $OPENAI_API_KEY"
# dry-run mode
[ "$dry_run" -eq 1 ] && {
echo "Dry-run mode, no API calls made."
echo -e "\nRequest URL:\n--------------\n$url"
echo -en "\nAuthorization:\n--------------\n"
sed -E 's/(sk-.{3}).{41}/\1****/' <<<"$auth"
[ -n "$payload_file" ] && {
echo -e "\nPayload:\n--------------"
jq <"$payload_file"
}
exit 0
} >&2
local args=("$url" --no-buffer -fsSL -H 'Content-Type: application/json' -H "Authorization: $auth")
[ -n "$payload_file" ] && args+=(-d @"$payload_file")
[ $# -gt 0 ] && args+=("$@")
[ -n "$dump_file" ] && args+=(-o "$dump_file")
curl "${args[@]}"
[ -z "$dump_file" ] || exit 0
}
create_topic() {
update_conversation system "${rest_args[*]}"
raise_error "Topic '$topic' created with initial prompt '${rest_args[*]}'" 0
}
usage() {
raise_error "OpenAI Client v$_app_version
SYNOPSIS
ABSTRACT
$_app_name [-n] [-a api_name] [-v api_version] [-o dump_file] [INPUT...]
$_app_name -i dumped_file
DEFAULT_API (v$default_api_version/$default_api_name)
$_app_name [-c] [+property=value...] [@TOPIC] [-f file | prompt ...]
prompt
Prompt string for the request to OpenAI API. This can consist of multiple
arguments, which are considered to be separated by spaces.
-f file
A file to be read as prompt. If file is - or neither this parameter nor a prompt
is specified, read from standard input.
-c
Continues the topic, the default topic is '$default_topic'.
property=value
Overwrites default properties in payload. Prepend a plus sign '+' before property=value.
eg: +model=gpt-3.5-turbo-0301, +stream=false
TOPICS
Topic starts with an at sign '@'.
To create new topic, use \`$_app_name @new_topic initial prompt'
OTHER APIS
$_app_name -a models
GLOBAL OPTIONS
Global options apply to all APIs.
-v version
API version, default is '$default_api_version'.
-a name
API name, default is '$default_api_name'.
-n
Dry-run mode, don't call API.
-o filename
Dumps API response to a file and exits.
-i filename
Uses specified dumped file instead of requesting API.
Any request-related arguments and user input are ignored.
--
Ignores rest of arguments, useful when unquoted prompt consists of '-'.
-h
Shows this help" 0
}
parse() {
local opt
while getopts 'v:a:f:i:o:cnh' opt; do
case "$opt" in
c)
chat_mode=1
;;
v)
api_version="$OPTARG"
;;
a)
api_name="$OPTARG"
;;
f)
prompt_file="$OPTARG"
[ "$prompt_file" = - ] && prompt_file=
;;
n)
dry_run=1
;;
i)
dumped_file="$OPTARG"
;;
o)
dump_file="$OPTARG"
;;
h | ?)
usage
;;
esac
done
shift "$((OPTIND - 1))"
# extract the leading topic
[[ "$1" =~ ^@ ]] && {
topic="${1#@}"
shift
}
[ $chat_mode -eq 0 ] || {
[[ -n $topic && $topic != "$default_topic" ]] || raise_error 'Topic is required for chatting.' 2
}
rest_args=("$@")
}
check_bin() {
command -v "$1" >/dev/null || raise_error "$1 not found. Use package manager (Homebrew, apt-get etc.) to install it." "${2:-1}"
}
main() {
parse "$@"
check_bin jq 10
mkdir -p "$OPENAI_DATA_DIR"
data_file="$OPENAI_DATA_DIR/$topic.json"
temp_dir=$(mktemp -d)
if [[ $topic == "$default_topic" || -f "$data_file" ]]; then
[ -z "$OPENAI_API_KEY" ] && raise_error 'OpenAI API key is required.' 11
local fn="openai_${api_name//\//_}"
[ "$(type -t "$fn")" = function ] || raise_error "API '$api_name' is not available." 12
"$fn"
else
[ ${#rest_args[@]} -gt 0 ] || raise_error "Prompt for new topic is required" 13
create_topic
fi
}
main "$@"