-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path1_call_api_openai.py
49 lines (41 loc) · 1.41 KB
/
1_call_api_openai.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
#%%
import os
import openai
from tqdm import tqdm
#FULLTEXT_FOLDER = "data/prisma_amstar/fulltext/txt/"
#RESULTS_FOLDER = "docs/prisma_amstar/gpt3.5_amstar_rep/"
FULLTEXT_FOLDER = "data/precis2/fulltext/txt/"
RESULTS_FOLDER = "docs/precis2/gpt3.5_precis2_rep/"
openai.api_key = open("src/hidden/API_KEY_OPENAI").read()
files = os.listdir(FULLTEXT_FOLDER)
files = list(filter(lambda x: (".txt" in x), files))
files.sort()
#%%
for fulltext_file in tqdm(files):
fulltext_file = fulltext_file.lower()
# Read publication full text
with open(FULLTEXT_FOLDER + fulltext_file) as f:
fulltext = f.read()
# Prepare system and user prompt
with open(RESULTS_FOLDER + "prompt_template/system.txt") as f:
system_prompt = f.read()
with open(RESULTS_FOLDER + "prompt_template/user.txt") as f:
user_prompt = f.read()
user_prompt = user_prompt.replace("%FULLTEXT%", fulltext)
try:
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo-16k-0613",
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt}
],
temperature=0,
max_tokens=2000
)
except Exception as e:
print(fulltext_file)
print(e)
else:
with open(RESULTS_FOLDER + f"responses/{fulltext_file}.json", "a") as f:
f.write(str(response))
# %%