From e523b08492dcf6bb8abfb36db1fb6ff317093505 Mon Sep 17 00:00:00 2001 From: sean1832 Date: Sun, 26 Mar 2023 06:59:26 +1100 Subject: [PATCH] fix: Final Summary Prompt cause value error when final summary mode is off. --- src/util.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/util.py b/src/util.py index 7d00ded..b5dc2da 100644 --- a/src/util.py +++ b/src/util.py @@ -214,7 +214,6 @@ async def summarize_experimental_concurrently(content: str, chunk_size: int = 10 chunks = convert_to_chunks(content, chunk_size) REC_PROMPT = PromptTemplate(template=st.session_state['OPENAI_PERSONA_REC'], input_variables=['text']) - FINAL_PROMPT = PromptTemplate(template=st.session_state['OPENAI_PERSONA_SUM'], input_variables=['text']) chain = LLMChain(llm=llm_rec, prompt=REC_PROMPT) tasks = [] @@ -232,11 +231,12 @@ async def summarize_experimental_concurrently(content: str, chunk_size: int = 10 count += 1 rec_result = sorted(outputs_rec, key=lambda x: x['chunk_id']) if st.session_state['FINAL_SUMMARY_MODE']: + FINAL_PROMPT = PromptTemplate(template=st.session_state['OPENAI_PERSONA_SUM'], input_variables=['text']) chain = load_summarize_chain(llm_final, chain_type='stuff', prompt=FINAL_PROMPT) docs = convert_to_docs(rec_result) final_result = chain.run(docs) else: - final_result = '' + final_result = None return rec_result, final_result @st.cache_data(show_spinner=False)