From fb7760eee9159a3e16b9a017ec043cafe4a7c00f Mon Sep 17 00:00:00 2001 From: Beckett <133655553+severian42@users.noreply.github.com> Date: Mon, 15 Jul 2024 07:50:25 -0500 Subject: [PATCH 1/9] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0043daf5..850e7d78 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ Welcome to **GraphRAG Local with Ollama and Interactive UI**! This is an adaptation of Microsoft's [GraphRAG](https://github.com/microsoft/graphrag), tailored to support local models using Ollama and featuring a new interactive user interface. -*NOTE: The app is fully functional but I am currently in the process of debugging certain aspects so everything will run more smoothly. This may mean you will need to update your version by the end of today if you encounter an error by chance. I am trying to be fluid with the adjustments. I will also be making some changes so the model provider is more agnostic and not reliant on just Ollama. Feel free to open an Issue if you run into an error and I will try to address it immediately so you don't run into any downtime* +*NOTE: The app gained traction much quicker than I anticipated so I am frantically trying to get the bugs fixed and suggested improvements integrated. Right now it is functional but you may still run into some issues. This may mean you will need to update your version by the end of today if you encounter an error by chance. I am trying to be fluid with the adjustments. I will also be making some changes so the model provider is more agnostic and not reliant on just Ollama. Feel free to open an Issue if you run into an error and I will try to address it ASAP so you don't run into any downtime* ## 📄 Research Paper From 4365f13c4686cd1070223fec9eeaaed38e543564 Mon Sep 17 00:00:00 2001 From: Beckett <133655553+severian42@users.noreply.github.com> Date: Mon, 15 Jul 2024 10:21:27 -0500 Subject: [PATCH 2/9] Update README.md --- README.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 850e7d78..0d0b14cb 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,15 @@ Welcome to **GraphRAG Local with Ollama and Interactive UI**! This is an adaptation of Microsoft's [GraphRAG](https://github.com/microsoft/graphrag), tailored to support local models using Ollama and featuring a new interactive user interface. -*NOTE: The app gained traction much quicker than I anticipated so I am frantically trying to get the bugs fixed and suggested improvements integrated. Right now it is functional but you may still run into some issues. This may mean you will need to update your version by the end of today if you encounter an error by chance. I am trying to be fluid with the adjustments. I will also be making some changes so the model provider is more agnostic and not reliant on just Ollama. Feel free to open an Issue if you run into an error and I will try to address it ASAP so you don't run into any downtime* +*NOTE: The app gained traction much quicker than I anticipated so I am frantically trying to get the bugs fixed and suggested improvements integrated. Right now it is functional but you may still run into some issues. This may mean you will need to update your version by the end of today if you encounter an error by chance. I am trying to be fluid with the adjustments. + +Changes being made right now: +- LLM agnostic: Use Ollama or set your own base URL and local model for LLM and Embedder +- Bug fixes on indexing and output file generation (missing _final/entities on some calls) +- Launch your own GraphRAG API server so you can use the functions in your own external app +- Dockerfile for easier deployment + +Feel free to open an Issue if you run into an error and I will try to address it ASAP so you don't run into any downtime* ## 📄 Research Paper From 6ee17598a2ee5c9cd36d591f7584767f4cd63a68 Mon Sep 17 00:00:00 2001 From: Beckett <133655553+severian42@users.noreply.github.com> Date: Mon, 15 Jul 2024 11:07:37 -0500 Subject: [PATCH 3/9] Update settings.yaml --- ragtest/settings.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ragtest/settings.yaml b/ragtest/settings.yaml index 19a45c4d..4b3e2809 100644 --- a/ragtest/settings.yaml +++ b/ragtest/settings.yaml @@ -31,7 +31,7 @@ embeddings: llm: api_key: ${GRAPHRAG_API_KEY} type: openai_embedding # or azure_openai_embedding - model: nomic_embed_text + model: nomic-embed-text api_base: http://localhost:11434/v1 # api_version: 2024-02-15-preview # organization: From 308375d6abb086e6f80108f4b256504b64719949 Mon Sep 17 00:00:00 2001 From: Beckett <133655553+severian42@users.noreply.github.com> Date: Tue, 16 Jul 2024 02:30:04 -0500 Subject: [PATCH 4/9] Update requirements.txt --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index b80bab0c..aad2bea7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,7 +17,7 @@ pydantic rich typing-extensions ollama -gradio +gradio>=latest PyYAML matplotlib plotly From d1405fbc5ae2379bc291b1f6c4c46930372f233d Mon Sep 17 00:00:00 2001 From: Beckett <133655553+severian42@users.noreply.github.com> Date: Tue, 16 Jul 2024 04:01:20 -0500 Subject: [PATCH 5/9] Update app.py --- app.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/app.py b/app.py index cd670b0a..3ddf1d27 100644 --- a/app.py +++ b/app.py @@ -681,13 +681,13 @@ def list_folder_contents(folder_path): default_model = settings['llm']['model'] with gr.Blocks(css=custom_css, theme=gr.themes.Base()) as demo: - gr.Markdown("# GraphRAG UI", elem_id="title") + gr.Markdown("# GraphRAG Local UI", elem_id="title") with gr.Row(elem_id="main-container"): with gr.Column(scale=1, elem_id="left-column"): with gr.Tabs(): with gr.TabItem("Data Management"): - with gr.Accordion("File Operations", open=False): + with gr.Accordion("File Upload (.txt)", open=True): file_upload = gr.File(label="Upload .txt File", file_types=[".txt"]) upload_btn = gr.Button("Upload File", variant="primary") upload_output = gr.Textbox(label="Upload Status", visible=False) @@ -705,7 +705,7 @@ def list_folder_contents(folder_path): operation_status = gr.Textbox(label="Operation Status", visible=False) - with gr.Accordion("Indexing", open=False): + with gr.Accordion("Indexing", open=True): root_dir = gr.Textbox(label="Root Directory", value=os.path.abspath("./ragtest")) index_btn = gr.Button("Run Indexing", variant="primary") index_output = gr.Textbox(label="Indexing Output", lines=5, visible=False) From da381bebb6986c29404c2a1f581cc09d1c84b40d Mon Sep 17 00:00:00 2001 From: Beckett <133655553+severian42@users.noreply.github.com> Date: Tue, 16 Jul 2024 04:13:13 -0500 Subject: [PATCH 6/9] Update app.py --- app.py | 51 ++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 46 insertions(+), 5 deletions(-) diff --git a/app.py b/app.py index 3ddf1d27..d5a44199 100644 --- a/app.py +++ b/app.py @@ -87,12 +87,47 @@ def run_command(command): except subprocess.CalledProcessError as e: return f"Error: {e.stderr}" -def index_graph(root_dir): +def index_graph(root_dir, progress=gr.Progress()): command = f"python -m graphrag.index --root {root_dir}" logging.info(f"Running indexing command: {command}") - result = run_command(command) + + # Create a queue to store the output + output_queue = queue.Queue() + + def run_command_with_output(): + process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) + for line in iter(process.stdout.readline, ''): + output_queue.put(line) + process.stdout.close() + process.wait() + + # Start the command in a separate thread + thread = threading.Thread(target=run_command_with_output) + thread.start() + + # Initialize progress + progress(0, desc="Starting indexing...") + + # Process the output and update progress + full_output = [] + while thread.is_alive() or not output_queue.empty(): + try: + line = output_queue.get_nowait() + full_output.append(line) + + # Update progress based on the output + if "Processing file" in line: + progress((0.5, None), desc="Processing files...") + elif "Indexing completed" in line: + progress(1, desc="Indexing completed") + + yield "\n".join(full_output), update_logs() + except queue.Empty: + time.sleep(0.1) + + thread.join() logging.info("Indexing completed") - return result, update_logs() + return "\n".join(full_output), update_logs() def run_query(root_dir, method, query, history): command = f"python -m graphrag.query --root {root_dir} --method {method} \"{query}\"" @@ -708,7 +743,8 @@ def list_folder_contents(folder_path): with gr.Accordion("Indexing", open=True): root_dir = gr.Textbox(label="Root Directory", value=os.path.abspath("./ragtest")) index_btn = gr.Button("Run Indexing", variant="primary") - index_output = gr.Textbox(label="Indexing Output", lines=5, visible=False) + index_output = gr.Textbox(label="Indexing Output", lines=10, visible=True) + index_progress = gr.Textbox(label="Indexing Progress", visible=True) with gr.TabItem("Indexing Outputs"): output_folder_list = gr.Dropdown(label="Select Output Folder", choices=[], interactive=True) @@ -767,7 +803,12 @@ def list_folder_contents(folder_path): ) delete_btn.click(fn=delete_file, inputs=[file_list], outputs=[operation_status, file_list, log_output]) save_btn.click(fn=save_file_content, inputs=[file_list, file_content], outputs=[operation_status, log_output]) - index_btn.click(fn=index_graph, inputs=[root_dir], outputs=[index_output, log_output]) + index_btn.click( + fn=index_graph, + inputs=[root_dir], + outputs=[index_output, log_output], + show_progress=True + ) refresh_folder_btn.click(fn=update_output_folder_list, outputs=[output_folder_list]).then( fn=update_logs, outputs=[log_output] From 4d3c74cbbd622e8e6b77edf45c15b3d3815d1582 Mon Sep 17 00:00:00 2001 From: Beckett <133655553+severian42@users.noreply.github.com> Date: Tue, 16 Jul 2024 06:51:44 -0500 Subject: [PATCH 7/9] Update requirements.txt --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index aad2bea7..b80bab0c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,7 +17,7 @@ pydantic rich typing-extensions ollama -gradio>=latest +gradio PyYAML matplotlib plotly From 2e1189e117c633900c5fc245ff66fc9a86abad88 Mon Sep 17 00:00:00 2001 From: Beckett <133655553+severian42@users.noreply.github.com> Date: Tue, 16 Jul 2024 06:57:03 -0500 Subject: [PATCH 8/9] Update app.py --- app.py | 1 + 1 file changed, 1 insertion(+) diff --git a/app.py b/app.py index d5a44199..83b2b67b 100644 --- a/app.py +++ b/app.py @@ -867,4 +867,5 @@ def list_folder_contents(folder_path): """) if __name__ == "__main__": + demo.queue() demo.launch() From df06e5a151ee2e8f5625e55b0b5da40b3d58f1a4 Mon Sep 17 00:00:00 2001 From: Beckett <133655553+severian42@users.noreply.github.com> Date: Tue, 16 Jul 2024 08:13:05 -0500 Subject: [PATCH 9/9] Update README.md --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index ce57169c..0d0b14cb 100644 --- a/README.md +++ b/README.md @@ -49,7 +49,6 @@ Follow these steps to set up and run GraphRAG Local with Ollama and Interactive 4. **Launch the interactive UI:** ```bash - conda activate graphrag gradio app.py ``` or