From 3f382cb95cbc2dbe078b6e4a82a31994e1f52c10 Mon Sep 17 00:00:00 2001 From: Pedro Rico Pinazo Date: Wed, 8 May 2024 06:57:34 +0100 Subject: [PATCH] fix bug and improve settings --- README.md | 51 +++++++++++++++++++------------- pyproject.toml | 1 + src/comai/chain.py | 2 +- src/comai/cli.py | 68 +++++++++++++++++++++++++++++++++++++++---- src/comai/menu.py | 10 +++++-- src/comai/settings.py | 9 +++++- 6 files changed, 111 insertions(+), 30 deletions(-) diff --git a/README.md b/README.md index 973b7b9..25da813 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,17 @@
comai - - **The AI powered terminal assistant** - - [![Tests](https://github.com/ricopinazo/comai/actions/workflows/tests.yml/badge.svg)](https://github.com/ricopinazo/comai/actions/workflows/tests.yml) - [![Latest release](https://img.shields.io/github/v/release/ricopinazo/comai?color=brightgreen&include_prereleases)](https://github.com/ricopinazo/comai/releases) - [![PyPI](https://img.shields.io/pypi/v/comai)](https://pypi.org/project/comai/) - [![Issues](https://img.shields.io/github/issues/ricopinazo/comai?color=brightgreen)](https://github.com/ricopinazo/comai/issues) - [![PyPI - Downloads](https://img.shields.io/pypi/dm/comai)](https://pypi.org/project/comai/) - [![License GPLv3](https://img.shields.io/badge/license-GPLv3-blue.svg)](./LICENSE) - [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) - [![Checked with mypy](http://www.mypy-lang.org/static/mypy_badge.svg)](http://mypy-lang.org/) + +**The AI powered terminal assistant** + +[![Tests](https://github.com/ricopinazo/comai/actions/workflows/tests.yml/badge.svg)](https://github.com/ricopinazo/comai/actions/workflows/tests.yml) +[![Latest release](https://img.shields.io/github/v/release/ricopinazo/comai?color=brightgreen&include_prereleases)](https://github.com/ricopinazo/comai/releases) +[![PyPI](https://img.shields.io/pypi/v/comai)](https://pypi.org/project/comai/) +[![Issues](https://img.shields.io/github/issues/ricopinazo/comai?color=brightgreen)](https://github.com/ricopinazo/comai/issues) +[![PyPI - Downloads](https://img.shields.io/pypi/dm/comai)](https://pypi.org/project/comai/) +[![License GPLv3](https://img.shields.io/badge/license-GPLv3-blue.svg)](./LICENSE) +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +[![Checked with mypy](http://www.mypy-lang.org/static/mypy_badge.svg)](http://mypy-lang.org/) +
## What is comai? 🎯 @@ -35,9 +36,22 @@ However, if you usually work with python environments, it is recommended to use pipx install comai ``` -The first time you run it, it'll ask you for an OpenAI API key. You can create a developer account [here](https://platform.openai.com/overview). Once in your account, go to `API Keys` section and `Create new secret key`. We recommend setting a usage limit under `Billing`/`Usage limits`. +By default, `comai` is setup to use ollama under the hood, which allows you to host any popular open source LLM locally. If you are happy with this, make sure to install and have ollama running. You can find the install instructions [here](https://ollama.com/download) + +Once it is installed, make sure to download the `llama3` model, since comai has been optimised for it + +```shell +ollama pull llama3 +``` + +Otherwise, you can set up any other model available in the ollama service via: + +```shell +comai --config +``` > **_NOTE:_** `comai` uses the environment variable `TERM_SESSION_ID` to maintain context between calls so you don't need to repeat yourself giving instructions to it. You can check if it is available in your default terminal checking the output of `echo $TERM_SESSION_ID`, which should return some type of UUID. If the output is empty, you can simply add the following to your `.zshrc`/`.bashrc` file: +> > ```shell > export TERM_SESSION_ID=$(uuidgen) > ``` @@ -49,6 +63,7 @@ Using `comai` is straightforward. Simply invoke the `comai` command followed by Let's dive into some exciting examples of how you can harness the power of `comai`: 1. Manage your system like a pro: + ```shell $ comai print my private ip address ❯ ifconfig | grep "inet " | grep -v 127.0.0.1 | awk '{print $2}' @@ -60,6 +75,7 @@ $ comai and my public one ``` 2. Master the intricacies of `git`: + ```shell $ comai squash the last 3 commits into a single commit ❯ git rebase -i HEAD~3 @@ -72,12 +88,14 @@ $ comai show me all the branches having commit c4c0d2d in common ``` 3. Check the weather forecast for your location: + ```shell $ comai show me the weather forecast ❯ curl wttr.in ``` 4. Find the annoying process using the port 8080: + ```shell $ comai show me the process using the port 8080 ❯ lsof -i :8080 @@ -93,6 +111,7 @@ $ comai kill it ``` 5. Swiftly get rid of all your docker containers: + ```shell $ comai stop and remove all running docker containers ❯ docker stop $(docker ps -aq) && docker rm $(docker ps -aq) @@ -108,7 +127,6 @@ If you're interested in joining the development of new features for `comai`, her 2. Install the package in editable mode by running `pip install -e .`. 3. Run the tests using `pytest`. Make sure you have the `OPENAI_API_KEY` environment variable set up with your OpenAI API key. Alternatively, you can create a file named `.env` and define the variable there. - This project utilizes black for code formatting. To ensure your changes adhere to this format, simply follow these steps: ```shell @@ -116,13 +134,6 @@ pip install black black . ``` -For users of VS Code, you can configure the following options after installing `black`: - -```json -"editor.formatOnSave": true, -"python.formatting.provider": "black" -``` - ## License 📜 Comai is licensed under the GPLv3. You can find the full text of the license in the [LICENSE](./LICENSE) file. diff --git a/pyproject.toml b/pyproject.toml index 823adbb..63f43a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ dependencies = [ "typer[all]==0.9.0", "langchain==0.1.17", "langchain-openai==0.1.6", + "ollama==0.1.9", ] [project.urls] diff --git a/src/comai/chain.py b/src/comai/chain.py index 5c6b557..baeda70 100644 --- a/src/comai/chain.py +++ b/src/comai/chain.py @@ -126,7 +126,7 @@ def query_command( query: str, settings: Settings, context: Context ) -> Iterator[StreamStart | Token | FinalCommand]: stream = create_chain_stream(settings, context) - output = stream({"question": "print your name"}) + output = stream({"question": query}) started = False buffer = "" diff --git a/src/comai/cli.py b/src/comai/cli.py index 589bb78..34f6dd5 100755 --- a/src/comai/cli.py +++ b/src/comai/cli.py @@ -1,12 +1,16 @@ import os +import sys import typer +import click import itertools from typing import List, Optional, Iterator from typing_extensions import Annotated +from langchain_community.llms.ollama import OllamaEndpointNotFoundError from comai import __version__ from comai.chain import StreamStart, Token, FinalCommand, query_command -from comai.settings import load_settings +from comai.ollama import get_ollama_model_names +from comai.settings import load_settings, write_settings, Settings from comai.context import get_context from comai.menu import get_option_from_menu, MenuOption from comai.animations import ( @@ -26,15 +30,56 @@ def version_callback(value: bool): raise typer.Exit() -def main_normal_flow(instructions: List[str]): +def show_settings_callback(value: bool): + if value: + settings = load_settings() + print("Current settings:") + print(settings.model_dump_json(indent=2)) + raise typer.Exit() + + +def settings_callback(value: bool): + if value: + settings = load_settings() + ollama_models = get_ollama_model_names() + if settings.model in ollama_models: + default_model = settings.model + elif "llama3" in ollama_models: + default_model = "llama3" + elif len(ollama_models) > 0: + default_model = ollama_models[0] + else: + default_model = "llama3" + model = click.prompt( + "Ollama model", + type=click.Choice(ollama_models), + default=default_model, + show_default=True, + show_choices=True, + ) + verbose = click.prompt( + "Verbose mode", + type=click.BOOL, + default="yes" if settings.verbose else "no", + show_default=True, + show_choices=True, + ) + settings.provider = "ollama" + settings.model = model + settings.verbose = verbose + write_settings(settings) + raise typer.Exit() + + +def main_normal_flow(instructions: List[str], settings: Settings): final_command: str | None = None input_text = " ".join(instructions) hide_cursor() - settings = load_settings() context = get_context() output = query_command(input_text, settings, context) + with query_animation(): stream_start = next(output) assert type(stream_start) == StreamStart @@ -50,7 +95,7 @@ def main_normal_flow(instructions: List[str]): if final_command is None: raise Exception("failed to fetch command") - match get_option_from_menu(): + match get_option_from_menu(settings): case MenuOption.run: os.system(final_command) case MenuOption.cancel: @@ -63,9 +108,22 @@ def main( version: Annotated[ Optional[bool], typer.Option("--version", callback=version_callback) ] = None, + config: Annotated[ + Optional[bool], typer.Option("--config", callback=settings_callback) + ] = None, + show_config: Annotated[ + Optional[bool], typer.Option("--show-config", callback=show_settings_callback) + ] = None, ): + settings = load_settings() + try: - main_normal_flow(instructions) + main_normal_flow(instructions, settings) + except OllamaEndpointNotFoundError as e: + sys.stderr.write( + f"Model '{settings.model}' not found in the ollama service. Please download it with 'ollama pull {settings.model}' or select a different model with 'comai --config'" + ) + typer.Exit(1) except Exception as e: raise e finally: diff --git a/src/comai/menu.py b/src/comai/menu.py index 32d8b99..52016b3 100644 --- a/src/comai/menu.py +++ b/src/comai/menu.py @@ -1,9 +1,12 @@ import click from enum import Enum -from .animations import show_cursor + from rich import print from rich.markup import escape +from comai.settings import Settings +from comai.animations import show_cursor + class MenuOption(str, Enum): run = "r" @@ -14,8 +17,9 @@ class MenuOption(str, Enum): MENU_PROMPT = f"[bright_black] ➜ [underline bold]r[/underline bold]un | [underline bold]c[/underline bold]ancel {DEFAULT_OPTION}:[/bright_black]" -def get_option_from_menu() -> MenuOption: - print(MENU_PROMPT, end="", flush=True) +def get_option_from_menu(settings: Settings) -> MenuOption: + if settings.verbose: + print(MENU_PROMPT, end="", flush=True) show_cursor() option = click.prompt( "", diff --git a/src/comai/settings.py b/src/comai/settings.py index fe02414..a3d3884 100644 --- a/src/comai/settings.py +++ b/src/comai/settings.py @@ -12,9 +12,10 @@ class Settings(BaseModel): provider: Literal["ollama", "openai"] # TODO: improve this, should be typed per provider, although possible models can be queried at runtime model: str = "llama3" + verbose: bool = True -DEFAULT_SETTINGS: Settings = Settings(provider="ollama", model="llama3") +DEFAULT_SETTINGS: Settings = Settings(provider="ollama") def load_settings() -> Settings: @@ -23,3 +24,9 @@ def load_settings() -> Settings: except: # TODO: if there is indeed a file but the file is incorrect, we should complain instead of returning the default return DEFAULT_SETTINGS + + +def write_settings(settings: Settings): + json = settings.model_dump_json(indent=2) + with open(settings_path, "w") as file: + file.write(json + "\n")