-
-
Notifications
You must be signed in to change notification settings - Fork 20
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #95 from nollied/cleanup
Added new modules and methods, updated version number, and added new dependencies and imports.
- Loading branch information
Showing
24 changed files
with
315 additions
and
111 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1 @@ | ||
__version__ = "0.3.12" | ||
__version__ = "0.3.13" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,19 +1,35 @@ | ||
from typing import Optional | ||
from mindflow.settings import Settings | ||
from mindflow.utils.constants import MinimumReservedLength | ||
from mindflow.utils.prompts import CHAT_PROMPT_PREFIX | ||
from mindflow.utils.token import get_token_count | ||
|
||
|
||
def run_chat(prompt: str) -> str: | ||
""" | ||
This function is used to generate a prompt and then use it as a prompt for GPT bot. | ||
""" | ||
settings = Settings() | ||
completion_model = settings.mindflow_models.query.model | ||
|
||
if ( | ||
get_token_count(completion_model, CHAT_PROMPT_PREFIX + prompt) | ||
> completion_model.hard_token_limit - MinimumReservedLength.CHAT.value | ||
): | ||
print("The prompt is too long. Please try again with a shorter prompt.") | ||
return "" | ||
|
||
# Prompt GPT through Mindflow API or locally | ||
response: str = settings.mindflow_models.query.model( | ||
response: Optional[str] = completion_model( | ||
[ | ||
{ | ||
"role": "system", | ||
"content": "You are a helpful virtual assistant responding to a users query using your general knowledge and the text provided below.", | ||
"content": CHAT_PROMPT_PREFIX, | ||
}, | ||
{"role": "user", "content": prompt}, | ||
] | ||
) | ||
|
||
if response is None: | ||
return "Unable to generate response. Please try again. If the problem persists, please raise an issue at: https://github.com/nollied/mindflow-cli/issues." | ||
return response |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,8 +1,9 @@ | ||
import subprocess | ||
from typing import Tuple, Optional | ||
from typing import Tuple, Optional, Union | ||
|
||
from mindflow.core.git.diff import run_diff | ||
from mindflow.settings import Settings | ||
from mindflow.utils.errors import ModelError | ||
from mindflow.utils.prompt_builders import build_context_prompt | ||
from mindflow.utils.prompts import COMMIT_PROMPT_PREFIX | ||
|
||
|
@@ -20,9 +21,11 @@ def run_commit(args: Tuple[str], message_overwrite: Optional[str] = None) -> str | |
if diff_output == "No staged changes.": | ||
return diff_output | ||
|
||
response: str = settings.mindflow_models.query.model( | ||
response: Union[ModelError, str] = settings.mindflow_models.query.model( | ||
build_context_prompt(COMMIT_PROMPT_PREFIX, diff_output) | ||
) | ||
if isinstance(response, ModelError): | ||
return response.commit_message | ||
|
||
# add co-authorship to commit message | ||
response += "\n\nCo-authored-by: MindFlow <[email protected]>" | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.