Skip to content

Commit

Permalink
style: fmt code
Browse files Browse the repository at this point in the history
  • Loading branch information
cvauclair committed Sep 4, 2024
1 parent 4db38e8 commit 5c4f278
Show file tree
Hide file tree
Showing 7 changed files with 39 additions and 39 deletions.
8 changes: 4 additions & 4 deletions rig-core/src/agent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,19 +33,19 @@
//! let chat_response = agent.chat("Prompt", chat_history)
//! .await
//! .expect("Failed to chat with Agent");
//!
//!
//! // Generate a prompt completion response from a simple prompt
//! let chat_response = agent.prompt("Prompt")
//! .await
//! .expect("Failed to prompt the Agent");
//!
//!
//! // Generate a completion request builder from a prompt and chat history. The builder
//! // will contain the agent's configuration (i.e.: preamble, context documents, tools,
//! // will contain the agent's configuration (i.e.: preamble, context documents, tools,
//! // model parameters, etc.), but these can be overwritten.
//! let completion_req_builder = agent.completion("Prompt", chat_history)
//! .await
//! .expect("Failed to create completion request builder");
//!
//!
//! let response = completion_req_builder
//! .temperature(0.9) // Overwrite the agent's temperature
//! .send()
Expand Down
2 changes: 1 addition & 1 deletion rig-core/src/cli_chatbot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use std::io::{self, Write};

use crate::completion::{Chat, Message, PromptError};

/// Utility function to create a simple REPL CLI chatbot from a type that implements the
/// Utility function to create a simple REPL CLI chatbot from a type that implements the
/// `Chat` trait.
pub async fn cli_chatbot(chatbot: impl Chat) -> Result<(), PromptError> {
let stdin = io::stdin();
Expand Down
36 changes: 18 additions & 18 deletions rig-core/src/completion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,12 +133,12 @@ pub struct ToolDefinition {
/// Trait defining a high-level LLM simple prompt interface (i.e.: prompt in, response out).
pub trait Prompt: Send + Sync {
/// Send a simple prompt to the underlying completion model.
///
/// If the completion model's response is a message, then it is returned as a string.
///
/// If the completion model's response is a tool call, then the tool is called and
///
/// If the completion model's response is a message, then it is returned as a string.
///
/// If the completion model's response is a tool call, then the tool is called and
/// the result is returned as a string.
///
///
/// If the tool does not exist, or the tool call fails, then an error is returned.
fn prompt(
&self,
Expand All @@ -149,12 +149,12 @@ pub trait Prompt: Send + Sync {
/// Trait defining a high-level LLM chat interface (i.e.: prompt and chat history in, response out).
pub trait Chat: Send + Sync {
/// Send a prompt with optional chat history to the underlying completion model.
///
/// If the completion model's response is a message, then it is returned as a string.
///
/// If the completion model's response is a tool call, then the tool is called and the result
///
/// If the completion model's response is a message, then it is returned as a string.
///
/// If the completion model's response is a tool call, then the tool is called and the result
/// is returned as a string.
///
///
/// If the tool does not exist, or the tool call fails, then an error is returned.
fn chat(
&self,
Expand Down Expand Up @@ -242,38 +242,38 @@ pub struct CompletionRequest {
}

/// Builder struct for constructing a completion request.
///
///
/// Example usage:
/// ```rust
/// use rig::{
/// providers::openai::{Client, self},
/// completion::CompletionRequestBuilder,
/// };
///
///
/// let openai = Client::new("your-openai-api-key");
/// let model = openai.completion_model(openai::GPT_4O).build();
///
///
/// // Create the completion request and execute it separately
/// let request = CompletionRequestBuilder::new(model, "Who are you?".to_string())
/// .preamble("You are Marvin from the Hitchhiker's Guide to the Galaxy.".to_string())
/// .temperature(0.5)
/// .build();
///
///
/// let response = model.completion(request)
/// .await
/// .expect("Failed to get completion response");
/// ```
///
///
/// Alternatively, you can execute the completion request directly from the builder:
/// ```rust
/// use rig::{
/// providers::openai::{Client, self},
/// completion::CompletionRequestBuilder,
/// };
///
///
/// let openai = Client::new("your-openai-api-key");
/// let model = openai.completion_model(openai::GPT_4O).build();
///
///
/// // Create the completion request and execute it directly
/// let response = CompletionRequestBuilder::new(model, "Who are you?".to_string())
/// .preamble("You are Marvin from the Hitchhiker's Guide to the Galaxy.".to_string())
Expand All @@ -282,7 +282,7 @@ pub struct CompletionRequest {
/// .await
/// .expect("Failed to get completion response");
/// ```
///
///
/// Note: It is usually unnecessary to create a completion request builder directly.
/// Instead, use the [CompletionModel::completion_request] method.
pub struct CompletionRequestBuilder<M: CompletionModel> {
Expand Down
8 changes: 4 additions & 4 deletions rig-core/src/embeddings.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,10 +134,10 @@ impl Embedding {
///
/// The struct is designed to model any kind of documents that can be serialized to JSON
/// (including a simple string).
///
/// Moreover, it can hold multiple embeddings for the same document, thus allowing a
/// large document to be retrieved from a query that matches multiple smaller and
/// distinct text documents. For example, if the document is a textbook, a summary of
///
/// Moreover, it can hold multiple embeddings for the same document, thus allowing a
/// large document to be retrieved from a query that matches multiple smaller and
/// distinct text documents. For example, if the document is a textbook, a summary of
/// each chapter could serve as the book's embeddings.
#[derive(Clone, Eq, PartialEq, Serialize, Deserialize)]
pub struct DocumentEmbeddings {
Expand Down
4 changes: 2 additions & 2 deletions rig-core/src/extractor.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
//! This module provides high-level abstractions for extracting structured data from text using LLMs.
//!
//! Note: The target structure must implement the `serde::Deserialize`, `serde::Serialize`,
//!
//! Note: The target structure must implement the `serde::Deserialize`, `serde::Serialize`,
//! and `schemars::JsonSchema` traits. Those can be easily derived using the `derive` macro.
//!
//! # Example
Expand Down
14 changes: 7 additions & 7 deletions rig-core/src/model.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
//! This module contains the implementation of the [Model] struct and its builder.
//!
//! The [Model] type is the simplest building block for creating an LLM powered application
//! and can be used to prompt completions from a completion model. This struct acts as a
//! thin wrapper around a completion model (i.e.: a struct implementing the
//! and can be used to prompt completions from a completion model. This struct acts as a
//! thin wrapper around a completion model (i.e.: a struct implementing the
//! [CompletionModel](crate::completion::CompletionModel) trait).
//!
//! The [ModelBuilder] struct provides a builder interface for creating [Model] instances
//! and allows the user to set the underlying model and other common parameters such as
//! and allows the user to set the underlying model and other common parameters such as
//! the temperature of the model.
//!
//! # Example
Expand All @@ -28,19 +28,19 @@
//! let chat_response = agent.chat("Prompt", chat_history)
//! .await
//! .expect("Failed to chat with model");
//!
//!
//! // Generate a prompt completion response from a simple prompt
//! let chat_response = agent.prompt("Prompt")
//! .await
//! .expect("Failed to prompt the model");
//!
//!
//! // Generate a completion request builder from a prompt and chat history. The builder
//! // will contain the model's configuration (i.e.: model parameters, etc.), but these
//! // will contain the model's configuration (i.e.: model parameters, etc.), but these
//! // can be overwritten.
//! let completion_req_builder = agent.completion("Prompt", chat_history)
//! .await
//! .expect("Failed to create completion request builder");
//!
//!
//! let response = completion_req_builder
//! .temperature(0.9) // Overwrite the model's temperature
//! .send()
Expand Down
6 changes: 3 additions & 3 deletions rig-mongodb/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,11 +83,11 @@ impl MongoDbVectorStore {
}

/// Create a new `MongoDbVectorIndex` from an existing `MongoDbVectorStore`.
///
///
/// The index (of type "vector") must already exist for the MongoDB collection.
/// See the MongoDB [documentation](https://www.mongodb.com/docs/atlas/atlas-vector-search/vector-search-type/) for more information on creating indexes.
///
/// An additional filter can be provided to further restrict the documents that are
///
/// An additional filter can be provided to further restrict the documents that are
/// considered in the search.
pub fn index<M: EmbeddingModel>(
&self,
Expand Down

0 comments on commit 5c4f278

Please sign in to comment.