diff --git a/Cargo.lock b/Cargo.lock index f07bff78..8f9f22d0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1211,7 +1211,7 @@ dependencies = [ [[package]] name = "rig-core" -version = "0.1.0" +version = "0.0.0" dependencies = [ "anyhow", "futures", diff --git a/rig-core/Cargo.toml b/rig-core/Cargo.toml index 12ecfbaa..37dc681a 100644 --- a/rig-core/Cargo.toml +++ b/rig-core/Cargo.toml @@ -4,6 +4,7 @@ version = "0.0.0" edition = "2021" [lib] +name="rig" path="src/lib.rs" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/rig-core/examples/agent.rs b/rig-core/examples/agent.rs index 218b424d..9ff15242 100644 --- a/rig-core/examples/agent.rs +++ b/rig-core/examples/agent.rs @@ -1,6 +1,6 @@ use std::env; -use llm::{completion::Prompt, providers}; +use rig::{completion::Prompt, providers}; #[tokio::main] async fn main() -> Result<(), anyhow::Error> { diff --git a/rig-core/examples/agent_with_context.rs b/rig-core/examples/agent_with_context.rs index 0050ac0e..840ef317 100644 --- a/rig-core/examples/agent_with_context.rs +++ b/rig-core/examples/agent_with_context.rs @@ -1,6 +1,6 @@ use std::env; -use llm::{agent::AgentBuilder, completion::Prompt, providers::cohere}; +use rig::{agent::AgentBuilder, completion::Prompt, providers::cohere}; #[tokio::main] async fn main() -> Result<(), anyhow::Error> { diff --git a/rig-core/examples/agent_with_tools.rs b/rig-core/examples/agent_with_tools.rs index f5838081..782dc98b 100644 --- a/rig-core/examples/agent_with_tools.rs +++ b/rig-core/examples/agent_with_tools.rs @@ -1,5 +1,5 @@ use anyhow::Result; -use llm::{ +use rig::{ completion::{Prompt, ToolDefinition}, providers, tool::Tool, diff --git a/rig-core/examples/calculator_chatbot.rs b/rig-core/examples/calculator_chatbot.rs index f67cc628..78f2ed77 100644 --- a/rig-core/examples/calculator_chatbot.rs +++ b/rig-core/examples/calculator_chatbot.rs @@ -1,5 +1,5 @@ use anyhow::Result; -use llm::{ +use rig::{ cli_chatbot::cli_chatbot, completion::ToolDefinition, embeddings::EmbeddingsBuilder, diff --git a/rig-core/examples/cohere_connector.rs b/rig-core/examples/cohere_connector.rs index 6e172ff7..c9c7e25c 100644 --- a/rig-core/examples/cohere_connector.rs +++ b/rig-core/examples/cohere_connector.rs @@ -1,6 +1,6 @@ use std::env; -use llm::{ +use rig::{ completion::{Completion, Prompt}, providers::cohere::Client as CohereClient, }; diff --git a/rig-core/examples/debate.rs b/rig-core/examples/debate.rs index 33544813..65d99ab2 100644 --- a/rig-core/examples/debate.rs +++ b/rig-core/examples/debate.rs @@ -1,7 +1,7 @@ use std::env; use anyhow::Result; -use llm::{ +use rig::{ agent::Agent, completion::{Message, Prompt}, providers::{cohere, openai}, diff --git a/rig-core/examples/extractor.rs b/rig-core/examples/extractor.rs index de83a7d4..b9d7b342 100644 --- a/rig-core/examples/extractor.rs +++ b/rig-core/examples/extractor.rs @@ -1,7 +1,6 @@ use std::env; -// use llm::client::{Client, OpenAIClient}; -use llm::providers::{cohere::Client as CohereClient, openai::Client as OpenAIClient}; +use rig::providers::{cohere::Client as CohereClient, openai::Client as OpenAIClient}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; diff --git a/rig-core/examples/multi_agent.rs b/rig-core/examples/multi_agent.rs index 09e1ea2f..f23fdbac 100644 --- a/rig-core/examples/multi_agent.rs +++ b/rig-core/examples/multi_agent.rs @@ -1,6 +1,6 @@ use std::env; -use llm::{ +use rig::{ agent::{Agent, AgentBuilder}, cli_chatbot::cli_chatbot, completion::{CompletionModel, Message, Prompt}, diff --git a/rig-core/examples/rag.rs b/rig-core/examples/rag.rs index db78a15b..e66e6058 100644 --- a/rig-core/examples/rag.rs +++ b/rig-core/examples/rag.rs @@ -1,6 +1,6 @@ use std::env; -use llm::{ +use rig::{ completion::Prompt, embeddings::EmbeddingsBuilder, providers::openai::Client, diff --git a/rig-core/examples/rag_dynamic_tools.rs b/rig-core/examples/rag_dynamic_tools.rs index 41c9ff24..c6998582 100644 --- a/rig-core/examples/rag_dynamic_tools.rs +++ b/rig-core/examples/rag_dynamic_tools.rs @@ -1,5 +1,5 @@ use anyhow::Result; -use llm::{ +use rig::{ completion::{Prompt, ToolDefinition}, embeddings::EmbeddingsBuilder, providers::openai::Client, diff --git a/rig-core/examples/scratch.rs b/rig-core/examples/scratch.rs deleted file mode 100644 index 172fd337..00000000 --- a/rig-core/examples/scratch.rs +++ /dev/null @@ -1,34 +0,0 @@ -use std::collections::HashMap; - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct Document { - text: String, - #[serde(flatten)] - additional_prop: HashMap, -} - -#[tokio::main] -async fn main() -> Result<(), anyhow::Error> { - let document = Document { - text: "Hello, world!".to_string(), - additional_prop: HashMap::from([ - ("key1".to_string(), "value1".to_string()), - ("key2".to_string(), "value2".to_string()), - ]), - }; - - println!("{}", serde_json::to_string_pretty(&document)?); - - let document_json = r#"{ - "text": "Hello, world!", - "key1": "value1", - "key2": "value2" - }"#; - - let document: Document = serde_json::from_str(document_json)?; - println!("{:?}", document); - - Ok(()) -} diff --git a/rig-core/examples/simple_model.rs b/rig-core/examples/simple_model.rs index 29277b9b..dce2cd45 100644 --- a/rig-core/examples/simple_model.rs +++ b/rig-core/examples/simple_model.rs @@ -1,6 +1,6 @@ use std::env; -use llm::{ +use rig::{ completion::Prompt, providers::{cohere, openai}, }; diff --git a/rig-core/examples/vector_search.rs b/rig-core/examples/vector_search.rs index d34e9433..f9ac6cf0 100644 --- a/rig-core/examples/vector_search.rs +++ b/rig-core/examples/vector_search.rs @@ -1,6 +1,6 @@ use std::env; -use llm::{ +use rig::{ embeddings::EmbeddingsBuilder, providers::openai::Client, vector_store::{in_memory_store::InMemoryVectorStore, VectorStore, VectorStoreIndex}, diff --git a/rig-core/examples/vector_search_cohere.rs b/rig-core/examples/vector_search_cohere.rs index f29348a3..7e9226af 100644 --- a/rig-core/examples/vector_search_cohere.rs +++ b/rig-core/examples/vector_search_cohere.rs @@ -1,6 +1,6 @@ use std::env; -use llm::{ +use rig::{ embeddings::EmbeddingsBuilder, providers::cohere::Client, vector_store::{in_memory_store::InMemoryVectorStore, VectorStore, VectorStoreIndex}, diff --git a/rig-core/examples/vector_search_mongodb.rs b/rig-core/examples/vector_search_mongodb.rs deleted file mode 100644 index 6c23b656..00000000 --- a/rig-core/examples/vector_search_mongodb.rs +++ /dev/null @@ -1,67 +0,0 @@ -use aws_config::{meta::region::RegionProviderChain, BehaviorVersion}; -use mongodb::{bson::doc, options::ClientOptions, Client as MongoClient, Collection}; -use mongodb_utils::mongo_connection_string; -use std::env; - -use llm::{ - embeddings::{DocumentEmbeddings, EmbeddingsBuilder}, - providers::openai::Client, - vector_store::{mongodb_store::MongoDbVectorStore, VectorStore, VectorStoreIndex}, -}; - -#[tokio::main] -async fn main() -> Result<(), anyhow::Error> { - // Create OpenAI client - let openai_api_key = env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY not set"); - let openai_client = Client::new(&openai_api_key); - - let aws_config = aws_config::defaults(BehaviorVersion::latest()) - .region( - RegionProviderChain::default_provider() - .or_else("us-east-1") - .or_default_provider(), - ) - .load() - .await; - - // Init MongoDB client - let options = ClientOptions::parse(mongo_connection_string(&aws_config).await) - .await - .expect("MongoDB connection string should be valid"); - - let mongodb_client = - MongoClient::with_options(options).expect("MongoDB client options should be valid"); - - let model = openai_client.embedding_model("text-embedding-ada-002"); - - let collection: Collection = mongodb_client - .database("knowledgebase") - .collection("context"); - - let mut vector_store = MongoDbVectorStore::new(collection); - - let embeddings = EmbeddingsBuilder::new(model.clone()) - .simple_document("doc0", "Definition of a *flurbo*: A flurbo is a green alien that lives on cold planets") - .simple_document("doc1", "Definition of a *glarb-glarb*: A glarb-glarb is a ancient tool used by the ancestors of the inhabitants of planet Jiro to farm the land.") - .simple_document("doc2", "Definition of a *linglingdong*: A term used by inhabitants of the far side of the moon to describe humans.") - .build() - .await?; - - match vector_store.add_documents(embeddings).await { - Ok(_) => println!("Documents added successfully"), - Err(e) => println!("Error adding documents: {:?}", e), - } - - let index = vector_store.index(model, "context_vector_index", doc! {})?; - - let results = index - .top_n_from_query("What is a linglingdong?", 1) - .await? - .into_iter() - .map(|(score, doc)| (score, doc.id, doc.document)) - .collect::>(); - - println!("Results: {:?}", results); - - Ok(()) -}