Skip to content

Commit

Permalink
fix(galadriel): missed fixes from messages pr (0xPlaygrounds#270)
Browse files Browse the repository at this point in the history
  • Loading branch information
0xMochan authored Feb 5, 2025
1 parent 4dc5eee commit 83be500
Show file tree
Hide file tree
Showing 2 changed files with 205 additions and 17 deletions.
10 changes: 8 additions & 2 deletions rig-core/src/one_or_many.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,10 @@ impl<T: Clone> OneOrMany<T> {
}

/// Create a `OneOrMany` object with a vector of items of any type.
pub fn many(items: Vec<T>) -> Result<Self, EmptyListError> {
pub fn many<I>(items: I) -> Result<Self, EmptyListError>
where
I: IntoIterator<Item = T>,
{
let mut iter = items.into_iter();
Ok(OneOrMany {
first: match iter.next() {
Expand All @@ -82,7 +85,10 @@ impl<T: Clone> OneOrMany<T> {
}

/// Merge a list of OneOrMany items into a single OneOrMany item.
pub fn merge(one_or_many_items: Vec<OneOrMany<T>>) -> Result<Self, EmptyListError> {
pub fn merge<I>(one_or_many_items: I) -> Result<Self, EmptyListError>
where
I: IntoIterator<Item = OneOrMany<T>>,
{
let items = one_or_many_items
.into_iter()
.flat_map(|one_or_many| one_or_many.into_iter())
Expand Down
212 changes: 197 additions & 15 deletions rig-core/src/providers/galadriel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,14 @@ use crate::{
agent::AgentBuilder,
completion::{self, CompletionError, CompletionRequest},
extractor::ExtractorBuilder,
json_utils,
providers::openai,
json_utils, message, OneOrMany,
};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use serde_json::json;

use super::openai;

// ================================================================
// Main Galadriel Client
// ================================================================
Expand Down Expand Up @@ -203,6 +204,186 @@ pub const GPT_35_TURBO_1106: &str = "gpt-3.5-turbo-1106";
/// `gpt-3.5-turbo-instruct` completion model
pub const GPT_35_TURBO_INSTRUCT: &str = "gpt-3.5-turbo-instruct";

#[derive(Debug, Deserialize)]
pub struct CompletionResponse {
pub id: String,
pub object: String,
pub created: u64,
pub model: String,
pub system_fingerprint: Option<String>,
pub choices: Vec<Choice>,
pub usage: Option<Usage>,
}

impl From<ApiErrorResponse> for CompletionError {
fn from(err: ApiErrorResponse) -> Self {
CompletionError::ProviderError(err.message)
}
}

impl TryFrom<CompletionResponse> for completion::CompletionResponse<CompletionResponse> {
type Error = CompletionError;

fn try_from(response: CompletionResponse) -> Result<Self, Self::Error> {
let Choice { message, .. } = response.choices.first().ok_or_else(|| {
CompletionError::ResponseError("Response contained no choices".to_owned())
})?;

let mut content = message
.content
.as_ref()
.map(|c| vec![completion::AssistantContent::text(c)])
.unwrap_or_default();

content.extend(message.tool_calls.iter().map(|call| {
completion::AssistantContent::tool_call(
&call.function.name,
&call.function.name,
call.function.arguments.clone(),
)
}));

let choice = OneOrMany::many(content).map_err(|_| {
CompletionError::ResponseError(
"Response contained no message or tool call (empty)".to_owned(),
)
})?;

Ok(completion::CompletionResponse {
choice,
raw_response: response,
})
}
}

#[derive(Debug, Deserialize)]
pub struct Choice {
pub index: usize,
pub message: Message,
pub logprobs: Option<serde_json::Value>,
pub finish_reason: String,
}

#[derive(Debug, Serialize, Deserialize)]
pub struct Message {
pub role: String,
pub content: Option<String>,
#[serde(default, deserialize_with = "json_utils::null_or_vec")]
pub tool_calls: Vec<openai::ToolCall>,
}

impl TryFrom<Message> for message::Message {
type Error = message::MessageError;

fn try_from(message: Message) -> Result<Self, Self::Error> {
let tool_calls: Vec<message::ToolCall> = message
.tool_calls
.into_iter()
.map(|tool_call| tool_call.into())
.collect();

match message.role.as_str() {
"user" => Ok(Self::User {
content: OneOrMany::one(
message
.content
.map(|content| message::UserContent::text(&content))
.ok_or_else(|| {
message::MessageError::ConversionError("Empty user message".to_string())
})?,
),
}),
"assistant" => Ok(Self::Assistant {
content: OneOrMany::many(
tool_calls
.into_iter()
.map(message::AssistantContent::ToolCall)
.chain(
message
.content
.map(|content| message::AssistantContent::text(&content))
.into_iter(),
),
)
.map_err(|_| {
message::MessageError::ConversionError("Empty assistant message".to_string())
})?,
}),
_ => Err(message::MessageError::ConversionError(format!(
"Unknown role: {}",
message.role
))),
}
}
}

impl TryFrom<message::Message> for Message {
type Error = message::MessageError;

fn try_from(message: message::Message) -> Result<Self, Self::Error> {
match message {
message::Message::User { content } => Ok(Self {
role: "user".to_string(),
content: content.iter().find_map(|c| match c {
message::UserContent::Text(text) => Some(text.text.clone()),
_ => None,
}),
tool_calls: vec![],
}),
message::Message::Assistant { content } => {
let mut text_content: Option<String> = None;
let mut tool_calls = vec![];

for c in content.iter() {
match c {
message::AssistantContent::Text(text) => {
text_content = Some(
text_content
.map(|mut existing| {
existing.push('\n');
existing.push_str(&text.text);
existing
})
.unwrap_or_else(|| text.text.clone()),
);
}
message::AssistantContent::ToolCall(tool_call) => {
tool_calls.push(tool_call.clone().into());
}
}
}

Ok(Self {
role: "assistant".to_string(),
content: text_content,
tool_calls,
})
}
}
}
}

#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ToolDefinition {
pub r#type: String,
pub function: completion::ToolDefinition,
}

impl From<completion::ToolDefinition> for ToolDefinition {
fn from(tool: completion::ToolDefinition) -> Self {
Self {
r#type: "function".into(),
function: tool,
}
}
}

#[derive(Debug, Deserialize)]
pub struct Function {
pub name: String,
pub arguments: String,
}

#[derive(Clone)]
pub struct CompletionModel {
client: Client,
Expand All @@ -220,35 +401,36 @@ impl CompletionModel {
}

impl completion::CompletionModel for CompletionModel {
type Response = openai::CompletionResponse;
type Response = CompletionResponse;

#[cfg_attr(feature = "worker", worker::send)]
async fn completion(
&self,
completion_request: CompletionRequest,
) -> Result<completion::CompletionResponse<openai::CompletionResponse>, CompletionError> {
) -> Result<completion::CompletionResponse<CompletionResponse>, CompletionError> {
// Add preamble to chat history (if available)
let mut full_history: Vec<openai::Message> = match &completion_request.preamble {
Some(preamble) => vec![openai::Message::system(preamble)],
let mut full_history: Vec<Message> = match &completion_request.preamble {
Some(preamble) => vec![Message {
role: "system".to_string(),
content: Some(preamble.to_string()),
tool_calls: vec![],
}],
None => vec![],
};

// Convert prompt to user message
let prompt: Vec<openai::Message> = completion_request.prompt_with_context().try_into()?;
let prompt: Message = completion_request.prompt_with_context().try_into()?;

// Convert existing chat history
let chat_history: Vec<openai::Message> = completion_request
let chat_history: Vec<Message> = completion_request
.chat_history
.into_iter()
.map(|message| message.try_into())
.collect::<Result<Vec<Vec<openai::Message>>, _>>()?
.into_iter()
.flatten()
.collect();
.collect::<Result<Vec<Message>, _>>()?;

// Combine all messages into a single history
full_history.extend(chat_history);
full_history.extend(prompt);
full_history.push(prompt);

let request = if completion_request.tools.is_empty() {
json!({
Expand All @@ -261,7 +443,7 @@ impl completion::CompletionModel for CompletionModel {
"model": self.model,
"messages": full_history,
"temperature": completion_request.temperature,
"tools": completion_request.tools.into_iter().map(openai::ToolDefinition::from).collect::<Vec<_>>(),
"tools": completion_request.tools.into_iter().map(ToolDefinition::from).collect::<Vec<_>>(),
"tool_choice": "auto",
})
};
Expand All @@ -283,7 +465,7 @@ impl completion::CompletionModel for CompletionModel {
let t = response.text().await?;
tracing::debug!(target: "rig", "Galadriel completion error: {}", t);

match serde_json::from_str::<ApiResponse<openai::CompletionResponse>>(&t)? {
match serde_json::from_str::<ApiResponse<CompletionResponse>>(&t)? {
ApiResponse::Ok(response) => {
tracing::info!(target: "rig",
"Galadriel completion token usage: {:?}",
Expand Down

0 comments on commit 83be500

Please sign in to comment.