Skip to content

Commit

Permalink
feat(gemini): move system prompt to correct request field
Browse files Browse the repository at this point in the history
  • Loading branch information
mateobelanger committed Oct 15, 2024
1 parent e5e763e commit 1dca1da
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 25 deletions.
21 changes: 10 additions & 11 deletions rig-core/examples/gemini_agent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,23 +12,22 @@ async fn main() -> Result<(), anyhow::Error> {
// Create agent with a single context prompt
let agent = client
.agent(gemini::completion::GEMINI_1_5_PRO)
.preamble("Be precise and concise.")
.preamble("Be creative and concise. Answer directly and clearly.")
.temperature(0.5)
.max_tokens(8192)
.additional_params(
serde_json::to_value(GenerationConfig {
top_k: Some(1),
top_p: Some(0.95),
candidate_count: Some(1),
..Default::default()
})
.unwrap(),
) // Unwrap the Result to get the Value
.additional_params(serde_json::to_value(GenerationConfig {
top_k: Some(1),
top_p: Some(0.95),
candidate_count: Some(1),
..Default::default()
})?) // Unwrap the Result to get the Value
.build();

tracing::info!("Prompting the agent...");

// Prompt the agent and print the response
let response = agent
.prompt("How much wood would a woodchuck chuck if a woodchuck could chuck wood?")
.prompt("How much wood would a woodchuck chuck if a woodchuck could chuck wood? Infer an answer.")
.await?;
println!("{}", response);

Expand Down
24 changes: 10 additions & 14 deletions rig-core/src/providers/gemini/completion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ pub struct GenerateContentResponse {
pub usage_metadata: Option<UsageMetadata>,
}

// Define the struct for a Candidate
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ContentCandidate {
Expand Down Expand Up @@ -341,7 +340,7 @@ pub struct GenerateContentRequest {
pub safety_settings: Option<Vec<SafetySetting>>,
/// Optional. Developer set system instruction(s). Currently, text only.
/// https://ai.google.dev/gemini-api/docs/system-instructions?lang=rest
pub system_instruction: Option<String>,
pub system_instruction: Option<Content>,
// cachedContent: Optional<String>
}

Expand Down Expand Up @@ -414,17 +413,7 @@ impl completion::CompletionModel for CompletionModel {
&self,
mut completion_request: CompletionRequest,
) -> Result<completion::CompletionResponse<GenerateContentResponse>, CompletionError> {
// QUESTION: Why do Anthropic/openAi implementation differ here? OpenAI adds the preamble but Anthropic does not.

let mut full_history = if let Some(preamble) = &completion_request.preamble {
vec![completion::Message {
role: "system".into(),
content: preamble.clone(),
}]
} else {
vec![]
};

let mut full_history = Vec::new();
full_history.append(&mut completion_request.chat_history);

let prompt_with_context = completion_request.prompt_with_context();
Expand Down Expand Up @@ -471,9 +460,16 @@ impl completion::CompletionModel for CompletionModel {
.collect(),
),
tool_config: None,
system_instruction: None,
system_instruction: Some(Content {
parts: vec![Part {
text: "system".to_string(),
}],
role: Some("system".to_string()),
}),
};

tracing::info!("Request: {:?}", request);

let response = self
.client
.post(&format!("/v1beta/models/{}:generateContent", self.model))
Expand Down

0 comments on commit 1dca1da

Please sign in to comment.