Skip to content

Commit

Permalink
docs and gemini key in header
Browse files Browse the repository at this point in the history
  • Loading branch information
anish-palakurthi committed Jun 13, 2024
1 parent 3dbc0cd commit 973f229
Show file tree
Hide file tree
Showing 12 changed files with 96 additions and 58 deletions.
5 changes: 2 additions & 3 deletions docs/docs/syntax/client/client.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -153,18 +153,17 @@ client<llm> MyClient {
Provider names:
- `google-ai`

Accepts any options as defined by the [Gemini SDK](https://ai.google.dev/gemini-api/docs/api-overview).

Accepts any options as defined by the [Gemini SDK](https://ai.google.dev/gemini-api/docs/get-started/tutorial?lang=rest).

```rust
client<llm> MyGoogleClient {
provider google-ai
options{
model "gemini-1.5-pro-001"
api_key env.GOOGLE_API_KEY
}
}
```
This is not the Vertex AI Gemini API, but the Google Generative AI Gemini API, which supports the same models but at a different endpoint.


### Fallback
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -267,19 +267,15 @@ impl RequestBuilder for GoogleClient {
prompt: either::Either<&String, &Vec<RenderedChatMessage>>,
stream: bool,
) -> reqwest::RequestBuilder {
let mut should_stream = "generateContent?";
let mut should_stream = "generateContent";
if stream {
should_stream = "streamGenerateContent?alt=sse&";
should_stream = "streamGenerateContent?alt=sse";
}

let baml_original_url = format!(
"https://generativelanguage.googleapis.com/v1/models/{}:{}key={}",
"https://generativelanguage.googleapis.com/v1/models/{}:{}",
self.properties.model_id.as_ref().unwrap_or(&"".to_string()),
should_stream,
self.properties
.api_key
.clone()
.unwrap_or_else(|| "".to_string())
should_stream
);

let mut req = self.client.post(
Expand All @@ -295,6 +291,13 @@ impl RequestBuilder for GoogleClient {
}

req = req.header("baml-original-url", baml_original_url);
req = req.header(
"x-goog-api-key",
self.properties
.api_key
.clone()
.unwrap_or_else(|| "".to_string()),
);

let mut body = json!(self.properties.properties);
let body_obj = body.as_object_mut().unwrap();
Expand Down
2 changes: 1 addition & 1 deletion typescript/fiddle-frontend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "infisical run --env=test -- next dev",
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "next lint"
Expand Down
39 changes: 0 additions & 39 deletions typescript/fiddle-frontend/public/_examples/clients.baml

This file was deleted.

1 change: 1 addition & 0 deletions typescript/fiddle-frontend/public/_examples/clients.baml
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
// These are LLM clients you can use in your functions. We currently support Anthropic, OpenAI / Azure, and Ollama as providers but are expanding to many more.
// These are LLM clients you can use in your functions. We currently support Anthropic, OpenAI / Azure, Gemini, and Ollama as providers but are expanding to many more.

// We also support any other provider that follows the OpenAI API specification, such as HuggingFace.

// For this playground, we have setup a few clients for you to use already with some free credits.

Expand Down Expand Up @@ -36,4 +38,12 @@ client<llm> Claude {
max_tokens 1000

}
}

client<llm> Gemini {
provider google-ai
options{
model "gemini-1.5-pro-001"
api_key env.GOOGLE_API_KEY
}
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
// These are LLM clients you can use in your functions. We currently support Anthropic, OpenAI / Azure, and Ollama as providers but are expanding to many more.
// These are LLM clients you can use in your functions. We currently support Anthropic, OpenAI / Azure, Gemini, and Ollama as providers but are expanding to many more.

// We also support any other provider that follows the OpenAI API specification, such as HuggingFace.

// For this playground, we have setup a few clients for you to use already with some free credits.

Expand Down Expand Up @@ -36,4 +38,12 @@ client<llm> Claude {
max_tokens 1000

}
}

client<llm> Gemini {
provider google-ai
options{
model "gemini-1.5-pro-001"
api_key env.GOOGLE_API_KEY
}
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
// These are LLM clients you can use in your functions. We currently support Anthropic, OpenAI / Azure, and Ollama as providers but are expanding to many more.
// These are LLM clients you can use in your functions. We currently support Anthropic, OpenAI / Azure, Gemini, and Ollama as providers but are expanding to many more.

// We also support any other provider that follows the OpenAI API specification, such as HuggingFace.

// For this playground, we have setup a few clients for you to use already with some free credits.

Expand Down Expand Up @@ -36,4 +38,12 @@ client<llm> Claude {
max_tokens 1000

}
}

client<llm> Gemini {
provider google-ai
options{
model "gemini-1.5-pro-001"
api_key env.GOOGLE_API_KEY
}
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
// These are LLM clients you can use in your functions. We currently support Anthropic, OpenAI / Azure, and Ollama as providers but are expanding to many more.
// These are LLM clients you can use in your functions. We currently support Anthropic, OpenAI / Azure, Gemini, and Ollama as providers but are expanding to many more.

// We also support any other provider that follows the OpenAI API specification, such as HuggingFace.

// For this playground, we have setup a few clients for you to use already with some free credits.

Expand Down Expand Up @@ -36,4 +38,12 @@ client<llm> Claude {
max_tokens 1000

}
}

client<llm> Gemini {
provider google-ai
options{
model "gemini-1.5-pro-001"
api_key env.GOOGLE_API_KEY
}
}
3 changes: 2 additions & 1 deletion typescript/fiddle-proxy/.gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
node_modules
node_modules
.env
18 changes: 17 additions & 1 deletion typescript/fiddle-proxy/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion typescript/fiddle-proxy/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,14 @@
"author": "",
"license": "ISC",
"devDependencies": {
"@flydotio/node-demo": "^0.2.1"
"@flydotio/node-demo": "^0.2.1",
"cors": "^2.8.5"

},
"dependencies": {
"cors": "^2.8.5",
"express": "^4.19.2",
"dotenv": "16.4.5",
"http-proxy-middleware": "^3.0.0"
}
}
14 changes: 14 additions & 0 deletions typescript/fiddle-proxy/server.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,20 @@
const cors = require('cors')
const { createProxyMiddleware } = require('http-proxy-middleware')
const app = require('express')()
require('dotenv').config()

app.use(cors())

app.use(
createProxyMiddleware({
changeOrigin: true,
pathRewrite: (path, req) => {
// Ensure the URL does not end with a slash
if (path.endsWith('/')) {
return path.slice(0, -1)
}
return path
},
router: (req) => {
// Extract the original target URL from the custom header
const originalUrl = req.headers['baml-original-url']
Expand All @@ -32,6 +40,12 @@ app.use(
}
proxyReq.setHeader('x-api-key', process.env.ANTHROPIC_API_KEY)
}
if (req.headers['baml-original-url'].includes('gemini')) {
// if (process.env.GOOGLE_API_KEY === undefined) {
// throw new Error('GOOGLE_API_KEY is missing')
// }
proxyReq.setHeader('x-goog-api-key', process.env.GOOGLE_API_KEY)
}
},
proxyRes: (proxyRes, req, res) => {
proxyRes.headers['Access-Control-Allow-Origin'] = '*'
Expand Down

0 comments on commit 973f229

Please sign in to comment.