Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix reqwest caching images on the playground tests #1228

Merged
merged 1 commit into from
Dec 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion engine/baml-runtime/src/internal/llm_client/traits/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -655,8 +655,11 @@ async fn fetch_with_proxy(
proxy_url: Option<&str>,
) -> Result<reqwest::Response, anyhow::Error> {
let client = reqwest::Client::new();

let request = if let Some(proxy) = proxy_url {
client.get(proxy).header("baml-original-url", url)
client
.get(format!("{}/{}", proxy, url))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The URL construction with the proxy might be incorrect. Typically, a proxy URL is used to route requests, not concatenate with the original URL. This could lead to incorrect URL formation.

.header("baml-original-url", url)
} else {
client.get(url)
};
Expand Down
3 changes: 3 additions & 0 deletions engine/baml-runtime/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,7 @@ impl BamlRuntime {
let rctx = ctx.create_ctx(None, None)?;
let (params, constraints) =
self.get_test_params_and_constraints(function_name, test_name, &rctx, true)?;
log::info!("params: {:#?}", params);
let rctx_stream = ctx.create_ctx(None, None)?;
let mut stream = self.inner.stream_function_impl(
function_name.into(),
Expand All @@ -238,12 +239,14 @@ impl BamlRuntime {
self.async_runtime.clone(),
)?;
let (response_res, span_uuid) = stream.run(on_event, ctx, None, None).await;
log::info!("response_res: {:#?}", response_res);
let res = response_res?;
let (_, llm_resp, _, val) = res
.event_chain()
.iter()
.last()
.context("Expected non-empty event chain")?;
log::info!("llm_resp: {:#?}", llm_resp);
let complete_resp = match llm_resp {
LLMResponse::Success(complete_llm_response) => Ok(complete_llm_response),
LLMResponse::InternalFailure(e) => Err(anyhow::anyhow!("{}", e)),
Expand Down
6 changes: 4 additions & 2 deletions engine/baml-schema-wasm/src/runtime_wasm/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ pub fn on_wasm_init() {
const LOG_LEVEL: log::Level = log::Level::Warn;
}
};
// This line is required if we want to see normal log::info! messages in JS console logs.
wasm_logger::init(wasm_logger::Config::new(LOG_LEVEL));
// I dont think we need this line anymore -- seems to break logging if you add it.
//wasm_logger::init(wasm_logger::Config::new(LOG_LEVEL));
match console_log::init_with_level(LOG_LEVEL) {
Ok(_) => web_sys::console::log_1(
&format!("Initialized BAML runtime logging as log::{}", LOG_LEVEL).into(),
Expand Down Expand Up @@ -1661,6 +1661,8 @@ impl WasmFunction {
.run_test(&function_name, &test_name, &ctx, Some(cb))
.await;

log::info!("test_response: {:#?}", test_response);

Ok(WasmTestResponse {
test_response,
span,
Expand Down
56 changes: 56 additions & 0 deletions engine/baml-schema-wasm/tests/test_file_manager.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
// Run from the baml-schema-wasm folder with:
// wasm-pack test --node
// and make sure to set rust-analyzer target in vscode settings to: "rust-analyzer.cargo.target": "wasm32-unknown-unknown",
#[cfg(target_arch = "wasm32")]
#[cfg(test)]
mod tests {
Expand Down Expand Up @@ -173,4 +174,59 @@ function PredictAgeBare(inp: string @assert(big_enough, {{this|length > 1}}) ) -

assert!(diagnostics.errors().is_empty());
}

#[wasm_bindgen_test]
fn test_run_tests() {
wasm_logger::init(wasm_logger::Config::new(log::Level::Info));
let sample_baml_content = r##"
function Func(name: string ) -> string {
client "openai/gpt-4o"
prompt #"
Return the name of {{name}}
"#
}

test One {
functions [Func]
args {
name "john"
}
}

test Two {
functions [Func]
args {
name "jane"
}
}


"##;
let mut files = HashMap::new();
files.insert("error.baml".to_string(), sample_baml_content.to_string());
let files_js = to_value(&files).unwrap();
let project = WasmProject::new("baml_src", files_js)
.map_err(JsValue::from)
.unwrap();

let env_vars = [("OPENAI_API_KEY", "12345")]
.iter()
.cloned()
.collect::<HashMap<_, _>>();
let env_vars_js = to_value(&env_vars).unwrap();

let current_runtime = project.runtime(env_vars_js).map_err(JsValue::from).unwrap();

let diagnostics = project.diagnostics(&current_runtime);
let functions = current_runtime.list_functions();
functions.iter().for_each(|f| {
log::info!("function: {:#?}", f);
f.test_cases.iter().for_each(|t| {
log::info!("test case: {:#?}", t);
});
// f.run_test(&mut current_runtime, "One".to_string(), None, None);
});

assert!(diagnostics.errors().is_empty());
}
}
Loading