From f004447d94df3c66d958534dae6dc7598f118406 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 6 Sep 2024 14:57:44 +0300 Subject: [PATCH] batched.swift : fix build [no ci] --- examples/batched.swift/Sources/main.swift | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/examples/batched.swift/Sources/main.swift b/examples/batched.swift/Sources/main.swift index e9acdc7ac86aa9..fbb9a92b349a9d 100644 --- a/examples/batched.swift/Sources/main.swift +++ b/examples/batched.swift/Sources/main.swift @@ -199,9 +199,10 @@ if n_parallel > 1 { let t_main_end = ggml_time_us() -print("decoded \(n_decode) tokens in \(String(format: "%.2f", Double(t_main_end - t_main_start) / 1_000_000.0)) s, speed: \(String(format: "%.2f", Double(n_decode) / (Double(t_main_end - t_main_start) / 1_000_000.0))) t/s\n") +print("decoded \(n_decode) tokens in \(String(format: "%.2f", Double(t_main_end - t_main_start) / 1_000_000.0)) s, speed: \(String(format: "%.2f", Double(n_decode) / (Double(t_main_end - t_main_start) / 1_000_000.0))) t/s\n\n") -llama_print_timings(context, smpl) +llama_perf_print(context, LLAMA_PERF_TYPE_CONTEXT) +llama_perf_print(smpl, LLAMA_PERF_TYPE_SAMPLER_CHAIN) private func tokenize(text: String, add_bos: Bool) -> [llama_token] { let utf8Count = text.utf8.count