Skip to content

Commit

Permalink
batched.swift : fix build [no ci]
Browse files Browse the repository at this point in the history
  • Loading branch information
ggerganov committed Sep 6, 2024
1 parent 43eda32 commit f004447
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions examples/batched.swift/Sources/main.swift
Original file line number Diff line number Diff line change
Expand Up @@ -199,9 +199,10 @@ if n_parallel > 1 {

let t_main_end = ggml_time_us()

print("decoded \(n_decode) tokens in \(String(format: "%.2f", Double(t_main_end - t_main_start) / 1_000_000.0)) s, speed: \(String(format: "%.2f", Double(n_decode) / (Double(t_main_end - t_main_start) / 1_000_000.0))) t/s\n")
print("decoded \(n_decode) tokens in \(String(format: "%.2f", Double(t_main_end - t_main_start) / 1_000_000.0)) s, speed: \(String(format: "%.2f", Double(n_decode) / (Double(t_main_end - t_main_start) / 1_000_000.0))) t/s\n\n")

llama_print_timings(context, smpl)
llama_perf_print(context, LLAMA_PERF_TYPE_CONTEXT)
llama_perf_print(smpl, LLAMA_PERF_TYPE_SAMPLER_CHAIN)

private func tokenize(text: String, add_bos: Bool) -> [llama_token] {
let utf8Count = text.utf8.count
Expand Down

0 comments on commit f004447

Please sign in to comment.