diff --git a/src/Cellm/Models/Llamafile/LlamafileClient.cs b/src/Cellm/Models/Llamafile/LlamafileClient.cs index 3f3d384..3fd370b 100644 --- a/src/Cellm/Models/Llamafile/LlamafileClient.cs +++ b/src/Cellm/Models/Llamafile/LlamafileClient.cs @@ -127,7 +127,8 @@ private async Task WaitForLlamafile(Process process) var cancellationTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(1)); var startTime = DateTime.UtcNow; - while ((DateTime.UtcNow - startTime).TotalSeconds < 30) // Max 30 seconds timeout + // 30 seconds timeout + while ((DateTime.UtcNow - startTime).TotalSeconds < 30) { if (process.HasExited) { @@ -136,10 +137,11 @@ private async Task WaitForLlamafile(Process process) try { - var response = await _httpClient.GetAsync($"{_openAiConfiguration.BaseAddress}/health", cancellationTokenSource.Token); + var response = await _httpClient.GetAsync(new Uri(_openAiConfiguration.BaseAddress, "health"), cancellationTokenSource.Token); if (response.StatusCode == System.Net.HttpStatusCode.OK) { - return; // Server is healthy + // Server is ready + return; } } catch (TaskCanceledException) @@ -149,9 +151,11 @@ private async Task WaitForLlamafile(Process process) { } - await Task.Delay(500); // Wait for 500ms before next attempt + // Wait for before next attempt + await Task.Delay(500); } + process.Kill(); throw new CellmException("Timeout waiting for Llamafile server to be ready"); } }