Skip to content

Commit

Permalink
fix: loading embedded ai controller spinner size
Browse files Browse the repository at this point in the history
  • Loading branch information
oliverqx committed Jan 26, 2025
1 parent 1f2e505 commit f8d170c
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,4 @@ export const EmbeddedAiProvider: ProviderMetadata = {
port: settings.embeddedLLM.port.toString()
}
},
}
// essentially then embedded ischosen
// show form, save settings
// once settings are saved, enable llm model control component
// this component should start, stop and show status and logs
}
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ export function EmbeddedControlCenter({
control center
</Label>
<p className="text-sm text-muted-foreground">
control and find useful information about screenpipe's embedded ai
control and find useful information about screenpipe's embedded ai
</p>
</div>
<div className="grid grid-cols-2 gap-3">
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ export function LLMControler({
description:
"downloading and initializing the embedded ai, may take a while (check $HOME/.ollama/models)...",
});
// // TODOO: Separate start server and start model rust commands

try {
// const result = await invoke<string>("start_ollama_sidecar", {
// settings: {
Expand Down Expand Up @@ -132,7 +132,9 @@ export function LLMControler({
className="w-[40px] h-[40px]"
>
{stopIsPending || startIsPending
? <Spinner/>
? <div className="w-[60%] h-[80%]">
<Spinner/>
</div>
: <Component className="h-5 w-5"/>
}
</Button>
Expand Down
1 change: 0 additions & 1 deletion screenpipe-app-tauri/src-tauri/src/llm_sidecar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@ impl LLMSidecar {
}

info!("Spawning Ollama serve command");
// serve_command.spawn()?;
serve_command.spawn()?;

info!("Ollama serve command spawned. Please wait a few seconds to check ollama's server health");
Expand Down

0 comments on commit f8d170c

Please sign in to comment.