Skip to content

Commit

Permalink
Merge pull request #242 from grafana/ui-fix
Browse files Browse the repository at this point in the history
Healthcheck fix when disabled, UI bug fixes
  • Loading branch information
gerboland authored Feb 29, 2024
2 parents c32a1d5 + 809241a commit 286154b
Show file tree
Hide file tree
Showing 6 changed files with 47 additions and 28 deletions.
4 changes: 2 additions & 2 deletions pkg/plugin/health.go
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ func (a *App) openAIHealth(ctx context.Context, req *backend.CheckHealthRequest)

d := openAIHealthDetails{
OK: true,
Configured: a.settings.OpenAI.apiKey != "" || a.settings.OpenAI.Provider == openAIProviderGrafana,
Configured: ((a.settings.OpenAI.Provider == openAIProviderAzure || a.settings.OpenAI.Provider == openAIProviderOpenAI) && a.settings.OpenAI.apiKey != "") || a.settings.OpenAI.Provider == openAIProviderGrafana,
Models: map[string]openAIModelHealth{},
}

Expand All @@ -114,7 +114,7 @@ func (a *App) openAIHealth(ctx context.Context, req *backend.CheckHealthRequest)
}
if !anyOK {
d.OK = false
d.Error = "No models are working"
d.Error = "No functioning models are available"
}

// Only cache result if openAI is ok to use.
Expand Down
7 changes: 5 additions & 2 deletions pkg/plugin/health_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ func TestCheckHealth(t *testing.T) {
},
expDetails: healthCheckDetails{
OpenAI: openAIHealthDetails{
Error: "No models are working",
Error: "No functioning models are available",
Models: map[string]openAIModelHealth{},
},
Vector: vectorHealthDetails{},
Expand Down Expand Up @@ -122,7 +122,7 @@ func TestCheckHealth(t *testing.T) {
vService: &mockVectorService{},
expDetails: healthCheckDetails{
OpenAI: openAIHealthDetails{
Error: "No models are working",
Error: "No functioning models are available",
Models: map[string]openAIModelHealth{},
},
Vector: vectorHealthDetails{
Expand All @@ -136,6 +136,9 @@ func TestCheckHealth(t *testing.T) {
name: "vector enabled with openai",
settings: backend.AppInstanceSettings{
JSONData: json.RawMessage(`{
"openai": {
"provider": "openai"
},
"vector": {
"enabled": true,
"embed": {
Expand Down
5 changes: 5 additions & 0 deletions pkg/plugin/resources.go
Original file line number Diff line number Diff line change
Expand Up @@ -394,6 +394,11 @@ func (app *App) handleSaveLLMOptInState(w http.ResponseWriter, req *http.Request

user := httpadapter.UserFromContext(req.Context())

devMode := os.Getenv("DEV_MODE") != ""
if devMode {
user.Email = "[email protected]"
}

if user == nil || user.Email == "" {
handleError(w, fmt.Errorf("valid user not found (please sign in and retry)"), http.StatusUnauthorized)
return
Expand Down
7 changes: 1 addition & 6 deletions pkg/plugin/settings.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,12 +77,7 @@ type Settings struct {
}

func loadSettings(appSettings backend.AppInstanceSettings) (*Settings, error) {
settings := Settings{
OpenAI: OpenAISettings{
URL: "https://api.openai.com",
Provider: openAIProviderOpenAI,
},
}
settings := Settings{}

if len(appSettings.JSONData) != 0 {
err := json.Unmarshal(appSettings.JSONData, &settings)
Expand Down
43 changes: 31 additions & 12 deletions src/components/AppConfig/AppConfig.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -68,11 +68,6 @@ export const AppConfig = ({ plugin }: AppConfigProps) => {
};
const errorState = validateInputs();

const updateManagedLLMOptIn = async (newOptIn: boolean): Promise<void> => {
await saveLLMOptInState(newOptIn);
setManagedLLMOptIn(newOptIn);
};

useEffect(() => {
const fetchData = async () => {
const optIn = await getLLMOptInState();
Expand All @@ -84,6 +79,13 @@ export const AppConfig = ({ plugin }: AppConfigProps) => {
}
}, [settings.enableGrafanaManagedLLM]);

useEffect(() => {
// clear health check status if any setting changed
if (updated) {
setHealthCheck(undefined);
}
}, [updated]);

const doSave = async () => {
if (errorState !== undefined) {
return;
Expand All @@ -108,17 +110,27 @@ export const AppConfig = ({ plugin }: AppConfigProps) => {
});
} catch (e) {
setIsUpdating(false);
throw (e);
throw e;
}
// If disabling LLM features, no health check needed

// Note: health-check uses the state saved in the plugin settings.
let healthCheckResult: HealthCheckResult | undefined = undefined;
if (settings.openAI?.provider !== undefined) {
const result = await checkPluginHealth(plugin.meta.id);
setHealthCheck(result.data);
healthCheckResult = result.data;
}
setHealthCheck(healthCheckResult);

// If moving away from Grafana-managed LLM, opt-out of the feature automatically
if (managedLLMOptIn && settings.openAI?.provider !== 'grafana') {
updateManagedLLMOptIn(false);
await saveLLMOptInState(false);
} else {
await saveLLMOptInState(managedLLMOptIn);
}

// Update the frontend settings explicitly, it is otherwise not updated until page reload
plugin.meta.jsonData = settings;

setIsUpdating(false);
setUpdated(false);
};
Expand All @@ -134,7 +146,10 @@ export const AppConfig = ({ plugin }: AppConfigProps) => {
secrets={newSecrets}
secretsSet={configuredSecrets}
optIn={managedLLMOptIn}
setOptIn={updateManagedLLMOptIn}
setOptIn={(optIn) => {
setManagedLLMOptIn(optIn);
setUpdated(true);
}}
onChangeSecrets={(secrets: Secrets) => {
// Update the new secrets.
setNewSecrets(secrets);
Expand Down Expand Up @@ -212,13 +227,17 @@ export const updateGcomProvisionedPluginSettings = (data: Partial<PluginMeta>) =
const response = getBackendSrv().fetch({
url: `/api/plugins/grafana-llm-app/resources/save-plugin-settings`,
method: 'POST',
data
data,
});

return lastValueFrom(response);
};

export const updateAndSavePluginSettings = async (pluginId: string, persistToGcom = false, data: Partial<PluginMeta>) => {
export const updateAndSavePluginSettings = async (
pluginId: string,
persistToGcom = false,
data: Partial<PluginMeta>
) => {
const gcomPluginData = {
jsonData: data.jsonData,
secureJsonData: data.secureJsonData,
Expand Down
9 changes: 3 additions & 6 deletions src/components/AppConfig/LLMConfig.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,8 @@ export function LLMConfig({
const allowGrafanaManagedLLM = settings.enableGrafanaManagedLLM === true;

// llmOption is the currently chosen LLM option in the UI
const [llmOption, setLLMOption] = useState<LLMOptions>(getLLMOptionFromSettings(settings));
const llmOption = getLLMOptionFromSettings(settings);

// previousOpenAIProvider caches the value of the openAI provider, as it is overwritten by the grafana option
const [previousOpenAIProvider, setPreviousOpenAIProvider] = useState<OpenAIProvider>();

Expand All @@ -61,7 +62,6 @@ export function LLMConfig({
}

onChange({ ...settings, openAI: { provider: undefined } });
setLLMOption('disabled');
}
};

Expand All @@ -73,7 +73,6 @@ export function LLMConfig({
}

onChange({ ...settings, openAI: { provider: 'grafana' } });
setLLMOption('grafana-provided');
}
};

Expand All @@ -86,11 +85,9 @@ export function LLMConfig({
onChange({ ...settings, openAI: { provider: previousOpenAIProvider } });
setPreviousOpenAIProvider(undefined);
} else {
onChange({ ...settings, openAI: { provider: "openai" } });
onChange({ ...settings, openAI: { provider: 'openai' } });
setPreviousOpenAIProvider(undefined);
}

setLLMOption('openai');
}
};

Expand Down

0 comments on commit 286154b

Please sign in to comment.