forked from home-assistant/core
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Anthropic Claude conversation integration (home-assistant#122526)
* Initial commit * Use add_suggested_values * Update homeassistant/components/anthropic/conversation.py Co-authored-by: Joost Lekkerkerker <[email protected]> * Update strings.json * Update config_flow.py * Update config_flow.py * Fix tests * Update homeassistant/components/anthropic/conversation.py Co-authored-by: Paulus Schoutsen <[email protected]> * Removed agent registration * Moved message_convert inline function outside --------- Co-authored-by: Joost Lekkerkerker <[email protected]> Co-authored-by: Paulus Schoutsen <[email protected]> Co-authored-by: Paulus Schoutsen <[email protected]>
- Loading branch information
1 parent
80aa2c2
commit 262d778
Showing
17 changed files
with
1,509 additions
and
0 deletions.
There are no files selected for viewing
Validating CODEOWNERS rules …
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
"""The Anthropic integration.""" | ||
|
||
from __future__ import annotations | ||
|
||
import anthropic | ||
|
||
from homeassistant.config_entries import ConfigEntry | ||
from homeassistant.const import CONF_API_KEY, Platform | ||
from homeassistant.core import HomeAssistant | ||
from homeassistant.exceptions import ConfigEntryNotReady | ||
from homeassistant.helpers import config_validation as cv | ||
|
||
from .const import DOMAIN, LOGGER | ||
|
||
PLATFORMS = (Platform.CONVERSATION,) | ||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) | ||
|
||
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient] | ||
|
||
|
||
async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool: | ||
"""Set up Anthropic from a config entry.""" | ||
client = anthropic.AsyncAnthropic(api_key=entry.data[CONF_API_KEY]) | ||
try: | ||
await client.messages.create( | ||
model="claude-3-haiku-20240307", | ||
max_tokens=1, | ||
messages=[{"role": "user", "content": "Hi"}], | ||
timeout=10.0, | ||
) | ||
except anthropic.AuthenticationError as err: | ||
LOGGER.error("Invalid API key: %s", err) | ||
return False | ||
except anthropic.AnthropicError as err: | ||
raise ConfigEntryNotReady(err) from err | ||
|
||
entry.runtime_data = client | ||
|
||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) | ||
|
||
return True | ||
|
||
|
||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: | ||
"""Unload Anthropic.""" | ||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,210 @@ | ||
"""Config flow for Anthropic integration.""" | ||
|
||
from __future__ import annotations | ||
|
||
import logging | ||
from types import MappingProxyType | ||
from typing import Any | ||
|
||
import anthropic | ||
import voluptuous as vol | ||
|
||
from homeassistant.config_entries import ( | ||
ConfigEntry, | ||
ConfigFlow, | ||
ConfigFlowResult, | ||
OptionsFlow, | ||
) | ||
from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API | ||
from homeassistant.core import HomeAssistant | ||
from homeassistant.helpers import llm | ||
from homeassistant.helpers.selector import ( | ||
NumberSelector, | ||
NumberSelectorConfig, | ||
SelectOptionDict, | ||
SelectSelector, | ||
SelectSelectorConfig, | ||
TemplateSelector, | ||
) | ||
|
||
from .const import ( | ||
CONF_CHAT_MODEL, | ||
CONF_MAX_TOKENS, | ||
CONF_PROMPT, | ||
CONF_RECOMMENDED, | ||
CONF_TEMPERATURE, | ||
DOMAIN, | ||
RECOMMENDED_CHAT_MODEL, | ||
RECOMMENDED_MAX_TOKENS, | ||
RECOMMENDED_TEMPERATURE, | ||
) | ||
|
||
_LOGGER = logging.getLogger(__name__) | ||
|
||
STEP_USER_DATA_SCHEMA = vol.Schema( | ||
{ | ||
vol.Required(CONF_API_KEY): str, | ||
} | ||
) | ||
|
||
RECOMMENDED_OPTIONS = { | ||
CONF_RECOMMENDED: True, | ||
CONF_LLM_HASS_API: llm.LLM_API_ASSIST, | ||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT, | ||
} | ||
|
||
|
||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None: | ||
"""Validate the user input allows us to connect. | ||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. | ||
""" | ||
client = anthropic.AsyncAnthropic(api_key=data[CONF_API_KEY]) | ||
await client.messages.create( | ||
model="claude-3-haiku-20240307", | ||
max_tokens=1, | ||
messages=[{"role": "user", "content": "Hi"}], | ||
timeout=10.0, | ||
) | ||
|
||
|
||
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN): | ||
"""Handle a config flow for Anthropic.""" | ||
|
||
VERSION = 1 | ||
|
||
async def async_step_user( | ||
self, user_input: dict[str, Any] | None = None | ||
) -> ConfigFlowResult: | ||
"""Handle the initial step.""" | ||
errors = {} | ||
|
||
if user_input is not None: | ||
try: | ||
await validate_input(self.hass, user_input) | ||
except anthropic.APITimeoutError: | ||
errors["base"] = "timeout_connect" | ||
except anthropic.APIConnectionError: | ||
errors["base"] = "cannot_connect" | ||
except anthropic.APIStatusError as e: | ||
if isinstance(e.body, dict): | ||
errors["base"] = e.body.get("error", {}).get("type", "unknown") | ||
else: | ||
errors["base"] = "unknown" | ||
except Exception: | ||
_LOGGER.exception("Unexpected exception") | ||
errors["base"] = "unknown" | ||
else: | ||
return self.async_create_entry( | ||
title="Claude", | ||
data=user_input, | ||
options=RECOMMENDED_OPTIONS, | ||
) | ||
|
||
return self.async_show_form( | ||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors or None | ||
) | ||
|
||
@staticmethod | ||
def async_get_options_flow( | ||
config_entry: ConfigEntry, | ||
) -> OptionsFlow: | ||
"""Create the options flow.""" | ||
return AnthropicOptionsFlow(config_entry) | ||
|
||
|
||
class AnthropicOptionsFlow(OptionsFlow): | ||
"""Anthropic config flow options handler.""" | ||
|
||
def __init__(self, config_entry: ConfigEntry) -> None: | ||
"""Initialize options flow.""" | ||
self.config_entry = config_entry | ||
self.last_rendered_recommended = config_entry.options.get( | ||
CONF_RECOMMENDED, False | ||
) | ||
|
||
async def async_step_init( | ||
self, user_input: dict[str, Any] | None = None | ||
) -> ConfigFlowResult: | ||
"""Manage the options.""" | ||
options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options | ||
|
||
if user_input is not None: | ||
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended: | ||
if user_input[CONF_LLM_HASS_API] == "none": | ||
user_input.pop(CONF_LLM_HASS_API) | ||
return self.async_create_entry(title="", data=user_input) | ||
|
||
# Re-render the options again, now with the recommended options shown/hidden | ||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED] | ||
|
||
options = { | ||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED], | ||
CONF_PROMPT: user_input[CONF_PROMPT], | ||
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API], | ||
} | ||
|
||
suggested_values = options.copy() | ||
if not suggested_values.get(CONF_PROMPT): | ||
suggested_values[CONF_PROMPT] = llm.DEFAULT_INSTRUCTIONS_PROMPT | ||
|
||
schema = self.add_suggested_values_to_schema( | ||
vol.Schema(anthropic_config_option_schema(self.hass, options)), | ||
suggested_values, | ||
) | ||
|
||
return self.async_show_form( | ||
step_id="init", | ||
data_schema=schema, | ||
) | ||
|
||
|
||
def anthropic_config_option_schema( | ||
hass: HomeAssistant, | ||
options: dict[str, Any] | MappingProxyType[str, Any], | ||
) -> dict: | ||
"""Return a schema for Anthropic completion options.""" | ||
hass_apis: list[SelectOptionDict] = [ | ||
SelectOptionDict( | ||
label="No control", | ||
value="none", | ||
) | ||
] | ||
hass_apis.extend( | ||
SelectOptionDict( | ||
label=api.name, | ||
value=api.id, | ||
) | ||
for api in llm.async_get_apis(hass) | ||
) | ||
|
||
schema = { | ||
vol.Optional(CONF_PROMPT): TemplateSelector(), | ||
vol.Optional(CONF_LLM_HASS_API, default="none"): SelectSelector( | ||
SelectSelectorConfig(options=hass_apis) | ||
), | ||
vol.Required( | ||
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False) | ||
): bool, | ||
} | ||
|
||
if options.get(CONF_RECOMMENDED): | ||
return schema | ||
|
||
schema.update( | ||
{ | ||
vol.Optional( | ||
CONF_CHAT_MODEL, | ||
default=RECOMMENDED_CHAT_MODEL, | ||
): str, | ||
vol.Optional( | ||
CONF_MAX_TOKENS, | ||
default=RECOMMENDED_MAX_TOKENS, | ||
): int, | ||
vol.Optional( | ||
CONF_TEMPERATURE, | ||
default=RECOMMENDED_TEMPERATURE, | ||
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)), | ||
} | ||
) | ||
return schema |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
"""Constants for the Anthropic integration.""" | ||
|
||
import logging | ||
|
||
DOMAIN = "anthropic" | ||
LOGGER = logging.getLogger(__package__) | ||
|
||
CONF_RECOMMENDED = "recommended" | ||
CONF_PROMPT = "prompt" | ||
CONF_CHAT_MODEL = "chat_model" | ||
RECOMMENDED_CHAT_MODEL = "claude-3-5-sonnet-20240620" | ||
CONF_MAX_TOKENS = "max_tokens" | ||
RECOMMENDED_MAX_TOKENS = 1024 | ||
CONF_TEMPERATURE = "temperature" | ||
RECOMMENDED_TEMPERATURE = 1.0 |
Oops, something went wrong.