Add memory scope for conversation history

This commit is contained in:
Your Name 2025-12-20 11:49:46 -06:00
parent 6a5e2a067a
commit 67f80de978
4 changed files with 42 additions and 3 deletions

View file

@ -27,6 +27,7 @@ from .const import (
CONF_CONTEXT_MESSAGES,
CONF_FREQUENCY_PENALTY,
CONF_PARALLEL_TOOL_CALLS,
CONF_MEMORY_SCOPE,
CONF_PRESENCE_PENALTY,
CONF_PROMPT,
CONF_RESPONSE_FORMAT,
@ -45,6 +46,7 @@ from .const import (
DEFAULT_TEMPERATURE,
DEFAULT_TOOL_CHOICE,
DEFAULT_TOP_P,
DEFAULT_MEMORY_SCOPE,
DOMAIN,
)
from .const import DEFAULT_MAX_TOKENS, CONF_MAX_TOKENS
@ -232,6 +234,21 @@ class GroqdOptionsFlow(OptionsFlow):
]
)
),
vol.Optional(
CONF_MEMORY_SCOPE,
description={\"suggested_value\": options.get(CONF_MEMORY_SCOPE, DEFAULT_MEMORY_SCOPE)},
default=options.get(CONF_MEMORY_SCOPE, DEFAULT_MEMORY_SCOPE),
): SelectSelector(
SelectSelectorConfig(
options=[
SelectOptionDict(label=\"none\", value=\"none\"),
SelectOptionDict(label=\"conversation\", value=\"conversation\"),
SelectOptionDict(label=\"device\", value=\"device\"),
SelectOptionDict(label=\"user\", value=\"user\"),
SelectOptionDict(label=\"global\", value=\"global\"),
]
)
),
vol.Optional(
CONF_LLM_HASS_API,
description={"suggested_value": options.get(CONF_LLM_HASS_API)},

View file

@ -22,6 +22,7 @@ CONF_STOP = "stop_sequences"
CONF_TOOL_CHOICE = "tool_choice"
CONF_PARALLEL_TOOL_CALLS = "parallel_tool_calls"
CONF_RESPONSE_FORMAT = "response_format"
CONF_MEMORY_SCOPE = "memory_scope"
DEFAULT_CHAT_MODEL = "meta-llama/llama-4-maverick-17b-128e-instruct"
DEFAULT_CONTEXT_MESSAGES = 20
@ -33,3 +34,4 @@ DEFAULT_PRESENCE_PENALTY = 0.0
DEFAULT_TOOL_CHOICE = "auto"
DEFAULT_PARALLEL_TOOL_CALLS = True
DEFAULT_RESPONSE_FORMAT = "text"
DEFAULT_MEMORY_SCOPE = "device"

View file

@ -38,6 +38,7 @@ from .const import (
CONF_CHAT_MODEL,
CONF_CONTEXT_MESSAGES,
CONF_FREQUENCY_PENALTY,
CONF_MEMORY_SCOPE,
CONF_MAX_TOKENS,
CONF_PARALLEL_TOOL_CALLS,
CONF_PRESENCE_PENALTY,
@ -58,6 +59,7 @@ from .const import (
DEFAULT_TEMPERATURE,
DEFAULT_TOOL_CHOICE,
DEFAULT_TOP_P,
DEFAULT_MEMORY_SCOPE,
DOMAIN,
LOGGER,
)
@ -123,6 +125,7 @@ class GroqdConversationEntity(
def __init__(self, entry: GroqdConfigEntry) -> None:
self.entry = entry
self.history: dict[str, list[ChatCompletionMessageParam]] = {}
self._memory_index: dict[str, str] = {}
self._attr_unique_id = entry.entry_id
self._attr_device_info = dr.DeviceInfo(
identifiers={(DOMAIN, entry.entry_id)},
@ -185,7 +188,20 @@ class GroqdConversationEntity(
)
tools = [_format_tool(tool, llm_api.custom_serializer) for tool in llm_api.tools]
memory_scope = options.get(CONF_MEMORY_SCOPE, DEFAULT_MEMORY_SCOPE)
memory_key = None
if memory_scope == "device" and user_input.device_id:
memory_key = f"device:{user_input.device_id}"
elif memory_scope == "user" and user_input.context and user_input.context.user_id:
memory_key = f"user:{user_input.context.user_id}"
elif memory_scope == "global":
memory_key = "global"
if user_input.conversation_id is None:
if memory_key and memory_key in self._memory_index:
conversation_id = self._memory_index[memory_key]
history = self.history.get(conversation_id, [])
else:
conversation_id = ulid.ulid_now()
history = []
elif user_input.conversation_id in self.history:
@ -360,6 +376,8 @@ class GroqdConversationEntity(
history = history[-limit:]
self.history[conversation_id] = history
if memory_key:
self._memory_index[memory_key] = conversation_id
intent_response.async_set_speech(response.content or "")
return conversation.ConversationResult(

View file

@ -40,10 +40,12 @@
"tool_choice": "Tool choice (auto/none/required/tool:<name>)",
"parallel_tool_calls": "Parallel tool calls",
"response_format": "Response format",
"memory_scope": "Memory scope",
"llm_hass_api": "Home Assistant LLM API"
},
"data_description": {
"prompt": "System prompt for the assistant. Supports templates."
"prompt": "System prompt for the assistant. Supports templates.",
"memory_scope": "How to reuse conversation history across requests."
}
}
}