From 67f80de978d7295e0f3b08992f79cc15dca85070 Mon Sep 17 00:00:00 2001 From: Your Name Date: Sat, 20 Dec 2025 11:49:46 -0600 Subject: [PATCH] Add memory scope for conversation history --- custom_components/groqd/config_flow.py | 17 +++++++++++++++ custom_components/groqd/const.py | 2 ++ custom_components/groqd/conversation.py | 22 ++++++++++++++++++-- custom_components/groqd/translations/en.json | 4 +++- 4 files changed, 42 insertions(+), 3 deletions(-) diff --git a/custom_components/groqd/config_flow.py b/custom_components/groqd/config_flow.py index d79aa97..2d4ec2c 100644 --- a/custom_components/groqd/config_flow.py +++ b/custom_components/groqd/config_flow.py @@ -27,6 +27,7 @@ from .const import ( CONF_CONTEXT_MESSAGES, CONF_FREQUENCY_PENALTY, CONF_PARALLEL_TOOL_CALLS, + CONF_MEMORY_SCOPE, CONF_PRESENCE_PENALTY, CONF_PROMPT, CONF_RESPONSE_FORMAT, @@ -45,6 +46,7 @@ from .const import ( DEFAULT_TEMPERATURE, DEFAULT_TOOL_CHOICE, DEFAULT_TOP_P, + DEFAULT_MEMORY_SCOPE, DOMAIN, ) from .const import DEFAULT_MAX_TOKENS, CONF_MAX_TOKENS @@ -232,6 +234,21 @@ class GroqdOptionsFlow(OptionsFlow): ] ) ), + vol.Optional( + CONF_MEMORY_SCOPE, + description={\"suggested_value\": options.get(CONF_MEMORY_SCOPE, DEFAULT_MEMORY_SCOPE)}, + default=options.get(CONF_MEMORY_SCOPE, DEFAULT_MEMORY_SCOPE), + ): SelectSelector( + SelectSelectorConfig( + options=[ + SelectOptionDict(label=\"none\", value=\"none\"), + SelectOptionDict(label=\"conversation\", value=\"conversation\"), + SelectOptionDict(label=\"device\", value=\"device\"), + SelectOptionDict(label=\"user\", value=\"user\"), + SelectOptionDict(label=\"global\", value=\"global\"), + ] + ) + ), vol.Optional( CONF_LLM_HASS_API, description={"suggested_value": options.get(CONF_LLM_HASS_API)}, diff --git a/custom_components/groqd/const.py b/custom_components/groqd/const.py index da88eae..1cbb949 100644 --- a/custom_components/groqd/const.py +++ b/custom_components/groqd/const.py @@ -22,6 +22,7 @@ CONF_STOP = "stop_sequences" CONF_TOOL_CHOICE = "tool_choice" CONF_PARALLEL_TOOL_CALLS = "parallel_tool_calls" CONF_RESPONSE_FORMAT = "response_format" +CONF_MEMORY_SCOPE = "memory_scope" DEFAULT_CHAT_MODEL = "meta-llama/llama-4-maverick-17b-128e-instruct" DEFAULT_CONTEXT_MESSAGES = 20 @@ -33,3 +34,4 @@ DEFAULT_PRESENCE_PENALTY = 0.0 DEFAULT_TOOL_CHOICE = "auto" DEFAULT_PARALLEL_TOOL_CALLS = True DEFAULT_RESPONSE_FORMAT = "text" +DEFAULT_MEMORY_SCOPE = "device" diff --git a/custom_components/groqd/conversation.py b/custom_components/groqd/conversation.py index df0af16..7f55645 100644 --- a/custom_components/groqd/conversation.py +++ b/custom_components/groqd/conversation.py @@ -38,6 +38,7 @@ from .const import ( CONF_CHAT_MODEL, CONF_CONTEXT_MESSAGES, CONF_FREQUENCY_PENALTY, + CONF_MEMORY_SCOPE, CONF_MAX_TOKENS, CONF_PARALLEL_TOOL_CALLS, CONF_PRESENCE_PENALTY, @@ -58,6 +59,7 @@ from .const import ( DEFAULT_TEMPERATURE, DEFAULT_TOOL_CHOICE, DEFAULT_TOP_P, + DEFAULT_MEMORY_SCOPE, DOMAIN, LOGGER, ) @@ -123,6 +125,7 @@ class GroqdConversationEntity( def __init__(self, entry: GroqdConfigEntry) -> None: self.entry = entry self.history: dict[str, list[ChatCompletionMessageParam]] = {} + self._memory_index: dict[str, str] = {} self._attr_unique_id = entry.entry_id self._attr_device_info = dr.DeviceInfo( identifiers={(DOMAIN, entry.entry_id)}, @@ -185,9 +188,22 @@ class GroqdConversationEntity( ) tools = [_format_tool(tool, llm_api.custom_serializer) for tool in llm_api.tools] + memory_scope = options.get(CONF_MEMORY_SCOPE, DEFAULT_MEMORY_SCOPE) + memory_key = None + if memory_scope == "device" and user_input.device_id: + memory_key = f"device:{user_input.device_id}" + elif memory_scope == "user" and user_input.context and user_input.context.user_id: + memory_key = f"user:{user_input.context.user_id}" + elif memory_scope == "global": + memory_key = "global" + if user_input.conversation_id is None: - conversation_id = ulid.ulid_now() - history = [] + if memory_key and memory_key in self._memory_index: + conversation_id = self._memory_index[memory_key] + history = self.history.get(conversation_id, []) + else: + conversation_id = ulid.ulid_now() + history = [] elif user_input.conversation_id in self.history: conversation_id = user_input.conversation_id history = self.history[conversation_id] @@ -360,6 +376,8 @@ class GroqdConversationEntity( history = history[-limit:] self.history[conversation_id] = history + if memory_key: + self._memory_index[memory_key] = conversation_id intent_response.async_set_speech(response.content or "") return conversation.ConversationResult( diff --git a/custom_components/groqd/translations/en.json b/custom_components/groqd/translations/en.json index 008f312..bc7eb1f 100644 --- a/custom_components/groqd/translations/en.json +++ b/custom_components/groqd/translations/en.json @@ -40,10 +40,12 @@ "tool_choice": "Tool choice (auto/none/required/tool:)", "parallel_tool_calls": "Parallel tool calls", "response_format": "Response format", + "memory_scope": "Memory scope", "llm_hass_api": "Home Assistant LLM API" }, "data_description": { - "prompt": "System prompt for the assistant. Supports templates." + "prompt": "System prompt for the assistant. Supports templates.", + "memory_scope": "How to reuse conversation history across requests." } } }