From cbb4e95928138e63cb8948f89d6e8d282327cf0d Mon Sep 17 00:00:00 2001 From: -LAN- Date: Tue, 26 Nov 2024 13:07:32 +0800 Subject: [PATCH] fix(llm_node): Ignore user query when memory is disabled. (#11106) --- api/core/workflow/nodes/llm/node.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index 9f5df1edc..8653f539a 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -137,12 +137,12 @@ class LLMNode(BaseNode[LLMNodeData]): query = None if self.node_data.memory: query = self.node_data.memory.query_prompt_template - if not query and ( - query_variable := self.graph_runtime_state.variable_pool.get( - (SYSTEM_VARIABLE_NODE_ID, SystemVariableKey.QUERY) - ) - ): - query = query_variable.text + if not query and ( + query_variable := self.graph_runtime_state.variable_pool.get( + (SYSTEM_VARIABLE_NODE_ID, SystemVariableKey.QUERY) + ) + ): + query = query_variable.text prompt_messages, stop = self._fetch_prompt_messages( user_query=query,