From 7535b67423f4c577956ed5de8f911daf255eb927 Mon Sep 17 00:00:00 2001 From: Stream Date: Wed, 4 Feb 2026 07:23:02 +0800 Subject: [PATCH] fix: handle prompt template correctly to extract selectors for step run idk why. This may cause further issues. --- api/core/workflow/nodes/llm/node.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index 8894d75998..f291d64639 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -1441,9 +1441,11 @@ class LLMNode(Node[LLMNodeData]): if isinstance(item, PromptMessageContext): if len(item.value_selector) >= 2: prompt_context_selectors.append(item.value_selector) - elif isinstance(item, LLMNodeChatModelMessage) and item.edition_type == "jinja2": + elif isinstance(item, LLMNodeChatModelMessage): variable_template_parser = VariableTemplateParser(template=item.text) variable_selectors.extend(variable_template_parser.extract_variable_selectors()) + else: + raise InvalidVariableTypeError(f"Invalid prompt template type: {type(prompt_template)}") elif isinstance(prompt_template, LLMNodeCompletionModelPromptTemplate): if prompt_template.edition_type != "jinja2": variable_template_parser = VariableTemplateParser(template=prompt_template.text)