From 54874633859998b056df4b40360a5918cf6298a6 Mon Sep 17 00:00:00 2001 From: Yeuoly <45712896+Yeuoly@users.noreply.github.com> Date: Wed, 2 Jul 2025 19:14:21 +0800 Subject: [PATCH] fix: add list contents handling in structured LLM output (#21837) --- .../llm_generator/output_parser/structured_output.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/api/core/llm_generator/output_parser/structured_output.py b/api/core/llm_generator/output_parser/structured_output.py index 0aaf5abef0..151cef1bc3 100644 --- a/api/core/llm_generator/output_parser/structured_output.py +++ b/api/core/llm_generator/output_parser/structured_output.py @@ -23,6 +23,7 @@ from core.model_runtime.entities.message_entities import ( PromptMessage, PromptMessageTool, SystemPromptMessage, + TextPromptMessageContent, ) from core.model_runtime.entities.model_entities import AIModelEntity, ParameterRule @@ -170,10 +171,15 @@ def invoke_llm_with_structured_output( system_fingerprint: Optional[str] = None for event in llm_result: if isinstance(event, LLMResultChunk): + prompt_messages = event.prompt_messages + system_fingerprint = event.system_fingerprint + if isinstance(event.delta.message.content, str): result_text += event.delta.message.content - prompt_messages = event.prompt_messages - system_fingerprint = event.system_fingerprint + elif isinstance(event.delta.message.content, list): + for item in event.delta.message.content: + if isinstance(item, TextPromptMessageContent): + result_text += item.data yield LLMResultChunkWithStructuredOutput( model=model_schema.model,