From 1b9165624f2cf7534b1ffebc46753061ace2cdb4 Mon Sep 17 00:00:00 2001 From: znn Date: Wed, 10 Dec 2025 06:49:13 +0530 Subject: [PATCH 1/3] adding llm_usage and error_type (#26546) --- api/core/workflow/nodes/llm/node.py | 3 +++ .../nodes/question_classifier/question_classifier_node.py | 1 + 2 files changed, 4 insertions(+) diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index 10682ae38a..a5973862b2 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -334,6 +334,7 @@ class LLMNode(Node[LLMNodeData]): inputs=node_inputs, process_data=process_data, error_type=type(e).__name__, + llm_usage=usage, ) ) except Exception as e: @@ -344,6 +345,8 @@ class LLMNode(Node[LLMNodeData]): error=str(e), inputs=node_inputs, process_data=process_data, + error_type=type(e).__name__, + llm_usage=usage, ) ) diff --git a/api/core/workflow/nodes/question_classifier/question_classifier_node.py b/api/core/workflow/nodes/question_classifier/question_classifier_node.py index db3d4d4aac..4a3e8e56f8 100644 --- a/api/core/workflow/nodes/question_classifier/question_classifier_node.py +++ b/api/core/workflow/nodes/question_classifier/question_classifier_node.py @@ -221,6 +221,7 @@ class QuestionClassifierNode(Node[QuestionClassifierNodeData]): status=WorkflowNodeExecutionStatus.FAILED, inputs=variables, error=str(e), + error_type=type(e).__name__, metadata={ WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: usage.total_tokens, WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: usage.total_price, From 4a88c8fd1932c3f43f242ba697ad7bdd1b121540 Mon Sep 17 00:00:00 2001 From: wangxiaolei Date: Wed, 10 Dec 2025 09:44:47 +0800 Subject: [PATCH 2/3] chore: set is_multimodal db define default = false (#29362) --- api/models/dataset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/models/dataset.py b/api/models/dataset.py index 5bbf44050c..ba2eaf6749 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -78,7 +78,7 @@ class Dataset(Base): pipeline_id = mapped_column(StringUUID, nullable=True) chunk_structure = mapped_column(sa.String(255), nullable=True) enable_api = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true")) - is_multimodal = mapped_column(sa.Boolean, nullable=False, server_default=db.text("false")) + is_multimodal = mapped_column(sa.Boolean, default=False, nullable=False, server_default=db.text("false")) @property def total_documents(self): From e205182e1f9cf6dfa374bfe3240fdc909a15b42a Mon Sep 17 00:00:00 2001 From: wangxiaolei Date: Wed, 10 Dec 2025 10:01:45 +0800 Subject: [PATCH 3/3] =?UTF-8?q?fix:=20Parent=20instance=20=20is=20not=20bound=E2=80=A6=20(#29377)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/core/rag/datasource/retrieval_service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py index cbd7cbeb64..e644e754ec 100644 --- a/api/core/rag/datasource/retrieval_service.py +++ b/api/core/rag/datasource/retrieval_service.py @@ -483,7 +483,7 @@ class RetrievalService: DocumentSegment.status == "completed", DocumentSegment.id == segment_id, ) - segment = db.session.scalar(document_segment_stmt) + segment = session.scalar(document_segment_stmt) if segment: segment_file_map[segment.id] = [attachment_info] else: @@ -496,7 +496,7 @@ class RetrievalService: DocumentSegment.status == "completed", DocumentSegment.index_node_id == index_node_id, ) - segment = db.session.scalar(document_segment_stmt) + segment = session.scalar(document_segment_stmt) if not segment: continue