From f610f6895f3cd0fe3a1a47b68cb3136f4a250b21 Mon Sep 17 00:00:00 2001 From: Jyong <76649700+JohnJyong@users.noreply.github.com> Date: Fri, 26 Dec 2025 21:42:06 +0800 Subject: [PATCH 01/20] fix: retrieval test and knowledge retrieval node failed in multimodal mode (#30210) Co-authored-by: Stephen Zhou <38493346+hyoban@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- api/core/rag/datasource/retrieval_service.py | 169 ++++++++++--------- 1 file changed, 85 insertions(+), 84 deletions(-) diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py index 9807cb4e6a..43912cd75d 100644 --- a/api/core/rag/datasource/retrieval_service.py +++ b/api/core/rag/datasource/retrieval_service.py @@ -13,7 +13,7 @@ from core.model_runtime.entities.model_entities import ModelType from core.rag.data_post_processor.data_post_processor import DataPostProcessor from core.rag.datasource.keyword.keyword_factory import Keyword from core.rag.datasource.vdb.vector_factory import Vector -from core.rag.embedding.retrieval import RetrievalSegments +from core.rag.embedding.retrieval import RetrievalChildChunk, RetrievalSegments from core.rag.entities.metadata_entities import MetadataCondition from core.rag.index_processor.constant.doc_type import DocType from core.rag.index_processor.constant.index_type import IndexStructureType @@ -381,10 +381,9 @@ class RetrievalService: records = [] include_segment_ids = set() segment_child_map = {} - segment_file_map = {} valid_dataset_documents = {} - image_doc_ids = [] + image_doc_ids: list[Any] = [] child_index_node_ids = [] index_node_ids = [] doc_to_document_map = {} @@ -417,28 +416,39 @@ class RetrievalService: child_index_node_ids = [i for i in child_index_node_ids if i] index_node_ids = [i for i in index_node_ids if i] - segment_ids = [] + segment_ids: list[str] = [] index_node_segments: list[DocumentSegment] = [] segments: list[DocumentSegment] = [] - attachment_map = {} - child_chunk_map = {} - doc_segment_map = {} + attachment_map: dict[str, list[dict[str, Any]]] = {} + child_chunk_map: dict[str, list[ChildChunk]] = {} + doc_segment_map: dict[str, list[str]] = {} with session_factory.create_session() as session: attachments = cls.get_segment_attachment_infos(image_doc_ids, session) for attachment in attachments: segment_ids.append(attachment["segment_id"]) - attachment_map[attachment["segment_id"]] = attachment - doc_segment_map[attachment["segment_id"]] = attachment["attachment_id"] - + if attachment["segment_id"] in attachment_map: + attachment_map[attachment["segment_id"]].append(attachment["attachment_info"]) + else: + attachment_map[attachment["segment_id"]] = [attachment["attachment_info"]] + if attachment["segment_id"] in doc_segment_map: + doc_segment_map[attachment["segment_id"]].append(attachment["attachment_id"]) + else: + doc_segment_map[attachment["segment_id"]] = [attachment["attachment_id"]] child_chunk_stmt = select(ChildChunk).where(ChildChunk.index_node_id.in_(child_index_node_ids)) child_index_nodes = session.execute(child_chunk_stmt).scalars().all() for i in child_index_nodes: segment_ids.append(i.segment_id) - child_chunk_map[i.segment_id] = i - doc_segment_map[i.segment_id] = i.index_node_id + if i.segment_id in child_chunk_map: + child_chunk_map[i.segment_id].append(i) + else: + child_chunk_map[i.segment_id] = [i] + if i.segment_id in doc_segment_map: + doc_segment_map[i.segment_id].append(i.index_node_id) + else: + doc_segment_map[i.segment_id] = [i.index_node_id] if index_node_ids: document_segment_stmt = select(DocumentSegment).where( @@ -448,7 +458,7 @@ class RetrievalService: ) index_node_segments = session.execute(document_segment_stmt).scalars().all() # type: ignore for index_node_segment in index_node_segments: - doc_segment_map[index_node_segment.id] = index_node_segment.index_node_id + doc_segment_map[index_node_segment.id] = [index_node_segment.index_node_id] if segment_ids: document_segment_stmt = select(DocumentSegment).where( DocumentSegment.enabled == True, @@ -461,95 +471,86 @@ class RetrievalService: segments.extend(index_node_segments) for segment in segments: - doc_id = doc_segment_map.get(segment.id) - child_chunk = child_chunk_map.get(segment.id) - attachment_info = attachment_map.get(segment.id) + child_chunks: list[ChildChunk] = child_chunk_map.get(segment.id, []) + attachment_infos: list[dict[str, Any]] = attachment_map.get(segment.id, []) + ds_dataset_document: DatasetDocument | None = valid_dataset_documents.get(segment.document_id) - if doc_id: - document = doc_to_document_map[doc_id] - ds_dataset_document: DatasetDocument | None = valid_dataset_documents.get( - document.metadata.get("document_id") - ) - - if ds_dataset_document and ds_dataset_document.doc_form == IndexStructureType.PARENT_CHILD_INDEX: - if segment.id not in include_segment_ids: - include_segment_ids.add(segment.id) - if child_chunk: + if ds_dataset_document and ds_dataset_document.doc_form == IndexStructureType.PARENT_CHILD_INDEX: + if segment.id not in include_segment_ids: + include_segment_ids.add(segment.id) + if child_chunks or attachment_infos: + child_chunk_details = [] + max_score = 0.0 + for child_chunk in child_chunks: + document = doc_to_document_map[child_chunk.index_node_id] child_chunk_detail = { "id": child_chunk.id, "content": child_chunk.content, "position": child_chunk.position, "score": document.metadata.get("score", 0.0) if document else 0.0, } - map_detail = { - "max_score": document.metadata.get("score", 0.0) if document else 0.0, - "child_chunks": [child_chunk_detail], - } - segment_child_map[segment.id] = map_detail - record = { - "segment": segment, + child_chunk_details.append(child_chunk_detail) + max_score = max(max_score, document.metadata.get("score", 0.0) if document else 0.0) + for attachment_info in attachment_infos: + file_document = doc_to_document_map[attachment_info["id"]] + max_score = max( + max_score, file_document.metadata.get("score", 0.0) if file_document else 0.0 + ) + + map_detail = { + "max_score": max_score, + "child_chunks": child_chunk_details, } - if attachment_info: - segment_file_map[segment.id] = [attachment_info] - records.append(record) - else: - if child_chunk: - child_chunk_detail = { - "id": child_chunk.id, - "content": child_chunk.content, - "position": child_chunk.position, - "score": document.metadata.get("score", 0.0), - } - if segment.id in segment_child_map: - segment_child_map[segment.id]["child_chunks"].append(child_chunk_detail) # type: ignore - segment_child_map[segment.id]["max_score"] = max( - segment_child_map[segment.id]["max_score"], - document.metadata.get("score", 0.0) if document else 0.0, - ) - else: - segment_child_map[segment.id] = { - "max_score": document.metadata.get("score", 0.0) if document else 0.0, - "child_chunks": [child_chunk_detail], - } - if attachment_info: - if segment.id in segment_file_map: - segment_file_map[segment.id].append(attachment_info) - else: - segment_file_map[segment.id] = [attachment_info] - else: - if segment.id not in include_segment_ids: - include_segment_ids.add(segment.id) - record = { - "segment": segment, - "score": document.metadata.get("score", 0.0), # type: ignore - } - if attachment_info: - segment_file_map[segment.id] = [attachment_info] - records.append(record) - else: - if attachment_info: - attachment_infos = segment_file_map.get(segment.id, []) - if attachment_info not in attachment_infos: - attachment_infos.append(attachment_info) - segment_file_map[segment.id] = attachment_infos + segment_child_map[segment.id] = map_detail + record: dict[str, Any] = { + "segment": segment, + } + records.append(record) + else: + if segment.id not in include_segment_ids: + include_segment_ids.add(segment.id) + max_score = 0.0 + segment_document = doc_to_document_map.get(segment.index_node_id) + if segment_document: + max_score = max(max_score, segment_document.metadata.get("score", 0.0)) + for attachment_info in attachment_infos: + file_doc = doc_to_document_map.get(attachment_info["id"]) + if file_doc: + max_score = max(max_score, file_doc.metadata.get("score", 0.0)) + record = { + "segment": segment, + "score": max_score, + } + records.append(record) # Add child chunks information to records for record in records: if record["segment"].id in segment_child_map: record["child_chunks"] = segment_child_map[record["segment"].id].get("child_chunks") # type: ignore record["score"] = segment_child_map[record["segment"].id]["max_score"] # type: ignore - if record["segment"].id in segment_file_map: - record["files"] = segment_file_map[record["segment"].id] # type: ignore[assignment] + if record["segment"].id in attachment_map: + record["files"] = attachment_map[record["segment"].id] # type: ignore[assignment] - result = [] + result: list[RetrievalSegments] = [] for record in records: # Extract segment segment = record["segment"] # Extract child_chunks, ensuring it's a list or None - child_chunks = record.get("child_chunks") - if not isinstance(child_chunks, list): - child_chunks = None + raw_child_chunks = record.get("child_chunks") + child_chunks_list: list[RetrievalChildChunk] | None = None + if isinstance(raw_child_chunks, list): + # Sort by score descending + sorted_chunks = sorted(raw_child_chunks, key=lambda x: x.get("score", 0.0), reverse=True) + child_chunks_list = [ + RetrievalChildChunk( + id=chunk["id"], + content=chunk["content"], + score=chunk.get("score", 0.0), + position=chunk["position"], + ) + for chunk in sorted_chunks + ] # Extract files, ensuring it's a list or None files = record.get("files") @@ -566,11 +567,11 @@ class RetrievalService: # Create RetrievalSegments object retrieval_segment = RetrievalSegments( - segment=segment, child_chunks=child_chunks, score=score, files=files + segment=segment, child_chunks=child_chunks_list, score=score, files=files ) result.append(retrieval_segment) - return result + return sorted(result, key=lambda x: x.score if x.score is not None else 0.0, reverse=True) except Exception as e: db.session.rollback() raise e From c393d7a2dc28875e9dad8daec9ddb6b1a055d966 Mon Sep 17 00:00:00 2001 From: Shemol Date: Sat, 27 Dec 2025 10:07:10 +0800 Subject: [PATCH 02/20] test(web): add unit tests for Avatar component (#30201) --- web/app/components/base/avatar/index.spec.tsx | 308 ++++++++++++++++++ 1 file changed, 308 insertions(+) create mode 100644 web/app/components/base/avatar/index.spec.tsx diff --git a/web/app/components/base/avatar/index.spec.tsx b/web/app/components/base/avatar/index.spec.tsx new file mode 100644 index 0000000000..e85690880b --- /dev/null +++ b/web/app/components/base/avatar/index.spec.tsx @@ -0,0 +1,308 @@ +import { fireEvent, render, screen, waitFor } from '@testing-library/react' +import Avatar from './index' + +describe('Avatar', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + // Rendering tests - verify component renders correctly in different states + describe('Rendering', () => { + it('should render img element with correct alt and src when avatar URL is provided', () => { + const avatarUrl = 'https://example.com/avatar.jpg' + const props = { name: 'John Doe', avatar: avatarUrl } + + render() + + const img = screen.getByRole('img', { name: 'John Doe' }) + expect(img).toBeInTheDocument() + expect(img).toHaveAttribute('src', avatarUrl) + }) + + it('should render fallback div with uppercase initial when avatar is null', () => { + const props = { name: 'alice', avatar: null } + + render() + + expect(screen.queryByRole('img')).not.toBeInTheDocument() + expect(screen.getByText('A')).toBeInTheDocument() + }) + }) + + // Props tests - verify all props are applied correctly + describe('Props', () => { + describe('size prop', () => { + it.each([ + { size: undefined, expected: '30px', label: 'default (30px)' }, + { size: 50, expected: '50px', label: 'custom (50px)' }, + ])('should apply $label size to img element', ({ size, expected }) => { + const props = { name: 'Test', avatar: 'https://example.com/avatar.jpg', size } + + render() + + expect(screen.getByRole('img')).toHaveStyle({ + width: expected, + height: expected, + fontSize: expected, + lineHeight: expected, + }) + }) + + it('should apply size to fallback div when avatar is null', () => { + const props = { name: 'Test', avatar: null, size: 40 } + + render() + + const textElement = screen.getByText('T') + const outerDiv = textElement.parentElement as HTMLElement + expect(outerDiv).toHaveStyle({ width: '40px', height: '40px' }) + }) + }) + + describe('className prop', () => { + it('should merge className with default avatar classes on img', () => { + const props = { + name: 'Test', + avatar: 'https://example.com/avatar.jpg', + className: 'custom-class', + } + + render() + + const img = screen.getByRole('img') + expect(img).toHaveClass('custom-class') + expect(img).toHaveClass('shrink-0', 'flex', 'items-center', 'rounded-full', 'bg-primary-600') + }) + + it('should merge className with default avatar classes on fallback div', () => { + const props = { + name: 'Test', + avatar: null, + className: 'my-custom-class', + } + + render() + + const textElement = screen.getByText('T') + const outerDiv = textElement.parentElement as HTMLElement + expect(outerDiv).toHaveClass('my-custom-class') + expect(outerDiv).toHaveClass('shrink-0', 'flex', 'items-center', 'rounded-full', 'bg-primary-600') + }) + }) + + describe('textClassName prop', () => { + it('should apply textClassName to the initial text element', () => { + const props = { + name: 'Test', + avatar: null, + textClassName: 'custom-text-class', + } + + render() + + const textElement = screen.getByText('T') + expect(textElement).toHaveClass('custom-text-class') + expect(textElement).toHaveClass('scale-[0.4]', 'text-center', 'text-white') + }) + }) + }) + + // State Management tests - verify useState and useEffect behavior + describe('State Management', () => { + it('should switch to fallback when image fails to load', async () => { + const props = { name: 'John', avatar: 'https://example.com/broken.jpg' } + render() + const img = screen.getByRole('img') + + fireEvent.error(img) + + await waitFor(() => { + expect(screen.queryByRole('img')).not.toBeInTheDocument() + }) + expect(screen.getByText('J')).toBeInTheDocument() + }) + + it('should reset error state when avatar URL changes', async () => { + const initialProps = { name: 'John', avatar: 'https://example.com/broken.jpg' } + const { rerender } = render() + const img = screen.getByRole('img') + + // First, trigger error + fireEvent.error(img) + await waitFor(() => { + expect(screen.queryByRole('img')).not.toBeInTheDocument() + }) + expect(screen.getByText('J')).toBeInTheDocument() + + rerender() + + await waitFor(() => { + expect(screen.getByRole('img')).toBeInTheDocument() + }) + expect(screen.queryByText('J')).not.toBeInTheDocument() + }) + + it('should not reset error state if avatar becomes null', async () => { + const initialProps = { name: 'John', avatar: 'https://example.com/broken.jpg' } + const { rerender } = render() + + // Trigger error + fireEvent.error(screen.getByRole('img')) + await waitFor(() => { + expect(screen.getByText('J')).toBeInTheDocument() + }) + + rerender() + + await waitFor(() => { + expect(screen.queryByRole('img')).not.toBeInTheDocument() + }) + expect(screen.getByText('J')).toBeInTheDocument() + }) + }) + + // Event Handlers tests - verify onError callback behavior + describe('Event Handlers', () => { + it('should call onError with true when image fails to load', () => { + const onErrorMock = vi.fn() + const props = { + name: 'John', + avatar: 'https://example.com/broken.jpg', + onError: onErrorMock, + } + render() + + fireEvent.error(screen.getByRole('img')) + + expect(onErrorMock).toHaveBeenCalledTimes(1) + expect(onErrorMock).toHaveBeenCalledWith(true) + }) + + it('should call onError with false when image loads successfully', () => { + const onErrorMock = vi.fn() + const props = { + name: 'John', + avatar: 'https://example.com/avatar.jpg', + onError: onErrorMock, + } + render() + + fireEvent.load(screen.getByRole('img')) + + expect(onErrorMock).toHaveBeenCalledTimes(1) + expect(onErrorMock).toHaveBeenCalledWith(false) + }) + + it('should not throw when onError is not provided', async () => { + const props = { name: 'John', avatar: 'https://example.com/broken.jpg' } + render() + + expect(() => fireEvent.error(screen.getByRole('img'))).not.toThrow() + await waitFor(() => { + expect(screen.getByText('J')).toBeInTheDocument() + }) + }) + }) + + // Edge Cases tests - verify handling of unusual inputs + describe('Edge Cases', () => { + it('should handle empty string name gracefully', () => { + const props = { name: '', avatar: null } + + const { container } = render() + + // Note: Using querySelector here because empty name produces no visible text, + // making semantic queries (getByRole, getByText) impossible + const textElement = container.querySelector('.text-white') as HTMLElement + expect(textElement).toBeInTheDocument() + expect(textElement.textContent).toBe('') + }) + + it.each([ + { name: '中文名', expected: '中', label: 'Chinese characters' }, + { name: '123User', expected: '1', label: 'number' }, + ])('should display first character when name starts with $label', ({ name, expected }) => { + const props = { name, avatar: null } + + render() + + expect(screen.getByText(expected)).toBeInTheDocument() + }) + + it('should handle empty string avatar as falsy value', () => { + const props = { name: 'Test', avatar: '' as string | null } + + render() + + expect(screen.queryByRole('img')).not.toBeInTheDocument() + expect(screen.getByText('T')).toBeInTheDocument() + }) + + it('should handle undefined className and textClassName', () => { + const props = { name: 'Test', avatar: null } + + render() + + const textElement = screen.getByText('T') + const outerDiv = textElement.parentElement as HTMLElement + expect(outerDiv).toHaveClass('shrink-0', 'flex', 'items-center', 'rounded-full', 'bg-primary-600') + }) + + it.each([ + { size: 0, expected: '0px', label: 'zero' }, + { size: 1000, expected: '1000px', label: 'very large' }, + ])('should handle $label size value', ({ size, expected }) => { + const props = { name: 'Test', avatar: null, size } + + render() + + const textElement = screen.getByText('T') + const outerDiv = textElement.parentElement as HTMLElement + expect(outerDiv).toHaveStyle({ width: expected, height: expected }) + }) + }) + + // Combined props tests - verify props work together correctly + describe('Combined Props', () => { + it('should apply all props correctly when used together', () => { + const onErrorMock = vi.fn() + const props = { + name: 'Test User', + avatar: 'https://example.com/avatar.jpg', + size: 64, + className: 'custom-avatar', + onError: onErrorMock, + } + + render() + + const img = screen.getByRole('img') + expect(img).toHaveAttribute('alt', 'Test User') + expect(img).toHaveAttribute('src', 'https://example.com/avatar.jpg') + expect(img).toHaveStyle({ width: '64px', height: '64px' }) + expect(img).toHaveClass('custom-avatar') + + // Trigger load to verify onError callback + fireEvent.load(img) + expect(onErrorMock).toHaveBeenCalledWith(false) + }) + + it('should apply all fallback props correctly when used together', () => { + const props = { + name: 'Fallback User', + avatar: null, + size: 48, + className: 'fallback-custom', + textClassName: 'custom-text-style', + } + + render() + + const textElement = screen.getByText('F') + const outerDiv = textElement.parentElement as HTMLElement + expect(outerDiv).toHaveClass('fallback-custom') + expect(outerDiv).toHaveStyle({ width: '48px', height: '48px' }) + expect(textElement).toHaveClass('custom-text-style') + }) + }) +}) From b85564cae573011fb815fdc39c23f905dd009ab8 Mon Sep 17 00:00:00 2001 From: Wu Tianwei <30284043+WTW0313@users.noreply.github.com> Date: Sun, 28 Dec 2025 02:00:30 +0800 Subject: [PATCH 03/20] fix: remove unused CSS styles and fix HitTestingPage layout (#30235) --- .../components/datasets/hit-testing/index.tsx | 7 ++- .../datasets/hit-testing/style.module.css | 43 ------------------- 2 files changed, 3 insertions(+), 47 deletions(-) delete mode 100644 web/app/components/datasets/hit-testing/style.module.css diff --git a/web/app/components/datasets/hit-testing/index.tsx b/web/app/components/datasets/hit-testing/index.tsx index e75ef48abf..d810442704 100644 --- a/web/app/components/datasets/hit-testing/index.tsx +++ b/web/app/components/datasets/hit-testing/index.tsx @@ -34,7 +34,6 @@ import Records from './components/records' import ResultItem from './components/result-item' import ResultItemExternal from './components/result-item-external' import ModifyRetrievalModal from './modify-retrieval-modal' -import s from './style.module.css' const limit = 10 @@ -115,8 +114,8 @@ const HitTestingPage: FC = ({ datasetId }: Props) => { }, [isMobile, setShowRightPanel]) return ( -
-
+
+

{t('datasetHitTesting.title')}

{t('datasetHitTesting.desc')}

@@ -161,7 +160,7 @@ const HitTestingPage: FC = ({ datasetId }: Props) => { onClose={hideRightPanel} footer={null} > -
+
{isRetrievalLoading ? (
diff --git a/web/app/components/datasets/hit-testing/style.module.css b/web/app/components/datasets/hit-testing/style.module.css deleted file mode 100644 index a421962f48..0000000000 --- a/web/app/components/datasets/hit-testing/style.module.css +++ /dev/null @@ -1,43 +0,0 @@ -.container { - @apply flex h-full w-full relative overflow-y-auto; -} - -.container>div { - @apply flex-1 h-full; -} - -.commonIcon { - @apply w-3.5 h-3.5 inline-block align-middle; - background-repeat: no-repeat; - background-position: center center; - background-size: contain; -} - -.app_icon { - background-image: url(./assets/grid.svg); -} - -.hit_testing_icon { - background-image: url(../documents/assets/target.svg); -} - -.plugin_icon { - background-image: url(./assets/plugin.svg); -} - -.cardWrapper { - display: grid; - grid-template-columns: repeat(auto-fill, minmax(284px, auto)); - grid-gap: 16px; - grid-auto-rows: 216px; -} - -.clockWrapper { - border: 0.5px solid #eaecf5; - @apply rounded-lg w-11 h-11 flex justify-center items-center; -} - -.clockIcon { - mask-image: url(./assets/clock.svg); - @apply bg-gray-500; -} From b067ad2f0a4f1aa8e5bedc1fbf12b2b7f1e5af4d Mon Sep 17 00:00:00 2001 From: Sara Rasool <83841462+sarxxt@users.noreply.github.com> Date: Sat, 27 Dec 2025 10:01:57 -0800 Subject: [PATCH 04/20] chore(web): remove unused dev-preview page (#30226) Co-authored-by: Dev --- web/app/dev-preview/page.tsx | 68 ------------------------------------ 1 file changed, 68 deletions(-) delete mode 100644 web/app/dev-preview/page.tsx diff --git a/web/app/dev-preview/page.tsx b/web/app/dev-preview/page.tsx deleted file mode 100644 index 90090acdd0..0000000000 --- a/web/app/dev-preview/page.tsx +++ /dev/null @@ -1,68 +0,0 @@ -'use client' -import BaseForm from '../components/base/form/form-scenarios/base' -import { BaseFieldType } from '../components/base/form/form-scenarios/base/types' - -export default function Page() { - return ( -
-
- { - console.log('onSubmit', value) - }} - /> -
-
- ) -} From d8010a7fbce0e627685c985bd24d1220a9b560a1 Mon Sep 17 00:00:00 2001 From: Novice Date: Sun, 28 Dec 2025 02:02:46 +0800 Subject: [PATCH 05/20] fix: Add JSON RPC request type guard (#30216) --- api/core/mcp/client/streamable_client.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/api/core/mcp/client/streamable_client.py b/api/core/mcp/client/streamable_client.py index f81e7cead8..5c3cd0d8f8 100644 --- a/api/core/mcp/client/streamable_client.py +++ b/api/core/mcp/client/streamable_client.py @@ -313,17 +313,20 @@ class StreamableHTTPTransport: if is_initialization: self._maybe_extract_session_id_from_response(response) - content_type = cast(str, response.headers.get(CONTENT_TYPE, "").lower()) + # Per https://modelcontextprotocol.io/specification/2025-06-18/basic#notifications: + # The server MUST NOT send a response to notifications. + if isinstance(message.root, JSONRPCRequest): + content_type = cast(str, response.headers.get(CONTENT_TYPE, "").lower()) - if content_type.startswith(JSON): - self._handle_json_response(response, ctx.server_to_client_queue) - elif content_type.startswith(SSE): - self._handle_sse_response(response, ctx) - else: - self._handle_unexpected_content_type( - content_type, - ctx.server_to_client_queue, - ) + if content_type.startswith(JSON): + self._handle_json_response(response, ctx.server_to_client_queue) + elif content_type.startswith(SSE): + self._handle_sse_response(response, ctx) + else: + self._handle_unexpected_content_type( + content_type, + ctx.server_to_client_queue, + ) def _handle_json_response( self, From 2b01f85d61fe9966e176ab2ec0fa22a54dc3c044 Mon Sep 17 00:00:00 2001 From: Maries Date: Sun, 28 Dec 2025 02:03:42 +0800 Subject: [PATCH 06/20] fix: consolidate duplicate InvokeRateLimitError definitions (#30229) Co-authored-by: Claude Opus 4.5 --- api/services/app_generate_service.py | 3 ++- api/services/async_workflow_service.py | 4 ++-- api/services/errors/app.py | 4 ++-- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/api/services/app_generate_service.py b/api/services/app_generate_service.py index 4514c86f7c..cc58899dc4 100644 --- a/api/services/app_generate_service.py +++ b/api/services/app_generate_service.py @@ -14,7 +14,8 @@ from enums.quota_type import QuotaType, unlimited from extensions.otel import AppGenerateHandler, trace_span from models.model import Account, App, AppMode, EndUser from models.workflow import Workflow -from services.errors.app import InvokeRateLimitError, QuotaExceededError, WorkflowIdFormatError, WorkflowNotFoundError +from services.errors.app import QuotaExceededError, WorkflowIdFormatError, WorkflowNotFoundError +from services.errors.llm import InvokeRateLimitError from services.workflow_service import WorkflowService diff --git a/api/services/async_workflow_service.py b/api/services/async_workflow_service.py index e100582511..bc73b7c8c2 100644 --- a/api/services/async_workflow_service.py +++ b/api/services/async_workflow_service.py @@ -21,7 +21,7 @@ from models.model import App, EndUser from models.trigger import WorkflowTriggerLog from models.workflow import Workflow from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository -from services.errors.app import InvokeRateLimitError, QuotaExceededError, WorkflowNotFoundError +from services.errors.app import QuotaExceededError, WorkflowNotFoundError, WorkflowQuotaLimitError from services.workflow.entities import AsyncTriggerResponse, TriggerData, WorkflowTaskData from services.workflow.queue_dispatcher import QueueDispatcherManager, QueuePriority from services.workflow_service import WorkflowService @@ -141,7 +141,7 @@ class AsyncWorkflowService: trigger_log_repo.update(trigger_log) session.commit() - raise InvokeRateLimitError( + raise WorkflowQuotaLimitError( f"Workflow execution quota limit reached for tenant {trigger_data.tenant_id}" ) from e diff --git a/api/services/errors/app.py b/api/services/errors/app.py index 24e4760acc..60e59e97dc 100644 --- a/api/services/errors/app.py +++ b/api/services/errors/app.py @@ -18,8 +18,8 @@ class WorkflowIdFormatError(Exception): pass -class InvokeRateLimitError(Exception): - """Raised when rate limit is exceeded for workflow invocations.""" +class WorkflowQuotaLimitError(Exception): + """Raised when workflow execution quota is exceeded (for async/background workflows).""" pass From 1f2c85c916a8cfaf4c0bbb4192e0fe71fafe557a Mon Sep 17 00:00:00 2001 From: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Date: Sun, 28 Dec 2025 13:47:54 +0800 Subject: [PATCH 07/20] fix: wrong usage of redis lock (#28177) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Maries Co-authored-by: 非法操作 --- api/services/trigger/webhook_service.py | 31 ++++++++++++++++++------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/api/services/trigger/webhook_service.py b/api/services/trigger/webhook_service.py index 5c4607d400..4159f5f8f4 100644 --- a/api/services/trigger/webhook_service.py +++ b/api/services/trigger/webhook_service.py @@ -863,10 +863,18 @@ class WebhookService: not_found_in_cache.append(node_id) continue - with Session(db.engine) as session: - try: - # lock the concurrent webhook trigger creation - redis_client.lock(f"{cls.__WEBHOOK_NODE_CACHE_KEY__}:apps:{app.id}:lock", timeout=10) + lock_key = f"{cls.__WEBHOOK_NODE_CACHE_KEY__}:apps:{app.id}:lock" + lock = redis_client.lock(lock_key, timeout=10) + lock_acquired = False + + try: + # acquire the lock with blocking and timeout + lock_acquired = lock.acquire(blocking=True, blocking_timeout=10) + if not lock_acquired: + logger.warning("Failed to acquire lock for webhook sync, app %s", app.id) + raise RuntimeError("Failed to acquire lock for webhook trigger synchronization") + + with Session(db.engine) as session: # fetch the non-cached nodes from DB all_records = session.scalars( select(WorkflowWebhookTrigger).where( @@ -903,11 +911,16 @@ class WebhookService: session.delete(nodes_id_in_db[node_id]) redis_client.delete(f"{cls.__WEBHOOK_NODE_CACHE_KEY__}:{app.id}:{node_id}") session.commit() - except Exception: - logger.exception("Failed to sync webhook relationships for app %s", app.id) - raise - finally: - redis_client.delete(f"{cls.__WEBHOOK_NODE_CACHE_KEY__}:apps:{app.id}:lock") + except Exception: + logger.exception("Failed to sync webhook relationships for app %s", app.id) + raise + finally: + # release the lock only if it was acquired + if lock_acquired: + try: + lock.release() + except Exception: + logger.exception("Failed to release lock for webhook sync, app %s", app.id) @classmethod def generate_webhook_id(cls) -> str: From 543ce38a6c5d6223f29956c3bfefe4565157965e Mon Sep 17 00:00:00 2001 From: yyh <92089059+lyzno1@users.noreply.github.com> Date: Sun, 28 Dec 2025 17:48:55 +0800 Subject: [PATCH 08/20] chore(claude-code): migrate from legacy MCP configuration to official plugin system (#30265) --- .claude/settings.json | 8 ++++++++ .claude/settings.json.example | 19 ------------------- .mcp.json | 34 ---------------------------------- 3 files changed, 8 insertions(+), 53 deletions(-) create mode 100644 .claude/settings.json delete mode 100644 .claude/settings.json.example delete mode 100644 .mcp.json diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 0000000000..7d42234cae --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,8 @@ +{ + "enabledPlugins": { + "feature-dev@claude-plugins-official": true, + "context7@claude-plugins-official": true, + "typescript-lsp@claude-plugins-official": true, + "pyright-lsp@claude-plugins-official": true + } +} diff --git a/.claude/settings.json.example b/.claude/settings.json.example deleted file mode 100644 index 1149895340..0000000000 --- a/.claude/settings.json.example +++ /dev/null @@ -1,19 +0,0 @@ -{ - "permissions": { - "allow": [], - "deny": [] - }, - "env": { - "__comment": "Environment variables for MCP servers. Override in .claude/settings.local.json with actual values.", - "GITHUB_PERSONAL_ACCESS_TOKEN": "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" - }, - "enabledMcpjsonServers": [ - "context7", - "sequential-thinking", - "github", - "fetch", - "playwright", - "ide" - ], - "enableAllProjectMcpServers": true - } \ No newline at end of file diff --git a/.mcp.json b/.mcp.json deleted file mode 100644 index 8eceaf9ead..0000000000 --- a/.mcp.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "mcpServers": { - "context7": { - "type": "http", - "url": "https://mcp.context7.com/mcp" - }, - "sequential-thinking": { - "type": "stdio", - "command": "npx", - "args": ["-y", "@modelcontextprotocol/server-sequential-thinking"], - "env": {} - }, - "github": { - "type": "stdio", - "command": "npx", - "args": ["-y", "@modelcontextprotocol/server-github"], - "env": { - "GITHUB_PERSONAL_ACCESS_TOKEN": "${GITHUB_PERSONAL_ACCESS_TOKEN}" - } - }, - "fetch": { - "type": "stdio", - "command": "uvx", - "args": ["mcp-server-fetch"], - "env": {} - }, - "playwright": { - "type": "stdio", - "command": "npx", - "args": ["-y", "@playwright/mcp@latest"], - "env": {} - } - } - } \ No newline at end of file From bf56c2e9dbdff0426cdfd6c1574ecdc2aef307b6 Mon Sep 17 00:00:00 2001 From: wangxiaolei Date: Sun, 28 Dec 2025 17:50:30 +0800 Subject: [PATCH 09/20] fix: fix custom tool content is not update (#30250) --- .../edit-custom-collection-modal/index.tsx | 11 ++++++- web/app/components/tools/provider/detail.tsx | 33 +++++++++++-------- .../header/version-history-button.tsx | 2 +- 3 files changed, 30 insertions(+), 16 deletions(-) diff --git a/web/app/components/tools/edit-custom-collection-modal/index.tsx b/web/app/components/tools/edit-custom-collection-modal/index.tsx index 474c262010..a468af7257 100644 --- a/web/app/components/tools/edit-custom-collection-modal/index.tsx +++ b/web/app/components/tools/edit-custom-collection-modal/index.tsx @@ -48,6 +48,7 @@ const EditCustomCollectionModal: FC = ({ const [editFirst, setEditFirst] = useState(!isAdd) const [paramsSchemas, setParamsSchemas] = useState(payload?.tools || []) + const [labels, setLabels] = useState(payload?.labels || []) const [customCollection, setCustomCollection, getCustomCollection] = useGetState(isAdd ? { provider: '', @@ -67,6 +68,15 @@ const EditCustomCollectionModal: FC = ({ const originalProvider = isEdit ? payload.provider : '' + // Sync customCollection state when payload changes + useEffect(() => { + if (isEdit) { + setCustomCollection(payload) + setParamsSchemas(payload.tools || []) + setLabels(payload.labels || []) + } + }, [isEdit, payload]) + const [showEmojiPicker, setShowEmojiPicker] = useState(false) const emoji = customCollection.icon const setEmoji = (emoji: Emoji) => { @@ -124,7 +134,6 @@ const EditCustomCollectionModal: FC = ({ const [currTool, setCurrTool] = useState(null) const [isShowTestApi, setIsShowTestApi] = useState(false) - const [labels, setLabels] = useState(payload?.labels || []) const handleLabelSelect = (value: string[]) => { setLabels(value) } diff --git a/web/app/components/tools/provider/detail.tsx b/web/app/components/tools/provider/detail.tsx index c4b65f353d..e0a2281696 100644 --- a/web/app/components/tools/provider/detail.tsx +++ b/web/app/components/tools/provider/detail.tsx @@ -100,9 +100,28 @@ const ProviderDetail = ({ const [isShowEditCollectionToolModal, setIsShowEditCustomCollectionModal] = useState(false) const [showConfirmDelete, setShowConfirmDelete] = useState(false) const [deleteAction, setDeleteAction] = useState('') + + const getCustomProvider = useCallback(async () => { + setIsDetailLoading(true) + const res = await fetchCustomCollection(collection.name) + if (res.credentials.auth_type === AuthType.apiKey && !res.credentials.api_key_header_prefix) { + if (res.credentials.api_key_value) + res.credentials.api_key_header_prefix = AuthHeaderPrefix.custom + } + setCustomCollection({ + ...res, + labels: collection.labels, + provider: collection.name, + }) + setIsDetailLoading(false) + }, [collection.labels, collection.name]) + const doUpdateCustomToolCollection = async (data: CustomCollectionBackend) => { await updateCustomCollection(data) onRefreshData() + await getCustomProvider() + // Use fresh data from form submission to avoid race condition with collection.labels + setCustomCollection(prev => prev ? { ...prev, labels: data.labels } : null) Toast.notify({ type: 'success', message: t('common.api.actionSuccess'), @@ -118,20 +137,6 @@ const ProviderDetail = ({ }) setIsShowEditCustomCollectionModal(false) } - const getCustomProvider = useCallback(async () => { - setIsDetailLoading(true) - const res = await fetchCustomCollection(collection.name) - if (res.credentials.auth_type === AuthType.apiKey && !res.credentials.api_key_header_prefix) { - if (res.credentials.api_key_value) - res.credentials.api_key_header_prefix = AuthHeaderPrefix.custom - } - setCustomCollection({ - ...res, - labels: collection.labels, - provider: collection.name, - }) - setIsDetailLoading(false) - }, [collection.labels, collection.name]) // workflow provider const [isShowEditWorkflowToolModal, setIsShowEditWorkflowToolModal] = useState(false) const getWorkflowToolProvider = useCallback(async () => { diff --git a/web/app/components/workflow/header/version-history-button.tsx b/web/app/components/workflow/header/version-history-button.tsx index ae3bd68b48..9ec9e6934e 100644 --- a/web/app/components/workflow/header/version-history-button.tsx +++ b/web/app/components/workflow/header/version-history-button.tsx @@ -61,7 +61,7 @@ const VersionHistoryButton: FC = ({ > )} + {onBatchReIndex && ( + + )}