From f7f9a08fa508e9244dca38e5c709e0cd9092afcc Mon Sep 17 00:00:00 2001 From: Asuka Minato Date: Thu, 15 Jan 2026 11:07:02 +0900 Subject: [PATCH 01/25] refactor: port TidbAuthBinding( (#31006) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- api/models/dataset.py | 14 +++++++++++--- api/schedule/create_tidb_serverless_task.py | 3 +++ 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/api/models/dataset.py b/api/models/dataset.py index 445ac6086f..62f11b8c72 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -1149,7 +1149,7 @@ class DatasetCollectionBinding(TypeBase): ) -class TidbAuthBinding(Base): +class TidbAuthBinding(TypeBase): __tablename__ = "tidb_auth_bindings" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="tidb_auth_bindings_pkey"), @@ -1158,7 +1158,13 @@ class TidbAuthBinding(Base): sa.Index("tidb_auth_bindings_created_at_idx", "created_at"), sa.Index("tidb_auth_bindings_status_idx", "status"), ) - id: Mapped[str] = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuid4())) + id: Mapped[str] = mapped_column( + StringUUID, + primary_key=True, + insert_default=lambda: str(uuid4()), + default_factory=lambda: str(uuid4()), + init=False, + ) tenant_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) cluster_id: Mapped[str] = mapped_column(String(255), nullable=False) cluster_name: Mapped[str] = mapped_column(String(255), nullable=False) @@ -1166,7 +1172,9 @@ class TidbAuthBinding(Base): status: Mapped[str] = mapped_column(sa.String(255), nullable=False, server_default=sa.text("'CREATING'")) account: Mapped[str] = mapped_column(String(255), nullable=False) password: Mapped[str] = mapped_column(String(255), nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) class Whitelist(TypeBase): diff --git a/api/schedule/create_tidb_serverless_task.py b/api/schedule/create_tidb_serverless_task.py index c343063fae..ed46c1c70a 100644 --- a/api/schedule/create_tidb_serverless_task.py +++ b/api/schedule/create_tidb_serverless_task.py @@ -50,10 +50,13 @@ def create_clusters(batch_size): ) for new_cluster in new_clusters: tidb_auth_binding = TidbAuthBinding( + tenant_id=None, cluster_id=new_cluster["cluster_id"], cluster_name=new_cluster["cluster_name"], account=new_cluster["account"], password=new_cluster["password"], + active=False, + status="CREATING", ) db.session.add(tidb_auth_binding) db.session.commit() From c6999fb5beccc4b0839de38bbec4c2da2292d476 Mon Sep 17 00:00:00 2001 From: wangxiaolei Date: Thu, 15 Jan 2026 10:09:57 +0800 Subject: [PATCH 02/25] fix: fix plugin edit endpoint app disappear (#30951) --- .../app-selector/app-picker.tsx | 9 ++++- .../app-selector/index.tsx | 34 ++++++++++++++----- 2 files changed, 34 insertions(+), 9 deletions(-) diff --git a/web/app/components/plugins/plugin-detail-panel/app-selector/app-picker.tsx b/web/app/components/plugins/plugin-detail-panel/app-selector/app-picker.tsx index 2bd196b14d..c32e959652 100644 --- a/web/app/components/plugins/plugin-detail-panel/app-selector/app-picker.tsx +++ b/web/app/components/plugins/plugin-detail-panel/app-selector/app-picker.tsx @@ -180,7 +180,14 @@ const AppPicker: FC = ({ background={app.icon_background} imageUrl={app.icon_url} /> -
{app.name}
+
+ {app.name} + + ( + {app.id.slice(0, 8)} + ) + +
{getAppType(app)}
))} diff --git a/web/app/components/plugins/plugin-detail-panel/app-selector/index.tsx b/web/app/components/plugins/plugin-detail-panel/app-selector/index.tsx index c5fdfe17da..40b0ba9205 100644 --- a/web/app/components/plugins/plugin-detail-panel/app-selector/index.tsx +++ b/web/app/components/plugins/plugin-detail-panel/app-selector/index.tsx @@ -16,7 +16,7 @@ import { import AppInputsPanel from '@/app/components/plugins/plugin-detail-panel/app-selector/app-inputs-panel' import AppPicker from '@/app/components/plugins/plugin-detail-panel/app-selector/app-picker' import AppTrigger from '@/app/components/plugins/plugin-detail-panel/app-selector/app-trigger' -import { useInfiniteAppList } from '@/service/use-apps' +import { useAppDetail, useInfiniteAppList } from '@/service/use-apps' const PAGE_SIZE = 20 @@ -70,6 +70,30 @@ const AppSelector: FC = ({ return pages.flatMap(({ data: apps }) => apps) }, [pages]) + // fetch selected app by id to avoid pagination gaps + const { data: selectedAppDetail } = useAppDetail(value?.app_id || '') + + // Ensure the currently selected app is available for display and in the picker options + const currentAppInfo = useMemo(() => { + if (!value?.app_id) + return undefined + return selectedAppDetail || displayedApps.find(app => app.id === value.app_id) + }, [value?.app_id, selectedAppDetail, displayedApps]) + + const appsForPicker = useMemo(() => { + if (!currentAppInfo) + return displayedApps + + const appIndex = displayedApps.findIndex(a => a.id === currentAppInfo.id) + + if (appIndex === -1) + return [currentAppInfo, ...displayedApps] + + const updatedApps = [...displayedApps] + updatedApps[appIndex] = currentAppInfo + return updatedApps + }, [currentAppInfo, displayedApps]) + const hasMore = hasNextPage ?? true const handleLoadMore = useCallback(async () => { @@ -127,12 +151,6 @@ const AppSelector: FC = ({ } }, [value]) - const currentAppInfo = useMemo(() => { - if (!displayedApps || !value) - return undefined - return displayedApps.find(app => app.id === value.app_id) - }, [displayedApps, value]) - return ( <> = ({ disabled={false} onSelect={handleSelectApp} scope={scope || 'all'} - apps={displayedApps} + apps={appsForPicker} isLoading={isLoading || isLoadingMore || isFetchingNextPage} hasMore={hasMore} onLoadMore={handleLoadMore} From 98c88cec34e32d86563251e82f38639a8df299c7 Mon Sep 17 00:00:00 2001 From: wangxiaolei Date: Thu, 15 Jan 2026 10:10:10 +0800 Subject: [PATCH 03/25] refactor: delete_endpoint should be idempotent (#30954) --- api/core/plugin/impl/endpoint.py | 32 +- .../core/plugin/test_endpoint_client.py | 279 ++++++++++++++++++ 2 files changed, 300 insertions(+), 11 deletions(-) create mode 100644 api/tests/unit_tests/core/plugin/test_endpoint_client.py diff --git a/api/core/plugin/impl/endpoint.py b/api/core/plugin/impl/endpoint.py index 5b88742be5..2db5185a2c 100644 --- a/api/core/plugin/impl/endpoint.py +++ b/api/core/plugin/impl/endpoint.py @@ -1,5 +1,6 @@ from core.plugin.entities.endpoint import EndpointEntityWithInstance from core.plugin.impl.base import BasePluginClient +from core.plugin.impl.exc import PluginDaemonInternalServerError class PluginEndpointClient(BasePluginClient): @@ -70,18 +71,27 @@ class PluginEndpointClient(BasePluginClient): def delete_endpoint(self, tenant_id: str, user_id: str, endpoint_id: str): """ Delete the given endpoint. + + This operation is idempotent: if the endpoint is already deleted (record not found), + it will return True instead of raising an error. """ - return self._request_with_plugin_daemon_response( - "POST", - f"plugin/{tenant_id}/endpoint/remove", - bool, - data={ - "endpoint_id": endpoint_id, - }, - headers={ - "Content-Type": "application/json", - }, - ) + try: + return self._request_with_plugin_daemon_response( + "POST", + f"plugin/{tenant_id}/endpoint/remove", + bool, + data={ + "endpoint_id": endpoint_id, + }, + headers={ + "Content-Type": "application/json", + }, + ) + except PluginDaemonInternalServerError as e: + # Make delete idempotent: if record is not found, consider it a success + if "record not found" in str(e.description).lower(): + return True + raise def enable_endpoint(self, tenant_id: str, user_id: str, endpoint_id: str): """ diff --git a/api/tests/unit_tests/core/plugin/test_endpoint_client.py b/api/tests/unit_tests/core/plugin/test_endpoint_client.py new file mode 100644 index 0000000000..53056ee42a --- /dev/null +++ b/api/tests/unit_tests/core/plugin/test_endpoint_client.py @@ -0,0 +1,279 @@ +"""Unit tests for PluginEndpointClient functionality. + +This test module covers the endpoint client operations including: +- Successful endpoint deletion +- Idempotent delete behavior (record not found) +- Non-idempotent delete behavior (other errors) + +Tests follow the Arrange-Act-Assert pattern for clarity. +""" + +from unittest.mock import MagicMock, patch + +import pytest + +from core.plugin.impl.endpoint import PluginEndpointClient +from core.plugin.impl.exc import PluginDaemonInternalServerError + + +class TestPluginEndpointClientDelete: + """Unit tests for PluginEndpointClient delete_endpoint operation. + + Tests cover: + - Successful endpoint deletion + - Idempotent behavior when endpoint is already deleted (record not found) + - Non-idempotent behavior for other errors + """ + + @pytest.fixture + def endpoint_client(self): + """Create a PluginEndpointClient instance for testing.""" + return PluginEndpointClient() + + @pytest.fixture + def mock_config(self): + """Mock plugin daemon configuration.""" + with ( + patch("core.plugin.impl.base.dify_config.PLUGIN_DAEMON_URL", "http://127.0.0.1:5002"), + patch("core.plugin.impl.base.dify_config.PLUGIN_DAEMON_KEY", "test-api-key"), + ): + yield + + def test_delete_endpoint_success(self, endpoint_client, mock_config): + """Test successful endpoint deletion. + + Given: + - A valid tenant_id, user_id, and endpoint_id + - The plugin daemon returns success response + When: + - delete_endpoint is called + Then: + - The method should return True + - The request should be made with correct parameters + """ + # Arrange + tenant_id = "tenant-123" + user_id = "user-456" + endpoint_id = "endpoint-789" + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = { + "code": 0, + "message": "success", + "data": True, + } + + with patch("httpx.request", return_value=mock_response): + # Act + result = endpoint_client.delete_endpoint( + tenant_id=tenant_id, + user_id=user_id, + endpoint_id=endpoint_id, + ) + + # Assert + assert result is True + + def test_delete_endpoint_idempotent_record_not_found(self, endpoint_client, mock_config): + """Test idempotent delete behavior when endpoint is already deleted. + + Given: + - A valid tenant_id, user_id, and endpoint_id + - The plugin daemon returns "record not found" error + When: + - delete_endpoint is called + Then: + - The method should return True (idempotent behavior) + - No exception should be raised + """ + # Arrange + tenant_id = "tenant-123" + user_id = "user-456" + endpoint_id = "endpoint-789" + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = { + "code": -1, + "message": ( + '{"error_type": "PluginDaemonInternalServerError", ' + '"message": "failed to remove endpoint: record not found"}' + ), + } + + with patch("httpx.request", return_value=mock_response): + # Act + result = endpoint_client.delete_endpoint( + tenant_id=tenant_id, + user_id=user_id, + endpoint_id=endpoint_id, + ) + + # Assert - should return True instead of raising an error + assert result is True + + def test_delete_endpoint_non_idempotent_other_errors(self, endpoint_client, mock_config): + """Test non-idempotent delete behavior for other errors. + + Given: + - A valid tenant_id, user_id, and endpoint_id + - The plugin daemon returns a different error (not "record not found") + When: + - delete_endpoint is called + Then: + - The method should raise PluginDaemonInternalServerError + """ + # Arrange + tenant_id = "tenant-123" + user_id = "user-456" + endpoint_id = "endpoint-789" + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = { + "code": -1, + "message": ( + '{"error_type": "PluginDaemonInternalServerError", ' + '"message": "failed to remove endpoint: internal server error"}' + ), + } + + with patch("httpx.request", return_value=mock_response): + # Act & Assert + with pytest.raises(PluginDaemonInternalServerError) as exc_info: + endpoint_client.delete_endpoint( + tenant_id=tenant_id, + user_id=user_id, + endpoint_id=endpoint_id, + ) + + # Assert - the error message should not be "record not found" + assert "record not found" not in str(exc_info.value.description) + + def test_delete_endpoint_idempotent_case_insensitive(self, endpoint_client, mock_config): + """Test idempotent delete behavior with case-insensitive error message. + + Given: + - A valid tenant_id, user_id, and endpoint_id + - The plugin daemon returns "Record Not Found" error (different case) + When: + - delete_endpoint is called + Then: + - The method should return True (idempotent behavior) + """ + # Arrange + tenant_id = "tenant-123" + user_id = "user-456" + endpoint_id = "endpoint-789" + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = { + "code": -1, + "message": '{"error_type": "PluginDaemonInternalServerError", "message": "Record Not Found"}', + } + + with patch("httpx.request", return_value=mock_response): + # Act + result = endpoint_client.delete_endpoint( + tenant_id=tenant_id, + user_id=user_id, + endpoint_id=endpoint_id, + ) + + # Assert - should still return True + assert result is True + + def test_delete_endpoint_multiple_calls_idempotent(self, endpoint_client, mock_config): + """Test that multiple delete calls are idempotent. + + Given: + - A valid tenant_id, user_id, and endpoint_id + - The first call succeeds + - Subsequent calls return "record not found" + When: + - delete_endpoint is called multiple times + Then: + - All calls should return True + """ + # Arrange + tenant_id = "tenant-123" + user_id = "user-456" + endpoint_id = "endpoint-789" + + # First call - success + mock_response_success = MagicMock() + mock_response_success.status_code = 200 + mock_response_success.json.return_value = { + "code": 0, + "message": "success", + "data": True, + } + + # Second call - record not found + mock_response_not_found = MagicMock() + mock_response_not_found.status_code = 200 + mock_response_not_found.json.return_value = { + "code": -1, + "message": ( + '{"error_type": "PluginDaemonInternalServerError", ' + '"message": "failed to remove endpoint: record not found"}' + ), + } + + with patch("httpx.request") as mock_request: + # Act - first call + mock_request.return_value = mock_response_success + result1 = endpoint_client.delete_endpoint( + tenant_id=tenant_id, + user_id=user_id, + endpoint_id=endpoint_id, + ) + + # Act - second call (already deleted) + mock_request.return_value = mock_response_not_found + result2 = endpoint_client.delete_endpoint( + tenant_id=tenant_id, + user_id=user_id, + endpoint_id=endpoint_id, + ) + + # Assert - both should return True + assert result1 is True + assert result2 is True + + def test_delete_endpoint_non_idempotent_unauthorized_error(self, endpoint_client, mock_config): + """Test that authorization errors are not treated as idempotent. + + Given: + - A valid tenant_id, user_id, and endpoint_id + - The plugin daemon returns an unauthorized error + When: + - delete_endpoint is called + Then: + - The method should raise the appropriate error (not return True) + """ + # Arrange + tenant_id = "tenant-123" + user_id = "user-456" + endpoint_id = "endpoint-789" + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = { + "code": -1, + "message": '{"error_type": "PluginDaemonUnauthorizedError", "message": "unauthorized access"}', + } + + with patch("httpx.request", return_value=mock_response): + # Act & Assert + with pytest.raises(Exception) as exc_info: + endpoint_client.delete_endpoint( + tenant_id=tenant_id, + user_id=user_id, + endpoint_id=endpoint_id, + ) + + # Assert - should not return True for unauthorized errors + assert exc_info.value.__class__.__name__ == "PluginDaemonUnauthorizedError" From 2f633de45e01b62dd786b1938518393bda8b50c4 Mon Sep 17 00:00:00 2001 From: Asuka Minato Date: Thu, 15 Jan 2026 11:14:15 +0900 Subject: [PATCH 04/25] refactor: port TenantCreditPool (#30926) Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/models/model.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/api/models/model.py b/api/models/model.py index a48f4d34d4..463693cfba 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -2083,7 +2083,7 @@ class TraceAppConfig(TypeBase): } -class TenantCreditPool(Base): +class TenantCreditPool(TypeBase): __tablename__ = "tenant_credit_pools" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="tenant_credit_pool_pkey"), @@ -2091,14 +2091,20 @@ class TenantCreditPool(Base): sa.Index("tenant_credit_pool_pool_type_idx", "pool_type"), ) - id = mapped_column(StringUUID, primary_key=True, server_default=text("uuid_generate_v4()")) - tenant_id = mapped_column(StringUUID, nullable=False) - pool_type = mapped_column(String(40), nullable=False, default="trial", server_default="trial") - quota_limit = mapped_column(BigInteger, nullable=False, default=0) - quota_used = mapped_column(BigInteger, nullable=False, default=0) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=text("CURRENT_TIMESTAMP")) - updated_at = mapped_column( - sa.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() + id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=text("uuid_generate_v4()"), init=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + pool_type: Mapped[str] = mapped_column(String(40), nullable=False, default="trial", server_default="trial") + quota_limit: Mapped[int] = mapped_column(BigInteger, nullable=False, default=0) + quota_used: Mapped[int] = mapped_column(BigInteger, nullable=False, default=0) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=text("CURRENT_TIMESTAMP"), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + sa.DateTime, + nullable=False, + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, ) @property From d3923e7b56cb3b4a7384db94e986ba27fbd76a43 Mon Sep 17 00:00:00 2001 From: Asuka Minato Date: Thu, 15 Jan 2026 11:14:55 +0900 Subject: [PATCH 05/25] refactor: port AppAnnotationHitHistory (#30922) Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- api/models/model.py | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/api/models/model.py b/api/models/model.py index 463693cfba..68903e86eb 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -1447,7 +1447,7 @@ class MessageAnnotation(Base): return account -class AppAnnotationHitHistory(Base): +class AppAnnotationHitHistory(TypeBase): __tablename__ = "app_annotation_hit_histories" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="app_annotation_hit_histories_pkey"), @@ -1457,17 +1457,19 @@ class AppAnnotationHitHistory(Base): sa.Index("app_annotation_hit_histories_message_idx", "message_id"), ) - id = mapped_column(StringUUID, default=lambda: str(uuid4())) - app_id = mapped_column(StringUUID, nullable=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) + app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) annotation_id: Mapped[str] = mapped_column(StringUUID, nullable=False) - source = mapped_column(LongText, nullable=False) - question = mapped_column(LongText, nullable=False) - account_id = mapped_column(StringUUID, nullable=False) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) - score = mapped_column(Float, nullable=False, server_default=sa.text("0")) - message_id = mapped_column(StringUUID, nullable=False) - annotation_question = mapped_column(LongText, nullable=False) - annotation_content = mapped_column(LongText, nullable=False) + source: Mapped[str] = mapped_column(LongText, nullable=False) + question: Mapped[str] = mapped_column(LongText, nullable=False) + account_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + score: Mapped[float] = mapped_column(Float, nullable=False, server_default=sa.text("0")) + message_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + annotation_question: Mapped[str] = mapped_column(LongText, nullable=False) + annotation_content: Mapped[str] = mapped_column(LongText, nullable=False) @property def account(self): From a33ac77a2206b7e3cc992c26b4953fde825fddb4 Mon Sep 17 00:00:00 2001 From: Coding On Star <447357187@qq.com> Date: Thu, 15 Jan 2026 10:33:48 +0800 Subject: [PATCH 06/25] feat: implement document creation pipeline with multi-step wizard and datasource management (#30843) Co-authored-by: CodingOnStar --- .../create-from-dsl-modal/index.spec.tsx | 2028 +++++++++++++ .../create-from-pipeline/hooks/index.ts | 5 + .../hooks/use-add-documents-steps.ts | 41 + .../hooks/use-datasource-actions.ts | 321 ++ .../hooks/use-datasource-options.ts | 27 + .../use-datasource-store.ts} | 76 +- .../hooks/use-datasource-ui-state.ts | 132 + .../create-from-pipeline/index.spec.tsx | 2698 +++++++++++++++++ .../documents/create-from-pipeline/index.tsx | 683 +---- .../create-from-pipeline/steps/index.ts | 3 + .../steps/preview-panel.tsx | 112 + .../steps/step-one-content.tsx | 110 + .../steps/step-three-content.tsx | 23 + .../steps/step-two-content.tsx | 38 + .../utils/datasource-info-builder.ts | 63 + 15 files changed, 5783 insertions(+), 577 deletions(-) create mode 100644 web/app/components/datasets/create-from-pipeline/create-options/create-from-dsl-modal/index.spec.tsx create mode 100644 web/app/components/datasets/documents/create-from-pipeline/hooks/index.ts create mode 100644 web/app/components/datasets/documents/create-from-pipeline/hooks/use-add-documents-steps.ts create mode 100644 web/app/components/datasets/documents/create-from-pipeline/hooks/use-datasource-actions.ts create mode 100644 web/app/components/datasets/documents/create-from-pipeline/hooks/use-datasource-options.ts rename web/app/components/datasets/documents/create-from-pipeline/{hooks.ts => hooks/use-datasource-store.ts} (70%) create mode 100644 web/app/components/datasets/documents/create-from-pipeline/hooks/use-datasource-ui-state.ts create mode 100644 web/app/components/datasets/documents/create-from-pipeline/index.spec.tsx create mode 100644 web/app/components/datasets/documents/create-from-pipeline/steps/index.ts create mode 100644 web/app/components/datasets/documents/create-from-pipeline/steps/preview-panel.tsx create mode 100644 web/app/components/datasets/documents/create-from-pipeline/steps/step-one-content.tsx create mode 100644 web/app/components/datasets/documents/create-from-pipeline/steps/step-three-content.tsx create mode 100644 web/app/components/datasets/documents/create-from-pipeline/steps/step-two-content.tsx create mode 100644 web/app/components/datasets/documents/create-from-pipeline/utils/datasource-info-builder.ts diff --git a/web/app/components/datasets/create-from-pipeline/create-options/create-from-dsl-modal/index.spec.tsx b/web/app/components/datasets/create-from-pipeline/create-options/create-from-dsl-modal/index.spec.tsx new file mode 100644 index 0000000000..8ae7a41e72 --- /dev/null +++ b/web/app/components/datasets/create-from-pipeline/create-options/create-from-dsl-modal/index.spec.tsx @@ -0,0 +1,2028 @@ +import { QueryClient, QueryClientProvider } from '@tanstack/react-query' +import { act, fireEvent, render, screen, waitFor } from '@testing-library/react' +import DSLConfirmModal from './dsl-confirm-modal' +import Header from './header' +import CreateFromDSLModal, { CreateFromDSLModalTab } from './index' +import Tab from './tab' +import TabItem from './tab/item' +import Uploader from './uploader' + +// Mock next/navigation +const mockPush = vi.fn() +vi.mock('next/navigation', () => ({ + useRouter: () => ({ + push: mockPush, + }), +})) + +// Mock service hooks +const mockImportDSL = vi.fn() +const mockImportDSLConfirm = vi.fn() + +vi.mock('@/service/use-pipeline', () => ({ + useImportPipelineDSL: () => ({ + mutateAsync: mockImportDSL, + }), + useImportPipelineDSLConfirm: () => ({ + mutateAsync: mockImportDSLConfirm, + }), +})) + +// Mock plugin dependencies hook +const mockHandleCheckPluginDependencies = vi.fn() + +vi.mock('@/app/components/workflow/plugin-dependency/hooks', () => ({ + usePluginDependencies: () => ({ + handleCheckPluginDependencies: mockHandleCheckPluginDependencies, + }), +})) + +// Mock toast context +const mockNotify = vi.fn() + +vi.mock('use-context-selector', async () => { + const actual = await vi.importActual('use-context-selector') + return { + ...actual, + useContext: vi.fn(() => ({ notify: mockNotify })), + } +}) + +// Test data builders +const createMockFile = (name = 'test.pipeline'): File => { + return new File(['test content'], name, { type: 'application/octet-stream' }) +} + +const createImportDSLResponse = (overrides = {}) => ({ + id: 'import-123', + status: 'completed' as const, + pipeline_id: 'pipeline-456', + dataset_id: 'dataset-789', + current_dsl_version: '1.0.0', + imported_dsl_version: '1.0.0', + ...overrides, +}) + +// Helper function to create QueryClient wrapper +const createWrapper = () => { + const queryClient = new QueryClient({ + defaultOptions: { + queries: { retry: false }, + mutations: { retry: false }, + }, + }) + return ({ children }: { children: React.ReactNode }) => ( + + {children} + + ) +} + +describe('CreateFromDSLModal', () => { + beforeEach(() => { + vi.clearAllMocks() + mockImportDSL.mockReset() + mockImportDSLConfirm.mockReset() + mockPush.mockReset() + mockNotify.mockReset() + mockHandleCheckPluginDependencies.mockReset() + }) + + // ============================================ + // Rendering Tests + // ============================================ + describe('Rendering', () => { + it('should render without crashing when show is true', () => { + render( + , + { wrapper: createWrapper() }, + ) + + expect(screen.getByText('app.importFromDSL')).toBeInTheDocument() + }) + + it('should not render modal content when show is false', () => { + render( + , + { wrapper: createWrapper() }, + ) + + // Modal with show=false should not display its content visibly + const modal = screen.queryByText('app.importFromDSL') + expect(modal).toBeNull() + }) + + it('should render file tab by default', () => { + render( + , + { wrapper: createWrapper() }, + ) + + expect(screen.getByText('app.importFromDSLFile')).toBeInTheDocument() + expect(screen.getByText('app.importFromDSLUrl')).toBeInTheDocument() + }) + + it('should render cancel and import buttons', () => { + render( + , + { wrapper: createWrapper() }, + ) + + expect(screen.getByText('app.newApp.Cancel')).toBeInTheDocument() + expect(screen.getByText('app.newApp.import')).toBeInTheDocument() + }) + + it('should render uploader when file tab is active', () => { + render( + , + { wrapper: createWrapper() }, + ) + + expect(screen.getByText('app.dslUploader.button')).toBeInTheDocument() + }) + + it('should render URL input when URL tab is active', () => { + render( + , + { wrapper: createWrapper() }, + ) + + expect(screen.getByText('DSL URL')).toBeInTheDocument() + expect(screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder')).toBeInTheDocument() + }) + }) + + // ============================================ + // Props Testing + // ============================================ + describe('Props', () => { + it('should use FROM_FILE as default activeTab', () => { + render( + , + { wrapper: createWrapper() }, + ) + + // File tab content should be visible + expect(screen.getByText('app.dslUploader.button')).toBeInTheDocument() + }) + + it('should use provided activeTab prop', () => { + render( + , + { wrapper: createWrapper() }, + ) + + expect(screen.getByText('DSL URL')).toBeInTheDocument() + }) + + it('should use provided dslUrl prop', () => { + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + expect(input).toHaveValue('https://example.com/test.pipeline') + }) + + it('should call onClose when cancel button is clicked', () => { + const onClose = vi.fn() + render( + , + { wrapper: createWrapper() }, + ) + + fireEvent.click(screen.getByText('app.newApp.Cancel')) + expect(onClose).toHaveBeenCalled() + }) + }) + + // ============================================ + // State Management Tests + // ============================================ + describe('State Management', () => { + it('should switch between tabs', () => { + render( + , + { wrapper: createWrapper() }, + ) + + // Initially file tab is active + expect(screen.getByText('app.dslUploader.button')).toBeInTheDocument() + + // Click URL tab + fireEvent.click(screen.getByText('app.importFromDSLUrl')) + + // URL input should be visible + expect(screen.getByText('DSL URL')).toBeInTheDocument() + }) + + it('should update URL value when typing', () => { + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + expect(input).toHaveValue('https://example.com/test.pipeline') + }) + + it('should have disabled import button when no file is selected in file tab', () => { + render( + , + { wrapper: createWrapper() }, + ) + + const importButton = screen.getByText('app.newApp.import').closest('button') + expect(importButton).toBeDisabled() + }) + + it('should have disabled import button when no URL is entered in URL tab', () => { + render( + , + { wrapper: createWrapper() }, + ) + + const importButton = screen.getByText('app.newApp.import').closest('button') + expect(importButton).toBeDisabled() + }) + + it('should enable import button when URL is entered', () => { + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + const importButton = screen.getByText('app.newApp.import').closest('button') + expect(importButton).not.toBeDisabled() + }) + }) + + // ============================================ + // API Call Tests + // ============================================ + describe('API Calls', () => { + it('should call importDSL with URL mode when URL tab is active', async () => { + mockImportDSL.mockResolvedValue(createImportDSLResponse()) + + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + await waitFor(() => { + expect(mockImportDSL).toHaveBeenCalledWith({ + mode: 'yaml-url', + yaml_url: 'https://example.com/test.pipeline', + }) + }) + }) + + it('should handle successful import with COMPLETED status', async () => { + const onSuccess = vi.fn() + const onClose = vi.fn() + mockImportDSL.mockResolvedValue(createImportDSLResponse({ status: 'completed' })) + mockHandleCheckPluginDependencies.mockResolvedValue(undefined) + + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + await waitFor(() => { + expect(onSuccess).toHaveBeenCalled() + expect(onClose).toHaveBeenCalled() + expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ + type: 'success', + })) + expect(mockPush).toHaveBeenCalledWith('/datasets/dataset-789/pipeline') + }) + }) + + it('should handle import with COMPLETED_WITH_WARNINGS status', async () => { + const onSuccess = vi.fn() + const onClose = vi.fn() + mockImportDSL.mockResolvedValue(createImportDSLResponse({ status: 'completed-with-warnings' })) + mockHandleCheckPluginDependencies.mockResolvedValue(undefined) + + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + await waitFor(() => { + expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ + type: 'warning', + })) + }) + }) + + it('should handle import with PENDING status and show error modal', async () => { + vi.useFakeTimers({ shouldAdvanceTime: true }) + const onClose = vi.fn() + mockImportDSL.mockResolvedValue(createImportDSLResponse({ + status: 'pending', + imported_dsl_version: '0.9.0', + current_dsl_version: '1.0.0', + })) + + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + await waitFor(() => { + expect(onClose).toHaveBeenCalled() + }) + + // Advance timer to show error modal + await act(async () => { + vi.advanceTimersByTime(400) + }) + + await waitFor(() => { + expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument() + }) + + vi.useRealTimers() + }) + + it('should handle API error', async () => { + mockImportDSL.mockResolvedValue(null) + + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + await waitFor(() => { + expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ + type: 'error', + })) + }) + }) + + it('should handle FAILED status', async () => { + mockImportDSL.mockResolvedValue(createImportDSLResponse({ status: 'failed' })) + + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + await waitFor(() => { + expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ + type: 'error', + })) + }) + }) + + it('should check plugin dependencies after successful import', async () => { + mockImportDSL.mockResolvedValue(createImportDSLResponse({ + status: 'completed', + pipeline_id: 'pipeline-123', + })) + mockHandleCheckPluginDependencies.mockResolvedValue(undefined) + + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + await waitFor(() => { + expect(mockHandleCheckPluginDependencies).toHaveBeenCalledWith('pipeline-123', true) + }) + }) + }) + + // ============================================ + // Event Handler Tests + // ============================================ + describe('Event Handlers', () => { + it('should call onClose when header close button is clicked', () => { + const onClose = vi.fn() + render( + , + { wrapper: createWrapper() }, + ) + + // Find and click the close icon in header + const closeIcon = document.querySelector('[class*="cursor-pointer"]') + + if (closeIcon) { + fireEvent.click(closeIcon) + expect(onClose).toHaveBeenCalled() + } + }) + + it('should close modal on ESC key press', () => { + const onClose = vi.fn() + render( + , + { wrapper: createWrapper() }, + ) + + // Trigger ESC key event - ahooks useKeyPress listens for 'esc' which maps to Escape key + // Need to dispatch on window/document with the correct event properties + const escEvent = new KeyboardEvent('keydown', { + key: 'Escape', + code: 'Escape', + keyCode: 27, + bubbles: true, + }) + document.dispatchEvent(escEvent) + + expect(onClose).toHaveBeenCalled() + }) + + it('should not close on ESC when error modal is shown', async () => { + vi.useFakeTimers({ shouldAdvanceTime: true }) + const onClose = vi.fn() + mockImportDSL.mockResolvedValue(createImportDSLResponse({ status: 'pending' })) + + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + await waitFor(() => { + expect(onClose).toHaveBeenCalled() + }) + + // Clear previous calls + onClose.mockClear() + + // Show error modal + await act(async () => { + vi.advanceTimersByTime(400) + }) + + // Now ESC should not close main modal because error modal is shown + const escEvent = new KeyboardEvent('keydown', { + key: 'Escape', + code: 'Escape', + keyCode: 27, + bubbles: true, + }) + document.dispatchEvent(escEvent) + + // onClose should not be called again when error modal is shown + expect(onClose).not.toHaveBeenCalled() + + vi.useRealTimers() + }) + + it('should prevent duplicate submissions', async () => { + mockImportDSL.mockImplementation(() => new Promise(resolve => + setTimeout(() => resolve(createImportDSLResponse()), 1000), + )) + + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + + // Click multiple times rapidly + fireEvent.click(importButton) + fireEvent.click(importButton) + fireEvent.click(importButton) + + // Should only be called once due to isCreatingRef + await waitFor(() => { + expect(mockImportDSL).toHaveBeenCalledTimes(1) + }) + }) + }) + + // ============================================ + // Memoization Tests + // ============================================ + describe('Memoization', () => { + it('should correctly compute buttonDisabled based on currentTab and file/URL', () => { + render( + , + { wrapper: createWrapper() }, + ) + + // File tab with no file - disabled + let importButton = screen.getByText('app.newApp.import').closest('button') + expect(importButton).toBeDisabled() + + // Switch to URL tab by clicking on it + fireEvent.click(screen.getByText('app.importFromDSLUrl')) + + // Still disabled (no URL) + importButton = screen.getByText('app.newApp.import').closest('button') + expect(importButton).toBeDisabled() + + // Add URL value - should enable + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com' } }) + + importButton = screen.getByText('app.newApp.import').closest('button') + expect(importButton).not.toBeDisabled() + }) + }) + + // ============================================ + // Edge Cases Tests + // ============================================ + describe('Edge Cases', () => { + it('should handle empty URL gracefully', () => { + render( + , + { wrapper: createWrapper() }, + ) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + // Should not call API with empty URL + expect(mockImportDSL).not.toHaveBeenCalled() + }) + + it('should handle undefined onSuccess gracefully', async () => { + mockImportDSL.mockResolvedValue(createImportDSLResponse()) + mockHandleCheckPluginDependencies.mockResolvedValue(undefined) + + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + + // Should not throw + fireEvent.click(importButton) + + await waitFor(() => { + expect(mockPush).toHaveBeenCalled() + }) + }) + + it('should handle response without pipeline_id', async () => { + mockImportDSL.mockResolvedValue(createImportDSLResponse({ + status: 'completed', + pipeline_id: null, + })) + + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + await waitFor(() => { + // Should not call handleCheckPluginDependencies when pipeline_id is null + expect(mockHandleCheckPluginDependencies).not.toHaveBeenCalled() + }) + }) + + it('should handle empty file in file tab gracefully', () => { + render( + , + { wrapper: createWrapper() }, + ) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + // Should not call API with no file + expect(mockImportDSL).not.toHaveBeenCalled() + }) + + it('should return early in onCreate when file tab has no file (direct trigger)', async () => { + vi.useFakeTimers({ shouldAdvanceTime: true }) + + // Test the early return branch by force-triggering the button even when disabled + render( + , + { wrapper: createWrapper() }, + ) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + + // Remove disabled attribute temporarily to test the early return + importButton.removeAttribute('disabled') + + // Dispatch a native click event to bypass any React disabled checks + const clickEvent = new MouseEvent('click', { bubbles: true, cancelable: true }) + importButton.dispatchEvent(clickEvent) + + // Wait for debounce to trigger + await act(async () => { + vi.advanceTimersByTime(400) + }) + + // Should not call API due to early return in onCreate + expect(mockImportDSL).not.toHaveBeenCalled() + + vi.useRealTimers() + }) + + it('should return early in onCreate when URL tab has no URL (direct trigger)', async () => { + vi.useFakeTimers({ shouldAdvanceTime: true }) + + render( + , + { wrapper: createWrapper() }, + ) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + + // Remove disabled attribute to test the early return + importButton.removeAttribute('disabled') + + // Dispatch a native click event + const clickEvent = new MouseEvent('click', { bubbles: true, cancelable: true }) + importButton.dispatchEvent(clickEvent) + + // Wait for debounce + await act(async () => { + vi.advanceTimersByTime(400) + }) + + // Should not call API due to early return + expect(mockImportDSL).not.toHaveBeenCalled() + + vi.useRealTimers() + }) + }) + + // ============================================ + // File Import Tests (covers readFile, handleFile, file mode import) + // ============================================ + describe('File Import', () => { + it('should read file content when file is selected', async () => { + mockImportDSL.mockResolvedValue(createImportDSLResponse()) + mockHandleCheckPluginDependencies.mockResolvedValue(undefined) + + render( + , + { wrapper: createWrapper() }, + ) + + // Create a mock file with content + const fileContent = 'test yaml content' + const mockFile = new File([fileContent], 'test.pipeline', { type: 'application/octet-stream' }) + + // Get the file input and simulate file selection + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement + Object.defineProperty(fileInput, 'files', { + value: [mockFile], + configurable: true, + }) + fireEvent.change(fileInput) + + // Wait for FileReader to complete + await waitFor(() => { + const importButton = screen.getByText('app.newApp.import').closest('button') + expect(importButton).not.toBeDisabled() + }) + + // Click import button + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + // Verify API was called with file content + await waitFor(() => { + expect(mockImportDSL).toHaveBeenCalledWith({ + mode: 'yaml-content', + yaml_content: fileContent, + }) + }) + }) + + it('should clear file content when file is removed', async () => { + render( + , + { wrapper: createWrapper() }, + ) + + // First add a file + const mockFile = new File(['content'], 'test.pipeline', { type: 'application/octet-stream' }) + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement + Object.defineProperty(fileInput, 'files', { + value: [mockFile], + configurable: true, + }) + fireEvent.change(fileInput) + + // Wait for file to be displayed + await waitFor(() => { + expect(screen.getByText('test.pipeline')).toBeInTheDocument() + }) + + // Now remove the file by clicking delete button (inside ActionButton) + const actionButton = document.querySelector('[class*="group-hover"]') + const deleteButton = actionButton?.querySelector('button') + if (deleteButton) { + fireEvent.click(deleteButton) + // File should be removed - uploader prompt should show again + await waitFor(() => { + expect(screen.getByText('app.dslUploader.button')).toBeInTheDocument() + }) + } + }) + }) + + // ============================================ + // DSL Confirm Flow Tests (covers onDSLConfirm) + // ============================================ + describe('DSL Confirm Flow', () => { + it('should handle DSL confirm success', async () => { + vi.useFakeTimers({ shouldAdvanceTime: true }) + const onSuccess = vi.fn() + const onClose = vi.fn() + + mockImportDSL.mockResolvedValue(createImportDSLResponse({ + id: 'import-123', + status: 'pending', + imported_dsl_version: '0.9.0', + current_dsl_version: '1.0.0', + })) + + mockImportDSLConfirm.mockResolvedValue({ + status: 'completed', + pipeline_id: 'pipeline-456', + dataset_id: 'dataset-789', + }) + + mockHandleCheckPluginDependencies.mockResolvedValue(undefined) + + render( + , + { wrapper: createWrapper() }, + ) + + // Enter URL and submit + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + // Wait for pending status handling + await waitFor(() => { + expect(onClose).toHaveBeenCalled() + }) + + // Advance timer to show error modal + await act(async () => { + vi.advanceTimersByTime(400) + }) + + // Click confirm button in error modal + await waitFor(() => { + expect(screen.getByText('app.newApp.Confirm')).toBeInTheDocument() + }) + + fireEvent.click(screen.getByText('app.newApp.Confirm')) + + // Verify confirm was called + await waitFor(() => { + expect(mockImportDSLConfirm).toHaveBeenCalledWith('import-123') + }) + + // Verify success handling + await waitFor(() => { + expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ + type: 'success', + })) + }) + + vi.useRealTimers() + }) + + it('should handle DSL confirm with no importId', async () => { + vi.useFakeTimers({ shouldAdvanceTime: true }) + + mockImportDSL.mockResolvedValue(createImportDSLResponse({ + id: '', // Empty id + status: 'pending', + })) + + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + await act(async () => { + vi.advanceTimersByTime(400) + }) + + // Click confirm - should return early since importId is empty + await waitFor(() => { + expect(screen.getByText('app.newApp.Confirm')).toBeInTheDocument() + }) + + fireEvent.click(screen.getByText('app.newApp.Confirm')) + + // Confirm should not be called since importId is empty string (falsy) + expect(mockImportDSLConfirm).not.toHaveBeenCalled() + + vi.useRealTimers() + }) + + it('should handle DSL confirm API error', async () => { + vi.useFakeTimers({ shouldAdvanceTime: true }) + + mockImportDSL.mockResolvedValue(createImportDSLResponse({ + id: 'import-123', + status: 'pending', + })) + + mockImportDSLConfirm.mockResolvedValue(null) + + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + fireEvent.click(screen.getByText('app.newApp.import').closest('button')!) + + await act(async () => { + vi.advanceTimersByTime(400) + }) + + await waitFor(() => { + expect(screen.getByText('app.newApp.Confirm')).toBeInTheDocument() + }) + + fireEvent.click(screen.getByText('app.newApp.Confirm')) + + await waitFor(() => { + expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ + type: 'error', + })) + }) + + vi.useRealTimers() + }) + + it('should handle DSL confirm with FAILED status', async () => { + vi.useFakeTimers({ shouldAdvanceTime: true }) + + mockImportDSL.mockResolvedValue(createImportDSLResponse({ + id: 'import-123', + status: 'pending', + })) + + mockImportDSLConfirm.mockResolvedValue({ + status: 'failed', + pipeline_id: 'pipeline-456', + dataset_id: 'dataset-789', + }) + + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + fireEvent.click(screen.getByText('app.newApp.import').closest('button')!) + + await act(async () => { + vi.advanceTimersByTime(400) + }) + + await waitFor(() => { + expect(screen.getByText('app.newApp.Confirm')).toBeInTheDocument() + }) + + fireEvent.click(screen.getByText('app.newApp.Confirm')) + + await waitFor(() => { + expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ + type: 'error', + })) + }) + + vi.useRealTimers() + }) + + it('should close error modal when cancel is clicked', async () => { + vi.useFakeTimers({ shouldAdvanceTime: true }) + + mockImportDSL.mockResolvedValue(createImportDSLResponse({ + status: 'pending', + })) + + render( + , + { wrapper: createWrapper() }, + ) + + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/test.pipeline' } }) + + fireEvent.click(screen.getByText('app.newApp.import').closest('button')!) + + await act(async () => { + vi.advanceTimersByTime(400) + }) + + // Error modal should be visible + await waitFor(() => { + expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument() + }) + + // There are two Cancel buttons now (one in main modal footer, one in error modal) + // Find the Cancel button in the error modal context + const cancelButtons = screen.getAllByText('app.newApp.Cancel') + // Click the last Cancel button (the one in the error modal) + fireEvent.click(cancelButtons[cancelButtons.length - 1]) + + vi.useRealTimers() + }) + }) +}) + +// ============================================ +// Header Component Tests +// ============================================ +describe('Header', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + describe('Rendering', () => { + it('should render title', () => { + render(
) + expect(screen.getByText('app.importFromDSL')).toBeInTheDocument() + }) + + it('should render close icon', () => { + render(
) + // Check for close icon container + const closeButton = document.querySelector('[class*="cursor-pointer"]') + expect(closeButton).toBeInTheDocument() + }) + }) + + describe('Event Handlers', () => { + it('should call onClose when close icon is clicked', () => { + const onClose = vi.fn() + render(
) + + const closeButton = document.querySelector('[class*="cursor-pointer"]')! + fireEvent.click(closeButton) + + expect(onClose).toHaveBeenCalled() + }) + }) +}) + +// ============================================ +// Tab Component Tests +// ============================================ +describe('Tab', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + describe('Rendering', () => { + it('should render both tabs', () => { + render( + , + ) + + expect(screen.getByText('app.importFromDSLFile')).toBeInTheDocument() + expect(screen.getByText('app.importFromDSLUrl')).toBeInTheDocument() + }) + }) + + describe('Event Handlers', () => { + it('should call setCurrentTab when clicking file tab', () => { + const setCurrentTab = vi.fn() + render( + , + ) + + fireEvent.click(screen.getByText('app.importFromDSLFile')) + // Tab uses bind() which passes the key as first argument and event as second + expect(setCurrentTab).toHaveBeenCalled() + expect(setCurrentTab.mock.calls[0][0]).toBe(CreateFromDSLModalTab.FROM_FILE) + }) + + it('should call setCurrentTab when clicking URL tab', () => { + const setCurrentTab = vi.fn() + render( + , + ) + + fireEvent.click(screen.getByText('app.importFromDSLUrl')) + // Tab uses bind() which passes the key as first argument and event as second + expect(setCurrentTab).toHaveBeenCalled() + expect(setCurrentTab.mock.calls[0][0]).toBe(CreateFromDSLModalTab.FROM_URL) + }) + }) +}) + +// ============================================ +// Tab Item Component Tests +// ============================================ +describe('TabItem', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + describe('Rendering', () => { + it('should render label', () => { + render( + , + ) + + expect(screen.getByText('Test Tab')).toBeInTheDocument() + }) + + it('should render active indicator when active', () => { + render( + , + ) + + // Active indicator is the bottom border div + const indicator = document.querySelector('[class*="bg-util-colors-blue"]') + expect(indicator).toBeInTheDocument() + }) + + it('should not render active indicator when inactive', () => { + render( + , + ) + + const indicator = document.querySelector('[class*="bg-util-colors-blue"]') + expect(indicator).toBeNull() + }) + + it('should have active text color when active', () => { + render( + , + ) + + const item = screen.getByText('Test Tab') + expect(item.className).toContain('text-text-primary') + }) + + it('should have inactive text color when inactive', () => { + render( + , + ) + + const item = screen.getByText('Test Tab') + expect(item.className).toContain('text-text-tertiary') + }) + }) + + describe('Event Handlers', () => { + it('should call onClick when clicked', () => { + const onClick = vi.fn() + render( + , + ) + + fireEvent.click(screen.getByText('Test Tab')) + expect(onClick).toHaveBeenCalled() + }) + }) +}) + +// ============================================ +// Uploader Component Tests +// ============================================ +describe('Uploader', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + describe('Rendering', () => { + it('should render upload prompt when no file', () => { + render( + , + ) + + expect(screen.getByText('app.dslUploader.button')).toBeInTheDocument() + expect(screen.getByText('app.dslUploader.browse')).toBeInTheDocument() + }) + + it('should render file info when file is selected', () => { + const mockFile = createMockFile('test.pipeline') + + render( + , + ) + + expect(screen.getByText('test.pipeline')).toBeInTheDocument() + expect(screen.getByText('PIPELINE')).toBeInTheDocument() + }) + + it('should apply custom className', () => { + const { container } = render( + , + ) + + expect(container.firstChild).toHaveClass('custom-class') + }) + }) + + describe('Event Handlers', () => { + it('should call updateFile when browse link is clicked and file is selected', async () => { + const updateFile = vi.fn() + render( + , + ) + + // Get the hidden input + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement + + // Create a mock file + const mockFile = createMockFile() + + // Simulate file selection + Object.defineProperty(fileInput, 'files', { + value: [mockFile], + }) + + fireEvent.change(fileInput) + + expect(updateFile).toHaveBeenCalledWith(mockFile) + }) + + it('should call updateFile with undefined when delete button is clicked', () => { + const updateFile = vi.fn() + const mockFile = createMockFile() + + render( + , + ) + + // Find and click delete button - the button contains the delete icon + const deleteButton = document.querySelector('button') + if (deleteButton) { + fireEvent.click(deleteButton) + expect(updateFile).toHaveBeenCalledWith() + } + }) + + it('should handle browse click', () => { + const updateFile = vi.fn() + render( + , + ) + + const browseLink = screen.getByText('app.dslUploader.browse') + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement + + // Mock click on input + const clickSpy = vi.spyOn(fileInput, 'click') + + fireEvent.click(browseLink) + + expect(clickSpy).toHaveBeenCalled() + }) + }) + + describe('Drag and Drop', () => { + it('should show drag state when dragging over', () => { + render( + , + ) + + const dropArea = document.querySelector('[class*="border-dashed"]')! + + // The drag state is triggered when dragEnter fires on something other than the dragRef + // In the component, setDragging(true) happens when e.target !== dragRef.current + fireEvent.dragEnter(dropArea, { + dataTransfer: { files: [] }, + }) + + // The class should be present since dropArea is not dragRef + expect(dropArea.className).toContain('border-components-dropzone') + }) + + it('should handle dragOver event', () => { + render( + , + ) + + const dashedArea = document.querySelector('[class*="border-dashed"]') + const dropArea = dashedArea?.parentElement + if (!dropArea) + return + + // DragOver should prevent default and stop propagation + const dragOverEvent = new Event('dragover', { bubbles: true, cancelable: true }) + dropArea.dispatchEvent(dragOverEvent) + + // Event should be handled without errors + expect(dropArea).toBeInTheDocument() + }) + + it('should handle dragLeave event and reset dragging state when target is dragRef', async () => { + render( + , + ) + + const dropArea = document.querySelector('[class*="border-dashed"]')! + const dropAreaParent = dropArea.parentElement + + if (!dropAreaParent) + return + + // First trigger dragEnter to set dragging state + fireEvent.dragEnter(dropArea, { + dataTransfer: { files: [] }, + }) + + // Verify dragging state is set - the accent class appears when dragging + await waitFor(() => { + expect(dropArea.className).toContain('border-components-dropzone-border-accent') + }) + + // The dragRef div appears when dragging is true + const dragRefDiv = document.querySelector('[class*="absolute left-0 top-0"]') + expect(dragRefDiv).toBeInTheDocument() + + // When dragLeave happens on the dragRef element, setDragging(false) is called + if (dragRefDiv) { + // Fire dragleave directly on the dragRef element + fireEvent.dragLeave(dragRefDiv) + + // After dragLeave on dragRef, dragging should be false and accent class removed + await waitFor(() => { + expect(dropArea.className).not.toContain('border-components-dropzone-border-accent') + }) + } + }) + + it('should not reset dragging when dragLeave target is not dragRef', async () => { + render( + , + ) + + const dropArea = document.querySelector('[class*="border-dashed"]')! + const dropAreaParent = dropArea.parentElement + + if (!dropAreaParent) + return + + // First trigger dragEnter to set dragging state + fireEvent.dragEnter(dropArea, { + dataTransfer: { files: [] }, + }) + + // Verify dragging state is set + await waitFor(() => { + expect(dropArea.className).toContain('border-components-dropzone-border-accent') + }) + + // Trigger dragLeave on the drop area (not dragRef) - should NOT reset dragging + fireEvent.dragLeave(dropArea, { + dataTransfer: { files: [] }, + }) + + // Dragging should still be true (accent class still present) + // because target is not dragRef + expect(dropArea.className).toContain('border-components-dropzone') + }) + + it('should handle file drop', async () => { + const updateFile = vi.fn() + render( + , + ) + + const dashedArea = document.querySelector('[class*="border-dashed"]') + const dropArea = dashedArea?.parentElement + if (!dropArea) + return + + const mockFile = createMockFile() + + fireEvent.drop(dropArea, { + dataTransfer: { + files: [mockFile], + }, + }) + + expect(updateFile).toHaveBeenCalledWith(mockFile) + }) + + it('should reject multiple files', async () => { + const updateFile = vi.fn() + render( + , + ) + + const dashedArea = document.querySelector('[class*="border-dashed"]') + const dropArea = dashedArea?.parentElement + if (!dropArea) + return + + const mockFile1 = createMockFile('file1.pipeline') + const mockFile2 = createMockFile('file2.pipeline') + + fireEvent.drop(dropArea, { + dataTransfer: { + files: [mockFile1, mockFile2], + }, + }) + + expect(updateFile).not.toHaveBeenCalled() + expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ + type: 'error', + })) + }) + }) + + describe('Edge Cases', () => { + it('should handle drop event without dataTransfer', () => { + const updateFile = vi.fn() + render( + , + ) + + const dashedArea = document.querySelector('[class*="border-dashed"]') + const dropArea = dashedArea?.parentElement + if (!dropArea) + return + + fireEvent.drop(dropArea, { + dataTransfer: null, + }) + + expect(updateFile).not.toHaveBeenCalled() + }) + + it('should handle file cancel in selectHandle and restore original file', () => { + const updateFile = vi.fn() + + render( + , + ) + + // Get the file input + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement + expect(fileInput).toBeInTheDocument() + + // Spy on input click before triggering selectHandle + const clickSpy = vi.spyOn(fileInput, 'click').mockImplementation(() => { + // After click, oncancel should be set + }) + + // Click browse link to trigger selectHandle + const browseLink = screen.getByText('app.dslUploader.browse') + fireEvent.click(browseLink) + + // selectHandle should have triggered click on input + expect(clickSpy).toHaveBeenCalled() + + // After selectHandle runs, oncancel should be set + // Trigger cancel - should restore original file (undefined in this case) + if (fileInput.oncancel) { + fileInput.oncancel(new Event('cancel')) + // updateFile should be called with undefined (the original file) + expect(updateFile).toHaveBeenCalledWith(undefined) + } + + clickSpy.mockRestore() + }) + + it('should not set dragging when target equals dragRef', () => { + render( + , + ) + + const dropArea = document.querySelector('[class*="border-dashed"]')! + + // First trigger drag to show dragRef div + fireEvent.dragEnter(dropArea, { + dataTransfer: { files: [] }, + }) + + // Now the dragRef div should exist + const dragRefDiv = document.querySelector('[class*="absolute left-0 top-0"]') + + // When dragEnter happens on dragRef itself, setDragging should NOT be called + if (dragRefDiv) { + const dropAreaParent = dropArea.parentElement + if (dropAreaParent) { + // Trigger dragEnter with target = dragRef - this should NOT set dragging + const dragEnterEvent = new Event('dragenter', { bubbles: true }) + Object.defineProperty(dragEnterEvent, 'target', { value: dragRefDiv }) + dropAreaParent.dispatchEvent(dragEnterEvent) + } + } + }) + + it('should handle removeFile when file input exists', () => { + const updateFile = vi.fn() + const mockFile = createMockFile() + + render( + , + ) + + // Find and click delete button + const deleteButton = document.querySelector('button') + expect(deleteButton).toBeInTheDocument() + + if (deleteButton) { + fireEvent.click(deleteButton) + // updateFile should be called without arguments + expect(updateFile).toHaveBeenCalledWith() + } + + // Verify file input value was cleared + const fileInput = document.querySelector('input[type="file"]') as HTMLInputElement + expect(fileInput.value).toBe('') + }) + }) +}) + +// ============================================ +// DSLConfirmModal Component Tests +// ============================================ +describe('DSLConfirmModal', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + describe('Rendering', () => { + it('should render title', () => { + render( + , + ) + + expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument() + }) + + it('should render version information', () => { + render( + , + ) + + expect(screen.getByText('0.9.0')).toBeInTheDocument() + expect(screen.getByText('1.0.0')).toBeInTheDocument() + }) + + it('should render cancel and confirm buttons', () => { + render( + , + ) + + expect(screen.getByText('app.newApp.Cancel')).toBeInTheDocument() + expect(screen.getByText('app.newApp.Confirm')).toBeInTheDocument() + }) + + it('should render with default empty versions', () => { + render( + , + ) + + // Should not crash with default empty strings + expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument() + }) + + it('should disable confirm button when confirmDisabled is true', () => { + render( + , + ) + + const confirmButton = screen.getByText('app.newApp.Confirm').closest('button') + expect(confirmButton).toBeDisabled() + }) + }) + + describe('Event Handlers', () => { + it('should call onCancel when cancel button is clicked', () => { + const onCancel = vi.fn() + render( + , + ) + + fireEvent.click(screen.getByText('app.newApp.Cancel')) + expect(onCancel).toHaveBeenCalled() + }) + + it('should call onConfirm when confirm button is clicked', () => { + const onConfirm = vi.fn() + render( + , + ) + + fireEvent.click(screen.getByText('app.newApp.Confirm')) + expect(onConfirm).toHaveBeenCalled() + }) + + it('should bind onClose to onCancel via arrow function', () => { + // This test verifies that the Modal's onClose prop calls onCancel + // The implementation is: onClose={() => onCancel()} + const onCancel = vi.fn() + render( + , + ) + + // Trigger the cancel button which also calls onCancel + // This confirms onCancel is properly wired up + fireEvent.click(screen.getByText('app.newApp.Cancel')) + expect(onCancel).toHaveBeenCalledTimes(1) + }) + + it('should call onCancel when modal is closed via escape key', () => { + const onCancel = vi.fn() + render( + , + ) + + // Pressing Escape triggers Modal's onClose which calls onCancel + const escEvent = new KeyboardEvent('keydown', { + key: 'Escape', + code: 'Escape', + keyCode: 27, + bubbles: true, + }) + document.dispatchEvent(escEvent) + + // onCancel should be called via the onClose={() => onCancel()} callback + expect(onCancel).toHaveBeenCalled() + }) + }) + + describe('Props', () => { + it('should use default versions when not provided', () => { + render( + , + ) + + // Component should render without crashing + expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument() + }) + + it('should use default confirmDisabled when not provided', () => { + render( + , + ) + + const confirmButton = screen.getByText('app.newApp.Confirm').closest('button') + expect(confirmButton).not.toBeDisabled() + }) + }) +}) + +// ============================================ +// Integration Tests +// ============================================ +describe('CreateFromDSLModal Integration', () => { + beforeEach(() => { + vi.clearAllMocks() + mockImportDSL.mockReset() + mockImportDSLConfirm.mockReset() + mockPush.mockReset() + mockNotify.mockReset() + mockHandleCheckPluginDependencies.mockReset() + }) + + it('should complete full import flow with URL', async () => { + const onSuccess = vi.fn() + const onClose = vi.fn() + mockImportDSL.mockResolvedValue(createImportDSLResponse()) + mockHandleCheckPluginDependencies.mockResolvedValue(undefined) + + render( + , + { wrapper: createWrapper() }, + ) + + // Switch to URL tab + fireEvent.click(screen.getByText('app.importFromDSLUrl')) + + // Enter URL + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/pipeline.yaml' } }) + + // Click import + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + // Verify API was called + await waitFor(() => { + expect(mockImportDSL).toHaveBeenCalled() + }) + + // Verify success callbacks after API completes + await waitFor(() => { + expect(onSuccess).toHaveBeenCalled() + }) + + await waitFor(() => { + expect(mockPush).toHaveBeenCalledWith('/datasets/dataset-789/pipeline') + }) + }) + + it('should handle version mismatch flow - shows error modal', async () => { + vi.useFakeTimers({ shouldAdvanceTime: true }) + const onClose = vi.fn() + mockImportDSL.mockResolvedValue(createImportDSLResponse({ + status: 'pending', + imported_dsl_version: '0.8.0', + current_dsl_version: '1.0.0', + })) + + render( + , + { wrapper: createWrapper() }, + ) + + // Enter URL + const input = screen.getByPlaceholderText('app.importFromDSLUrlPlaceholder') + fireEvent.change(input, { target: { value: 'https://example.com/old-pipeline.yaml' } }) + + // Click import + const importButton = screen.getByText('app.newApp.import').closest('button')! + fireEvent.click(importButton) + + // Wait for API call + await waitFor(() => { + expect(mockImportDSL).toHaveBeenCalled() + }) + + // Wait for onClose to be called + await waitFor(() => { + expect(onClose).toHaveBeenCalled() + }) + + // Advance timer to show error modal + await act(async () => { + vi.advanceTimersByTime(400) + }) + + // Verify error modal is shown + await waitFor(() => { + expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument() + }) + + vi.useRealTimers() + }) +}) diff --git a/web/app/components/datasets/documents/create-from-pipeline/hooks/index.ts b/web/app/components/datasets/documents/create-from-pipeline/hooks/index.ts new file mode 100644 index 0000000000..0faf3c52f7 --- /dev/null +++ b/web/app/components/datasets/documents/create-from-pipeline/hooks/index.ts @@ -0,0 +1,5 @@ +export { useAddDocumentsSteps } from './use-add-documents-steps' +export { useDatasourceActions } from './use-datasource-actions' +export { useDatasourceOptions } from './use-datasource-options' +export { useLocalFile, useOnlineDocument, useOnlineDrive, useWebsiteCrawl } from './use-datasource-store' +export { useDatasourceUIState } from './use-datasource-ui-state' diff --git a/web/app/components/datasets/documents/create-from-pipeline/hooks/use-add-documents-steps.ts b/web/app/components/datasets/documents/create-from-pipeline/hooks/use-add-documents-steps.ts new file mode 100644 index 0000000000..eba0f5a8c8 --- /dev/null +++ b/web/app/components/datasets/documents/create-from-pipeline/hooks/use-add-documents-steps.ts @@ -0,0 +1,41 @@ +import { useCallback, useState } from 'react' +import { useTranslation } from 'react-i18next' +import { AddDocumentsStep } from '../types' + +/** + * Hook for managing add documents wizard steps + */ +export const useAddDocumentsSteps = () => { + const { t } = useTranslation() + const [currentStep, setCurrentStep] = useState(1) + + const handleNextStep = useCallback(() => { + setCurrentStep(preStep => preStep + 1) + }, []) + + const handleBackStep = useCallback(() => { + setCurrentStep(preStep => preStep - 1) + }, []) + + const steps = [ + { + label: t('addDocuments.steps.chooseDatasource', { ns: 'datasetPipeline' }), + value: AddDocumentsStep.dataSource, + }, + { + label: t('addDocuments.steps.processDocuments', { ns: 'datasetPipeline' }), + value: AddDocumentsStep.processDocuments, + }, + { + label: t('addDocuments.steps.processingDocuments', { ns: 'datasetPipeline' }), + value: AddDocumentsStep.processingDocuments, + }, + ] + + return { + steps, + currentStep, + handleNextStep, + handleBackStep, + } +} diff --git a/web/app/components/datasets/documents/create-from-pipeline/hooks/use-datasource-actions.ts b/web/app/components/datasets/documents/create-from-pipeline/hooks/use-datasource-actions.ts new file mode 100644 index 0000000000..66bd325c33 --- /dev/null +++ b/web/app/components/datasets/documents/create-from-pipeline/hooks/use-datasource-actions.ts @@ -0,0 +1,321 @@ +import type { StoreApi } from 'zustand' +import type { DataSourceShape } from '@/app/components/datasets/documents/create-from-pipeline/data-source/store' +import type { Datasource } from '@/app/components/rag-pipeline/components/panel/test-run/types' +import type { DataSourceNotionPageMap, NotionPage } from '@/models/common' +import type { CrawlResultItem, DocumentItem, CustomFile as File, FileIndexingEstimateResponse } from '@/models/datasets' +import type { + OnlineDriveFile, + PublishedPipelineRunPreviewResponse, + PublishedPipelineRunResponse, +} from '@/models/pipeline' +import { useCallback, useRef } from 'react' +import { trackEvent } from '@/app/components/base/amplitude' +import { DatasourceType } from '@/models/pipeline' +import { useRunPublishedPipeline } from '@/service/use-pipeline' +import { + buildLocalFileDatasourceInfo, + buildOnlineDocumentDatasourceInfo, + buildOnlineDriveDatasourceInfo, + buildWebsiteCrawlDatasourceInfo, +} from '../utils/datasource-info-builder' + +type DatasourceActionsParams = { + datasource: Datasource | undefined + datasourceType: string | undefined + pipelineId: string | undefined + dataSourceStore: StoreApi + setEstimateData: (data: FileIndexingEstimateResponse | undefined) => void + setBatchId: (id: string) => void + setDocuments: (docs: PublishedPipelineRunResponse['documents']) => void + handleNextStep: () => void + PagesMapAndSelectedPagesId: DataSourceNotionPageMap + currentWorkspacePages: { page_id: string }[] | undefined + clearOnlineDocumentData: () => void + clearWebsiteCrawlData: () => void + clearOnlineDriveData: () => void + setDatasource: (ds: Datasource) => void +} + +/** + * Hook for datasource-related actions (preview, process, etc.) + */ +export const useDatasourceActions = ({ + datasource, + datasourceType, + pipelineId, + dataSourceStore, + setEstimateData, + setBatchId, + setDocuments, + handleNextStep, + PagesMapAndSelectedPagesId, + currentWorkspacePages, + clearOnlineDocumentData, + clearWebsiteCrawlData, + clearOnlineDriveData, + setDatasource, +}: DatasourceActionsParams) => { + const isPreview = useRef(false) + const formRef = useRef<{ submit: () => void } | null>(null) + + const { mutateAsync: runPublishedPipeline, isIdle, isPending } = useRunPublishedPipeline() + + // Build datasource info for preview (single item) + const buildPreviewDatasourceInfo = useCallback(() => { + const { + previewLocalFileRef, + previewOnlineDocumentRef, + previewWebsitePageRef, + previewOnlineDriveFileRef, + currentCredentialId, + bucket, + } = dataSourceStore.getState() + + const datasourceInfoList: Record[] = [] + + if (datasourceType === DatasourceType.localFile && previewLocalFileRef.current) { + datasourceInfoList.push(buildLocalFileDatasourceInfo( + previewLocalFileRef.current as File, + currentCredentialId, + )) + } + + if (datasourceType === DatasourceType.onlineDocument && previewOnlineDocumentRef.current) { + datasourceInfoList.push(buildOnlineDocumentDatasourceInfo( + previewOnlineDocumentRef.current, + currentCredentialId, + )) + } + + if (datasourceType === DatasourceType.websiteCrawl && previewWebsitePageRef.current) { + datasourceInfoList.push(buildWebsiteCrawlDatasourceInfo( + previewWebsitePageRef.current, + currentCredentialId, + )) + } + + if (datasourceType === DatasourceType.onlineDrive && previewOnlineDriveFileRef.current) { + datasourceInfoList.push(buildOnlineDriveDatasourceInfo( + previewOnlineDriveFileRef.current, + bucket, + currentCredentialId, + )) + } + + return datasourceInfoList + }, [dataSourceStore, datasourceType]) + + // Build datasource info for processing (all items) + const buildProcessDatasourceInfo = useCallback(() => { + const { + currentCredentialId, + localFileList, + onlineDocuments, + websitePages, + bucket, + selectedFileIds, + onlineDriveFileList, + } = dataSourceStore.getState() + + const datasourceInfoList: Record[] = [] + + if (datasourceType === DatasourceType.localFile) { + localFileList.forEach((file) => { + datasourceInfoList.push(buildLocalFileDatasourceInfo(file.file, currentCredentialId)) + }) + } + + if (datasourceType === DatasourceType.onlineDocument) { + onlineDocuments.forEach((page) => { + datasourceInfoList.push(buildOnlineDocumentDatasourceInfo(page, currentCredentialId)) + }) + } + + if (datasourceType === DatasourceType.websiteCrawl) { + websitePages.forEach((page) => { + datasourceInfoList.push(buildWebsiteCrawlDatasourceInfo(page, currentCredentialId)) + }) + } + + if (datasourceType === DatasourceType.onlineDrive) { + selectedFileIds.forEach((id) => { + const file = onlineDriveFileList.find(f => f.id === id) + if (file) + datasourceInfoList.push(buildOnlineDriveDatasourceInfo(file, bucket, currentCredentialId)) + }) + } + + return datasourceInfoList + }, [dataSourceStore, datasourceType]) + + // Handle chunk preview + const handlePreviewChunks = useCallback(async (data: Record) => { + if (!datasource || !pipelineId) + return + + const datasourceInfoList = buildPreviewDatasourceInfo() + await runPublishedPipeline({ + pipeline_id: pipelineId, + inputs: data, + start_node_id: datasource.nodeId, + datasource_type: datasourceType as DatasourceType, + datasource_info_list: datasourceInfoList, + is_preview: true, + }, { + onSuccess: (res) => { + setEstimateData((res as PublishedPipelineRunPreviewResponse).data.outputs) + }, + }) + }, [datasource, pipelineId, datasourceType, buildPreviewDatasourceInfo, runPublishedPipeline, setEstimateData]) + + // Handle document processing + const handleProcess = useCallback(async (data: Record) => { + if (!datasource || !pipelineId) + return + + const datasourceInfoList = buildProcessDatasourceInfo() + await runPublishedPipeline({ + pipeline_id: pipelineId, + inputs: data, + start_node_id: datasource.nodeId, + datasource_type: datasourceType as DatasourceType, + datasource_info_list: datasourceInfoList, + is_preview: false, + }, { + onSuccess: (res) => { + setBatchId((res as PublishedPipelineRunResponse).batch || '') + setDocuments((res as PublishedPipelineRunResponse).documents || []) + handleNextStep() + trackEvent('dataset_document_added', { + data_source_type: datasourceType, + indexing_technique: 'pipeline', + }) + }, + }) + }, [datasource, pipelineId, datasourceType, buildProcessDatasourceInfo, runPublishedPipeline, setBatchId, setDocuments, handleNextStep]) + + // Form submission handlers + const onClickProcess = useCallback(() => { + isPreview.current = false + formRef.current?.submit() + }, []) + + const onClickPreview = useCallback(() => { + isPreview.current = true + formRef.current?.submit() + }, []) + + const handleSubmit = useCallback((data: Record) => { + if (isPreview.current) + handlePreviewChunks(data) + else + handleProcess(data) + }, [handlePreviewChunks, handleProcess]) + + // Preview change handlers + const handlePreviewFileChange = useCallback((file: DocumentItem) => { + const { previewLocalFileRef } = dataSourceStore.getState() + previewLocalFileRef.current = file + onClickPreview() + }, [dataSourceStore, onClickPreview]) + + const handlePreviewOnlineDocumentChange = useCallback((page: NotionPage) => { + const { previewOnlineDocumentRef } = dataSourceStore.getState() + previewOnlineDocumentRef.current = page + onClickPreview() + }, [dataSourceStore, onClickPreview]) + + const handlePreviewWebsiteChange = useCallback((website: CrawlResultItem) => { + const { previewWebsitePageRef } = dataSourceStore.getState() + previewWebsitePageRef.current = website + onClickPreview() + }, [dataSourceStore, onClickPreview]) + + const handlePreviewOnlineDriveFileChange = useCallback((file: OnlineDriveFile) => { + const { previewOnlineDriveFileRef } = dataSourceStore.getState() + previewOnlineDriveFileRef.current = file + onClickPreview() + }, [dataSourceStore, onClickPreview]) + + // Select all handler + const handleSelectAll = useCallback(() => { + const { + onlineDocuments, + onlineDriveFileList, + selectedFileIds, + setOnlineDocuments, + setSelectedFileIds, + setSelectedPagesId, + } = dataSourceStore.getState() + + if (datasourceType === DatasourceType.onlineDocument) { + const allIds = currentWorkspacePages?.map(page => page.page_id) || [] + if (onlineDocuments.length < allIds.length) { + const selectedPages = Array.from(allIds).map(pageId => PagesMapAndSelectedPagesId[pageId]) + setOnlineDocuments(selectedPages) + setSelectedPagesId(new Set(allIds)) + } + else { + setOnlineDocuments([]) + setSelectedPagesId(new Set()) + } + } + + if (datasourceType === DatasourceType.onlineDrive) { + const allKeys = onlineDriveFileList.filter(item => item.type !== 'bucket').map(file => file.id) + if (selectedFileIds.length < allKeys.length) + setSelectedFileIds(allKeys) + else + setSelectedFileIds([]) + } + }, [PagesMapAndSelectedPagesId, currentWorkspacePages, dataSourceStore, datasourceType]) + + // Clear datasource data based on type + const clearDataSourceData = useCallback((dataSource: Datasource) => { + const providerType = dataSource.nodeData.provider_type + const clearFunctions: Record void> = { + [DatasourceType.onlineDocument]: clearOnlineDocumentData, + [DatasourceType.websiteCrawl]: clearWebsiteCrawlData, + [DatasourceType.onlineDrive]: clearOnlineDriveData, + [DatasourceType.localFile]: () => {}, + } + clearFunctions[providerType]?.() + }, [clearOnlineDocumentData, clearOnlineDriveData, clearWebsiteCrawlData]) + + // Switch datasource handler + const handleSwitchDataSource = useCallback((dataSource: Datasource) => { + const { + setCurrentCredentialId, + currentNodeIdRef, + } = dataSourceStore.getState() + clearDataSourceData(dataSource) + setCurrentCredentialId('') + currentNodeIdRef.current = dataSource.nodeId + setDatasource(dataSource) + }, [clearDataSourceData, dataSourceStore, setDatasource]) + + // Credential change handler + const handleCredentialChange = useCallback((credentialId: string) => { + const { setCurrentCredentialId } = dataSourceStore.getState() + if (datasource) + clearDataSourceData(datasource) + setCurrentCredentialId(credentialId) + }, [clearDataSourceData, dataSourceStore, datasource]) + + return { + isPreview, + formRef, + isIdle, + isPending, + onClickProcess, + onClickPreview, + handleSubmit, + handlePreviewFileChange, + handlePreviewOnlineDocumentChange, + handlePreviewWebsiteChange, + handlePreviewOnlineDriveFileChange, + handleSelectAll, + handleSwitchDataSource, + handleCredentialChange, + } +} diff --git a/web/app/components/datasets/documents/create-from-pipeline/hooks/use-datasource-options.ts b/web/app/components/datasets/documents/create-from-pipeline/hooks/use-datasource-options.ts new file mode 100644 index 0000000000..a8b233faba --- /dev/null +++ b/web/app/components/datasets/documents/create-from-pipeline/hooks/use-datasource-options.ts @@ -0,0 +1,27 @@ +import type { DataSourceOption } from '@/app/components/rag-pipeline/components/panel/test-run/types' +import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types' +import type { Node } from '@/app/components/workflow/types' +import { useMemo } from 'react' +import { BlockEnum } from '@/app/components/workflow/types' + +/** + * Hook for getting datasource options from pipeline nodes + */ +export const useDatasourceOptions = (pipelineNodes: Node[]) => { + const datasourceNodes = pipelineNodes.filter(node => node.data.type === BlockEnum.DataSource) + + const options = useMemo(() => { + const options: DataSourceOption[] = [] + datasourceNodes.forEach((node) => { + const label = node.data.title + options.push({ + label, + value: node.id, + data: node.data, + }) + }) + return options + }, [datasourceNodes]) + + return options +} diff --git a/web/app/components/datasets/documents/create-from-pipeline/hooks.ts b/web/app/components/datasets/documents/create-from-pipeline/hooks/use-datasource-store.ts similarity index 70% rename from web/app/components/datasets/documents/create-from-pipeline/hooks.ts rename to web/app/components/datasets/documents/create-from-pipeline/hooks/use-datasource-store.ts index 68d79de031..da620de154 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/hooks.ts +++ b/web/app/components/datasets/documents/create-from-pipeline/hooks/use-datasource-store.ts @@ -1,69 +1,12 @@ -import type { DataSourceOption } from '@/app/components/rag-pipeline/components/panel/test-run/types' -import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types' -import type { Node } from '@/app/components/workflow/types' import type { DataSourceNotionPageMap, DataSourceNotionWorkspace } from '@/models/common' -import { useCallback, useMemo, useState } from 'react' -import { useTranslation } from 'react-i18next' +import { useCallback, useMemo } from 'react' import { useShallow } from 'zustand/react/shallow' -import { BlockEnum } from '@/app/components/workflow/types' import { CrawlStep } from '@/models/datasets' -import { useDataSourceStore, useDataSourceStoreWithSelector } from './data-source/store' -import { AddDocumentsStep } from './types' - -export const useAddDocumentsSteps = () => { - const { t } = useTranslation() - const [currentStep, setCurrentStep] = useState(1) - - const handleNextStep = useCallback(() => { - setCurrentStep(preStep => preStep + 1) - }, []) - - const handleBackStep = useCallback(() => { - setCurrentStep(preStep => preStep - 1) - }, []) - - const steps = [ - { - label: t('addDocuments.steps.chooseDatasource', { ns: 'datasetPipeline' }), - value: AddDocumentsStep.dataSource, - }, - { - label: t('addDocuments.steps.processDocuments', { ns: 'datasetPipeline' }), - value: AddDocumentsStep.processDocuments, - }, - { - label: t('addDocuments.steps.processingDocuments', { ns: 'datasetPipeline' }), - value: AddDocumentsStep.processingDocuments, - }, - ] - - return { - steps, - currentStep, - handleNextStep, - handleBackStep, - } -} - -export const useDatasourceOptions = (pipelineNodes: Node[]) => { - const datasourceNodes = pipelineNodes.filter(node => node.data.type === BlockEnum.DataSource) - - const options = useMemo(() => { - const options: DataSourceOption[] = [] - datasourceNodes.forEach((node) => { - const label = node.data.title - options.push({ - label, - value: node.id, - data: node.data, - }) - }) - return options - }, [datasourceNodes]) - - return options -} +import { useDataSourceStore, useDataSourceStoreWithSelector } from '../data-source/store' +/** + * Hook for local file datasource store operations + */ export const useLocalFile = () => { const { localFileList, @@ -89,6 +32,9 @@ export const useLocalFile = () => { } } +/** + * Hook for online document datasource store operations + */ export const useOnlineDocument = () => { const { documentsData, @@ -147,6 +93,9 @@ export const useOnlineDocument = () => { } } +/** + * Hook for website crawl datasource store operations + */ export const useWebsiteCrawl = () => { const { websitePages, @@ -186,6 +135,9 @@ export const useWebsiteCrawl = () => { } } +/** + * Hook for online drive datasource store operations + */ export const useOnlineDrive = () => { const { onlineDriveFileList, diff --git a/web/app/components/datasets/documents/create-from-pipeline/hooks/use-datasource-ui-state.ts b/web/app/components/datasets/documents/create-from-pipeline/hooks/use-datasource-ui-state.ts new file mode 100644 index 0000000000..e398f90a48 --- /dev/null +++ b/web/app/components/datasets/documents/create-from-pipeline/hooks/use-datasource-ui-state.ts @@ -0,0 +1,132 @@ +import type { Datasource } from '@/app/components/rag-pipeline/components/panel/test-run/types' +import type { OnlineDriveFile } from '@/models/pipeline' +import { useMemo } from 'react' +import { useTranslation } from 'react-i18next' +import { DatasourceType } from '@/models/pipeline' + +type DatasourceUIStateParams = { + datasource: Datasource | undefined + allFileLoaded: boolean + localFileListLength: number + onlineDocumentsLength: number + websitePagesLength: number + selectedFileIdsLength: number + onlineDriveFileList: OnlineDriveFile[] + isVectorSpaceFull: boolean + enableBilling: boolean + currentWorkspacePagesLength: number + fileUploadConfig: { file_size_limit: number, batch_count_limit: number } +} + +/** + * Hook for computing datasource UI state based on datasource type + */ +export const useDatasourceUIState = ({ + datasource, + allFileLoaded, + localFileListLength, + onlineDocumentsLength, + websitePagesLength, + selectedFileIdsLength, + onlineDriveFileList, + isVectorSpaceFull, + enableBilling, + currentWorkspacePagesLength, + fileUploadConfig, +}: DatasourceUIStateParams) => { + const { t } = useTranslation() + const datasourceType = datasource?.nodeData.provider_type + + const isShowVectorSpaceFull = useMemo(() => { + if (!datasource || !datasourceType) + return false + + // Lookup table for vector space full condition check + const vectorSpaceFullConditions: Record = { + [DatasourceType.localFile]: allFileLoaded, + [DatasourceType.onlineDocument]: onlineDocumentsLength > 0, + [DatasourceType.websiteCrawl]: websitePagesLength > 0, + [DatasourceType.onlineDrive]: onlineDriveFileList.length > 0, + } + + const condition = vectorSpaceFullConditions[datasourceType] + return condition && isVectorSpaceFull && enableBilling + }, [datasource, datasourceType, allFileLoaded, onlineDocumentsLength, websitePagesLength, onlineDriveFileList.length, isVectorSpaceFull, enableBilling]) + + // Lookup table for next button disabled conditions + const nextBtnDisabled = useMemo(() => { + if (!datasource || !datasourceType) + return true + + const disabledConditions: Record = { + [DatasourceType.localFile]: isShowVectorSpaceFull || localFileListLength === 0 || !allFileLoaded, + [DatasourceType.onlineDocument]: isShowVectorSpaceFull || onlineDocumentsLength === 0, + [DatasourceType.websiteCrawl]: isShowVectorSpaceFull || websitePagesLength === 0, + [DatasourceType.onlineDrive]: isShowVectorSpaceFull || selectedFileIdsLength === 0, + } + + return disabledConditions[datasourceType] ?? true + }, [datasource, datasourceType, isShowVectorSpaceFull, localFileListLength, allFileLoaded, onlineDocumentsLength, websitePagesLength, selectedFileIdsLength]) + + // Check if select all should be shown + const showSelect = useMemo(() => { + if (datasourceType === DatasourceType.onlineDocument) + return currentWorkspacePagesLength > 0 + + if (datasourceType === DatasourceType.onlineDrive) { + const nonBucketItems = onlineDriveFileList.filter(item => item.type !== 'bucket') + const isBucketList = onlineDriveFileList.some(file => file.type === 'bucket') + return !isBucketList && nonBucketItems.length > 0 + } + + return false + }, [currentWorkspacePagesLength, datasourceType, onlineDriveFileList]) + + // Total selectable options count + const totalOptions = useMemo(() => { + if (datasourceType === DatasourceType.onlineDocument) + return currentWorkspacePagesLength + + if (datasourceType === DatasourceType.onlineDrive) + return onlineDriveFileList.filter(item => item.type !== 'bucket').length + + return undefined + }, [currentWorkspacePagesLength, datasourceType, onlineDriveFileList]) + + // Selected options count + const selectedOptions = useMemo(() => { + if (datasourceType === DatasourceType.onlineDocument) + return onlineDocumentsLength + + if (datasourceType === DatasourceType.onlineDrive) + return selectedFileIdsLength + + return undefined + }, [datasourceType, onlineDocumentsLength, selectedFileIdsLength]) + + // Tip message for selection + const tip = useMemo(() => { + if (datasourceType === DatasourceType.onlineDocument) + return t('addDocuments.selectOnlineDocumentTip', { ns: 'datasetPipeline', count: 50 }) + + if (datasourceType === DatasourceType.onlineDrive) { + return t('addDocuments.selectOnlineDriveTip', { + ns: 'datasetPipeline', + count: fileUploadConfig.batch_count_limit, + fileSize: fileUploadConfig.file_size_limit, + }) + } + + return '' + }, [datasourceType, fileUploadConfig.batch_count_limit, fileUploadConfig.file_size_limit, t]) + + return { + datasourceType, + isShowVectorSpaceFull, + nextBtnDisabled, + showSelect, + totalOptions, + selectedOptions, + tip, + } +} diff --git a/web/app/components/datasets/documents/create-from-pipeline/index.spec.tsx b/web/app/components/datasets/documents/create-from-pipeline/index.spec.tsx new file mode 100644 index 0000000000..c43678def0 --- /dev/null +++ b/web/app/components/datasets/documents/create-from-pipeline/index.spec.tsx @@ -0,0 +1,2698 @@ +import type { Datasource } from '@/app/components/rag-pipeline/components/panel/test-run/types' +import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types' +import type { Node } from '@/app/components/workflow/types' +import type { NotionPage } from '@/models/common' +import type { CrawlResultItem, CustomFile, DocumentItem, FileItem } from '@/models/datasets' +import type { InitialDocumentDetail, OnlineDriveFile } from '@/models/pipeline' +import { act, fireEvent, render, renderHook, screen } from '@testing-library/react' +import * as React from 'react' +import { BlockEnum } from '@/app/components/workflow/types' +import { DatasourceType } from '@/models/pipeline' +import { TransferMethod } from '@/types/app' +import { + useAddDocumentsSteps, + useDatasourceActions, + useDatasourceOptions, + useDatasourceUIState, + useLocalFile, + useOnlineDocument, + useOnlineDrive, + useWebsiteCrawl, +} from './hooks' +import { StepOneContent, StepThreeContent, StepTwoContent } from './steps' +import { StepOnePreview, StepTwoPreview } from './steps/preview-panel' +import { + buildLocalFileDatasourceInfo, + buildOnlineDocumentDatasourceInfo, + buildOnlineDriveDatasourceInfo, + buildWebsiteCrawlDatasourceInfo, +} from './utils/datasource-info-builder' + +// ========================================== +// Mock External Dependencies Only +// ========================================== + +// Mock context providers +const mockPlan = { + usage: { vectorSpace: 50 }, + total: { vectorSpace: 100 }, + type: 'professional', +} + +vi.mock('@/context/provider-context', () => ({ + useProviderContextSelector: (selector: (state: { plan: typeof mockPlan, enableBilling: boolean }) => unknown) => + selector({ plan: mockPlan, enableBilling: true }), +})) + +vi.mock('@/context/dataset-detail', () => ({ + useDatasetDetailContextWithSelector: (selector: (state: { dataset: { pipeline_id: string } }) => unknown) => + selector({ dataset: { pipeline_id: 'test-pipeline-id' } }), +})) + +// Mock API services +const mockRunPublishedPipeline = vi.fn() +vi.mock('@/service/use-pipeline', () => ({ + usePublishedPipelineInfo: () => ({ + data: { + graph: { + nodes: [ + { + id: 'node-1', + data: { + type: 'data-source', + title: 'Local File', + provider_type: DatasourceType.localFile, + plugin_id: 'plugin-1', + fileExtensions: ['.txt', '.pdf'], + }, + }, + ], + }, + }, + isFetching: false, + }), + useRunPublishedPipeline: () => ({ + mutateAsync: mockRunPublishedPipeline, + isIdle: true, + isPending: false, + }), +})) + +vi.mock('@/service/use-common', () => ({ + useFileUploadConfig: () => ({ + data: { + file_size_limit: 15, + batch_count_limit: 5, + }, + }), +})) + +// Mock amplitude tracking +vi.mock('@/app/components/base/amplitude', () => ({ + trackEvent: vi.fn(), +})) + +// Mock next/navigation +vi.mock('next/navigation', () => ({ + useParams: () => ({ datasetId: 'test-dataset-id' }), + useRouter: () => ({ + push: vi.fn(), + replace: vi.fn(), + back: vi.fn(), + }), + usePathname: () => '/datasets/test-dataset-id/documents/create-from-pipeline', +})) + +// Mock next/link +vi.mock('next/link', () => ({ + default: ({ children, href }: { children: React.ReactNode, href: string }) => ( + {children} + ), +})) + +// Mock billing components (external dependencies) +vi.mock('@/app/components/billing/vector-space-full', () => ({ + default: () =>
Vector Space Full
, +})) + +vi.mock('@/app/components/billing/plan-upgrade-modal', () => ({ + default: ({ show, onClose }: { show: boolean, onClose: () => void }) => ( + show + ? ( +
+ +
+ ) + : null + ), +})) + +vi.mock('@/app/components/datasets/create/step-one/upgrade-card', () => ({ + default: () =>
Upgrade Card
, +})) + +// Mock zustand store +// eslint-disable-next-line ts/no-explicit-any +type MockDataSourceStore = any + +const mockStoreState = { + localFileList: [] as FileItem[], + currentLocalFile: undefined as CustomFile | undefined, + setCurrentLocalFile: vi.fn(), + documentsData: [] as { workspace_id: string, pages: { page_id: string }[] }[], + onlineDocuments: [] as (NotionPage & { workspace_id: string })[], + currentDocument: undefined as (NotionPage & { workspace_id: string }) | undefined, + setDocumentsData: vi.fn(), + setSearchValue: vi.fn(), + setSelectedPagesId: vi.fn(), + setOnlineDocuments: vi.fn(), + setCurrentDocument: vi.fn(), + websitePages: [] as CrawlResultItem[], + currentWebsite: undefined as CrawlResultItem | undefined, + setCurrentWebsite: vi.fn(), + setPreviewIndex: vi.fn(), + setStep: vi.fn(), + setCrawlResult: vi.fn(), + setWebsitePages: vi.fn(), + onlineDriveFileList: [] as OnlineDriveFile[], + selectedFileIds: [] as string[], + setOnlineDriveFileList: vi.fn(), + setBucket: vi.fn(), + setPrefix: vi.fn(), + setKeywords: vi.fn(), + setSelectedFileIds: vi.fn(), + previewLocalFileRef: { current: undefined }, + previewOnlineDocumentRef: { current: undefined }, + previewWebsitePageRef: { current: undefined }, + previewOnlineDriveFileRef: { current: undefined }, + currentCredentialId: '', + setCurrentCredentialId: vi.fn(), + currentNodeIdRef: { current: '' }, + bucket: '', +} + +vi.mock('./data-source/store', () => ({ + useDataSourceStore: () => ({ + getState: () => mockStoreState, + }), + useDataSourceStoreWithSelector: (selector: (state: typeof mockStoreState) => unknown) => selector(mockStoreState), +})) + +vi.mock('./data-source/store/provider', () => ({ + default: ({ children }: { children: React.ReactNode }) => <>{children}, +})) + +// ========================================== +// Test Data Factories +// ========================================== + +const createMockDatasource = (overrides?: Partial): Datasource => ({ + nodeId: 'node-1', + nodeData: { + type: 'data-source', + title: 'Local File', + desc: '', + provider_type: DatasourceType.localFile, + plugin_id: 'plugin-1', + provider_name: 'local', + datasource_name: 'local-file', + datasource_label: 'Local File', + fileExtensions: ['.txt', '.pdf'], + } as unknown as DataSourceNodeType, + ...overrides, +}) + +const createMockFile = (overrides?: Partial): CustomFile => ({ + id: 'file-1', + name: 'test.txt', + type: 'text/plain', + size: 1024, + extension: '.txt', + mime_type: 'text/plain', + ...overrides, +} as CustomFile) + +const createMockFileItem = (overrides?: Partial): FileItem => ({ + file: createMockFile(), + progress: 100, + ...overrides, +} as FileItem) + +const createMockNotionPage = (overrides?: Partial): NotionPage & { workspace_id: string } => ({ + page_id: 'page-1', + page_name: 'Test Page', + page_icon: null, + type: 'page', + workspace_id: 'workspace-1', + ...overrides, +} as NotionPage & { workspace_id: string }) + +const createMockCrawlResult = (overrides?: Partial): CrawlResultItem => ({ + source_url: 'https://example.com', + title: 'Test Page', + markdown: '# Test', + description: 'A test page', + ...overrides, +} as CrawlResultItem) + +const createMockOnlineDriveFile = (overrides?: Partial): OnlineDriveFile => ({ + id: 'drive-file-1', + name: 'test-file.pdf', + type: 'file', + ...overrides, +} as OnlineDriveFile) + +// ========================================== +// Hook Tests - useAddDocumentsSteps +// ========================================== +describe('useAddDocumentsSteps', () => { + it('should initialize with step 1', () => { + const { result } = renderHook(() => useAddDocumentsSteps()) + expect(result.current.currentStep).toBe(1) + }) + + it('should return 3 steps', () => { + const { result } = renderHook(() => useAddDocumentsSteps()) + expect(result.current.steps).toHaveLength(3) + }) + + it('should increment step when handleNextStep is called', () => { + const { result } = renderHook(() => useAddDocumentsSteps()) + + act(() => { + result.current.handleNextStep() + }) + + expect(result.current.currentStep).toBe(2) + }) + + it('should decrement step when handleBackStep is called', () => { + const { result } = renderHook(() => useAddDocumentsSteps()) + + act(() => { + result.current.handleNextStep() + result.current.handleBackStep() + }) + + expect(result.current.currentStep).toBe(1) + }) + + it('should maintain callback reference stability (handleNextStep)', () => { + const { result, rerender } = renderHook(() => useAddDocumentsSteps()) + const firstRef = result.current.handleNextStep + rerender() + expect(result.current.handleNextStep).toBe(firstRef) + }) + + it('should maintain callback reference stability (handleBackStep)', () => { + const { result, rerender } = renderHook(() => useAddDocumentsSteps()) + const firstRef = result.current.handleBackStep + rerender() + expect(result.current.handleBackStep).toBe(firstRef) + }) +}) + +// ========================================== +// Hook Tests - useDatasourceUIState +// ========================================== +describe('useDatasourceUIState', () => { + const defaultParams = { + datasource: undefined as Datasource | undefined, + allFileLoaded: false, + localFileListLength: 0, + onlineDocumentsLength: 0, + websitePagesLength: 0, + selectedFileIdsLength: 0, + onlineDriveFileList: [] as OnlineDriveFile[], + isVectorSpaceFull: false, + enableBilling: true, + currentWorkspacePagesLength: 0, + fileUploadConfig: { file_size_limit: 15, batch_count_limit: 5 }, + } + + describe('nextBtnDisabled', () => { + it('should return true when no datasource is selected', () => { + const { result } = renderHook(() => useDatasourceUIState(defaultParams)) + expect(result.current.nextBtnDisabled).toBe(true) + }) + + it('should return true for localFile when no files are loaded', () => { + const { result } = renderHook(() => useDatasourceUIState({ + ...defaultParams, + datasource: createMockDatasource(), + allFileLoaded: false, + localFileListLength: 0, + })) + expect(result.current.nextBtnDisabled).toBe(true) + }) + + it('should return false for localFile when files are loaded', () => { + const { result } = renderHook(() => useDatasourceUIState({ + ...defaultParams, + datasource: createMockDatasource(), + allFileLoaded: true, + localFileListLength: 1, + })) + expect(result.current.nextBtnDisabled).toBe(false) + }) + + it('should return true for onlineDocument when no documents are selected', () => { + const { result } = renderHook(() => useDatasourceUIState({ + ...defaultParams, + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDocument, + }, + }), + onlineDocumentsLength: 0, + })) + expect(result.current.nextBtnDisabled).toBe(true) + }) + + it('should return false for onlineDocument when documents are selected', () => { + const { result } = renderHook(() => useDatasourceUIState({ + ...defaultParams, + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDocument, + }, + }), + onlineDocumentsLength: 1, + })) + expect(result.current.nextBtnDisabled).toBe(false) + }) + }) + + describe('isShowVectorSpaceFull', () => { + it('should return false when vector space is not full', () => { + const { result } = renderHook(() => useDatasourceUIState({ + ...defaultParams, + datasource: createMockDatasource(), + allFileLoaded: true, + isVectorSpaceFull: false, + })) + expect(result.current.isShowVectorSpaceFull).toBe(false) + }) + + it('should return true when vector space is full and billing is enabled', () => { + const { result } = renderHook(() => useDatasourceUIState({ + ...defaultParams, + datasource: createMockDatasource(), + allFileLoaded: true, + isVectorSpaceFull: true, + enableBilling: true, + })) + expect(result.current.isShowVectorSpaceFull).toBe(true) + }) + + it('should return false when vector space is full but billing is disabled', () => { + const { result } = renderHook(() => useDatasourceUIState({ + ...defaultParams, + datasource: createMockDatasource(), + allFileLoaded: true, + isVectorSpaceFull: true, + enableBilling: false, + })) + expect(result.current.isShowVectorSpaceFull).toBe(false) + }) + }) + + describe('showSelect', () => { + it('should return false for localFile datasource', () => { + const { result } = renderHook(() => useDatasourceUIState({ + ...defaultParams, + datasource: createMockDatasource(), + })) + expect(result.current.showSelect).toBe(false) + }) + + it('should return true for onlineDocument when pages exist', () => { + const { result } = renderHook(() => useDatasourceUIState({ + ...defaultParams, + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDocument, + }, + }), + currentWorkspacePagesLength: 5, + })) + expect(result.current.showSelect).toBe(true) + }) + + it('should return true for onlineDrive when non-bucket files exist', () => { + const { result } = renderHook(() => useDatasourceUIState({ + ...defaultParams, + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDrive, + }, + }), + onlineDriveFileList: [createMockOnlineDriveFile()], + })) + expect(result.current.showSelect).toBe(true) + }) + + it('should return false for onlineDrive when only buckets exist', () => { + const { result } = renderHook(() => useDatasourceUIState({ + ...defaultParams, + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDrive, + }, + }), + onlineDriveFileList: [createMockOnlineDriveFile({ type: 'bucket' as OnlineDriveFile['type'] })], + })) + expect(result.current.showSelect).toBe(false) + }) + }) + + describe('tip', () => { + it('should return empty string for localFile', () => { + const { result } = renderHook(() => useDatasourceUIState({ + ...defaultParams, + datasource: createMockDatasource(), + })) + expect(result.current.tip).toBe('') + }) + + it('should return translation key for onlineDocument', () => { + const { result } = renderHook(() => useDatasourceUIState({ + ...defaultParams, + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDocument, + }, + }), + })) + expect(result.current.tip).toContain('datasetPipeline.addDocuments.selectOnlineDocumentTip') + }) + }) +}) + +// ========================================== +// Utility Functions Tests - datasource-info-builder +// ========================================== +describe('datasource-info-builder', () => { + describe('buildLocalFileDatasourceInfo', () => { + it('should build correct info for local file', () => { + const file = createMockFile() + const result = buildLocalFileDatasourceInfo(file, 'cred-1') + + expect(result).toEqual({ + related_id: 'file-1', + name: 'test.txt', + type: 'text/plain', + size: 1024, + extension: '.txt', + mime_type: 'text/plain', + url: '', + transfer_method: TransferMethod.local_file, + credential_id: 'cred-1', + }) + }) + + it('should handle file with undefined id', () => { + const file = createMockFile({ id: undefined }) + const result = buildLocalFileDatasourceInfo(file, 'cred-1') + expect(result.related_id).toBeUndefined() + }) + }) + + describe('buildOnlineDocumentDatasourceInfo', () => { + it('should build correct info for online document', () => { + const page = createMockNotionPage() + const result = buildOnlineDocumentDatasourceInfo(page, 'cred-1') + + expect(result.workspace_id).toBe('workspace-1') + expect(result.credential_id).toBe('cred-1') + expect(result.page).toBeDefined() + expect((result.page as NotionPage).page_id).toBe('page-1') + }) + + it('should exclude workspace_id from page object', () => { + const page = createMockNotionPage() + const result = buildOnlineDocumentDatasourceInfo(page, 'cred-1') + + expect((result.page as Record).workspace_id).toBeUndefined() + }) + }) + + describe('buildWebsiteCrawlDatasourceInfo', () => { + it('should build correct info for website crawl', () => { + const page = createMockCrawlResult() + const result = buildWebsiteCrawlDatasourceInfo(page, 'cred-1') + + expect(result.source_url).toBe('https://example.com') + expect(result.credential_id).toBe('cred-1') + }) + + it('should spread all page properties', () => { + const page = createMockCrawlResult({ title: 'Custom Title' }) + const result = buildWebsiteCrawlDatasourceInfo(page, 'cred-1') + + expect(result.title).toBe('Custom Title') + }) + }) + + describe('buildOnlineDriveDatasourceInfo', () => { + it('should build correct info for online drive', () => { + const file = createMockOnlineDriveFile() + const result = buildOnlineDriveDatasourceInfo(file, 'my-bucket', 'cred-1') + + expect(result).toEqual({ + bucket: 'my-bucket', + id: 'drive-file-1', + name: 'test-file.pdf', + type: 'file', + credential_id: 'cred-1', + }) + }) + }) +}) + +// ========================================== +// Step Components Tests (with real components) +// ========================================== +describe('StepOneContent', () => { + const defaultProps = { + datasource: undefined as Datasource | undefined, + datasourceType: undefined as string | undefined, + pipelineNodes: [] as Node[], + supportBatchUpload: true, + localFileListLength: 0, + isShowVectorSpaceFull: false, + showSelect: false, + totalOptions: undefined as number | undefined, + selectedOptions: undefined as number | undefined, + tip: '', + nextBtnDisabled: true, + onSelectDataSource: vi.fn(), + onCredentialChange: vi.fn(), + onSelectAll: vi.fn(), + onNextStep: vi.fn(), + } + + beforeEach(() => { + vi.clearAllMocks() + }) + + it('should render VectorSpaceFull when isShowVectorSpaceFull is true', () => { + render() + expect(screen.getByTestId('vector-space-full')).toBeInTheDocument() + }) + + it('should not render VectorSpaceFull when isShowVectorSpaceFull is false', () => { + render() + expect(screen.queryByTestId('vector-space-full')).not.toBeInTheDocument() + }) + + it('should render UpgradeCard when conditions are met', () => { + render( + , + ) + expect(screen.getByTestId('upgrade-card')).toBeInTheDocument() + }) + + it('should not render UpgradeCard when supportBatchUpload is true', () => { + render( + , + ) + expect(screen.queryByTestId('upgrade-card')).not.toBeInTheDocument() + }) + + it('should call onNextStep when next button is clicked', () => { + const onNextStep = vi.fn() + render() + + // Find button with translation key text (using regex for flexibility) + const nextButton = screen.getByRole('button', { name: /datasetCreation.stepOne.button/i }) + fireEvent.click(nextButton) + + expect(onNextStep).toHaveBeenCalled() + }) + + it('should disable next button when nextBtnDisabled is true', () => { + render() + + const nextButton = screen.getByRole('button', { name: /datasetCreation.stepOne.button/i }) + expect(nextButton).toBeDisabled() + }) +}) + +describe('StepTwoContent', () => { + // Mock ProcessDocuments since it has complex dependencies + vi.mock('./process-documents', () => ({ + default: React.forwardRef(({ dataSourceNodeId, isRunning, onProcess, onPreview, onSubmit, onBack }: { + dataSourceNodeId: string + isRunning: boolean + onProcess: () => void + onPreview: () => void + onSubmit: (data: Record) => void + onBack: () => void + }, ref: React.Ref<{ submit: () => void }>) => { + React.useImperativeHandle(ref, () => ({ + submit: () => onSubmit({ test: 'data' }), + })) + return ( +
+ {dataSourceNodeId} + {isRunning.toString()} + + + +
+ ) + }), + })) + + const defaultProps = { + formRef: { current: null } as unknown as React.RefObject<{ submit: () => void }>, + dataSourceNodeId: 'node-1', + isRunning: false, + onProcess: vi.fn(), + onPreview: vi.fn(), + onSubmit: vi.fn(), + onBack: vi.fn(), + } + + beforeEach(() => { + vi.clearAllMocks() + }) + + it('should render ProcessDocuments component', () => { + render() + expect(screen.getByTestId('process-documents')).toBeInTheDocument() + }) + + it('should pass dataSourceNodeId to ProcessDocuments', () => { + render() + expect(screen.getByTestId('datasource-node-id')).toHaveTextContent('custom-node') + }) + + it('should pass isRunning to ProcessDocuments', () => { + render() + expect(screen.getByTestId('is-running')).toHaveTextContent('true') + }) + + it('should call onProcess when process button is clicked', () => { + const onProcess = vi.fn() + render() + + fireEvent.click(screen.getByTestId('process-btn')) + + expect(onProcess).toHaveBeenCalled() + }) + + it('should call onBack when back button is clicked', () => { + const onBack = vi.fn() + render() + + fireEvent.click(screen.getByTestId('back-btn')) + + expect(onBack).toHaveBeenCalled() + }) +}) + +describe('StepThreeContent', () => { + // Mock Processing since it has complex dependencies + vi.mock('./processing', () => ({ + default: ({ batchId, documents }: { batchId: string, documents: unknown[] }) => ( +
+ {batchId} + {documents.length} +
+ ), + })) + + it('should render Processing component', () => { + render() + expect(screen.getByTestId('processing')).toBeInTheDocument() + }) + + it('should pass batchId to Processing', () => { + render() + expect(screen.getByTestId('batch-id')).toHaveTextContent('batch-123') + }) + + it('should pass documents count to Processing', () => { + const documents = [{ id: '1' }, { id: '2' }] + render() + expect(screen.getByTestId('documents-count')).toHaveTextContent('2') + }) +}) + +// ========================================== +// Preview Panel Tests +// ========================================== +describe('StepOnePreview', () => { + // Mock preview components + vi.mock('./preview/file-preview', () => ({ + default: ({ file, hidePreview }: { file: CustomFile, hidePreview: () => void }) => ( +
+ {file.name} + +
+ ), + })) + + vi.mock('./preview/online-document-preview', () => ({ + default: ({ datasourceNodeId, currentPage, hidePreview }: { + datasourceNodeId: string + currentPage: NotionPage & { workspace_id: string } + hidePreview: () => void + }) => ( +
+ {datasourceNodeId} + {currentPage.page_id} + +
+ ), + })) + + vi.mock('./preview/web-preview', () => ({ + default: ({ currentWebsite, hidePreview }: { currentWebsite: CrawlResultItem, hidePreview: () => void }) => ( +
+ {currentWebsite.source_url} + +
+ ), + })) + + const defaultProps = { + datasource: undefined as Datasource | undefined, + currentLocalFile: undefined as CustomFile | undefined, + currentDocument: undefined as (NotionPage & { workspace_id: string }) | undefined, + currentWebsite: undefined as CrawlResultItem | undefined, + hidePreviewLocalFile: vi.fn(), + hidePreviewOnlineDocument: vi.fn(), + hideWebsitePreview: vi.fn(), + } + + beforeEach(() => { + vi.clearAllMocks() + }) + + it('should not render any preview when no file is selected', () => { + const { container } = render() + expect(container.querySelector('[data-testid="file-preview"]')).not.toBeInTheDocument() + expect(container.querySelector('[data-testid="online-document-preview"]')).not.toBeInTheDocument() + expect(container.querySelector('[data-testid="web-preview"]')).not.toBeInTheDocument() + }) + + it('should render FilePreview when currentLocalFile is set', () => { + render( + , + ) + expect(screen.getByTestId('file-preview')).toBeInTheDocument() + expect(screen.getByTestId('file-name')).toHaveTextContent('test.txt') + }) + + it('should render OnlineDocumentPreview when currentDocument is set', () => { + render( + , + ) + expect(screen.getByTestId('online-document-preview')).toBeInTheDocument() + }) + + it('should render WebsitePreview when currentWebsite is set', () => { + render( + , + ) + expect(screen.getByTestId('web-preview')).toBeInTheDocument() + }) + + it('should call hidePreviewLocalFile when hide button is clicked', () => { + const hidePreviewLocalFile = vi.fn() + render( + , + ) + + fireEvent.click(screen.getByTestId('hide-preview')) + + expect(hidePreviewLocalFile).toHaveBeenCalled() + }) +}) + +describe('StepTwoPreview', () => { + // Mock ChunkPreview + vi.mock('./preview/chunk-preview', () => ({ + default: ({ dataSourceType, isIdle, isPending, onPreview }: { + dataSourceType: string + isIdle: boolean + isPending: boolean + onPreview: () => void + }) => ( +
+ {dataSourceType} + {isIdle.toString()} + {isPending.toString()} + +
+ ), + })) + + const defaultProps = { + datasourceType: DatasourceType.localFile as string | undefined, + localFileList: [] as FileItem[], + onlineDocuments: [] as (NotionPage & { workspace_id: string })[], + websitePages: [] as CrawlResultItem[], + selectedOnlineDriveFileList: [] as OnlineDriveFile[], + isIdle: true, + isPendingPreview: false, + estimateData: undefined, + onPreview: vi.fn(), + handlePreviewFileChange: vi.fn(), + handlePreviewOnlineDocumentChange: vi.fn(), + handlePreviewWebsitePageChange: vi.fn(), + handlePreviewOnlineDriveFileChange: vi.fn(), + } + + beforeEach(() => { + vi.clearAllMocks() + }) + + it('should render ChunkPreview component', () => { + render() + expect(screen.getByTestId('chunk-preview')).toBeInTheDocument() + }) + + it('should pass datasourceType to ChunkPreview', () => { + render() + expect(screen.getByTestId('datasource-type')).toHaveTextContent(DatasourceType.onlineDocument) + }) + + it('should pass isIdle to ChunkPreview', () => { + render() + expect(screen.getByTestId('is-idle')).toHaveTextContent('false') + }) + + it('should pass isPendingPreview to ChunkPreview', () => { + render() + expect(screen.getByTestId('is-pending')).toHaveTextContent('true') + }) + + it('should call onPreview when preview button is clicked', () => { + const onPreview = vi.fn() + render() + + fireEvent.click(screen.getByTestId('preview-btn')) + + expect(onPreview).toHaveBeenCalled() + }) +}) + +// ========================================== +// Edge Cases Tests +// ========================================== +describe('Edge Cases', () => { + describe('Empty States', () => { + it('should handle undefined datasource in useDatasourceUIState', () => { + const { result } = renderHook(() => useDatasourceUIState({ + datasource: undefined, + allFileLoaded: false, + localFileListLength: 0, + onlineDocumentsLength: 0, + websitePagesLength: 0, + selectedFileIdsLength: 0, + onlineDriveFileList: [], + isVectorSpaceFull: false, + enableBilling: true, + currentWorkspacePagesLength: 0, + fileUploadConfig: { file_size_limit: 15, batch_count_limit: 5 }, + })) + + expect(result.current.datasourceType).toBeUndefined() + expect(result.current.nextBtnDisabled).toBe(true) + }) + }) + + describe('Boundary Conditions', () => { + it('should handle zero file size limit', () => { + const { result } = renderHook(() => useDatasourceUIState({ + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDrive, + }, + }), + allFileLoaded: false, + localFileListLength: 0, + onlineDocumentsLength: 0, + websitePagesLength: 0, + selectedFileIdsLength: 0, + onlineDriveFileList: [], + isVectorSpaceFull: false, + enableBilling: true, + currentWorkspacePagesLength: 0, + fileUploadConfig: { file_size_limit: 0, batch_count_limit: 0 }, + })) + + expect(result.current.tip).toContain('datasetPipeline.addDocuments.selectOnlineDriveTip') + }) + + it('should handle very large file counts', () => { + const { result } = renderHook(() => useDatasourceUIState({ + datasource: createMockDatasource(), + allFileLoaded: true, + localFileListLength: 10000, + onlineDocumentsLength: 0, + websitePagesLength: 0, + selectedFileIdsLength: 0, + onlineDriveFileList: [], + isVectorSpaceFull: false, + enableBilling: true, + currentWorkspacePagesLength: 0, + fileUploadConfig: { file_size_limit: 15, batch_count_limit: 5 }, + })) + + expect(result.current.nextBtnDisabled).toBe(false) + }) + }) + + describe('File with special characters', () => { + it('should handle file name with special characters', () => { + const file = createMockFile({ name: 'test<>&"\'file.txt' }) + const result = buildLocalFileDatasourceInfo(file, 'cred-1') + expect(result.name).toBe('test<>&"\'file.txt') + }) + + it('should handle unicode file names', () => { + const file = createMockFile({ name: '测试文件🚀.txt' }) + const result = buildLocalFileDatasourceInfo(file, 'cred-1') + expect(result.name).toBe('测试文件🚀.txt') + }) + }) +}) + +// ========================================== +// Component Memoization Tests +// ========================================== +describe('Component Memoization', () => { + it('StepOneContent should be memoized', async () => { + const StepOneContentModule = await import('./steps/step-one-content') + expect(StepOneContentModule.default.$$typeof).toBe(Symbol.for('react.memo')) + }) + + it('StepTwoContent should be memoized', async () => { + const StepTwoContentModule = await import('./steps/step-two-content') + expect(StepTwoContentModule.default.$$typeof).toBe(Symbol.for('react.memo')) + }) + + it('StepThreeContent should be memoized', async () => { + const StepThreeContentModule = await import('./steps/step-three-content') + expect(StepThreeContentModule.default.$$typeof).toBe(Symbol.for('react.memo')) + }) + + it('StepOnePreview should be memoized', () => { + expect(StepOnePreview.$$typeof).toBe(Symbol.for('react.memo')) + }) + + it('StepTwoPreview should be memoized', () => { + expect(StepTwoPreview.$$typeof).toBe(Symbol.for('react.memo')) + }) +}) + +// ========================================== +// Hook Callback Stability Tests +// ========================================== +describe('Hook Callback Stability', () => { + describe('useDatasourceUIState memoization', () => { + it('should maintain stable reference for datasourceType when dependencies unchanged', () => { + const params = { + datasource: createMockDatasource(), + allFileLoaded: true, + localFileListLength: 1, + onlineDocumentsLength: 0, + websitePagesLength: 0, + selectedFileIdsLength: 0, + onlineDriveFileList: [] as OnlineDriveFile[], + isVectorSpaceFull: false, + enableBilling: true, + currentWorkspacePagesLength: 0, + fileUploadConfig: { file_size_limit: 15, batch_count_limit: 5 }, + } + + const { result, rerender } = renderHook(() => useDatasourceUIState(params)) + const firstType = result.current.datasourceType + + rerender() + + expect(result.current.datasourceType).toBe(firstType) + }) + }) +}) + +// ========================================== +// Store Hooks Tests +// ========================================== +describe('Store Hooks', () => { + describe('useLocalFile', () => { + it('should return localFileList from store', () => { + mockStoreState.localFileList = [createMockFileItem()] + const { result } = renderHook(() => useLocalFile()) + expect(result.current.localFileList).toHaveLength(1) + }) + + it('should compute allFileLoaded correctly when all files have ids', () => { + mockStoreState.localFileList = [createMockFileItem()] + const { result } = renderHook(() => useLocalFile()) + expect(result.current.allFileLoaded).toBe(true) + }) + + it('should compute allFileLoaded as false when no files', () => { + mockStoreState.localFileList = [] + const { result } = renderHook(() => useLocalFile()) + expect(result.current.allFileLoaded).toBe(false) + }) + }) + + describe('useOnlineDocument', () => { + it('should return onlineDocuments from store', () => { + mockStoreState.onlineDocuments = [createMockNotionPage()] + const { result } = renderHook(() => useOnlineDocument()) + expect(result.current.onlineDocuments).toHaveLength(1) + }) + + it('should compute PagesMapAndSelectedPagesId correctly', () => { + mockStoreState.documentsData = [{ + workspace_id: 'ws-1', + pages: [{ page_id: 'page-1' }], + }] + const { result } = renderHook(() => useOnlineDocument()) + expect(result.current.PagesMapAndSelectedPagesId['page-1']).toBeDefined() + }) + }) + + describe('useWebsiteCrawl', () => { + it('should return websitePages from store', () => { + mockStoreState.websitePages = [createMockCrawlResult()] + const { result } = renderHook(() => useWebsiteCrawl()) + expect(result.current.websitePages).toHaveLength(1) + }) + }) + + describe('useOnlineDrive', () => { + it('should return onlineDriveFileList from store', () => { + mockStoreState.onlineDriveFileList = [createMockOnlineDriveFile()] + const { result } = renderHook(() => useOnlineDrive()) + expect(result.current.onlineDriveFileList).toHaveLength(1) + }) + + it('should compute selectedOnlineDriveFileList correctly', () => { + mockStoreState.onlineDriveFileList = [ + createMockOnlineDriveFile({ id: 'file-1' }), + createMockOnlineDriveFile({ id: 'file-2' }), + ] + mockStoreState.selectedFileIds = ['file-1'] + const { result } = renderHook(() => useOnlineDrive()) + expect(result.current.selectedOnlineDriveFileList).toHaveLength(1) + expect(result.current.selectedOnlineDriveFileList[0].id).toBe('file-1') + }) + }) +}) + +// ========================================== +// All Datasource Types Tests +// ========================================== +describe('All Datasource Types', () => { + const datasourceTypes = [ + { type: DatasourceType.localFile, name: 'Local File' }, + { type: DatasourceType.onlineDocument, name: 'Online Document' }, + { type: DatasourceType.websiteCrawl, name: 'Website Crawl' }, + { type: DatasourceType.onlineDrive, name: 'Online Drive' }, + ] + + describe.each(datasourceTypes)('$name datasource type', ({ type }) => { + it(`should handle ${type} in useDatasourceUIState`, () => { + const { result } = renderHook(() => useDatasourceUIState({ + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: type, + }, + }), + allFileLoaded: type === DatasourceType.localFile, + localFileListLength: type === DatasourceType.localFile ? 1 : 0, + onlineDocumentsLength: type === DatasourceType.onlineDocument ? 1 : 0, + websitePagesLength: type === DatasourceType.websiteCrawl ? 1 : 0, + selectedFileIdsLength: type === DatasourceType.onlineDrive ? 1 : 0, + onlineDriveFileList: type === DatasourceType.onlineDrive ? [createMockOnlineDriveFile()] : [], + isVectorSpaceFull: false, + enableBilling: true, + currentWorkspacePagesLength: type === DatasourceType.onlineDocument ? 1 : 0, + fileUploadConfig: { file_size_limit: 15, batch_count_limit: 5 }, + })) + + expect(result.current.datasourceType).toBe(type) + expect(result.current.nextBtnDisabled).toBe(false) + }) + }) +}) + +// ========================================== +// useDatasourceOptions Hook Tests +// ========================================== +describe('useDatasourceOptions', () => { + it('should return empty array when no pipeline nodes', () => { + const { result } = renderHook(() => useDatasourceOptions([])) + expect(result.current).toEqual([]) + }) + + it('should filter and map data source nodes', () => { + const mockNodes: Node[] = [ + { + id: 'node-1', + type: 'data-source', + position: { x: 0, y: 0 }, + data: { + type: BlockEnum.DataSource, + title: 'Local File Source', + provider_type: DatasourceType.localFile, + plugin_id: 'plugin-1', + } as DataSourceNodeType, + }, + { + id: 'node-2', + type: 'other', + position: { x: 0, y: 0 }, + data: { + type: BlockEnum.Start, + title: 'Start Node', + } as unknown as DataSourceNodeType, + }, + ] + + const { result } = renderHook(() => useDatasourceOptions(mockNodes)) + expect(result.current).toHaveLength(1) + expect(result.current[0].label).toBe('Local File Source') + expect(result.current[0].value).toBe('node-1') + }) + + it('should return multiple options for multiple data source nodes', () => { + const mockNodes: Node[] = [ + { + id: 'node-1', + type: 'data-source', + position: { x: 0, y: 0 }, + data: { + type: BlockEnum.DataSource, + title: 'Source 1', + provider_type: DatasourceType.localFile, + plugin_id: 'plugin-1', + } as DataSourceNodeType, + }, + { + id: 'node-2', + type: 'data-source', + position: { x: 0, y: 0 }, + data: { + type: BlockEnum.DataSource, + title: 'Source 2', + provider_type: DatasourceType.onlineDocument, + plugin_id: 'plugin-2', + } as DataSourceNodeType, + }, + ] + + const { result } = renderHook(() => useDatasourceOptions(mockNodes)) + expect(result.current).toHaveLength(2) + }) +}) + +// ========================================== +// useDatasourceActions Hook Tests +// ========================================== +describe('useDatasourceActions', () => { + const createMockDataSourceStore = () => ({ + getState: () => ({ + ...mockStoreState, + previewLocalFileRef: { current: createMockFile() }, + previewOnlineDocumentRef: { current: createMockNotionPage() }, + previewWebsitePageRef: { current: createMockCrawlResult() }, + previewOnlineDriveFileRef: { current: createMockOnlineDriveFile() }, + currentCredentialId: 'cred-1', + bucket: 'test-bucket', + localFileList: [createMockFileItem()], + onlineDocuments: [createMockNotionPage()], + websitePages: [createMockCrawlResult()], + selectedFileIds: ['file-1'], + onlineDriveFileList: [createMockOnlineDriveFile({ id: 'file-1' })], + setCurrentCredentialId: vi.fn(), + currentNodeIdRef: { current: '' }, + setOnlineDocuments: vi.fn(), + setSelectedFileIds: vi.fn(), + setSelectedPagesId: vi.fn(), + }), + }) + + const defaultParams = { + datasource: createMockDatasource(), + datasourceType: DatasourceType.localFile, + pipelineId: 'pipeline-1', + dataSourceStore: createMockDataSourceStore() as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId: vi.fn(), + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: { 'page-1': createMockNotionPage() }, + currentWorkspacePages: [{ page_id: 'page-1' }], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + beforeEach(() => { + vi.clearAllMocks() + }) + + it('should return initial state and callbacks', () => { + const { result } = renderHook(() => useDatasourceActions(defaultParams)) + + expect(result.current.isPreview).toBeDefined() + expect(result.current.formRef).toBeDefined() + expect(result.current.isIdle).toBe(true) + expect(result.current.isPending).toBe(false) + expect(typeof result.current.onClickProcess).toBe('function') + expect(typeof result.current.onClickPreview).toBe('function') + expect(typeof result.current.handleSubmit).toBe('function') + }) + + it('should set isPreview to false when onClickProcess is called', () => { + const { result } = renderHook(() => useDatasourceActions(defaultParams)) + + act(() => { + result.current.onClickProcess() + }) + + expect(result.current.isPreview.current).toBe(false) + }) + + it('should set isPreview to true when onClickPreview is called', () => { + const { result } = renderHook(() => useDatasourceActions(defaultParams)) + + act(() => { + result.current.onClickPreview() + }) + + expect(result.current.isPreview.current).toBe(true) + }) + + it('should call handlePreviewFileChange and trigger preview', () => { + const { result } = renderHook(() => useDatasourceActions(defaultParams)) + const mockFile = { id: 'file-1', name: 'test.txt' } as unknown as DocumentItem + + act(() => { + result.current.handlePreviewFileChange(mockFile) + }) + + expect(result.current.isPreview.current).toBe(true) + }) + + it('should call handlePreviewOnlineDocumentChange and trigger preview', () => { + const { result } = renderHook(() => useDatasourceActions(defaultParams)) + const mockPage = createMockNotionPage() + + act(() => { + result.current.handlePreviewOnlineDocumentChange(mockPage) + }) + + expect(result.current.isPreview.current).toBe(true) + }) + + it('should call handlePreviewWebsiteChange and trigger preview', () => { + const { result } = renderHook(() => useDatasourceActions(defaultParams)) + const mockWebsite = createMockCrawlResult() + + act(() => { + result.current.handlePreviewWebsiteChange(mockWebsite) + }) + + expect(result.current.isPreview.current).toBe(true) + }) + + it('should call handlePreviewOnlineDriveFileChange and trigger preview', () => { + const { result } = renderHook(() => useDatasourceActions(defaultParams)) + const mockFile = createMockOnlineDriveFile() + + act(() => { + result.current.handlePreviewOnlineDriveFileChange(mockFile) + }) + + expect(result.current.isPreview.current).toBe(true) + }) + + it('should handle select all for online document', () => { + const params = { + ...defaultParams, + datasourceType: DatasourceType.onlineDocument, + dataSourceStore: { + getState: () => ({ + ...mockStoreState, + onlineDocuments: [], + setOnlineDocuments: vi.fn(), + setSelectedPagesId: vi.fn(), + }), + } as MockDataSourceStore, + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + act(() => { + result.current.handleSelectAll() + }) + + // Verify the callback was executed (no error thrown) + expect(true).toBe(true) + }) + + it('should handle select all for online drive', () => { + const params = { + ...defaultParams, + datasourceType: DatasourceType.onlineDrive, + dataSourceStore: { + getState: () => ({ + ...mockStoreState, + onlineDriveFileList: [createMockOnlineDriveFile({ id: 'file-1' })], + selectedFileIds: [], + setSelectedFileIds: vi.fn(), + }), + } as MockDataSourceStore, + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + act(() => { + result.current.handleSelectAll() + }) + + expect(true).toBe(true) + }) + + it('should handle switch data source', () => { + const setDatasource = vi.fn() + const params = { + ...defaultParams, + setDatasource, + } + + const { result } = renderHook(() => useDatasourceActions(params)) + const newDatasource = createMockDatasource({ nodeId: 'node-2' }) + + act(() => { + result.current.handleSwitchDataSource(newDatasource) + }) + + expect(setDatasource).toHaveBeenCalledWith(newDatasource) + }) + + it('should handle credential change', () => { + const { result } = renderHook(() => useDatasourceActions(defaultParams)) + + act(() => { + result.current.handleCredentialChange('new-cred-id') + }) + + // Should not throw error + expect(true).toBe(true) + }) + + it('should clear online document data when switching datasource', () => { + const clearOnlineDocumentData = vi.fn() + const params = { + ...defaultParams, + clearOnlineDocumentData, + } + + const { result } = renderHook(() => useDatasourceActions(params)) + const newDatasource = createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDocument, + }, + }) + + act(() => { + result.current.handleSwitchDataSource(newDatasource) + }) + + expect(clearOnlineDocumentData).toHaveBeenCalled() + }) + + it('should clear website crawl data when switching datasource', () => { + const clearWebsiteCrawlData = vi.fn() + const params = { + ...defaultParams, + clearWebsiteCrawlData, + } + + const { result } = renderHook(() => useDatasourceActions(params)) + const newDatasource = createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.websiteCrawl, + }, + }) + + act(() => { + result.current.handleSwitchDataSource(newDatasource) + }) + + expect(clearWebsiteCrawlData).toHaveBeenCalled() + }) + + it('should clear online drive data when switching datasource', () => { + const clearOnlineDriveData = vi.fn() + const params = { + ...defaultParams, + clearOnlineDriveData, + } + + const { result } = renderHook(() => useDatasourceActions(params)) + const newDatasource = createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDrive, + }, + }) + + act(() => { + result.current.handleSwitchDataSource(newDatasource) + }) + + expect(clearOnlineDriveData).toHaveBeenCalled() + }) +}) + +// ========================================== +// Store Hooks - Additional Coverage Tests +// ========================================== +describe('Store Hooks - Callbacks', () => { + beforeEach(() => { + vi.clearAllMocks() + // Reset mock store state + mockStoreState.localFileList = [] + mockStoreState.documentsData = [] + mockStoreState.onlineDocuments = [] + mockStoreState.websitePages = [] + mockStoreState.onlineDriveFileList = [] + mockStoreState.selectedFileIds = [] + }) + + describe('useLocalFile callbacks', () => { + it('should call hidePreviewLocalFile callback', () => { + const { result } = renderHook(() => useLocalFile()) + + act(() => { + result.current.hidePreviewLocalFile() + }) + + expect(mockStoreState.setCurrentLocalFile).toHaveBeenCalledWith(undefined) + }) + }) + + describe('useOnlineDocument callbacks', () => { + it('should return currentWorkspace from documentsData', () => { + mockStoreState.documentsData = [{ workspace_id: 'ws-1', pages: [] }] + const { result } = renderHook(() => useOnlineDocument()) + + expect(result.current.currentWorkspace).toBeDefined() + expect(result.current.currentWorkspace?.workspace_id).toBe('ws-1') + }) + + it('should call hidePreviewOnlineDocument callback', () => { + const { result } = renderHook(() => useOnlineDocument()) + + act(() => { + result.current.hidePreviewOnlineDocument() + }) + + expect(mockStoreState.setCurrentDocument).toHaveBeenCalledWith(undefined) + }) + + it('should call clearOnlineDocumentData callback', () => { + const { result } = renderHook(() => useOnlineDocument()) + + act(() => { + result.current.clearOnlineDocumentData() + }) + + expect(mockStoreState.setDocumentsData).toHaveBeenCalledWith([]) + expect(mockStoreState.setSearchValue).toHaveBeenCalledWith('') + expect(mockStoreState.setOnlineDocuments).toHaveBeenCalledWith([]) + expect(mockStoreState.setCurrentDocument).toHaveBeenCalledWith(undefined) + }) + }) + + describe('useWebsiteCrawl callbacks', () => { + it('should call hideWebsitePreview callback', () => { + const { result } = renderHook(() => useWebsiteCrawl()) + + act(() => { + result.current.hideWebsitePreview() + }) + + expect(mockStoreState.setCurrentWebsite).toHaveBeenCalledWith(undefined) + expect(mockStoreState.setPreviewIndex).toHaveBeenCalledWith(-1) + }) + + it('should call clearWebsiteCrawlData callback', () => { + const { result } = renderHook(() => useWebsiteCrawl()) + + act(() => { + result.current.clearWebsiteCrawlData() + }) + + expect(mockStoreState.setStep).toHaveBeenCalled() + expect(mockStoreState.setCrawlResult).toHaveBeenCalledWith(undefined) + expect(mockStoreState.setCurrentWebsite).toHaveBeenCalledWith(undefined) + expect(mockStoreState.setWebsitePages).toHaveBeenCalledWith([]) + expect(mockStoreState.setPreviewIndex).toHaveBeenCalledWith(-1) + }) + }) + + describe('useOnlineDrive callbacks', () => { + it('should call clearOnlineDriveData callback', () => { + const { result } = renderHook(() => useOnlineDrive()) + + act(() => { + result.current.clearOnlineDriveData() + }) + + expect(mockStoreState.setOnlineDriveFileList).toHaveBeenCalledWith([]) + expect(mockStoreState.setBucket).toHaveBeenCalledWith('') + expect(mockStoreState.setPrefix).toHaveBeenCalledWith([]) + expect(mockStoreState.setKeywords).toHaveBeenCalledWith('') + expect(mockStoreState.setSelectedFileIds).toHaveBeenCalledWith([]) + }) + }) +}) + +// ========================================== +// StepOneContent - All Datasource Types +// ========================================== +describe('StepOneContent - All Datasource Types', () => { + // Mock data source components + vi.mock('./data-source/local-file', () => ({ + default: () =>
Local File
, + })) + + vi.mock('./data-source/online-documents', () => ({ + default: () =>
Online Documents
, + })) + + vi.mock('./data-source/website-crawl', () => ({ + default: () =>
Website Crawl
, + })) + + vi.mock('./data-source/online-drive', () => ({ + default: () =>
Online Drive
, + })) + + const defaultProps = { + datasource: undefined as Datasource | undefined, + datasourceType: undefined as string | undefined, + pipelineNodes: [] as Node[], + supportBatchUpload: true, + localFileListLength: 0, + isShowVectorSpaceFull: false, + showSelect: false, + totalOptions: undefined as number | undefined, + selectedOptions: undefined as number | undefined, + tip: '', + nextBtnDisabled: true, + onSelectDataSource: vi.fn(), + onCredentialChange: vi.fn(), + onSelectAll: vi.fn(), + onNextStep: vi.fn(), + } + + it('should render OnlineDocuments when datasourceType is onlineDocument', () => { + render( + , + ) + expect(screen.getByTestId('online-documents-component')).toBeInTheDocument() + }) + + it('should render WebsiteCrawl when datasourceType is websiteCrawl', () => { + render( + , + ) + expect(screen.getByTestId('website-crawl-component')).toBeInTheDocument() + }) + + it('should render OnlineDrive when datasourceType is onlineDrive', () => { + render( + , + ) + expect(screen.getByTestId('online-drive-component')).toBeInTheDocument() + }) + + it('should render LocalFile when datasourceType is localFile', () => { + render( + , + ) + expect(screen.getByTestId('local-file-component')).toBeInTheDocument() + }) +}) + +// ========================================== +// StepTwoPreview - with localFileList +// ========================================== +describe('StepTwoPreview - File List Mapping', () => { + it('should correctly map localFileList to localFiles', () => { + const fileList = [ + createMockFileItem({ file: createMockFile({ id: 'f1', name: 'file1.txt' }) }), + createMockFileItem({ file: createMockFile({ id: 'f2', name: 'file2.txt' }) }), + ] + + render( + , + ) + + // ChunkPreview should be rendered + expect(screen.getByTestId('chunk-preview')).toBeInTheDocument() + }) +}) + +// ========================================== +// useDatasourceActions - Additional Coverage +// ========================================== +describe('useDatasourceActions - Async Functions', () => { + beforeEach(() => { + vi.clearAllMocks() + mockRunPublishedPipeline.mockReset() + }) + + const createMockDataSourceStoreForAsync = (datasourceType: string) => ({ + getState: () => ({ + previewLocalFileRef: { current: datasourceType === DatasourceType.localFile ? createMockFile() : undefined }, + previewOnlineDocumentRef: { current: datasourceType === DatasourceType.onlineDocument ? createMockNotionPage() : undefined }, + previewWebsitePageRef: { current: datasourceType === DatasourceType.websiteCrawl ? createMockCrawlResult() : undefined }, + previewOnlineDriveFileRef: { current: datasourceType === DatasourceType.onlineDrive ? createMockOnlineDriveFile() : undefined }, + currentCredentialId: 'cred-1', + bucket: 'test-bucket', + localFileList: [createMockFileItem()], + onlineDocuments: [createMockNotionPage()], + websitePages: [createMockCrawlResult()], + selectedFileIds: ['file-1'], + onlineDriveFileList: [createMockOnlineDriveFile({ id: 'file-1' })], + setCurrentCredentialId: vi.fn(), + currentNodeIdRef: { current: '' }, + setOnlineDocuments: vi.fn(), + setSelectedFileIds: vi.fn(), + setSelectedPagesId: vi.fn(), + }), + }) + + it('should call handleSubmit with preview mode', () => { + const setEstimateData = vi.fn() + const params = { + datasource: createMockDatasource(), + datasourceType: DatasourceType.localFile, + pipelineId: 'pipeline-1', + dataSourceStore: createMockDataSourceStoreForAsync(DatasourceType.localFile) as MockDataSourceStore, + setEstimateData, + setBatchId: vi.fn(), + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + act(() => { + result.current.onClickPreview() + result.current.handleSubmit({ test: 'data' }) + }) + + // Should have triggered preview + expect(result.current.isPreview.current).toBe(true) + }) + + it('should call handleSubmit with process mode', () => { + const setBatchId = vi.fn() + const setDocuments = vi.fn() + const handleNextStep = vi.fn() + const params = { + datasource: createMockDatasource(), + datasourceType: DatasourceType.localFile, + pipelineId: 'pipeline-1', + dataSourceStore: createMockDataSourceStoreForAsync(DatasourceType.localFile) as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId, + setDocuments, + handleNextStep, + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + act(() => { + result.current.onClickProcess() + result.current.handleSubmit({ test: 'data' }) + }) + + // Should have triggered process + expect(result.current.isPreview.current).toBe(false) + }) + + it('should not call API when datasource is undefined', () => { + const params = { + datasource: undefined, + datasourceType: DatasourceType.localFile, + pipelineId: 'pipeline-1', + dataSourceStore: createMockDataSourceStoreForAsync(DatasourceType.localFile) as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId: vi.fn(), + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + act(() => { + result.current.handleSubmit({ test: 'data' }) + }) + + expect(mockRunPublishedPipeline).not.toHaveBeenCalled() + }) + + it('should not call API when pipelineId is undefined', () => { + const params = { + datasource: createMockDatasource(), + datasourceType: DatasourceType.localFile, + pipelineId: undefined, + dataSourceStore: createMockDataSourceStoreForAsync(DatasourceType.localFile) as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId: vi.fn(), + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + act(() => { + result.current.handleSubmit({ test: 'data' }) + }) + + expect(mockRunPublishedPipeline).not.toHaveBeenCalled() + }) + + it('should build preview info for online document type', () => { + const params = { + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDocument, + }, + }), + datasourceType: DatasourceType.onlineDocument, + pipelineId: 'pipeline-1', + dataSourceStore: createMockDataSourceStoreForAsync(DatasourceType.onlineDocument) as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId: vi.fn(), + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + act(() => { + result.current.onClickPreview() + result.current.handleSubmit({ test: 'data' }) + }) + + expect(result.current.isPreview.current).toBe(true) + }) + + it('should build preview info for website crawl type', () => { + const params = { + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.websiteCrawl, + }, + }), + datasourceType: DatasourceType.websiteCrawl, + pipelineId: 'pipeline-1', + dataSourceStore: createMockDataSourceStoreForAsync(DatasourceType.websiteCrawl) as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId: vi.fn(), + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + act(() => { + result.current.onClickPreview() + result.current.handleSubmit({ test: 'data' }) + }) + + expect(result.current.isPreview.current).toBe(true) + }) + + it('should build preview info for online drive type', () => { + const params = { + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDrive, + }, + }), + datasourceType: DatasourceType.onlineDrive, + pipelineId: 'pipeline-1', + dataSourceStore: createMockDataSourceStoreForAsync(DatasourceType.onlineDrive) as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId: vi.fn(), + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + act(() => { + result.current.onClickPreview() + result.current.handleSubmit({ test: 'data' }) + }) + + expect(result.current.isPreview.current).toBe(true) + }) + + it('should toggle select all for online document - deselect all when already selected', () => { + const setOnlineDocuments = vi.fn() + const setSelectedPagesId = vi.fn() + const params = { + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDocument, + }, + }), + datasourceType: DatasourceType.onlineDocument, + pipelineId: 'pipeline-1', + dataSourceStore: { + getState: () => ({ + ...mockStoreState, + onlineDocuments: [createMockNotionPage()], + setOnlineDocuments, + setSelectedPagesId, + }), + } as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId: vi.fn(), + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: { 'page-1': createMockNotionPage() }, + currentWorkspacePages: [{ page_id: 'page-1' }], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + act(() => { + result.current.handleSelectAll() + }) + + // Should deselect all since documents.length >= allIds.length + expect(setOnlineDocuments).toHaveBeenCalledWith([]) + }) + + it('should toggle select all for online drive - deselect all when already selected', () => { + const setSelectedFileIds = vi.fn() + const params = { + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDrive, + }, + }), + datasourceType: DatasourceType.onlineDrive, + pipelineId: 'pipeline-1', + dataSourceStore: { + getState: () => ({ + ...mockStoreState, + onlineDriveFileList: [createMockOnlineDriveFile({ id: 'file-1' })], + selectedFileIds: ['file-1'], + setSelectedFileIds, + }), + } as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId: vi.fn(), + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + act(() => { + result.current.handleSelectAll() + }) + + // Should deselect all since selectedFileIds.length >= allKeys.length + expect(setSelectedFileIds).toHaveBeenCalledWith([]) + }) + + it('should clear data when credential changes with datasource', () => { + const clearOnlineDocumentData = vi.fn() + const params = { + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDocument, + }, + }), + datasourceType: DatasourceType.onlineDocument, + pipelineId: 'pipeline-1', + dataSourceStore: { + getState: () => ({ + ...mockStoreState, + setCurrentCredentialId: vi.fn(), + }), + } as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId: vi.fn(), + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData, + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + act(() => { + result.current.handleCredentialChange('new-cred') + }) + + expect(clearOnlineDocumentData).toHaveBeenCalled() + }) +}) + +// ========================================== +// useDatasourceActions - onSuccess Callbacks +// ========================================== +describe('useDatasourceActions - API Success Callbacks', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + it('should call setEstimateData on preview success', async () => { + const setEstimateData = vi.fn() + const mockResponse = { + data: { outputs: { chunks: 10, tokens: 100 } }, + } + + // Create a mock that calls onSuccess + const mockMutateAsync = vi.fn().mockImplementation((_params, options) => { + options?.onSuccess?.(mockResponse) + return Promise.resolve(mockResponse) + }) + + vi.mocked(mockRunPublishedPipeline).mockImplementation(mockMutateAsync) + + const params = { + datasource: createMockDatasource(), + datasourceType: DatasourceType.localFile, + pipelineId: 'pipeline-1', + dataSourceStore: { + getState: () => ({ + ...mockStoreState, + previewLocalFileRef: { current: createMockFile() }, + currentCredentialId: 'cred-1', + localFileList: [createMockFileItem()], + }), + } as MockDataSourceStore, + setEstimateData, + setBatchId: vi.fn(), + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + await act(async () => { + result.current.isPreview.current = true + await result.current.handleSubmit({ test: 'data' }) + }) + + expect(setEstimateData).toHaveBeenCalledWith(mockResponse.data.outputs) + }) + + it('should call setBatchId, setDocuments, handleNextStep on process success', async () => { + const setBatchId = vi.fn() + const setDocuments = vi.fn() + const handleNextStep = vi.fn() + const mockResponse = { + batch: 'batch-123', + documents: [{ id: 'doc-1' }], + } + + const mockMutateAsync = vi.fn().mockImplementation((_params, options) => { + options?.onSuccess?.(mockResponse) + return Promise.resolve(mockResponse) + }) + + vi.mocked(mockRunPublishedPipeline).mockImplementation(mockMutateAsync) + + const params = { + datasource: createMockDatasource(), + datasourceType: DatasourceType.localFile, + pipelineId: 'pipeline-1', + dataSourceStore: { + getState: () => ({ + ...mockStoreState, + previewLocalFileRef: { current: createMockFile() }, + currentCredentialId: 'cred-1', + localFileList: [createMockFileItem()], + }), + } as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId, + setDocuments, + handleNextStep, + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + await act(async () => { + result.current.isPreview.current = false + await result.current.handleSubmit({ test: 'data' }) + }) + + expect(setBatchId).toHaveBeenCalledWith('batch-123') + expect(setDocuments).toHaveBeenCalledWith([{ id: 'doc-1' }]) + expect(handleNextStep).toHaveBeenCalled() + }) + + it('should handle empty batch and documents in process response', async () => { + const setBatchId = vi.fn() + const setDocuments = vi.fn() + const handleNextStep = vi.fn() + const mockResponse = {} // Empty response + + const mockMutateAsync = vi.fn().mockImplementation((_params, options) => { + options?.onSuccess?.(mockResponse) + return Promise.resolve(mockResponse) + }) + + vi.mocked(mockRunPublishedPipeline).mockImplementation(mockMutateAsync) + + const params = { + datasource: createMockDatasource(), + datasourceType: DatasourceType.localFile, + pipelineId: 'pipeline-1', + dataSourceStore: { + getState: () => ({ + ...mockStoreState, + previewLocalFileRef: { current: createMockFile() }, + currentCredentialId: 'cred-1', + localFileList: [createMockFileItem()], + }), + } as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId, + setDocuments, + handleNextStep, + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + await act(async () => { + result.current.isPreview.current = false + await result.current.handleSubmit({ test: 'data' }) + }) + + expect(setBatchId).toHaveBeenCalledWith('') + expect(setDocuments).toHaveBeenCalledWith([]) + expect(handleNextStep).toHaveBeenCalled() + }) +}) + +// ========================================== +// useDatasourceActions - buildProcessDatasourceInfo Coverage +// ========================================== +describe('useDatasourceActions - Process Mode for All Datasource Types', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + it('should build process info for onlineDocument type', async () => { + const setBatchId = vi.fn() + const setDocuments = vi.fn() + const handleNextStep = vi.fn() + const mockResponse = { batch: 'batch-1', documents: [] } + + const mockMutateAsync = vi.fn().mockImplementation((_params, options) => { + options?.onSuccess?.(mockResponse) + return Promise.resolve(mockResponse) + }) + vi.mocked(mockRunPublishedPipeline).mockImplementation(mockMutateAsync) + + const params = { + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDocument, + }, + }), + datasourceType: DatasourceType.onlineDocument, + pipelineId: 'pipeline-1', + dataSourceStore: { + getState: () => ({ + ...mockStoreState, + currentCredentialId: 'cred-1', + onlineDocuments: [createMockNotionPage()], + }), + } as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId, + setDocuments, + handleNextStep, + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + await act(async () => { + result.current.isPreview.current = false + await result.current.handleSubmit({ test: 'data' }) + }) + + expect(mockMutateAsync).toHaveBeenCalled() + expect(setBatchId).toHaveBeenCalled() + }) + + it('should build process info for websiteCrawl type', async () => { + const setBatchId = vi.fn() + const setDocuments = vi.fn() + const handleNextStep = vi.fn() + const mockResponse = { batch: 'batch-1', documents: [] } + + const mockMutateAsync = vi.fn().mockImplementation((_params, options) => { + options?.onSuccess?.(mockResponse) + return Promise.resolve(mockResponse) + }) + vi.mocked(mockRunPublishedPipeline).mockImplementation(mockMutateAsync) + + const params = { + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.websiteCrawl, + }, + }), + datasourceType: DatasourceType.websiteCrawl, + pipelineId: 'pipeline-1', + dataSourceStore: { + getState: () => ({ + ...mockStoreState, + currentCredentialId: 'cred-1', + websitePages: [createMockCrawlResult()], + }), + } as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId, + setDocuments, + handleNextStep, + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + await act(async () => { + result.current.isPreview.current = false + await result.current.handleSubmit({ test: 'data' }) + }) + + expect(mockMutateAsync).toHaveBeenCalled() + expect(setBatchId).toHaveBeenCalled() + }) + + it('should build process info for onlineDrive type', async () => { + const setBatchId = vi.fn() + const setDocuments = vi.fn() + const handleNextStep = vi.fn() + const mockResponse = { batch: 'batch-1', documents: [] } + + const mockMutateAsync = vi.fn().mockImplementation((_params, options) => { + options?.onSuccess?.(mockResponse) + return Promise.resolve(mockResponse) + }) + vi.mocked(mockRunPublishedPipeline).mockImplementation(mockMutateAsync) + + const params = { + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDrive, + }, + }), + datasourceType: DatasourceType.onlineDrive, + pipelineId: 'pipeline-1', + dataSourceStore: { + getState: () => ({ + ...mockStoreState, + currentCredentialId: 'cred-1', + bucket: 'test-bucket', + selectedFileIds: ['file-1'], + onlineDriveFileList: [createMockOnlineDriveFile({ id: 'file-1' })], + }), + } as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId, + setDocuments, + handleNextStep, + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + await act(async () => { + result.current.isPreview.current = false + await result.current.handleSubmit({ test: 'data' }) + }) + + expect(mockMutateAsync).toHaveBeenCalled() + expect(setBatchId).toHaveBeenCalled() + }) + + it('should return early in preview mode when datasource is undefined', async () => { + const setEstimateData = vi.fn() + const mockMutateAsync = vi.fn() + vi.mocked(mockRunPublishedPipeline).mockImplementation(mockMutateAsync) + + const params = { + datasource: undefined, // undefined datasource + datasourceType: DatasourceType.localFile, + pipelineId: 'pipeline-1', + dataSourceStore: { + getState: () => ({ ...mockStoreState }), + } as MockDataSourceStore, + setEstimateData, + setBatchId: vi.fn(), + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + await act(async () => { + result.current.isPreview.current = true + await result.current.handleSubmit({ test: 'data' }) + }) + + // Should not call API when datasource is undefined + expect(mockMutateAsync).not.toHaveBeenCalled() + expect(setEstimateData).not.toHaveBeenCalled() + }) + + it('should return early in preview mode when pipelineId is undefined', async () => { + const setEstimateData = vi.fn() + const mockMutateAsync = vi.fn() + vi.mocked(mockRunPublishedPipeline).mockImplementation(mockMutateAsync) + + const params = { + datasource: createMockDatasource(), + datasourceType: DatasourceType.localFile, + pipelineId: undefined, // undefined pipelineId + dataSourceStore: { + getState: () => ({ ...mockStoreState }), + } as MockDataSourceStore, + setEstimateData, + setBatchId: vi.fn(), + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + await act(async () => { + result.current.isPreview.current = true + await result.current.handleSubmit({ test: 'data' }) + }) + + // Should not call API when pipelineId is undefined + expect(mockMutateAsync).not.toHaveBeenCalled() + expect(setEstimateData).not.toHaveBeenCalled() + }) + + it('should skip file if not found in onlineDriveFileList', async () => { + const setBatchId = vi.fn() + const mockResponse = { batch: 'batch-1', documents: [] } + + const mockMutateAsync = vi.fn().mockImplementation((_params, options) => { + options?.onSuccess?.(mockResponse) + return Promise.resolve(mockResponse) + }) + vi.mocked(mockRunPublishedPipeline).mockImplementation(mockMutateAsync) + + const params = { + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDrive, + }, + }), + datasourceType: DatasourceType.onlineDrive, + pipelineId: 'pipeline-1', + dataSourceStore: { + getState: () => ({ + ...mockStoreState, + currentCredentialId: 'cred-1', + bucket: 'test-bucket', + selectedFileIds: ['non-existent-file'], + onlineDriveFileList: [createMockOnlineDriveFile({ id: 'file-1' })], + }), + } as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId, + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + await act(async () => { + result.current.isPreview.current = false + await result.current.handleSubmit({ test: 'data' }) + }) + + // Should still call API but with empty datasource_info_list + expect(mockMutateAsync).toHaveBeenCalled() + }) +}) + +// ========================================== +// useDatasourceActions - Edge Case Branches +// ========================================== +describe('useDatasourceActions - Edge Case Branches', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + it('should handle selectAll when currentWorkspacePages is undefined', () => { + const setOnlineDocuments = vi.fn() + const setSelectedPagesId = vi.fn() + + const params = { + datasource: createMockDatasource({ + nodeData: { + ...createMockDatasource().nodeData, + provider_type: DatasourceType.onlineDocument, + }, + }), + datasourceType: DatasourceType.onlineDocument, + pipelineId: 'pipeline-1', + dataSourceStore: { + getState: () => ({ + ...mockStoreState, + onlineDocuments: [], + setOnlineDocuments, + setSelectedPagesId, + }), + } as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId: vi.fn(), + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: undefined, // undefined currentWorkspacePages + clearOnlineDocumentData: vi.fn(), + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + act(() => { + result.current.handleSelectAll() + }) + + // Should use empty array when currentWorkspacePages is undefined + // Since allIds.length is 0 and onlineDocuments.length is 0, it should deselect + expect(setOnlineDocuments).toHaveBeenCalledWith([]) + }) + + it('should not clear data when datasource is undefined in handleCredentialChange', () => { + const clearOnlineDocumentData = vi.fn() + + const params = { + datasource: undefined, // undefined datasource + datasourceType: DatasourceType.onlineDocument, + pipelineId: 'pipeline-1', + dataSourceStore: { + getState: () => ({ + ...mockStoreState, + setCurrentCredentialId: vi.fn(), + }), + } as MockDataSourceStore, + setEstimateData: vi.fn(), + setBatchId: vi.fn(), + setDocuments: vi.fn(), + handleNextStep: vi.fn(), + PagesMapAndSelectedPagesId: {}, + currentWorkspacePages: [], + clearOnlineDocumentData, + clearWebsiteCrawlData: vi.fn(), + clearOnlineDriveData: vi.fn(), + setDatasource: vi.fn(), + } + + const { result } = renderHook(() => useDatasourceActions(params)) + + act(() => { + result.current.handleCredentialChange('new-cred') + }) + + // Should not call clearOnlineDocumentData when datasource is undefined + expect(clearOnlineDocumentData).not.toHaveBeenCalled() + }) +}) + +// ========================================== +// Hooks Index Re-exports Test +// ========================================== +describe('Hooks Index Re-exports', () => { + it('should export useAddDocumentsSteps', async () => { + const hooksModule = await import('./hooks') + expect(hooksModule.useAddDocumentsSteps).toBeDefined() + }) + + it('should export useDatasourceActions', async () => { + const hooksModule = await import('./hooks') + expect(hooksModule.useDatasourceActions).toBeDefined() + }) + + it('should export useDatasourceOptions', async () => { + const hooksModule = await import('./hooks') + expect(hooksModule.useDatasourceOptions).toBeDefined() + }) + + it('should export useLocalFile', async () => { + const hooksModule = await import('./hooks') + expect(hooksModule.useLocalFile).toBeDefined() + }) + + it('should export useOnlineDocument', async () => { + const hooksModule = await import('./hooks') + expect(hooksModule.useOnlineDocument).toBeDefined() + }) + + it('should export useOnlineDrive', async () => { + const hooksModule = await import('./hooks') + expect(hooksModule.useOnlineDrive).toBeDefined() + }) + + it('should export useWebsiteCrawl', async () => { + const hooksModule = await import('./hooks') + expect(hooksModule.useWebsiteCrawl).toBeDefined() + }) + + it('should export useDatasourceUIState', async () => { + const hooksModule = await import('./hooks') + expect(hooksModule.useDatasourceUIState).toBeDefined() + }) +}) + +// ========================================== +// Steps Index Re-exports Test +// ========================================== +describe('Steps Index Re-exports', () => { + it('should export StepOneContent', async () => { + const stepsModule = await import('./steps') + expect(stepsModule.StepOneContent).toBeDefined() + }) + + it('should export StepTwoContent', async () => { + const stepsModule = await import('./steps') + expect(stepsModule.StepTwoContent).toBeDefined() + }) + + it('should export StepThreeContent', async () => { + const stepsModule = await import('./steps') + expect(stepsModule.StepThreeContent).toBeDefined() + }) +}) diff --git a/web/app/components/datasets/documents/create-from-pipeline/index.tsx b/web/app/components/datasets/documents/create-from-pipeline/index.tsx index 2b17f97baa..62c1b919fe 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/index.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/index.tsx @@ -2,75 +2,71 @@ import type { Datasource } from '@/app/components/rag-pipeline/components/panel/test-run/types' import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types' import type { Node } from '@/app/components/workflow/types' -import type { NotionPage } from '@/models/common' -import type { CrawlResultItem, DocumentItem, CustomFile as File, FileIndexingEstimateResponse } from '@/models/datasets' -import type { - InitialDocumentDetail, - OnlineDriveFile, - PublishedPipelineRunPreviewResponse, - PublishedPipelineRunResponse, -} from '@/models/pipeline' +import type { FileIndexingEstimateResponse } from '@/models/datasets' +import type { InitialDocumentDetail } from '@/models/pipeline' import { useBoolean } from 'ahooks' -import { useCallback, useMemo, useRef, useState } from 'react' +import { useCallback, useMemo, useState } from 'react' import { useTranslation } from 'react-i18next' -import { trackEvent } from '@/app/components/base/amplitude' -import Divider from '@/app/components/base/divider' import Loading from '@/app/components/base/loading' import PlanUpgradeModal from '@/app/components/billing/plan-upgrade-modal' -import VectorSpaceFull from '@/app/components/billing/vector-space-full' -import LocalFile from '@/app/components/datasets/documents/create-from-pipeline/data-source/local-file' -import OnlineDocuments from '@/app/components/datasets/documents/create-from-pipeline/data-source/online-documents' -import OnlineDrive from '@/app/components/datasets/documents/create-from-pipeline/data-source/online-drive' -import WebsiteCrawl from '@/app/components/datasets/documents/create-from-pipeline/data-source/website-crawl' import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail' import { useProviderContextSelector } from '@/context/provider-context' import { DatasourceType } from '@/models/pipeline' import { useFileUploadConfig } from '@/service/use-common' -import { usePublishedPipelineInfo, useRunPublishedPipeline } from '@/service/use-pipeline' -import { TransferMethod } from '@/types/app' -import UpgradeCard from '../../create/step-one/upgrade-card' -import Actions from './actions' -import DataSourceOptions from './data-source-options' +import { usePublishedPipelineInfo } from '@/service/use-pipeline' import { useDataSourceStore } from './data-source/store' import DataSourceProvider from './data-source/store/provider' -import { useAddDocumentsSteps, useLocalFile, useOnlineDocument, useOnlineDrive, useWebsiteCrawl } from './hooks' +import { + useAddDocumentsSteps, + useDatasourceActions, + useDatasourceUIState, + useLocalFile, + useOnlineDocument, + useOnlineDrive, + useWebsiteCrawl, +} from './hooks' import LeftHeader from './left-header' -import ChunkPreview from './preview/chunk-preview' -import FilePreview from './preview/file-preview' -import OnlineDocumentPreview from './preview/online-document-preview' -import WebsitePreview from './preview/web-preview' -import ProcessDocuments from './process-documents' -import Processing from './processing' +import { StepOneContent, StepThreeContent, StepTwoContent } from './steps' +import { StepOnePreview, StepTwoPreview } from './steps/preview-panel' const CreateFormPipeline = () => { const { t } = useTranslation() const plan = useProviderContextSelector(state => state.plan) const enableBilling = useProviderContextSelector(state => state.enableBilling) const pipelineId = useDatasetDetailContextWithSelector(s => s.dataset?.pipeline_id) + const dataSourceStore = useDataSourceStore() + + // Core state const [datasource, setDatasource] = useState() const [estimateData, setEstimateData] = useState(undefined) const [batchId, setBatchId] = useState('') const [documents, setDocuments] = useState([]) - const dataSourceStore = useDataSourceStore() - - const isPreview = useRef(false) - const formRef = useRef(null) + // Data fetching const { data: pipelineInfo, isFetching: isFetchingPipelineInfo } = usePublishedPipelineInfo(pipelineId || '') const { data: fileUploadConfigResponse } = useFileUploadConfig() + const fileUploadConfig = useMemo(() => fileUploadConfigResponse ?? { + file_size_limit: 15, + batch_count_limit: 5, + }, [fileUploadConfigResponse]) + + // Steps management const { steps, currentStep, handleNextStep: doHandleNextStep, handleBackStep, } = useAddDocumentsSteps() + + // Datasource-specific hooks const { localFileList, allFileLoaded, currentLocalFile, hidePreviewLocalFile, } = useLocalFile() + const { currentWorkspace, onlineDocuments, @@ -79,12 +75,14 @@ const CreateFormPipeline = () => { hidePreviewOnlineDocument, clearOnlineDocumentData, } = useOnlineDocument() + const { websitePages, currentWebsite, hideWebsitePreview, clearWebsiteCrawlData, } = useWebsiteCrawl() + const { onlineDriveFileList, selectedFileIds, @@ -92,43 +90,50 @@ const CreateFormPipeline = () => { clearOnlineDriveData, } = useOnlineDrive() - const datasourceType = useMemo(() => datasource?.nodeData.provider_type, [datasource]) + // Computed values const isVectorSpaceFull = plan.usage.vectorSpace >= plan.total.vectorSpace - const isShowVectorSpaceFull = useMemo(() => { - if (!datasource) - return false - if (datasourceType === DatasourceType.localFile) - return allFileLoaded && isVectorSpaceFull && enableBilling - if (datasourceType === DatasourceType.onlineDocument) - return onlineDocuments.length > 0 && isVectorSpaceFull && enableBilling - if (datasourceType === DatasourceType.websiteCrawl) - return websitePages.length > 0 && isVectorSpaceFull && enableBilling - if (datasourceType === DatasourceType.onlineDrive) - return onlineDriveFileList.length > 0 && isVectorSpaceFull && enableBilling - return false - }, [allFileLoaded, datasource, datasourceType, enableBilling, isVectorSpaceFull, onlineDocuments.length, onlineDriveFileList.length, websitePages.length]) const supportBatchUpload = !enableBilling || plan.type !== 'sandbox' + // UI state + const { + datasourceType, + isShowVectorSpaceFull, + nextBtnDisabled, + showSelect, + totalOptions, + selectedOptions, + tip, + } = useDatasourceUIState({ + datasource, + allFileLoaded, + localFileListLength: localFileList.length, + onlineDocumentsLength: onlineDocuments.length, + websitePagesLength: websitePages.length, + selectedFileIdsLength: selectedFileIds.length, + onlineDriveFileList, + isVectorSpaceFull, + enableBilling, + currentWorkspacePagesLength: currentWorkspace?.pages.length ?? 0, + fileUploadConfig, + }) + + // Plan upgrade modal const [isShowPlanUpgradeModal, { setTrue: showPlanUpgradeModal, setFalse: hidePlanUpgradeModal, }] = useBoolean(false) + + // Next step with batch upload check const handleNextStep = useCallback(() => { if (!supportBatchUpload) { - let isMultiple = false - if (datasourceType === DatasourceType.localFile && localFileList.length > 1) - isMultiple = true - - if (datasourceType === DatasourceType.onlineDocument && onlineDocuments.length > 1) - isMultiple = true - - if (datasourceType === DatasourceType.websiteCrawl && websitePages.length > 1) - isMultiple = true - - if (datasourceType === DatasourceType.onlineDrive && selectedFileIds.length > 1) - isMultiple = true - - if (isMultiple) { + const multipleCheckMap: Record = { + [DatasourceType.localFile]: localFileList.length, + [DatasourceType.onlineDocument]: onlineDocuments.length, + [DatasourceType.websiteCrawl]: websitePages.length, + [DatasourceType.onlineDrive]: selectedFileIds.length, + } + const count = datasourceType ? multipleCheckMap[datasourceType] : 0 + if (count > 1) { showPlanUpgradeModal() return } @@ -136,334 +141,44 @@ const CreateFormPipeline = () => { doHandleNextStep() }, [datasourceType, doHandleNextStep, localFileList.length, onlineDocuments.length, selectedFileIds.length, showPlanUpgradeModal, supportBatchUpload, websitePages.length]) - const nextBtnDisabled = useMemo(() => { - if (!datasource) - return true - if (datasourceType === DatasourceType.localFile) - return isShowVectorSpaceFull || !localFileList.length || !allFileLoaded - if (datasourceType === DatasourceType.onlineDocument) - return isShowVectorSpaceFull || !onlineDocuments.length - if (datasourceType === DatasourceType.websiteCrawl) - return isShowVectorSpaceFull || !websitePages.length - if (datasourceType === DatasourceType.onlineDrive) - return isShowVectorSpaceFull || !selectedFileIds.length - return false - }, [datasource, datasourceType, isShowVectorSpaceFull, localFileList.length, allFileLoaded, onlineDocuments.length, websitePages.length, selectedFileIds.length]) + // Datasource actions + const { + isPreview, + formRef, + isIdle, + isPending, + onClickProcess, + onClickPreview, + handleSubmit, + handlePreviewFileChange, + handlePreviewOnlineDocumentChange, + handlePreviewWebsiteChange, + handlePreviewOnlineDriveFileChange, + handleSelectAll, + handleSwitchDataSource, + handleCredentialChange, + } = useDatasourceActions({ + datasource, + datasourceType, + pipelineId, + dataSourceStore, + setEstimateData, + setBatchId, + setDocuments, + handleNextStep, + PagesMapAndSelectedPagesId, + currentWorkspacePages: currentWorkspace?.pages, + clearOnlineDocumentData, + clearWebsiteCrawlData, + clearOnlineDriveData, + setDatasource, + }) - const fileUploadConfig = useMemo(() => fileUploadConfigResponse ?? { - file_size_limit: 15, - batch_count_limit: 5, - }, [fileUploadConfigResponse]) - - const showSelect = useMemo(() => { - if (datasourceType === DatasourceType.onlineDocument) { - const pagesCount = currentWorkspace?.pages.length ?? 0 - return pagesCount > 0 - } - if (datasourceType === DatasourceType.onlineDrive) { - const isBucketList = onlineDriveFileList.some(file => file.type === 'bucket') - return !isBucketList && onlineDriveFileList.filter((item) => { - return item.type !== 'bucket' - }).length > 0 - } - return false - }, [currentWorkspace?.pages.length, datasourceType, onlineDriveFileList]) - - const totalOptions = useMemo(() => { - if (datasourceType === DatasourceType.onlineDocument) - return currentWorkspace?.pages.length - if (datasourceType === DatasourceType.onlineDrive) { - return onlineDriveFileList.filter((item) => { - return item.type !== 'bucket' - }).length - } - }, [currentWorkspace?.pages.length, datasourceType, onlineDriveFileList]) - - const selectedOptions = useMemo(() => { - if (datasourceType === DatasourceType.onlineDocument) - return onlineDocuments.length - if (datasourceType === DatasourceType.onlineDrive) - return selectedFileIds.length - }, [datasourceType, onlineDocuments.length, selectedFileIds.length]) - - const tip = useMemo(() => { - if (datasourceType === DatasourceType.onlineDocument) - return t('addDocuments.selectOnlineDocumentTip', { ns: 'datasetPipeline', count: 50 }) - if (datasourceType === DatasourceType.onlineDrive) { - return t('addDocuments.selectOnlineDriveTip', { - ns: 'datasetPipeline', - count: fileUploadConfig.batch_count_limit, - fileSize: fileUploadConfig.file_size_limit, - }) - } - return '' - }, [datasourceType, fileUploadConfig.batch_count_limit, fileUploadConfig.file_size_limit, t]) - - const { mutateAsync: runPublishedPipeline, isIdle, isPending } = useRunPublishedPipeline() - - const handlePreviewChunks = useCallback(async (data: Record) => { - if (!datasource) - return - const { - previewLocalFileRef, - previewOnlineDocumentRef, - previewWebsitePageRef, - previewOnlineDriveFileRef, - currentCredentialId, - } = dataSourceStore.getState() - const datasourceInfoList: Record[] = [] - if (datasourceType === DatasourceType.localFile) { - const { id, name, type, size, extension, mime_type } = previewLocalFileRef.current as File - const documentInfo = { - related_id: id, - name, - type, - size, - extension, - mime_type, - url: '', - transfer_method: TransferMethod.local_file, - credential_id: currentCredentialId, - } - datasourceInfoList.push(documentInfo) - } - if (datasourceType === DatasourceType.onlineDocument) { - const { workspace_id, ...rest } = previewOnlineDocumentRef.current! - const documentInfo = { - workspace_id, - page: rest, - credential_id: currentCredentialId, - } - datasourceInfoList.push(documentInfo) - } - if (datasourceType === DatasourceType.websiteCrawl) { - datasourceInfoList.push({ - ...previewWebsitePageRef.current!, - credential_id: currentCredentialId, - }) - } - if (datasourceType === DatasourceType.onlineDrive) { - const { bucket } = dataSourceStore.getState() - const { id, type, name } = previewOnlineDriveFileRef.current! - datasourceInfoList.push({ - bucket, - id, - name, - type, - credential_id: currentCredentialId, - }) - } - await runPublishedPipeline({ - pipeline_id: pipelineId!, - inputs: data, - start_node_id: datasource.nodeId, - datasource_type: datasourceType as DatasourceType, - datasource_info_list: datasourceInfoList, - is_preview: true, - }, { - onSuccess: (res) => { - setEstimateData((res as PublishedPipelineRunPreviewResponse).data.outputs) - }, - }) - }, [datasource, datasourceType, runPublishedPipeline, pipelineId, dataSourceStore]) - - const handleProcess = useCallback(async (data: Record) => { - if (!datasource) - return - const { currentCredentialId } = dataSourceStore.getState() - const datasourceInfoList: Record[] = [] - if (datasourceType === DatasourceType.localFile) { - const { - localFileList, - } = dataSourceStore.getState() - localFileList.forEach((file) => { - const { id, name, type, size, extension, mime_type } = file.file - const documentInfo = { - related_id: id, - name, - type, - size, - extension, - mime_type, - url: '', - transfer_method: TransferMethod.local_file, - credential_id: currentCredentialId, - } - datasourceInfoList.push(documentInfo) - }) - } - if (datasourceType === DatasourceType.onlineDocument) { - const { - onlineDocuments, - } = dataSourceStore.getState() - onlineDocuments.forEach((page) => { - const { workspace_id, ...rest } = page - const documentInfo = { - workspace_id, - page: rest, - credential_id: currentCredentialId, - } - datasourceInfoList.push(documentInfo) - }) - } - if (datasourceType === DatasourceType.websiteCrawl) { - const { - websitePages, - } = dataSourceStore.getState() - websitePages.forEach((websitePage) => { - datasourceInfoList.push({ - ...websitePage, - credential_id: currentCredentialId, - }) - }) - } - if (datasourceType === DatasourceType.onlineDrive) { - const { - bucket, - selectedFileIds, - onlineDriveFileList, - } = dataSourceStore.getState() - selectedFileIds.forEach((id) => { - const file = onlineDriveFileList.find(file => file.id === id) - datasourceInfoList.push({ - bucket, - id: file?.id, - name: file?.name, - type: file?.type, - credential_id: currentCredentialId, - }) - }) - } - await runPublishedPipeline({ - pipeline_id: pipelineId!, - inputs: data, - start_node_id: datasource.nodeId, - datasource_type: datasourceType as DatasourceType, - datasource_info_list: datasourceInfoList, - is_preview: false, - }, { - onSuccess: (res) => { - setBatchId((res as PublishedPipelineRunResponse).batch || '') - setDocuments((res as PublishedPipelineRunResponse).documents || []) - handleNextStep() - trackEvent('dataset_document_added', { - data_source_type: datasourceType, - indexing_technique: 'pipeline', - }) - }, - }) - }, [dataSourceStore, datasource, datasourceType, handleNextStep, pipelineId, runPublishedPipeline]) - - const onClickProcess = useCallback(() => { - isPreview.current = false - formRef.current?.submit() - }, []) - - const onClickPreview = useCallback(() => { - isPreview.current = true - formRef.current?.submit() - }, []) - - const handleSubmit = useCallback((data: Record) => { - if (isPreview.current) - handlePreviewChunks(data) - else - handleProcess(data) - }, [handlePreviewChunks, handleProcess]) - - const handlePreviewFileChange = useCallback((file: DocumentItem) => { - const { previewLocalFileRef } = dataSourceStore.getState() - previewLocalFileRef.current = file - onClickPreview() - }, [dataSourceStore, onClickPreview]) - - const handlePreviewOnlineDocumentChange = useCallback((page: NotionPage) => { - const { previewOnlineDocumentRef } = dataSourceStore.getState() - previewOnlineDocumentRef.current = page - onClickPreview() - }, [dataSourceStore, onClickPreview]) - - const handlePreviewWebsiteChange = useCallback((website: CrawlResultItem) => { - const { previewWebsitePageRef } = dataSourceStore.getState() - previewWebsitePageRef.current = website - onClickPreview() - }, [dataSourceStore, onClickPreview]) - - const handlePreviewOnlineDriveFileChange = useCallback((file: OnlineDriveFile) => { - const { previewOnlineDriveFileRef } = dataSourceStore.getState() - previewOnlineDriveFileRef.current = file - onClickPreview() - }, [dataSourceStore, onClickPreview]) - - const handleSelectAll = useCallback(() => { - const { - onlineDocuments, - onlineDriveFileList, - selectedFileIds, - setOnlineDocuments, - setSelectedFileIds, - setSelectedPagesId, - } = dataSourceStore.getState() - if (datasourceType === DatasourceType.onlineDocument) { - const allIds = currentWorkspace?.pages.map(page => page.page_id) || [] - if (onlineDocuments.length < allIds.length) { - const selectedPages = Array.from(allIds).map(pageId => PagesMapAndSelectedPagesId[pageId]) - setOnlineDocuments(selectedPages) - setSelectedPagesId(new Set(allIds)) - } - else { - setOnlineDocuments([]) - setSelectedPagesId(new Set()) - } - } - if (datasourceType === DatasourceType.onlineDrive) { - const allKeys = onlineDriveFileList.filter((item) => { - return item.type !== 'bucket' - }).map(file => file.id) - if (selectedFileIds.length < allKeys.length) - setSelectedFileIds(allKeys) - else - setSelectedFileIds([]) - } - }, [PagesMapAndSelectedPagesId, currentWorkspace?.pages, dataSourceStore, datasourceType]) - - const clearDataSourceData = useCallback((dataSource: Datasource) => { - const providerType = dataSource.nodeData.provider_type - if (providerType === DatasourceType.onlineDocument) - clearOnlineDocumentData() - else if (providerType === DatasourceType.websiteCrawl) - clearWebsiteCrawlData() - else if (providerType === DatasourceType.onlineDrive) - clearOnlineDriveData() - }, [clearOnlineDocumentData, clearOnlineDriveData, clearWebsiteCrawlData]) - - const handleSwitchDataSource = useCallback((dataSource: Datasource) => { - const { - setCurrentCredentialId, - currentNodeIdRef, - } = dataSourceStore.getState() - clearDataSourceData(dataSource) - setCurrentCredentialId('') - currentNodeIdRef.current = dataSource.nodeId - setDatasource(dataSource) - }, [clearDataSourceData, dataSourceStore]) - - const handleCredentialChange = useCallback((credentialId: string) => { - const { setCurrentCredentialId } = dataSourceStore.getState() - clearDataSourceData(datasource!) - setCurrentCredentialId(credentialId) - }, [clearDataSourceData, dataSourceStore, datasource]) - - if (isFetchingPipelineInfo) { - return ( - - ) - } + if (isFetchingPipelineInfo) + return return ( -
+
{ currentStep={currentStep} />
- { - currentStep === 1 && ( -
- []} - /> - {datasourceType === DatasourceType.localFile && ( - - )} - {datasourceType === DatasourceType.onlineDocument && ( - - )} - {datasourceType === DatasourceType.websiteCrawl && ( - - )} - {datasourceType === DatasourceType.onlineDrive && ( - - )} - {isShowVectorSpaceFull && ( - - )} - - { - !supportBatchUpload && datasourceType === DatasourceType.localFile && localFileList.length > 0 && ( - <> - - - - ) - } -
- ) - } - { - currentStep === 2 && ( - - ) - } - { - currentStep === 3 && ( - - ) - } + {currentStep === 1 && ( + []} + supportBatchUpload={supportBatchUpload} + localFileListLength={localFileList.length} + isShowVectorSpaceFull={isShowVectorSpaceFull} + showSelect={showSelect} + totalOptions={totalOptions} + selectedOptions={selectedOptions} + tip={tip} + nextBtnDisabled={nextBtnDisabled} + onSelectDataSource={handleSwitchDataSource} + onCredentialChange={handleCredentialChange} + onSelectAll={handleSelectAll} + onNextStep={handleNextStep} + /> + )} + {currentStep === 2 && ( + + )} + {currentStep === 3 && ( + + )}
- {/* Preview */} - { - currentStep === 1 && ( -
-
- {currentLocalFile && ( - - )} - {currentDocument && ( - - )} - {currentWebsite && ( - - )} -
-
- ) - } - { - currentStep === 2 && ( -
-
- file.file)} - onlineDocuments={onlineDocuments} - websitePages={websitePages} - onlineDriveFiles={selectedOnlineDriveFileList} - isIdle={isIdle} - isPending={isPending && isPreview.current} - estimateData={estimateData} - onPreview={onClickPreview} - handlePreviewFileChange={handlePreviewFileChange} - handlePreviewOnlineDocumentChange={handlePreviewOnlineDocumentChange} - handlePreviewWebsitePageChange={handlePreviewWebsiteChange} - handlePreviewOnlineDriveFileChange={handlePreviewOnlineDriveFileChange} - /> -
-
- ) - } + + {/* Preview Panel */} + {currentStep === 1 && ( + + )} + {currentStep === 2 && ( + + )} + + {/* Plan Upgrade Modal */} {isShowPlanUpgradeModal && ( void + hidePreviewOnlineDocument: () => void + hideWebsitePreview: () => void +} + +export const StepOnePreview = memo(({ + datasource, + currentLocalFile, + currentDocument, + currentWebsite, + hidePreviewLocalFile, + hidePreviewOnlineDocument, + hideWebsitePreview, +}: StepOnePreviewProps) => { + return ( +
+
+ {currentLocalFile && ( + + )} + {currentDocument && ( + + )} + {currentWebsite && ( + + )} +
+
+ ) +}) +StepOnePreview.displayName = 'StepOnePreview' + +type StepTwoPreviewProps = { + datasourceType: string | undefined + localFileList: FileItem[] + onlineDocuments: (NotionPage & { workspace_id: string })[] + websitePages: CrawlResultItem[] + selectedOnlineDriveFileList: OnlineDriveFile[] + isIdle: boolean + isPendingPreview: boolean + estimateData: FileIndexingEstimateResponse | undefined + onPreview: () => void + handlePreviewFileChange: (file: DocumentItem) => void + handlePreviewOnlineDocumentChange: (page: NotionPage) => void + handlePreviewWebsitePageChange: (website: CrawlResultItem) => void + handlePreviewOnlineDriveFileChange: (file: OnlineDriveFile) => void +} + +export const StepTwoPreview = memo(({ + datasourceType, + localFileList, + onlineDocuments, + websitePages, + selectedOnlineDriveFileList, + isIdle, + isPendingPreview, + estimateData, + onPreview, + handlePreviewFileChange, + handlePreviewOnlineDocumentChange, + handlePreviewWebsitePageChange, + handlePreviewOnlineDriveFileChange, +}: StepTwoPreviewProps) => { + return ( +
+
+ file.file)} + onlineDocuments={onlineDocuments} + websitePages={websitePages} + onlineDriveFiles={selectedOnlineDriveFileList} + isIdle={isIdle} + isPending={isPendingPreview} + estimateData={estimateData} + onPreview={onPreview} + handlePreviewFileChange={handlePreviewFileChange} + handlePreviewOnlineDocumentChange={handlePreviewOnlineDocumentChange} + handlePreviewWebsitePageChange={handlePreviewWebsitePageChange} + handlePreviewOnlineDriveFileChange={handlePreviewOnlineDriveFileChange} + /> +
+
+ ) +}) +StepTwoPreview.displayName = 'StepTwoPreview' diff --git a/web/app/components/datasets/documents/create-from-pipeline/steps/step-one-content.tsx b/web/app/components/datasets/documents/create-from-pipeline/steps/step-one-content.tsx new file mode 100644 index 0000000000..8eed6d00b9 --- /dev/null +++ b/web/app/components/datasets/documents/create-from-pipeline/steps/step-one-content.tsx @@ -0,0 +1,110 @@ +'use client' +import type { Datasource } from '@/app/components/rag-pipeline/components/panel/test-run/types' +import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types' +import type { Node } from '@/app/components/workflow/types' +import { memo } from 'react' +import Divider from '@/app/components/base/divider' +import VectorSpaceFull from '@/app/components/billing/vector-space-full' +import LocalFile from '@/app/components/datasets/documents/create-from-pipeline/data-source/local-file' +import OnlineDocuments from '@/app/components/datasets/documents/create-from-pipeline/data-source/online-documents' +import OnlineDrive from '@/app/components/datasets/documents/create-from-pipeline/data-source/online-drive' +import WebsiteCrawl from '@/app/components/datasets/documents/create-from-pipeline/data-source/website-crawl' +import { DatasourceType } from '@/models/pipeline' +import UpgradeCard from '../../../create/step-one/upgrade-card' +import Actions from '../actions' +import DataSourceOptions from '../data-source-options' + +type StepOneContentProps = { + datasource: Datasource | undefined + datasourceType: string | undefined + pipelineNodes: Node[] + supportBatchUpload: boolean + localFileListLength: number + isShowVectorSpaceFull: boolean + showSelect: boolean + totalOptions: number | undefined + selectedOptions: number | undefined + tip: string + nextBtnDisabled: boolean + onSelectDataSource: (dataSource: Datasource) => void + onCredentialChange: (credentialId: string) => void + onSelectAll: () => void + onNextStep: () => void +} + +const StepOneContent = ({ + datasource, + datasourceType, + pipelineNodes, + supportBatchUpload, + localFileListLength, + isShowVectorSpaceFull, + showSelect, + totalOptions, + selectedOptions, + tip, + nextBtnDisabled, + onSelectDataSource, + onCredentialChange, + onSelectAll, + onNextStep, +}: StepOneContentProps) => { + const showUpgradeCard = !supportBatchUpload + && datasourceType === DatasourceType.localFile + && localFileListLength > 0 + + return ( +
+ + {datasourceType === DatasourceType.localFile && ( + + )} + {datasourceType === DatasourceType.onlineDocument && ( + + )} + {datasourceType === DatasourceType.websiteCrawl && ( + + )} + {datasourceType === DatasourceType.onlineDrive && ( + + )} + {isShowVectorSpaceFull && } + + {showUpgradeCard && ( + <> + + + + )} +
+ ) +} + +export default memo(StepOneContent) diff --git a/web/app/components/datasets/documents/create-from-pipeline/steps/step-three-content.tsx b/web/app/components/datasets/documents/create-from-pipeline/steps/step-three-content.tsx new file mode 100644 index 0000000000..f4b15888a9 --- /dev/null +++ b/web/app/components/datasets/documents/create-from-pipeline/steps/step-three-content.tsx @@ -0,0 +1,23 @@ +'use client' +import type { InitialDocumentDetail } from '@/models/pipeline' +import { memo } from 'react' +import Processing from '../processing' + +type StepThreeContentProps = { + batchId: string + documents: InitialDocumentDetail[] +} + +const StepThreeContent = ({ + batchId, + documents, +}: StepThreeContentProps) => { + return ( + + ) +} + +export default memo(StepThreeContent) diff --git a/web/app/components/datasets/documents/create-from-pipeline/steps/step-two-content.tsx b/web/app/components/datasets/documents/create-from-pipeline/steps/step-two-content.tsx new file mode 100644 index 0000000000..ca95c9f354 --- /dev/null +++ b/web/app/components/datasets/documents/create-from-pipeline/steps/step-two-content.tsx @@ -0,0 +1,38 @@ +'use client' +import type { RefObject } from 'react' +import { memo } from 'react' +import ProcessDocuments from '../process-documents' + +type StepTwoContentProps = { + formRef: RefObject<{ submit: () => void } | null> + dataSourceNodeId: string + isRunning: boolean + onProcess: () => void + onPreview: () => void + onSubmit: (data: Record) => void + onBack: () => void +} + +const StepTwoContent = ({ + formRef, + dataSourceNodeId, + isRunning, + onProcess, + onPreview, + onSubmit, + onBack, +}: StepTwoContentProps) => { + return ( + + ) +} + +export default memo(StepTwoContent) diff --git a/web/app/components/datasets/documents/create-from-pipeline/utils/datasource-info-builder.ts b/web/app/components/datasets/documents/create-from-pipeline/utils/datasource-info-builder.ts new file mode 100644 index 0000000000..c9f4808bbc --- /dev/null +++ b/web/app/components/datasets/documents/create-from-pipeline/utils/datasource-info-builder.ts @@ -0,0 +1,63 @@ +import type { NotionPage } from '@/models/common' +import type { CrawlResultItem, CustomFile as File } from '@/models/datasets' +import type { OnlineDriveFile } from '@/models/pipeline' +import { TransferMethod } from '@/types/app' + +/** + * Build datasource info for local files + */ +export const buildLocalFileDatasourceInfo = ( + file: File, + credentialId: string, +): Record => ({ + related_id: file.id, + name: file.name, + type: file.type, + size: file.size, + extension: file.extension, + mime_type: file.mime_type, + url: '', + transfer_method: TransferMethod.local_file, + credential_id: credentialId, +}) + +/** + * Build datasource info for online documents + */ +export const buildOnlineDocumentDatasourceInfo = ( + page: NotionPage & { workspace_id: string }, + credentialId: string, +): Record => { + const { workspace_id, ...rest } = page + return { + workspace_id, + page: rest, + credential_id: credentialId, + } +} + +/** + * Build datasource info for website crawl + */ +export const buildWebsiteCrawlDatasourceInfo = ( + page: CrawlResultItem, + credentialId: string, +): Record => ({ + ...page, + credential_id: credentialId, +}) + +/** + * Build datasource info for online drive + */ +export const buildOnlineDriveDatasourceInfo = ( + file: OnlineDriveFile, + bucket: string, + credentialId: string, +): Record => ({ + bucket, + id: file.id, + name: file.name, + type: file.type, + credential_id: credentialId, +}) From ab078380a3e11bdd14411dee476e378e49501e78 Mon Sep 17 00:00:00 2001 From: Coding On Star <447357187@qq.com> Date: Thu, 15 Jan 2026 10:33:58 +0800 Subject: [PATCH 07/25] feat(web): refactor documents component structure and enhance functionality (#30854) Co-authored-by: CodingOnStar --- .../documents/components/documents-header.tsx | 201 ++++++++ .../documents/components/empty-element.tsx | 41 ++ .../datasets/documents/components/icons.tsx | 34 ++ .../documents/{ => components}/list.tsx | 12 +- .../documents/{ => components}/operations.tsx | 16 +- .../{ => components}/rename-modal.tsx | 2 +- .../datasets/documents/detail/index.tsx | 2 +- .../hooks/use-documents-page-state.ts | 197 ++++++++ .../components/datasets/documents/index.tsx | 468 +++++------------- 9 files changed, 604 insertions(+), 369 deletions(-) create mode 100644 web/app/components/datasets/documents/components/documents-header.tsx create mode 100644 web/app/components/datasets/documents/components/empty-element.tsx create mode 100644 web/app/components/datasets/documents/components/icons.tsx rename web/app/components/datasets/documents/{ => components}/list.tsx (97%) rename web/app/components/datasets/documents/{ => components}/operations.tsx (96%) rename web/app/components/datasets/documents/{ => components}/rename-modal.tsx (97%) create mode 100644 web/app/components/datasets/documents/hooks/use-documents-page-state.ts diff --git a/web/app/components/datasets/documents/components/documents-header.tsx b/web/app/components/datasets/documents/components/documents-header.tsx new file mode 100644 index 0000000000..ed97742fdd --- /dev/null +++ b/web/app/components/datasets/documents/components/documents-header.tsx @@ -0,0 +1,201 @@ +'use client' +import type { FC } from 'react' +import type { Item } from '@/app/components/base/select' +import type { BuiltInMetadataItem, MetadataItemWithValueLength } from '@/app/components/datasets/metadata/types' +import type { SortType } from '@/service/datasets' +import { PlusIcon } from '@heroicons/react/24/solid' +import { RiDraftLine, RiExternalLinkLine } from '@remixicon/react' +import { useMemo } from 'react' +import { useTranslation } from 'react-i18next' +import Button from '@/app/components/base/button' +import Chip from '@/app/components/base/chip' +import Input from '@/app/components/base/input' +import Sort from '@/app/components/base/sort' +import AutoDisabledDocument from '@/app/components/datasets/common/document-status-with-action/auto-disabled-document' +import IndexFailed from '@/app/components/datasets/common/document-status-with-action/index-failed' +import StatusWithAction from '@/app/components/datasets/common/document-status-with-action/status-with-action' +import DatasetMetadataDrawer from '@/app/components/datasets/metadata/metadata-dataset/dataset-metadata-drawer' +import { useDocLink } from '@/context/i18n' +import { DataSourceType } from '@/models/datasets' +import { useIndexStatus } from '../status-item/hooks' + +type DocumentsHeaderProps = { + // Dataset info + datasetId: string + dataSourceType?: DataSourceType + embeddingAvailable: boolean + isFreePlan: boolean + + // Filter & sort + statusFilterValue: string + sortValue: SortType + inputValue: string + onStatusFilterChange: (value: string) => void + onStatusFilterClear: () => void + onSortChange: (value: string) => void + onInputChange: (value: string) => void + + // Metadata modal + isShowEditMetadataModal: boolean + showEditMetadataModal: () => void + hideEditMetadataModal: () => void + datasetMetaData?: MetadataItemWithValueLength[] + builtInMetaData?: BuiltInMetadataItem[] + builtInEnabled: boolean + onAddMetaData: (payload: BuiltInMetadataItem) => Promise + onRenameMetaData: (payload: MetadataItemWithValueLength) => Promise + onDeleteMetaData: (metaDataId: string) => Promise + onBuiltInEnabledChange: (enabled: boolean) => void + + // Actions + onAddDocument: () => void +} + +const DocumentsHeader: FC = ({ + datasetId, + dataSourceType, + embeddingAvailable, + isFreePlan, + statusFilterValue, + sortValue, + inputValue, + onStatusFilterChange, + onStatusFilterClear, + onSortChange, + onInputChange, + isShowEditMetadataModal, + showEditMetadataModal, + hideEditMetadataModal, + datasetMetaData, + builtInMetaData, + builtInEnabled, + onAddMetaData, + onRenameMetaData, + onDeleteMetaData, + onBuiltInEnabledChange, + onAddDocument, +}) => { + const { t } = useTranslation() + const docLink = useDocLink() + const DOC_INDEX_STATUS_MAP = useIndexStatus() + + const isDataSourceNotion = dataSourceType === DataSourceType.NOTION + const isDataSourceWeb = dataSourceType === DataSourceType.WEB + + const statusFilterItems: Item[] = useMemo(() => [ + { value: 'all', name: t('list.index.all', { ns: 'datasetDocuments' }) as string }, + { value: 'queuing', name: DOC_INDEX_STATUS_MAP.queuing.text }, + { value: 'indexing', name: DOC_INDEX_STATUS_MAP.indexing.text }, + { value: 'paused', name: DOC_INDEX_STATUS_MAP.paused.text }, + { value: 'error', name: DOC_INDEX_STATUS_MAP.error.text }, + { value: 'available', name: DOC_INDEX_STATUS_MAP.available.text }, + { value: 'enabled', name: DOC_INDEX_STATUS_MAP.enabled.text }, + { value: 'disabled', name: DOC_INDEX_STATUS_MAP.disabled.text }, + { value: 'archived', name: DOC_INDEX_STATUS_MAP.archived.text }, + ], [DOC_INDEX_STATUS_MAP, t]) + + const sortItems: Item[] = useMemo(() => [ + { value: 'created_at', name: t('list.sort.uploadTime', { ns: 'datasetDocuments' }) as string }, + { value: 'hit_count', name: t('list.sort.hitCount', { ns: 'datasetDocuments' }) as string }, + ], [t]) + + // Determine add button text based on data source type + const addButtonText = useMemo(() => { + if (isDataSourceNotion) + return t('list.addPages', { ns: 'datasetDocuments' }) + if (isDataSourceWeb) + return t('list.addUrl', { ns: 'datasetDocuments' }) + return t('list.addFile', { ns: 'datasetDocuments' }) + }, [isDataSourceNotion, isDataSourceWeb, t]) + + return ( + <> + {/* Title section */} +
+

+ {t('list.title', { ns: 'datasetDocuments' })} +

+
+ {t('list.desc', { ns: 'datasetDocuments' })} + + {t('list.learnMore', { ns: 'datasetDocuments' })} + + +
+
+ + {/* Toolbar section */} +
+ {/* Left: Filters */} +
+ onStatusFilterChange(item?.value ? String(item.value) : '')} + onClear={onStatusFilterClear} + /> + onInputChange(e.target.value)} + onClear={() => onInputChange('')} + /> +
+ onSortChange(String(value))} + /> +
+ + {/* Right: Actions */} +
+ {!isFreePlan && } + + {!embeddingAvailable && ( + + )} + {embeddingAvailable && ( + + )} + {isShowEditMetadataModal && ( + + )} + {embeddingAvailable && ( + + )} +
+
+ + ) +} + +export default DocumentsHeader diff --git a/web/app/components/datasets/documents/components/empty-element.tsx b/web/app/components/datasets/documents/components/empty-element.tsx new file mode 100644 index 0000000000..40c4bbdb9e --- /dev/null +++ b/web/app/components/datasets/documents/components/empty-element.tsx @@ -0,0 +1,41 @@ +'use client' +import type { FC } from 'react' +import { PlusIcon } from '@heroicons/react/24/solid' +import { useTranslation } from 'react-i18next' +import Button from '@/app/components/base/button' +import s from '../style.module.css' +import { FolderPlusIcon, NotionIcon, ThreeDotsIcon } from './icons' + +type EmptyElementProps = { + canAdd: boolean + onClick: () => void + type?: 'upload' | 'sync' +} + +const EmptyElement: FC = ({ canAdd = true, onClick, type = 'upload' }) => { + const { t } = useTranslation() + return ( +
+
+
+ {type === 'upload' ? : } +
+ + {t('list.empty.title', { ns: 'datasetDocuments' })} + + +
+ {t(`list.empty.${type}.tip`, { ns: 'datasetDocuments' })} +
+ {type === 'upload' && canAdd && ( + + )} +
+
+ ) +} + +export default EmptyElement diff --git a/web/app/components/datasets/documents/components/icons.tsx b/web/app/components/datasets/documents/components/icons.tsx new file mode 100644 index 0000000000..6a862f12f0 --- /dev/null +++ b/web/app/components/datasets/documents/components/icons.tsx @@ -0,0 +1,34 @@ +import type * as React from 'react' + +export const FolderPlusIcon = ({ className }: React.SVGProps) => { + return ( + + + + ) +} + +export const ThreeDotsIcon = ({ className }: React.SVGProps) => { + return ( + + + + ) +} + +export const NotionIcon = ({ className }: React.SVGProps) => { + return ( + + + + + + + + + + + + + ) +} diff --git a/web/app/components/datasets/documents/list.tsx b/web/app/components/datasets/documents/components/list.tsx similarity index 97% rename from web/app/components/datasets/documents/list.tsx rename to web/app/components/datasets/documents/components/list.tsx index 5fd6cd3a70..2bf9c278c4 100644 --- a/web/app/components/datasets/documents/list.tsx +++ b/web/app/components/datasets/documents/components/list.tsx @@ -16,13 +16,16 @@ import * as React from 'react' import { useCallback, useEffect, useMemo, useState } from 'react' import { useTranslation } from 'react-i18next' import Checkbox from '@/app/components/base/checkbox' +import FileTypeIcon from '@/app/components/base/file-uploader/file-type-icon' import NotionIcon from '@/app/components/base/notion-icon' import Pagination from '@/app/components/base/pagination' import Toast from '@/app/components/base/toast' import Tooltip from '@/app/components/base/tooltip' +import ChunkingModeLabel from '@/app/components/datasets/common/chunking-mode-label' import { normalizeStatusForQuery } from '@/app/components/datasets/documents/status-filter' import { extensionToFileType } from '@/app/components/datasets/hit-testing/utils/extension-to-file-type' import EditMetadataBatchModal from '@/app/components/datasets/metadata/edit-metadata-batch/modal' +import useBatchEditDocumentMetadata from '@/app/components/datasets/metadata/hooks/use-batch-edit-document-metadata' import { useDatasetDetailContextWithSelector as useDatasetDetailContext } from '@/context/dataset-detail' import useTimestamp from '@/hooks/use-timestamp' import { ChunkingMode, DataSourceType, DocumentActionType } from '@/models/datasets' @@ -31,14 +34,11 @@ import { useDocumentArchive, useDocumentBatchRetryIndex, useDocumentDelete, useD import { asyncRunSafe } from '@/utils' import { cn } from '@/utils/classnames' import { formatNumber } from '@/utils/format' -import FileTypeIcon from '../../base/file-uploader/file-type-icon' -import ChunkingModeLabel from '../common/chunking-mode-label' -import useBatchEditDocumentMetadata from '../metadata/hooks/use-batch-edit-document-metadata' -import BatchAction from './detail/completed/common/batch-action' +import BatchAction from '../detail/completed/common/batch-action' +import StatusItem from '../status-item' +import s from '../style.module.css' import Operations from './operations' import RenameModal from './rename-modal' -import StatusItem from './status-item' -import s from './style.module.css' export const renderTdValue = (value: string | number | null, isEmptyStyle = false) => { return ( diff --git a/web/app/components/datasets/documents/operations.tsx b/web/app/components/datasets/documents/components/operations.tsx similarity index 96% rename from web/app/components/datasets/documents/operations.tsx rename to web/app/components/datasets/documents/components/operations.tsx index 93afec6f8e..0d3c40c053 100644 --- a/web/app/components/datasets/documents/operations.tsx +++ b/web/app/components/datasets/documents/components/operations.tsx @@ -1,4 +1,4 @@ -import type { OperationName } from './types' +import type { OperationName } from '../types' import type { CommonResponse } from '@/models/common' import { RiArchive2Line, @@ -17,6 +17,12 @@ import * as React from 'react' import { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' +import Confirm from '@/app/components/base/confirm' +import Divider from '@/app/components/base/divider' +import CustomPopover from '@/app/components/base/popover' +import Switch from '@/app/components/base/switch' +import { ToastContext } from '@/app/components/base/toast' +import Tooltip from '@/app/components/base/tooltip' import { DataSourceType, DocumentActionType } from '@/models/datasets' import { useDocumentArchive, @@ -31,14 +37,8 @@ import { } from '@/service/knowledge/use-document' import { asyncRunSafe } from '@/utils' import { cn } from '@/utils/classnames' -import Confirm from '../../base/confirm' -import Divider from '../../base/divider' -import CustomPopover from '../../base/popover' -import Switch from '../../base/switch' -import { ToastContext } from '../../base/toast' -import Tooltip from '../../base/tooltip' +import s from '../style.module.css' import RenameModal from './rename-modal' -import s from './style.module.css' type OperationsProps = { embeddingAvailable: boolean diff --git a/web/app/components/datasets/documents/rename-modal.tsx b/web/app/components/datasets/documents/components/rename-modal.tsx similarity index 97% rename from web/app/components/datasets/documents/rename-modal.tsx rename to web/app/components/datasets/documents/components/rename-modal.tsx index cf3b5a05a1..a119a2da9e 100644 --- a/web/app/components/datasets/documents/rename-modal.tsx +++ b/web/app/components/datasets/documents/components/rename-modal.tsx @@ -7,8 +7,8 @@ import { useTranslation } from 'react-i18next' import Button from '@/app/components/base/button' import Input from '@/app/components/base/input' import Modal from '@/app/components/base/modal' +import Toast from '@/app/components/base/toast' import { renameDocumentName } from '@/service/datasets' -import Toast from '../../base/toast' type Props = { datasetId: string diff --git a/web/app/components/datasets/documents/detail/index.tsx b/web/app/components/datasets/documents/detail/index.tsx index 3ded3f9fd4..ea2c453355 100644 --- a/web/app/components/datasets/documents/detail/index.tsx +++ b/web/app/components/datasets/documents/detail/index.tsx @@ -18,7 +18,7 @@ import { useDocumentDetail, useDocumentMetadata, useInvalidDocumentList } from ' import { useCheckSegmentBatchImportProgress, useChildSegmentListKey, useSegmentBatchImport, useSegmentListKey } from '@/service/knowledge/use-segment' import { useInvalid } from '@/service/use-base' import { cn } from '@/utils/classnames' -import Operations from '../operations' +import Operations from '../components/operations' import StatusItem from '../status-item' import BatchModal from './batch-modal' import Completed from './completed' diff --git a/web/app/components/datasets/documents/hooks/use-documents-page-state.ts b/web/app/components/datasets/documents/hooks/use-documents-page-state.ts new file mode 100644 index 0000000000..4fb227f717 --- /dev/null +++ b/web/app/components/datasets/documents/hooks/use-documents-page-state.ts @@ -0,0 +1,197 @@ +import type { DocumentListResponse } from '@/models/datasets' +import type { SortType } from '@/service/datasets' +import { useDebounce, useDebounceFn } from 'ahooks' +import { useCallback, useEffect, useMemo, useState } from 'react' +import { normalizeStatusForQuery, sanitizeStatusValue } from '../status-filter' +import useDocumentListQueryState from './use-document-list-query-state' + +/** + * Custom hook to manage documents page state including: + * - Search state (input value, debounced search value) + * - Filter state (status filter, sort value) + * - Pagination state (current page, limit) + * - Selection state (selected document ids) + * - Polling state (timer control for auto-refresh) + */ +export function useDocumentsPageState() { + const { query, updateQuery } = useDocumentListQueryState() + + // Search state + const [inputValue, setInputValue] = useState('') + const [searchValue, setSearchValue] = useState('') + const debouncedSearchValue = useDebounce(searchValue, { wait: 500 }) + + // Filter & sort state + const [statusFilterValue, setStatusFilterValue] = useState(() => sanitizeStatusValue(query.status)) + const [sortValue, setSortValue] = useState(query.sort) + const normalizedStatusFilterValue = useMemo( + () => normalizeStatusForQuery(statusFilterValue), + [statusFilterValue], + ) + + // Pagination state + const [currPage, setCurrPage] = useState(query.page - 1) + const [limit, setLimit] = useState(query.limit) + + // Selection state + const [selectedIds, setSelectedIds] = useState([]) + + // Polling state + const [timerCanRun, setTimerCanRun] = useState(true) + + // Initialize search value from URL on mount + useEffect(() => { + if (query.keyword) { + setInputValue(query.keyword) + setSearchValue(query.keyword) + } + }, []) // Only run on mount + + // Sync local state with URL query changes + useEffect(() => { + setCurrPage(query.page - 1) + setLimit(query.limit) + if (query.keyword !== searchValue) { + setInputValue(query.keyword) + setSearchValue(query.keyword) + } + setStatusFilterValue((prev) => { + const nextValue = sanitizeStatusValue(query.status) + return prev === nextValue ? prev : nextValue + }) + setSortValue(query.sort) + }, [query]) + + // Update URL when search changes + useEffect(() => { + if (debouncedSearchValue !== query.keyword) { + setCurrPage(0) + updateQuery({ keyword: debouncedSearchValue, page: 1 }) + } + }, [debouncedSearchValue, query.keyword, updateQuery]) + + // Clear selection when search changes + useEffect(() => { + if (searchValue !== query.keyword) + setSelectedIds([]) + }, [searchValue, query.keyword]) + + // Clear selection when status filter changes + useEffect(() => { + setSelectedIds([]) + }, [normalizedStatusFilterValue]) + + // Page change handler + const handlePageChange = useCallback((newPage: number) => { + setCurrPage(newPage) + updateQuery({ page: newPage + 1 }) + }, [updateQuery]) + + // Limit change handler + const handleLimitChange = useCallback((newLimit: number) => { + setLimit(newLimit) + setCurrPage(0) + updateQuery({ limit: newLimit, page: 1 }) + }, [updateQuery]) + + // Debounced search handler + const { run: handleSearch } = useDebounceFn(() => { + setSearchValue(inputValue) + }, { wait: 500 }) + + // Input change handler + const handleInputChange = useCallback((value: string) => { + setInputValue(value) + handleSearch() + }, [handleSearch]) + + // Status filter change handler + const handleStatusFilterChange = useCallback((value: string) => { + const selectedValue = sanitizeStatusValue(value) + setStatusFilterValue(selectedValue) + setCurrPage(0) + updateQuery({ status: selectedValue, page: 1 }) + }, [updateQuery]) + + // Status filter clear handler + const handleStatusFilterClear = useCallback(() => { + if (statusFilterValue === 'all') + return + setStatusFilterValue('all') + setCurrPage(0) + updateQuery({ status: 'all', page: 1 }) + }, [statusFilterValue, updateQuery]) + + // Sort change handler + const handleSortChange = useCallback((value: string) => { + const next = value as SortType + if (next === sortValue) + return + setSortValue(next) + setCurrPage(0) + updateQuery({ sort: next, page: 1 }) + }, [sortValue, updateQuery]) + + // Update polling state based on documents response + const updatePollingState = useCallback((documentsRes: DocumentListResponse | undefined) => { + if (!documentsRes?.data) + return + + let completedNum = 0 + documentsRes.data.forEach((documentItem) => { + const { indexing_status } = documentItem + const isEmbedded = indexing_status === 'completed' || indexing_status === 'paused' || indexing_status === 'error' + if (isEmbedded) + completedNum++ + }) + + const hasIncompleteDocuments = completedNum !== documentsRes.data.length + const transientStatuses = ['queuing', 'indexing', 'paused'] + const shouldForcePolling = normalizedStatusFilterValue === 'all' + ? false + : transientStatuses.includes(normalizedStatusFilterValue) + setTimerCanRun(shouldForcePolling || hasIncompleteDocuments) + }, [normalizedStatusFilterValue]) + + // Adjust page when total pages change + const adjustPageForTotal = useCallback((documentsRes: DocumentListResponse | undefined) => { + if (!documentsRes) + return + const totalPages = Math.ceil(documentsRes.total / limit) + if (currPage > 0 && currPage + 1 > totalPages) + handlePageChange(totalPages > 0 ? totalPages - 1 : 0) + }, [limit, currPage, handlePageChange]) + + return { + // Search state + inputValue, + searchValue, + debouncedSearchValue, + handleInputChange, + + // Filter & sort state + statusFilterValue, + sortValue, + normalizedStatusFilterValue, + handleStatusFilterChange, + handleStatusFilterClear, + handleSortChange, + + // Pagination state + currPage, + limit, + handlePageChange, + handleLimitChange, + + // Selection state + selectedIds, + setSelectedIds, + + // Polling state + timerCanRun, + updatePollingState, + adjustPageForTotal, + } +} + +export default useDocumentsPageState diff --git a/web/app/components/datasets/documents/index.tsx b/web/app/components/datasets/documents/index.tsx index dcfb0c4ab6..676e715f56 100644 --- a/web/app/components/datasets/documents/index.tsx +++ b/web/app/components/datasets/documents/index.tsx @@ -1,185 +1,55 @@ 'use client' import type { FC } from 'react' -import type { Item } from '@/app/components/base/select' -import type { SortType } from '@/service/datasets' -import { PlusIcon } from '@heroicons/react/24/solid' -import { RiDraftLine, RiExternalLinkLine } from '@remixicon/react' -import { useDebounce, useDebounceFn } from 'ahooks' import { useRouter } from 'next/navigation' -import * as React from 'react' -import { useCallback, useEffect, useMemo, useState } from 'react' -import { useTranslation } from 'react-i18next' -import Button from '@/app/components/base/button' -import Input from '@/app/components/base/input' +import { useCallback, useEffect } from 'react' import Loading from '@/app/components/base/loading' -import IndexFailed from '@/app/components/datasets/common/document-status-with-action/index-failed' import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail' -import { useDocLink } from '@/context/i18n' import { useProviderContext } from '@/context/provider-context' import { DataSourceType } from '@/models/datasets' import { useDocumentList, useInvalidDocumentDetail, useInvalidDocumentList } from '@/service/knowledge/use-document' import { useChildSegmentListKey, useSegmentListKey } from '@/service/knowledge/use-segment' import { useInvalid } from '@/service/use-base' -import { cn } from '@/utils/classnames' -import Chip from '../../base/chip' -import Sort from '../../base/sort' -import AutoDisabledDocument from '../common/document-status-with-action/auto-disabled-document' -import StatusWithAction from '../common/document-status-with-action/status-with-action' import useEditDocumentMetadata from '../metadata/hooks/use-edit-dataset-metadata' -import DatasetMetadataDrawer from '../metadata/metadata-dataset/dataset-metadata-drawer' -import useDocumentListQueryState from './hooks/use-document-list-query-state' -import List from './list' -import { normalizeStatusForQuery, sanitizeStatusValue } from './status-filter' -import { useIndexStatus } from './status-item/hooks' -import s from './style.module.css' - -const FolderPlusIcon = ({ className }: React.SVGProps) => { - return ( - - - - ) -} - -const ThreeDotsIcon = ({ className }: React.SVGProps) => { - return ( - - - - ) -} - -const NotionIcon = ({ className }: React.SVGProps) => { - return ( - - - - - - - - - - - - - ) -} - -const EmptyElement: FC<{ canAdd: boolean, onClick: () => void, type?: 'upload' | 'sync' }> = ({ canAdd = true, onClick, type = 'upload' }) => { - const { t } = useTranslation() - return ( -
-
-
- {type === 'upload' ? : } -
- - {t('list.empty.title', { ns: 'datasetDocuments' })} - - -
- {t(`list.empty.${type}.tip`, { ns: 'datasetDocuments' })} -
- {type === 'upload' && canAdd && ( - - )} -
-
- ) -} +import DocumentsHeader from './components/documents-header' +import EmptyElement from './components/empty-element' +import List from './components/list' +import useDocumentsPageState from './hooks/use-documents-page-state' type IDocumentsProps = { datasetId: string } const Documents: FC = ({ datasetId }) => { - const { t } = useTranslation() - const docLink = useDocLink() + const router = useRouter() const { plan } = useProviderContext() const isFreePlan = plan.type === 'sandbox' - const { query, updateQuery } = useDocumentListQueryState() - const [inputValue, setInputValue] = useState('') // the input value - const [searchValue, setSearchValue] = useState('') - const [statusFilterValue, setStatusFilterValue] = useState(() => sanitizeStatusValue(query.status)) - const [sortValue, setSortValue] = useState(query.sort) - const DOC_INDEX_STATUS_MAP = useIndexStatus() - const [currPage, setCurrPage] = React.useState(query.page - 1) // Convert to 0-based index - const [limit, setLimit] = useState(query.limit) - const router = useRouter() const dataset = useDatasetDetailContextWithSelector(s => s.dataset) - const [timerCanRun, setTimerCanRun] = useState(true) - const isDataSourceNotion = dataset?.data_source_type === DataSourceType.NOTION - const isDataSourceWeb = dataset?.data_source_type === DataSourceType.WEB - const isDataSourceFile = dataset?.data_source_type === DataSourceType.FILE const embeddingAvailable = !!dataset?.embedding_available - const debouncedSearchValue = useDebounce(searchValue, { wait: 500 }) - const statusFilterItems: Item[] = useMemo(() => [ - { value: 'all', name: t('list.index.all', { ns: 'datasetDocuments' }) as string }, - { value: 'queuing', name: DOC_INDEX_STATUS_MAP.queuing.text }, - { value: 'indexing', name: DOC_INDEX_STATUS_MAP.indexing.text }, - { value: 'paused', name: DOC_INDEX_STATUS_MAP.paused.text }, - { value: 'error', name: DOC_INDEX_STATUS_MAP.error.text }, - { value: 'available', name: DOC_INDEX_STATUS_MAP.available.text }, - { value: 'enabled', name: DOC_INDEX_STATUS_MAP.enabled.text }, - { value: 'disabled', name: DOC_INDEX_STATUS_MAP.disabled.text }, - { value: 'archived', name: DOC_INDEX_STATUS_MAP.archived.text }, - ], [DOC_INDEX_STATUS_MAP, t]) - const normalizedStatusFilterValue = useMemo(() => normalizeStatusForQuery(statusFilterValue), [statusFilterValue]) - const sortItems: Item[] = useMemo(() => [ - { value: 'created_at', name: t('list.sort.uploadTime', { ns: 'datasetDocuments' }) as string }, - { value: 'hit_count', name: t('list.sort.hitCount', { ns: 'datasetDocuments' }) as string }, - ], [t]) - - // Initialize search value from URL on mount - useEffect(() => { - if (query.keyword) { - setInputValue(query.keyword) - setSearchValue(query.keyword) - } - }, []) // Only run on mount - - // Sync local state with URL query changes - useEffect(() => { - setCurrPage(query.page - 1) - setLimit(query.limit) - if (query.keyword !== searchValue) { - setInputValue(query.keyword) - setSearchValue(query.keyword) - } - setStatusFilterValue((prev) => { - const nextValue = sanitizeStatusValue(query.status) - return prev === nextValue ? prev : nextValue - }) - setSortValue(query.sort) - }, [query]) - - // Update URL when pagination changes - const handlePageChange = (newPage: number) => { - setCurrPage(newPage) - updateQuery({ page: newPage + 1 }) // Pagination emits 0-based page, convert to 1-based for URL - } - - // Update URL when limit changes - const handleLimitChange = (newLimit: number) => { - setLimit(newLimit) - setCurrPage(0) // Reset to first page when limit changes - updateQuery({ limit: newLimit, page: 1 }) - } - - // Update URL when search changes - useEffect(() => { - if (debouncedSearchValue !== query.keyword) { - setCurrPage(0) // Reset to first page when search changes - updateQuery({ keyword: debouncedSearchValue, page: 1 }) - } - }, [debouncedSearchValue, query.keyword, updateQuery]) + // Use custom hook for page state management + const { + inputValue, + debouncedSearchValue, + handleInputChange, + statusFilterValue, + sortValue, + normalizedStatusFilterValue, + handleStatusFilterChange, + handleStatusFilterClear, + handleSortChange, + currPage, + limit, + handlePageChange, + handleLimitChange, + selectedIds, + setSelectedIds, + timerCanRun, + updatePollingState, + adjustPageForTotal, + } = useDocumentsPageState() + // Fetch document list const { data: documentsRes, isLoading: isListLoading } = useDocumentList({ datasetId, query: { @@ -192,16 +62,18 @@ const Documents: FC = ({ datasetId }) => { refetchInterval: timerCanRun ? 2500 : 0, }) - const invalidDocumentList = useInvalidDocumentList(datasetId) - + // Update polling state when documents change useEffect(() => { - if (documentsRes) { - const totalPages = Math.ceil(documentsRes.total / limit) - if (totalPages < currPage + 1) - setCurrPage(totalPages === 0 ? 0 : totalPages - 1) - } - }, [documentsRes]) + updatePollingState(documentsRes) + }, [documentsRes, updatePollingState]) + // Adjust page when total changes + useEffect(() => { + adjustPageForTotal(documentsRes) + }, [documentsRes, adjustPageForTotal]) + + // Invalidation hooks + const invalidDocumentList = useInvalidDocumentList(datasetId) const invalidDocumentDetail = useInvalidDocumentDetail() const invalidChunkList = useInvalid(useSegmentListKey) const invalidChildChunkList = useInvalid(useChildSegmentListKey) @@ -213,73 +85,9 @@ const Documents: FC = ({ datasetId }) => { invalidChunkList() invalidChildChunkList() }, 5000) - }, []) - - useEffect(() => { - let completedNum = 0 - let percent = 0 - documentsRes?.data?.forEach((documentItem) => { - const { indexing_status, completed_segments, total_segments } = documentItem - const isEmbedded = indexing_status === 'completed' || indexing_status === 'paused' || indexing_status === 'error' - - if (isEmbedded) - completedNum++ - - const completedCount = completed_segments || 0 - const totalCount = total_segments || 0 - if (totalCount === 0 && completedCount === 0) { - percent = isEmbedded ? 100 : 0 - } - else { - const per = Math.round(completedCount * 100 / totalCount) - percent = per > 100 ? 100 : per - } - return { - ...documentItem, - percent, - } - }) - - const hasIncompleteDocuments = completedNum !== documentsRes?.data?.length - const transientStatuses = ['queuing', 'indexing', 'paused'] - const shouldForcePolling = normalizedStatusFilterValue === 'all' - ? false - : transientStatuses.includes(normalizedStatusFilterValue) - setTimerCanRun(shouldForcePolling || hasIncompleteDocuments) - }, [documentsRes, normalizedStatusFilterValue]) - const total = documentsRes?.total || 0 - - const routeToDocCreate = () => { - // if dataset is created from pipeline, go to create from pipeline page - if (dataset?.runtime_mode === 'rag_pipeline') { - router.push(`/datasets/${datasetId}/documents/create-from-pipeline`) - return - } - router.push(`/datasets/${datasetId}/documents/create`) - } - - const documentsList = documentsRes?.data - const [selectedIds, setSelectedIds] = useState([]) - - // Clear selection when search changes to avoid confusion - useEffect(() => { - if (searchValue !== query.keyword) - setSelectedIds([]) - }, [searchValue, query.keyword]) - - useEffect(() => { - setSelectedIds([]) - }, [normalizedStatusFilterValue]) - - const { run: handleSearch } = useDebounceFn(() => { - setSearchValue(inputValue) - }, { wait: 500 }) - - const handleInputChange = (value: string) => { - setInputValue(value) - handleSearch() - } + }, [invalidDocumentList, invalidDocumentDetail, invalidChunkList, invalidChildChunkList]) + // Metadata editing hook const { isShowEditModal: isShowEditMetadataModal, showEditModal: showEditMetadataModal, @@ -297,130 +105,84 @@ const Documents: FC = ({ datasetId }) => { onUpdateDocList: invalidDocumentList, }) + // Route to document creation page + const routeToDocCreate = useCallback(() => { + if (dataset?.runtime_mode === 'rag_pipeline') { + router.push(`/datasets/${datasetId}/documents/create-from-pipeline`) + return + } + router.push(`/datasets/${datasetId}/documents/create`) + }, [dataset?.runtime_mode, datasetId, router]) + + const total = documentsRes?.total || 0 + const documentsList = documentsRes?.data + + // Render content based on loading and data state + const renderContent = () => { + if (isListLoading) + return + + if (total > 0) { + return ( + + ) + } + + const isDataSourceNotion = dataset?.data_source_type === DataSourceType.NOTION + return ( + + ) + } + return (
-
-

{t('list.title', { ns: 'datasetDocuments' })}

-
- {t('list.desc', { ns: 'datasetDocuments' })} - - {t('list.learnMore', { ns: 'datasetDocuments' })} - - -
-
+
-
-
- { - const selectedValue = sanitizeStatusValue(item?.value ? String(item.value) : '') - setStatusFilterValue(selectedValue) - setCurrPage(0) - updateQuery({ status: selectedValue, page: 1 }) - }} - onClear={() => { - if (statusFilterValue === 'all') - return - setStatusFilterValue('all') - setCurrPage(0) - updateQuery({ status: 'all', page: 1 }) - }} - /> - handleInputChange(e.target.value)} - onClear={() => handleInputChange('')} - /> -
- { - const next = String(value) as SortType - if (next === sortValue) - return - setSortValue(next) - setCurrPage(0) - updateQuery({ sort: next, page: 1 }) - }} - /> -
-
- {!isFreePlan && } - - {!embeddingAvailable && } - {embeddingAvailable && ( - - )} - {isShowEditMetadataModal && ( - - )} - {embeddingAvailable && ( - - )} -
-
- {isListLoading - ? - // eslint-disable-next-line sonarjs/no-nested-conditional - : total > 0 - ? ( - - ) - : ( - - )} + {renderContent()}
) From 328897f81c8fc5651d15829b10d47c0b2356dcc4 Mon Sep 17 00:00:00 2001 From: Stephen Zhou <38493346+hyoban@users.noreply.github.com> Date: Thu, 15 Jan 2026 10:38:55 +0800 Subject: [PATCH 08/25] build: require node 24.13.0 (#30945) --- .github/workflows/style.yml | 2 +- .github/workflows/tool-test-sdks.yaml | 8 ++------ .github/workflows/translate-i18n-claude.yml | 2 +- .github/workflows/web-tests.yml | 2 +- web/.nvmrc | 2 +- web/Dockerfile | 2 +- web/README.md | 4 ++-- web/package.json | 3 --- 8 files changed, 9 insertions(+), 16 deletions(-) diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index 462ece303e..6c5d6f4135 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -90,7 +90,7 @@ jobs: uses: actions/setup-node@v6 if: steps.changed-files.outputs.any_changed == 'true' with: - node-version: 22 + node-version: 24 cache: pnpm cache-dependency-path: ./web/pnpm-lock.yaml diff --git a/.github/workflows/tool-test-sdks.yaml b/.github/workflows/tool-test-sdks.yaml index 0259ef2232..ec392cb3b2 100644 --- a/.github/workflows/tool-test-sdks.yaml +++ b/.github/workflows/tool-test-sdks.yaml @@ -16,10 +16,6 @@ jobs: name: unit test for Node.js SDK runs-on: ubuntu-latest - strategy: - matrix: - node-version: [16, 18, 20, 22] - defaults: run: working-directory: sdks/nodejs-client @@ -29,10 +25,10 @@ jobs: with: persist-credentials: false - - name: Use Node.js ${{ matrix.node-version }} + - name: Use Node.js uses: actions/setup-node@v6 with: - node-version: ${{ matrix.node-version }} + node-version: 24 cache: '' cache-dependency-path: 'pnpm-lock.yaml' diff --git a/.github/workflows/translate-i18n-claude.yml b/.github/workflows/translate-i18n-claude.yml index 003e7ffc6e..8344af9890 100644 --- a/.github/workflows/translate-i18n-claude.yml +++ b/.github/workflows/translate-i18n-claude.yml @@ -57,7 +57,7 @@ jobs: - name: Set up Node.js uses: actions/setup-node@v6 with: - node-version: 'lts/*' + node-version: 24 cache: pnpm cache-dependency-path: ./web/pnpm-lock.yaml diff --git a/.github/workflows/web-tests.yml b/.github/workflows/web-tests.yml index 0fd1d5d22b..65c958a453 100644 --- a/.github/workflows/web-tests.yml +++ b/.github/workflows/web-tests.yml @@ -31,7 +31,7 @@ jobs: - name: Setup Node.js uses: actions/setup-node@v6 with: - node-version: 22 + node-version: 24 cache: pnpm cache-dependency-path: ./web/pnpm-lock.yaml diff --git a/web/.nvmrc b/web/.nvmrc index 5767036af0..a45fd52cc5 100644 --- a/web/.nvmrc +++ b/web/.nvmrc @@ -1 +1 @@ -22.21.1 +24 diff --git a/web/Dockerfile b/web/Dockerfile index 9e08910a77..d71b1b6ba6 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -1,5 +1,5 @@ # base image -FROM node:22.21.1-alpine3.23 AS base +FROM node:24-alpine AS base LABEL maintainer="takatost@gmail.com" # if you located in China, you can use aliyun mirror to speed up diff --git a/web/README.md b/web/README.md index ae4338d7be..13780eec6c 100644 --- a/web/README.md +++ b/web/README.md @@ -8,8 +8,8 @@ This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next Before starting the web frontend service, please make sure the following environment is ready. -- [Node.js](https://nodejs.org) >= v22.11.x -- [pnpm](https://pnpm.io) v10.x +- [Node.js](https://nodejs.org) +- [pnpm](https://pnpm.io) > [!TIP] > It is recommended to install and enable Corepack to manage package manager versions automatically: diff --git a/web/package.json b/web/package.json index fab33f7608..bdbac2af83 100644 --- a/web/package.json +++ b/web/package.json @@ -10,9 +10,6 @@ "default": "./i18n-config/lib.client.ts" } }, - "engines": { - "node": ">=22.12.0" - }, "browserslist": [ "last 1 Chrome version", "last 1 Firefox version", From 3bee2ee067e4c19d884607bdd37d2f07698bea89 Mon Sep 17 00:00:00 2001 From: yyh <92089059+lyzno1@users.noreply.github.com> Date: Thu, 15 Jan 2026 10:41:18 +0800 Subject: [PATCH 09/25] refactor(contract): restructure console contracts with nested billing module (#30999) --- .../pricing/plans/cloud-plan-item/index.spec.tsx | 12 +++++++----- .../billing/pricing/plans/cloud-plan-item/index.tsx | 2 +- web/contract/{console.ts => console/billing.ts} | 13 ++----------- web/contract/console/system.ts | 11 +++++++++++ web/contract/router.ts | 9 ++++++--- web/service/use-billing.ts | 8 ++++---- 6 files changed, 31 insertions(+), 24 deletions(-) rename web/contract/{console.ts => console/billing.ts} (59%) create mode 100644 web/contract/console/system.ts diff --git a/web/app/components/billing/pricing/plans/cloud-plan-item/index.spec.tsx b/web/app/components/billing/pricing/plans/cloud-plan-item/index.spec.tsx index 680243a474..a7945a7203 100644 --- a/web/app/components/billing/pricing/plans/cloud-plan-item/index.spec.tsx +++ b/web/app/components/billing/pricing/plans/cloud-plan-item/index.spec.tsx @@ -27,7 +27,9 @@ vi.mock('@/service/billing', () => ({ vi.mock('@/service/client', () => ({ consoleClient: { - billingUrl: vi.fn(), + billing: { + invoices: vi.fn(), + }, }, })) @@ -43,7 +45,7 @@ vi.mock('../../assets', () => ({ const mockUseAppContext = useAppContext as Mock const mockUseAsyncWindowOpen = useAsyncWindowOpen as Mock -const mockBillingUrl = consoleClient.billingUrl as Mock +const mockBillingInvoices = consoleClient.billing.invoices as Mock const mockFetchSubscriptionUrls = fetchSubscriptionUrls as Mock const mockToastNotify = Toast.notify as Mock @@ -75,7 +77,7 @@ beforeEach(() => { vi.clearAllMocks() mockUseAppContext.mockReturnValue({ isCurrentWorkspaceManager: true }) mockUseAsyncWindowOpen.mockReturnValue(vi.fn(async open => await open())) - mockBillingUrl.mockResolvedValue({ url: 'https://billing.example' }) + mockBillingInvoices.mockResolvedValue({ url: 'https://billing.example' }) mockFetchSubscriptionUrls.mockResolvedValue({ url: 'https://subscription.example' }) assignedHref = '' }) @@ -149,7 +151,7 @@ describe('CloudPlanItem', () => { type: 'error', message: 'billing.buyPermissionDeniedTip', })) - expect(mockBillingUrl).not.toHaveBeenCalled() + expect(mockBillingInvoices).not.toHaveBeenCalled() }) it('should open billing portal when upgrading current paid plan', async () => { @@ -168,7 +170,7 @@ describe('CloudPlanItem', () => { fireEvent.click(screen.getByRole('button', { name: 'billing.plansCommon.currentPlan' })) await waitFor(() => { - expect(mockBillingUrl).toHaveBeenCalledTimes(1) + expect(mockBillingInvoices).toHaveBeenCalledTimes(1) }) expect(openWindow).toHaveBeenCalledTimes(1) }) diff --git a/web/app/components/billing/pricing/plans/cloud-plan-item/index.tsx b/web/app/components/billing/pricing/plans/cloud-plan-item/index.tsx index d9c4d3f75b..0807381bcd 100644 --- a/web/app/components/billing/pricing/plans/cloud-plan-item/index.tsx +++ b/web/app/components/billing/pricing/plans/cloud-plan-item/index.tsx @@ -77,7 +77,7 @@ const CloudPlanItem: FC = ({ try { if (isCurrentPaidPlan) { await openAsyncWindow(async () => { - const res = await consoleClient.billingUrl() + const res = await consoleClient.billing.invoices() if (res.url) return res.url throw new Error('Failed to open billing page') diff --git a/web/contract/console.ts b/web/contract/console/billing.ts similarity index 59% rename from web/contract/console.ts rename to web/contract/console/billing.ts index ec929d1357..08e1d0668f 100644 --- a/web/contract/console.ts +++ b/web/contract/console/billing.ts @@ -1,16 +1,7 @@ -import type { SystemFeatures } from '@/types/feature' import { type } from '@orpc/contract' -import { base } from './base' +import { base } from '../base' -export const systemFeaturesContract = base - .route({ - path: '/system-features', - method: 'GET', - }) - .input(type()) - .output(type()) - -export const billingUrlContract = base +export const invoicesContract = base .route({ path: '/billing/invoices', method: 'GET', diff --git a/web/contract/console/system.ts b/web/contract/console/system.ts new file mode 100644 index 0000000000..bce0a8226e --- /dev/null +++ b/web/contract/console/system.ts @@ -0,0 +1,11 @@ +import type { SystemFeatures } from '@/types/feature' +import { type } from '@orpc/contract' +import { base } from '../base' + +export const systemFeaturesContract = base + .route({ + path: '/system-features', + method: 'GET', + }) + .input(type()) + .output(type()) diff --git a/web/contract/router.ts b/web/contract/router.ts index d83cffb7b8..b1c100ab08 100644 --- a/web/contract/router.ts +++ b/web/contract/router.ts @@ -1,5 +1,6 @@ import type { InferContractRouterInputs } from '@orpc/contract' -import { billingUrlContract, bindPartnerStackContract, systemFeaturesContract } from './console' +import { bindPartnerStackContract, invoicesContract } from './console/billing' +import { systemFeaturesContract } from './console/system' import { collectionPluginsContract, collectionsContract, searchAdvancedContract } from './marketplace' export const marketplaceRouterContract = { @@ -12,8 +13,10 @@ export type MarketPlaceInputs = InferContractRouterInputs diff --git a/web/service/use-billing.ts b/web/service/use-billing.ts index 794b192d5c..84af077656 100644 --- a/web/service/use-billing.ts +++ b/web/service/use-billing.ts @@ -3,8 +3,8 @@ import { consoleClient, consoleQuery } from '@/service/client' export const useBindPartnerStackInfo = () => { return useMutation({ - mutationKey: consoleQuery.bindPartnerStack.mutationKey(), - mutationFn: (data: { partnerKey: string, clickId: string }) => consoleClient.bindPartnerStack({ + mutationKey: consoleQuery.billing.bindPartnerStack.mutationKey(), + mutationFn: (data: { partnerKey: string, clickId: string }) => consoleClient.billing.bindPartnerStack({ params: { partnerKey: data.partnerKey }, body: { click_id: data.clickId }, }), @@ -13,10 +13,10 @@ export const useBindPartnerStackInfo = () => { export const useBillingUrl = (enabled: boolean) => { return useQuery({ - queryKey: consoleQuery.billingUrl.queryKey(), + queryKey: consoleQuery.billing.invoices.queryKey(), enabled, queryFn: async () => { - const res = await consoleClient.billingUrl() + const res = await consoleClient.billing.invoices() return res.url }, }) From 4955de59057b5f209cee9fae69ca57d0e67e83a5 Mon Sep 17 00:00:00 2001 From: Joseph Adams <105917501+josephadamsdev@users.noreply.github.com> Date: Thu, 15 Jan 2026 03:54:10 +0100 Subject: [PATCH 10/25] fix: validation error when uploading images with None URL values (#31012) Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> --- api/factories/file_factory.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/api/factories/file_factory.py b/api/factories/file_factory.py index bd71f18af2..0be836c8f1 100644 --- a/api/factories/file_factory.py +++ b/api/factories/file_factory.py @@ -115,7 +115,18 @@ def build_from_mappings( # TODO(QuantumGhost): Performance concern - each mapping triggers a separate database query. # Implement batch processing to reduce database load when handling multiple files. # Filter out None/empty mappings to avoid errors - valid_mappings = [m for m in mappings if m and m.get("transfer_method")] + def is_valid_mapping(m: Mapping[str, Any]) -> bool: + if not m or not m.get("transfer_method"): + return False + # For REMOTE_URL transfer method, ensure url or remote_url is provided and not None + transfer_method = m.get("transfer_method") + if transfer_method == FileTransferMethod.REMOTE_URL: + url = m.get("url") or m.get("remote_url") + if not url: + return False + return True + + valid_mappings = [m for m in mappings if is_valid_mapping(m)] files = [ build_from_mapping( mapping=mapping, From bdd8d5b470b9df7683317a9cb12a9add87cb214d Mon Sep 17 00:00:00 2001 From: Coding On Star <447357187@qq.com> Date: Thu, 15 Jan 2026 10:56:02 +0800 Subject: [PATCH 11/25] test: add unit tests for PluginPage and related components (#30908) Co-authored-by: CodingOnStar --- .../components/plugins/card/index.spec.tsx | 52 + .../plugins/plugin-page/context.spec.tsx | 123 ++ .../plugins/plugin-page/index.spec.tsx | 1041 +++++++++++++++++ .../components/plugins/plugin-page/index.tsx | 1 + .../components/plugin-task-list.tsx | 219 ++++ .../components/task-status-indicator.tsx | 96 ++ .../plugin-page/plugin-tasks/index.spec.tsx | 856 ++++++++++++++ .../plugin-page/plugin-tasks/index.tsx | 300 +---- .../plugin-page/use-reference-setting.spec.ts | 388 ++++++ .../plugins/plugin-page/use-uploader.spec.ts | 487 ++++++++ .../components/rag-pipeline/index.spec.tsx | 550 +++++++++ 11 files changed, 3870 insertions(+), 243 deletions(-) create mode 100644 web/app/components/plugins/plugin-page/context.spec.tsx create mode 100644 web/app/components/plugins/plugin-page/index.spec.tsx create mode 100644 web/app/components/plugins/plugin-page/plugin-tasks/components/plugin-task-list.tsx create mode 100644 web/app/components/plugins/plugin-page/plugin-tasks/components/task-status-indicator.tsx create mode 100644 web/app/components/plugins/plugin-page/plugin-tasks/index.spec.tsx create mode 100644 web/app/components/plugins/plugin-page/use-reference-setting.spec.ts create mode 100644 web/app/components/plugins/plugin-page/use-uploader.spec.ts create mode 100644 web/app/components/rag-pipeline/index.spec.tsx diff --git a/web/app/components/plugins/card/index.spec.tsx b/web/app/components/plugins/card/index.spec.tsx index fd97534ec4..8dd7e67d69 100644 --- a/web/app/components/plugins/card/index.spec.tsx +++ b/web/app/components/plugins/card/index.spec.tsx @@ -897,6 +897,58 @@ describe('Icon', () => { const iconDiv = container.firstChild as HTMLElement expect(iconDiv).toHaveStyle({ backgroundImage: 'url(/icon?name=test&size=large)' }) }) + + it('should not render status indicators when src is object with installed=true', () => { + render() + + // Status indicators should not render for object src + expect(screen.queryByTestId('ri-check-line')).not.toBeInTheDocument() + }) + + it('should not render status indicators when src is object with installFailed=true', () => { + render() + + // Status indicators should not render for object src + expect(screen.queryByTestId('ri-close-line')).not.toBeInTheDocument() + }) + + it('should render object src with all size variants', () => { + const sizes: Array<'xs' | 'tiny' | 'small' | 'medium' | 'large'> = ['xs', 'tiny', 'small', 'medium', 'large'] + + sizes.forEach((size) => { + const { unmount } = render() + expect(screen.getByTestId('app-icon')).toHaveAttribute('data-size', size) + unmount() + }) + }) + + it('should render object src with custom className', () => { + const { container } = render( + , + ) + + expect(container.querySelector('.custom-object-icon')).toBeInTheDocument() + }) + + it('should pass correct props to AppIcon for object src', () => { + render() + + const appIcon = screen.getByTestId('app-icon') + expect(appIcon).toHaveAttribute('data-icon', '😀') + expect(appIcon).toHaveAttribute('data-background', '#123456') + expect(appIcon).toHaveAttribute('data-icon-type', 'emoji') + }) + + it('should render inner icon only when shouldUseMcpIcon returns true', () => { + // Test with MCP icon content + const { unmount } = render() + expect(screen.getByTestId('inner-icon')).toBeInTheDocument() + unmount() + + // Test without MCP icon content + render() + expect(screen.queryByTestId('inner-icon')).not.toBeInTheDocument() + }) }) }) diff --git a/web/app/components/plugins/plugin-page/context.spec.tsx b/web/app/components/plugins/plugin-page/context.spec.tsx new file mode 100644 index 0000000000..ea52ae1dbd --- /dev/null +++ b/web/app/components/plugins/plugin-page/context.spec.tsx @@ -0,0 +1,123 @@ +import { render, screen } from '@testing-library/react' +import { beforeEach, describe, expect, it, vi } from 'vitest' +// Import mocks +import { useGlobalPublicStore } from '@/context/global-public-context' + +import { PluginPageContext, PluginPageContextProvider, usePluginPageContext } from './context' + +// Mock dependencies +vi.mock('nuqs', () => ({ + useQueryState: vi.fn(() => ['plugins', vi.fn()]), +})) + +vi.mock('@/context/global-public-context', () => ({ + useGlobalPublicStore: vi.fn(), +})) + +vi.mock('../hooks', () => ({ + PLUGIN_PAGE_TABS_MAP: { + plugins: 'plugins', + marketplace: 'discover', + }, + usePluginPageTabs: () => [ + { value: 'plugins', text: 'Plugins' }, + { value: 'discover', text: 'Explore Marketplace' }, + ], +})) + +// Helper function to mock useGlobalPublicStore with marketplace setting +const mockGlobalPublicStore = (enableMarketplace: boolean) => { + vi.mocked(useGlobalPublicStore).mockImplementation((selector) => { + const state = { systemFeatures: { enable_marketplace: enableMarketplace } } + return selector(state as Parameters[0]) + }) +} + +// Test component that uses the context +const TestConsumer = () => { + const containerRef = usePluginPageContext(v => v.containerRef) + const options = usePluginPageContext(v => v.options) + const activeTab = usePluginPageContext(v => v.activeTab) + + return ( +
+ {containerRef ? 'true' : 'false'} + {options.length} + {activeTab} + {options.map((opt: { value: string, text: string }) => ( + {opt.text} + ))} +
+ ) +} + +describe('PluginPageContext', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + describe('PluginPageContextProvider', () => { + it('should provide context values to children', () => { + mockGlobalPublicStore(true) + + render( + + + , + ) + + expect(screen.getByTestId('has-container-ref')).toHaveTextContent('true') + expect(screen.getByTestId('options-count')).toHaveTextContent('2') + }) + + it('should include marketplace tab when enable_marketplace is true', () => { + mockGlobalPublicStore(true) + + render( + + + , + ) + + expect(screen.getByTestId('option-plugins')).toBeInTheDocument() + expect(screen.getByTestId('option-discover')).toBeInTheDocument() + }) + + it('should filter out marketplace tab when enable_marketplace is false', () => { + mockGlobalPublicStore(false) + + render( + + + , + ) + + expect(screen.getByTestId('option-plugins')).toBeInTheDocument() + expect(screen.queryByTestId('option-discover')).not.toBeInTheDocument() + expect(screen.getByTestId('options-count')).toHaveTextContent('1') + }) + }) + + describe('usePluginPageContext', () => { + it('should select specific context values', () => { + mockGlobalPublicStore(true) + + render( + + + , + ) + + // activeTab should be 'plugins' from the mock + expect(screen.getByTestId('active-tab')).toHaveTextContent('plugins') + }) + }) + + describe('Default Context Values', () => { + it('should have empty options by default from context', () => { + // Test that the context has proper default values by checking the exported constant + // The PluginPageContext is created with default values including empty options array + expect(PluginPageContext).toBeDefined() + }) + }) +}) diff --git a/web/app/components/plugins/plugin-page/index.spec.tsx b/web/app/components/plugins/plugin-page/index.spec.tsx new file mode 100644 index 0000000000..a3ea7f7125 --- /dev/null +++ b/web/app/components/plugins/plugin-page/index.spec.tsx @@ -0,0 +1,1041 @@ +import type { PluginPageProps } from './index' +import { act, fireEvent, render, screen, waitFor } from '@testing-library/react' +import { useQueryState } from 'nuqs' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +import { usePluginInstallation } from '@/hooks/use-query-params' +// Import mocked modules for assertions +import { fetchBundleInfoFromMarketPlace, fetchManifestFromMarketPlace } from '@/service/plugins' +import PluginPageWithContext from './index' + +// Mock external dependencies +vi.mock('@/service/plugins', () => ({ + fetchManifestFromMarketPlace: vi.fn(), + fetchBundleInfoFromMarketPlace: vi.fn(), +})) + +vi.mock('@/hooks/use-query-params', () => ({ + usePluginInstallation: vi.fn(() => [{ packageId: null, bundleInfo: null }, vi.fn()]), +})) + +vi.mock('@/hooks/use-document-title', () => ({ + default: vi.fn(), +})) + +vi.mock('@/context/i18n', () => ({ + useLocale: () => 'en-US', +})) + +vi.mock('@/context/global-public-context', () => ({ + useGlobalPublicStore: vi.fn((selector) => { + const state = { + systemFeatures: { + enable_marketplace: true, + }, + } + return selector(state) + }), +})) + +vi.mock('@/context/app-context', () => ({ + useAppContext: () => ({ + isCurrentWorkspaceManager: true, + isCurrentWorkspaceOwner: false, + }), +})) + +vi.mock('@/service/use-plugins', () => ({ + useReferenceSettings: () => ({ + data: { + permission: { + install_permission: 'everyone', + debug_permission: 'admins', + }, + }, + }), + useMutationReferenceSettings: () => ({ + mutate: vi.fn(), + isPending: false, + }), + useInvalidateReferenceSettings: () => vi.fn(), + usePluginTaskList: () => ({ + pluginTasks: [], + handleRefetch: vi.fn(), + }), + useMutationClearTaskPlugin: () => ({ + mutateAsync: vi.fn(), + }), + useInstalledPluginList: () => ({ + data: [], + isLoading: false, + isFetching: false, + isLastPage: true, + loadNextPage: vi.fn(), + }), + useInstalledLatestVersion: () => ({ + data: {}, + }), + useInvalidateInstalledPluginList: () => vi.fn(), +})) + +vi.mock('nuqs', () => ({ + useQueryState: vi.fn(() => ['plugins', vi.fn()]), +})) + +vi.mock('./plugin-tasks', () => ({ + default: () =>
PluginTasks
, +})) + +vi.mock('./debug-info', () => ({ + default: () =>
DebugInfo
, +})) + +vi.mock('./install-plugin-dropdown', () => ({ + default: ({ onSwitchToMarketplaceTab }: { onSwitchToMarketplaceTab: () => void }) => ( + + ), +})) + +vi.mock('../install-plugin/install-from-local-package', () => ({ + default: ({ onClose }: { onClose: () => void }) => ( +
+ +
+ ), +})) + +vi.mock('../install-plugin/install-from-marketplace', () => ({ + default: ({ onClose }: { onClose: () => void }) => ( +
+ +
+ ), +})) + +vi.mock('@/app/components/plugins/reference-setting-modal', () => ({ + default: ({ onHide }: { onHide: () => void }) => ( +
+ +
+ ), +})) + +// Helper to create default props +const createDefaultProps = (): PluginPageProps => ({ + plugins:
Plugins Content
, + marketplace:
Marketplace Content
, +}) + +// ============================================================================ +// PluginPage Component Tests +// ============================================================================ +describe('PluginPage Component', () => { + beforeEach(() => { + vi.clearAllMocks() + // Reset to default mock values + vi.mocked(usePluginInstallation).mockReturnValue([ + { packageId: null, bundleInfo: null }, + vi.fn(), + ]) + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + }) + + // ============================================================================ + // Rendering Tests + // ============================================================================ + describe('Rendering', () => { + it('should render without crashing', () => { + render() + expect(document.getElementById('marketplace-container')).toBeInTheDocument() + }) + + it('should render with correct container id', () => { + render() + const container = document.getElementById('marketplace-container') + expect(container).toBeInTheDocument() + }) + + it('should render PluginTasks component', () => { + render() + expect(screen.getByTestId('plugin-tasks')).toBeInTheDocument() + }) + + it('should render plugins content when on plugins tab', () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + expect(screen.getByTestId('plugins-content')).toBeInTheDocument() + }) + + it('should render marketplace content when on marketplace tab', () => { + vi.mocked(useQueryState).mockReturnValue(['discover', vi.fn()]) + + render() + // The marketplace content should be visible when enable_marketplace is true and on discover tab + const container = document.getElementById('marketplace-container') + expect(container).toBeInTheDocument() + // Check that marketplace-specific links are shown + expect(screen.getByText(/requestAPlugin/i)).toBeInTheDocument() + }) + + it('should render TabSlider', () => { + render() + // TabSlider renders tab options + expect(document.querySelector('.flex-1')).toBeInTheDocument() + }) + + it('should render drag and drop hint when on plugins tab', () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + expect(screen.getByText(/dropPluginToInstall/i)).toBeInTheDocument() + }) + + it('should render file input for plugin upload', () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + const fileInput = document.getElementById('fileUploader') + expect(fileInput).toBeInTheDocument() + expect(fileInput).toHaveAttribute('type', 'file') + }) + }) + + // ============================================================================ + // Tab Navigation Tests + // ============================================================================ + describe('Tab Navigation', () => { + it('should display plugins tab as active by default', () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + expect(screen.getByTestId('plugins-content')).toBeInTheDocument() + }) + + it('should show marketplace links when on marketplace tab', () => { + vi.mocked(useQueryState).mockReturnValue(['discover', vi.fn()]) + + render() + // Check for marketplace-specific buttons + expect(screen.getByText(/requestAPlugin/i)).toBeInTheDocument() + expect(screen.getByText(/publishPlugins/i)).toBeInTheDocument() + }) + + it('should not show marketplace links when on plugins tab', () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + expect(screen.queryByText(/requestAPlugin/i)).not.toBeInTheDocument() + }) + }) + + // ============================================================================ + // Permission-based Rendering Tests + // ============================================================================ + describe('Permission-based Rendering', () => { + it('should render InstallPluginDropdown when canManagement is true', () => { + render() + expect(screen.getByTestId('install-dropdown')).toBeInTheDocument() + }) + + it('should render DebugInfo when canDebugger is true', () => { + render() + expect(screen.getByTestId('debug-info')).toBeInTheDocument() + }) + + it('should render settings button when canSetPermissions is true', () => { + render() + // Settings button with RiEqualizer2Line icon + const settingsButtons = document.querySelectorAll('button') + expect(settingsButtons.length).toBeGreaterThan(0) + }) + + it('should call setActiveTab when onSwitchToMarketplaceTab is called', async () => { + const mockSetActiveTab = vi.fn() + vi.mocked(useQueryState).mockReturnValue(['plugins', mockSetActiveTab]) + + render() + + // Click the install dropdown button which triggers onSwitchToMarketplaceTab + fireEvent.click(screen.getByTestId('install-dropdown')) + + // The mock onSwitchToMarketplaceTab calls setActiveTab('discover') + // Since our mock InstallPluginDropdown calls onSwitchToMarketplaceTab on click + // we verify that setActiveTab was called with 'discover'. + expect(mockSetActiveTab).toHaveBeenCalledWith('discover') + }) + + it('should use noop for file handlers when canManagement is false', () => { + // Override mock to disable management permission + vi.doMock('@/service/use-plugins', () => ({ + useReferenceSettings: () => ({ + data: { + permission: { + install_permission: 'noone', + debug_permission: 'noone', + }, + }, + }), + useMutationReferenceSettings: () => ({ + mutate: vi.fn(), + isPending: false, + }), + useInvalidateReferenceSettings: () => vi.fn(), + usePluginTaskList: () => ({ + pluginTasks: [], + handleRefetch: vi.fn(), + }), + useMutationClearTaskPlugin: () => ({ + mutateAsync: vi.fn(), + }), + useInstalledPluginList: () => ({ + data: [], + isLoading: false, + isFetching: false, + isLastPage: true, + loadNextPage: vi.fn(), + }), + useInstalledLatestVersion: () => ({ + data: {}, + }), + useInvalidateInstalledPluginList: () => vi.fn(), + })) + + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + + // File input should still be in the document (even if handlers are noop) + const fileInput = document.getElementById('fileUploader') + expect(fileInput).toBeInTheDocument() + }) + }) + + // ============================================================================ + // File Upload Tests + // ============================================================================ + describe('File Upload', () => { + it('should have hidden file input', () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + const fileInput = document.getElementById('fileUploader') as HTMLInputElement + expect(fileInput).toHaveClass('hidden') + }) + + it('should accept .difypkg files', () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + const fileInput = document.getElementById('fileUploader') as HTMLInputElement + expect(fileInput.accept).toContain('.difypkg') + }) + + it('should show InstallFromLocalPackage modal when valid file is selected', async () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + const fileInput = document.getElementById('fileUploader') as HTMLInputElement + + const file = new File(['content'], 'plugin.difypkg', { type: 'application/octet-stream' }) + Object.defineProperty(fileInput, 'files', { + value: [file], + }) + + fireEvent.change(fileInput) + + await waitFor(() => { + expect(screen.getByTestId('install-local-modal')).toBeInTheDocument() + }) + }) + + it('should not show modal for non-.difypkg files', async () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + const fileInput = document.getElementById('fileUploader') as HTMLInputElement + + const file = new File(['content'], 'plugin.txt', { type: 'text/plain' }) + Object.defineProperty(fileInput, 'files', { + value: [file], + }) + + fireEvent.change(fileInput) + + await waitFor(() => { + expect(screen.queryByTestId('install-local-modal')).not.toBeInTheDocument() + }) + }) + }) + + // ============================================================================ + // Marketplace Installation Tests + // ============================================================================ + describe('Marketplace Installation', () => { + it('should fetch manifest when packageId is provided', async () => { + const mockSetInstallState = vi.fn() + vi.mocked(usePluginInstallation).mockReturnValue([ + { packageId: 'test-package-id', bundleInfo: null }, + mockSetInstallState, + ]) + + vi.mocked(fetchManifestFromMarketPlace).mockResolvedValue({ + data: { + plugin: { org: 'test-org', name: 'test-plugin', category: 'tool' }, + version: { version: '1.0.0' }, + }, + } as Awaited>) + + render() + + await waitFor(() => { + expect(fetchManifestFromMarketPlace).toHaveBeenCalledWith('test-package-id') + }) + }) + + it('should fetch bundle info when bundleInfo is provided', async () => { + const mockSetInstallState = vi.fn() + vi.mocked(usePluginInstallation).mockReturnValue([ + { packageId: null, bundleInfo: 'test-bundle-info' as unknown }, + mockSetInstallState, + ] as ReturnType) + + vi.mocked(fetchBundleInfoFromMarketPlace).mockResolvedValue({ + data: { version: { dependencies: [] } }, + } as unknown as Awaited>) + + render() + + await waitFor(() => { + expect(fetchBundleInfoFromMarketPlace).toHaveBeenCalledWith('test-bundle-info') + }) + }) + + it('should show InstallFromMarketplace modal after fetching manifest', async () => { + const mockSetInstallState = vi.fn() + vi.mocked(usePluginInstallation).mockReturnValue([ + { packageId: 'test-package-id', bundleInfo: null }, + mockSetInstallState, + ]) + + vi.mocked(fetchManifestFromMarketPlace).mockResolvedValue({ + data: { + plugin: { org: 'test-org', name: 'test-plugin', category: 'tool' }, + version: { version: '1.0.0' }, + }, + } as Awaited>) + + render() + + await waitFor(() => { + expect(screen.getByTestId('install-marketplace-modal')).toBeInTheDocument() + }, { timeout: 3000 }) + }) + + it('should handle fetch error gracefully', async () => { + const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) + + vi.mocked(usePluginInstallation).mockReturnValue([ + { packageId: null, bundleInfo: 'invalid-bundle' as unknown }, + vi.fn(), + ] as ReturnType) + + vi.mocked(fetchBundleInfoFromMarketPlace).mockRejectedValue(new Error('Network error')) + + render() + + await waitFor(() => { + expect(consoleSpy).toHaveBeenCalledWith('Failed to load bundle info:', expect.any(Error)) + }) + + consoleSpy.mockRestore() + }) + }) + + // ============================================================================ + // Settings Modal Tests + // ============================================================================ + describe('Settings Modal', () => { + it('should open settings modal when settings button is clicked', async () => { + render() + + fireEvent.click(screen.getByTestId('plugin-settings-button')) + + await waitFor(() => { + expect(screen.getByTestId('reference-setting-modal')).toBeInTheDocument() + }) + }) + + it('should close settings modal when onHide is called', async () => { + render() + + // Open modal + fireEvent.click(screen.getByTestId('plugin-settings-button')) + + await waitFor(() => { + expect(screen.getByTestId('reference-setting-modal')).toBeInTheDocument() + }) + + // Close modal + fireEvent.click(screen.getByText('Close Settings')) + + await waitFor(() => { + expect(screen.queryByTestId('reference-setting-modal')).not.toBeInTheDocument() + }) + }) + }) + + // ============================================================================ + // Drag and Drop Tests + // ============================================================================ + describe('Drag and Drop', () => { + it('should show dragging overlay when dragging files over container', () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + const container = document.getElementById('marketplace-container')! + + // Simulate drag enter + const dragEnterEvent = new Event('dragenter', { bubbles: true }) + Object.defineProperty(dragEnterEvent, 'dataTransfer', { + value: { types: ['Files'] }, + }) + container.dispatchEvent(dragEnterEvent) + + // Check for dragging overlay styles + expect(container).toBeInTheDocument() + }) + + it('should highlight drop zone text when dragging', () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + + // The drag hint should be visible + const dragHint = screen.getByText(/dropPluginToInstall/i) + expect(dragHint).toBeInTheDocument() + }) + }) + + // ============================================================================ + // Memoization Tests + // ============================================================================ + describe('Memoization', () => { + it('should memoize isPluginsTab correctly', () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + const { rerender } = render() + + // Should show plugins content + expect(screen.getByTestId('plugins-content')).toBeInTheDocument() + + // Rerender with same props - memoized value should be same + rerender() + expect(screen.getByTestId('plugins-content')).toBeInTheDocument() + }) + + it('should memoize isExploringMarketplace correctly', () => { + vi.mocked(useQueryState).mockReturnValue(['discover', vi.fn()]) + + const { rerender } = render() + + // Should show marketplace links when on discover tab + expect(screen.getByText(/requestAPlugin/i)).toBeInTheDocument() + + // Rerender with same props + rerender() + expect(screen.getByText(/requestAPlugin/i)).toBeInTheDocument() + }) + + it('should recognize plugin type tabs as marketplace', () => { + // Test with a plugin type tab like 'tool' + vi.mocked(useQueryState).mockReturnValue(['tool', vi.fn()]) + + render() + + // Should show marketplace links when on a plugin type tab + expect(screen.getByText(/requestAPlugin/i)).toBeInTheDocument() + expect(screen.getByText(/publishPlugins/i)).toBeInTheDocument() + }) + + it('should render marketplace content when isExploringMarketplace and enable_marketplace are true', () => { + vi.mocked(useQueryState).mockReturnValue(['discover', vi.fn()]) + + render() + + // The marketplace prop content should be rendered + // Since we mock the marketplace as a div, check it's not hidden + const container = document.getElementById('marketplace-container') + expect(container).toBeInTheDocument() + expect(container).toHaveClass('bg-background-body') + }) + }) + + // ============================================================================ + // Context Provider Tests + // ============================================================================ + describe('Context Provider', () => { + it('should wrap component with PluginPageContextProvider', () => { + render() + + // The component should render, indicating context is working + expect(document.getElementById('marketplace-container')).toBeInTheDocument() + }) + + it('should filter out marketplace tab when enable_marketplace is false', () => { + // This tests line 69 in context.tsx - the false branch of enable_marketplace + // The marketplace tab should be filtered out from options + render() + // Component should still work without marketplace + expect(document.getElementById('marketplace-container')).toBeInTheDocument() + }) + }) + + // ============================================================================ + // Edge Cases and Error Handling + // ============================================================================ + describe('Edge Cases', () => { + it('should handle null plugins prop', () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + expect(document.getElementById('marketplace-container')).toBeInTheDocument() + }) + + it('should handle empty marketplace prop', () => { + vi.mocked(useQueryState).mockReturnValue(['discover', vi.fn()]) + + render() + expect(document.getElementById('marketplace-container')).toBeInTheDocument() + }) + + it('should handle rapid tab switches', async () => { + const mockSetActiveTab = vi.fn() + vi.mocked(useQueryState).mockReturnValue(['plugins', mockSetActiveTab]) + + render() + + // Simulate rapid switches by updating state + act(() => { + vi.mocked(useQueryState).mockReturnValue(['discover', mockSetActiveTab]) + }) + + expect(document.getElementById('marketplace-container')).toBeInTheDocument() + }) + + it('should handle marketplace disabled', () => { + // Mock marketplace disabled + vi.mock('@/context/global-public-context', async () => ({ + useGlobalPublicStore: vi.fn((selector) => { + const state = { + systemFeatures: { + enable_marketplace: false, + }, + } + return selector(state) + }), + })) + + vi.mocked(useQueryState).mockReturnValue(['discover', vi.fn()]) + + render() + + // Component should still render but without marketplace content when disabled + expect(document.getElementById('marketplace-container')).toBeInTheDocument() + }) + + it('should handle file with empty name', async () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + const fileInput = document.getElementById('fileUploader') as HTMLInputElement + + const file = new File(['content'], '', { type: 'application/octet-stream' }) + Object.defineProperty(fileInput, 'files', { + value: [file], + }) + + fireEvent.change(fileInput) + + // Should not show modal for file without proper extension + await waitFor(() => { + expect(screen.queryByTestId('install-local-modal')).not.toBeInTheDocument() + }) + }) + + it('should handle no files selected', async () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + const fileInput = document.getElementById('fileUploader') as HTMLInputElement + + Object.defineProperty(fileInput, 'files', { + value: [], + }) + + fireEvent.change(fileInput) + + // Should not show modal + expect(screen.queryByTestId('install-local-modal')).not.toBeInTheDocument() + }) + }) + + // ============================================================================ + // Cleanup Tests + // ============================================================================ + describe('Cleanup', () => { + it('should reset install state when hiding marketplace modal', async () => { + const mockSetInstallState = vi.fn() + vi.mocked(usePluginInstallation).mockReturnValue([ + { packageId: 'test-package', bundleInfo: null }, + mockSetInstallState, + ]) + + vi.mocked(fetchManifestFromMarketPlace).mockResolvedValue({ + data: { + plugin: { org: 'test-org', name: 'test-plugin', category: 'tool' }, + version: { version: '1.0.0' }, + }, + } as Awaited>) + + render() + + // Wait for modal to appear + await waitFor(() => { + expect(screen.getByTestId('install-marketplace-modal')).toBeInTheDocument() + }, { timeout: 3000 }) + + // Close modal + fireEvent.click(screen.getByText('Close')) + + await waitFor(() => { + expect(mockSetInstallState).toHaveBeenCalledWith(null) + }) + }) + }) + + // ============================================================================ + // Styling Tests + // ============================================================================ + describe('Styling', () => { + it('should apply correct background for plugins tab', () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + const container = document.getElementById('marketplace-container') + + expect(container).toHaveClass('bg-components-panel-bg') + }) + + it('should apply correct background for marketplace tab', () => { + vi.mocked(useQueryState).mockReturnValue(['discover', vi.fn()]) + + render() + const container = document.getElementById('marketplace-container') + + expect(container).toHaveClass('bg-background-body') + }) + + it('should have scrollbar-gutter stable style', () => { + render() + const container = document.getElementById('marketplace-container') + + expect(container).toHaveStyle({ scrollbarGutter: 'stable' }) + }) + }) +}) + +// ============================================================================ +// Uploader Hook Integration Tests +// ============================================================================ +describe('Uploader Hook Integration', () => { + beforeEach(() => { + vi.clearAllMocks() + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + }) + + describe('Drag Events', () => { + it('should handle dragover event', async () => { + render() + const container = document.getElementById('marketplace-container')! + + const dragOverEvent = new Event('dragover', { bubbles: true, cancelable: true }) + Object.defineProperty(dragOverEvent, 'dataTransfer', { + value: { types: ['Files'] }, + }) + + act(() => { + container.dispatchEvent(dragOverEvent) + }) + + expect(container).toBeInTheDocument() + }) + + it('should handle dragleave event when leaving container', async () => { + render() + const container = document.getElementById('marketplace-container')! + + const dragEnterEvent = new Event('dragenter', { bubbles: true }) + Object.defineProperty(dragEnterEvent, 'dataTransfer', { + value: { types: ['Files'] }, + }) + act(() => { + container.dispatchEvent(dragEnterEvent) + }) + + const dragLeaveEvent = new Event('dragleave', { bubbles: true }) + Object.defineProperty(dragLeaveEvent, 'relatedTarget', { + value: null, + }) + act(() => { + container.dispatchEvent(dragLeaveEvent) + }) + + expect(container).toBeInTheDocument() + }) + + it('should handle dragleave event when moving to element outside container', async () => { + render() + const container = document.getElementById('marketplace-container')! + + const dragEnterEvent = new Event('dragenter', { bubbles: true }) + Object.defineProperty(dragEnterEvent, 'dataTransfer', { + value: { types: ['Files'] }, + }) + act(() => { + container.dispatchEvent(dragEnterEvent) + }) + + const outsideElement = document.createElement('div') + document.body.appendChild(outsideElement) + + const dragLeaveEvent = new Event('dragleave', { bubbles: true }) + Object.defineProperty(dragLeaveEvent, 'relatedTarget', { + value: outsideElement, + }) + act(() => { + container.dispatchEvent(dragLeaveEvent) + }) + + expect(container).toBeInTheDocument() + document.body.removeChild(outsideElement) + }) + + it('should handle drop event with files', async () => { + render() + const container = document.getElementById('marketplace-container')! + + const dragEnterEvent = new Event('dragenter', { bubbles: true }) + Object.defineProperty(dragEnterEvent, 'dataTransfer', { + value: { types: ['Files'] }, + }) + act(() => { + container.dispatchEvent(dragEnterEvent) + }) + + const file = new File(['content'], 'test-plugin.difypkg', { type: 'application/octet-stream' }) + const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) + Object.defineProperty(dropEvent, 'dataTransfer', { + value: { files: [file] }, + }) + + act(() => { + container.dispatchEvent(dropEvent) + }) + + await waitFor(() => { + expect(screen.getByTestId('install-local-modal')).toBeInTheDocument() + }) + }) + + it('should handle drop event without dataTransfer', async () => { + render() + const container = document.getElementById('marketplace-container')! + + const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) + + act(() => { + container.dispatchEvent(dropEvent) + }) + + expect(screen.queryByTestId('install-local-modal')).not.toBeInTheDocument() + }) + + it('should handle drop event with empty files array', async () => { + render() + const container = document.getElementById('marketplace-container')! + + const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) + Object.defineProperty(dropEvent, 'dataTransfer', { + value: { files: [] }, + }) + + act(() => { + container.dispatchEvent(dropEvent) + }) + + expect(screen.queryByTestId('install-local-modal')).not.toBeInTheDocument() + }) + }) + + describe('File Change Handler', () => { + it('should handle file change with null file', async () => { + render() + const fileInput = document.getElementById('fileUploader') as HTMLInputElement + + Object.defineProperty(fileInput, 'files', { value: null }) + + fireEvent.change(fileInput) + + expect(screen.queryByTestId('install-local-modal')).not.toBeInTheDocument() + }) + }) + + describe('Remove File', () => { + it('should clear file input when removeFile is called', async () => { + render() + const fileInput = document.getElementById('fileUploader') as HTMLInputElement + + const file = new File(['content'], 'plugin.difypkg', { type: 'application/octet-stream' }) + Object.defineProperty(fileInput, 'files', { value: [file] }) + fireEvent.change(fileInput) + + await waitFor(() => { + expect(screen.getByTestId('install-local-modal')).toBeInTheDocument() + }) + + fireEvent.click(screen.getByText('Close')) + + await waitFor(() => { + expect(screen.queryByTestId('install-local-modal')).not.toBeInTheDocument() + }) + }) + }) +}) + +// ============================================================================ +// Reference Setting Hook Integration Tests +// ============================================================================ +describe('Reference Setting Hook Integration', () => { + describe('Permission Handling', () => { + it('should render InstallPluginDropdown when permission is everyone', () => { + render() + expect(screen.getByTestId('install-dropdown')).toBeInTheDocument() + }) + + it('should render DebugInfo when permission is admins and user is manager', () => { + render() + expect(screen.getByTestId('debug-info')).toBeInTheDocument() + }) + }) +}) + +// ============================================================================ +// Marketplace Installation Permission Tests +// ============================================================================ +describe('Marketplace Installation Permission', () => { + it('should show InstallPluginDropdown when marketplace is enabled and has permission', () => { + render() + expect(screen.getByTestId('install-dropdown')).toBeInTheDocument() + }) +}) + +// ============================================================================ +// Integration Tests +// ============================================================================ +describe('PluginPage Integration', () => { + beforeEach(() => { + vi.clearAllMocks() + vi.mocked(usePluginInstallation).mockReturnValue([ + { packageId: null, bundleInfo: null }, + vi.fn(), + ]) + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + }) + + it('should render complete plugin page with all features', () => { + render() + + // Check all major elements are present + expect(document.getElementById('marketplace-container')).toBeInTheDocument() + expect(screen.getByTestId('plugin-tasks')).toBeInTheDocument() + expect(screen.getByTestId('install-dropdown')).toBeInTheDocument() + expect(screen.getByTestId('debug-info')).toBeInTheDocument() + expect(screen.getByTestId('plugins-content')).toBeInTheDocument() + }) + + it('should handle full install from marketplace flow', async () => { + const mockSetInstallState = vi.fn() + vi.mocked(usePluginInstallation).mockReturnValue([ + { packageId: 'test-package', bundleInfo: null }, + mockSetInstallState, + ]) + + vi.mocked(fetchManifestFromMarketPlace).mockResolvedValue({ + data: { + plugin: { org: 'langgenius', name: 'test-plugin', category: 'tool' }, + version: { version: '1.0.0' }, + }, + } as Awaited>) + + render() + + // Wait for API call + await waitFor(() => { + expect(fetchManifestFromMarketPlace).toHaveBeenCalled() + }) + + // Wait for modal + await waitFor(() => { + expect(screen.getByTestId('install-marketplace-modal')).toBeInTheDocument() + }, { timeout: 3000 }) + + // Close modal + fireEvent.click(screen.getByText('Close')) + + // Verify state reset + await waitFor(() => { + expect(mockSetInstallState).toHaveBeenCalledWith(null) + }) + }) + + it('should handle full local plugin install flow', async () => { + vi.mocked(useQueryState).mockReturnValue(['plugins', vi.fn()]) + + render() + + const fileInput = document.getElementById('fileUploader') as HTMLInputElement + const file = new File(['plugin content'], 'my-plugin.difypkg', { + type: 'application/octet-stream', + }) + + Object.defineProperty(fileInput, 'files', { value: [file] }) + fireEvent.change(fileInput) + + await waitFor(() => { + expect(screen.getByTestId('install-local-modal')).toBeInTheDocument() + }) + + // Close modal (triggers removeFile via onClose) + fireEvent.click(screen.getByText('Close')) + + await waitFor(() => { + expect(screen.queryByTestId('install-local-modal')).not.toBeInTheDocument() + }) + }) + + it('should render marketplace content only when enable_marketplace is true', () => { + vi.mocked(useQueryState).mockReturnValue(['discover', vi.fn()]) + + const { rerender } = render() + + // With enable_marketplace: true (default mock), marketplace links should show + expect(screen.getByText(/requestAPlugin/i)).toBeInTheDocument() + + // Rerender to verify consistent behavior + rerender() + expect(screen.getByText(/publishPlugins/i)).toBeInTheDocument() + }) +}) diff --git a/web/app/components/plugins/plugin-page/index.tsx b/web/app/components/plugins/plugin-page/index.tsx index 1f88f691ef..d852e4d0b8 100644 --- a/web/app/components/plugins/plugin-page/index.tsx +++ b/web/app/components/plugins/plugin-page/index.tsx @@ -207,6 +207,7 @@ const PluginPage = ({ popupContent={t('privilege.title', { ns: 'plugin' })} > + )} + /> + )} + + {/* Error Plugins Section */} + {errorPlugins.length > 0 && ( + + } + defaultStatusText={t('task.installError', { ns: 'plugin', errorLength: errorPlugins.length })} + statusClassName="text-text-destructive break-all" + headerAction={( + + )} + renderItemAction={plugin => ( + + )} + /> + )} +
+ ) +} + +export default PluginTaskList diff --git a/web/app/components/plugins/plugin-page/plugin-tasks/components/task-status-indicator.tsx b/web/app/components/plugins/plugin-page/plugin-tasks/components/task-status-indicator.tsx new file mode 100644 index 0000000000..084c8f90f9 --- /dev/null +++ b/web/app/components/plugins/plugin-page/plugin-tasks/components/task-status-indicator.tsx @@ -0,0 +1,96 @@ +import type { FC } from 'react' +import { + RiCheckboxCircleFill, + RiErrorWarningFill, + RiInstallLine, +} from '@remixicon/react' +import ProgressCircle from '@/app/components/base/progress-bar/progress-circle' +import Tooltip from '@/app/components/base/tooltip' +import DownloadingIcon from '@/app/components/header/plugins-nav/downloading-icon' +import { cn } from '@/utils/classnames' + +export type TaskStatusIndicatorProps = { + tip: string + isInstalling: boolean + isInstallingWithSuccess: boolean + isInstallingWithError: boolean + isSuccess: boolean + isFailed: boolean + successPluginsLength: number + runningPluginsLength: number + totalPluginsLength: number + onClick: () => void +} + +const TaskStatusIndicator: FC = ({ + tip, + isInstalling, + isInstallingWithSuccess, + isInstallingWithError, + isSuccess, + isFailed, + successPluginsLength, + runningPluginsLength, + totalPluginsLength, + onClick, +}) => { + const showDownloadingIcon = isInstalling || isInstallingWithError + const showErrorStyle = isInstallingWithError || isFailed + const showSuccessIcon = isSuccess || (successPluginsLength > 0 && runningPluginsLength === 0) + + return ( + +
+ {/* Main Icon */} + {showDownloadingIcon + ? + : ( + + )} + + {/* Status Indicator Badge */} +
+ {(isInstalling || isInstallingWithSuccess) && ( + 0 ? successPluginsLength / totalPluginsLength : 0) * 100} + circleFillColor="fill-components-progress-brand-bg" + /> + )} + {isInstallingWithError && ( + 0 ? runningPluginsLength / totalPluginsLength : 0) * 100} + circleFillColor="fill-components-progress-brand-bg" + sectorFillColor="fill-components-progress-error-border" + circleStrokeColor="stroke-components-progress-error-border" + /> + )} + {showSuccessIcon && !isInstalling && !isInstallingWithSuccess && !isInstallingWithError && ( + + )} + {isFailed && ( + + )} +
+
+
+ ) +} + +export default TaskStatusIndicator diff --git a/web/app/components/plugins/plugin-page/plugin-tasks/index.spec.tsx b/web/app/components/plugins/plugin-page/plugin-tasks/index.spec.tsx new file mode 100644 index 0000000000..32892cbe28 --- /dev/null +++ b/web/app/components/plugins/plugin-page/plugin-tasks/index.spec.tsx @@ -0,0 +1,856 @@ +import type { PluginStatus } from '@/app/components/plugins/types' +import { fireEvent, render, screen, waitFor } from '@testing-library/react' +import { beforeEach, describe, expect, it, vi } from 'vitest' +import { TaskStatus } from '@/app/components/plugins/types' +// Import mocked modules +import { useMutationClearTaskPlugin, usePluginTaskList } from '@/service/use-plugins' +import PluginTaskList from './components/plugin-task-list' +import TaskStatusIndicator from './components/task-status-indicator' +import { usePluginTaskStatus } from './hooks' + +import PluginTasks from './index' + +// Mock external dependencies +vi.mock('@/service/use-plugins', () => ({ + usePluginTaskList: vi.fn(), + useMutationClearTaskPlugin: vi.fn(), +})) + +vi.mock('@/app/components/plugins/install-plugin/base/use-get-icon', () => ({ + default: () => ({ + getIconUrl: (icon: string) => `https://example.com/${icon}`, + }), +})) + +vi.mock('@/context/i18n', () => ({ + useGetLanguage: () => 'en_US', +})) + +// Helper to create mock plugin +const createMockPlugin = (overrides: Partial = {}): PluginStatus => ({ + plugin_unique_identifier: `plugin-${Math.random().toString(36).substr(2, 9)}`, + plugin_id: 'test-plugin', + status: TaskStatus.running, + message: '', + icon: 'test-icon.png', + labels: { + en_US: 'Test Plugin', + zh_Hans: '测试插件', + } as Record, + taskId: 'task-1', + ...overrides, +}) + +// Helper to setup mock hook returns +const setupMocks = (plugins: PluginStatus[] = []) => { + const mockMutateAsync = vi.fn().mockResolvedValue({}) + const mockHandleRefetch = vi.fn() + + vi.mocked(usePluginTaskList).mockReturnValue({ + pluginTasks: plugins.length > 0 + ? [{ id: 'task-1', plugins, created_at: '', updated_at: '', status: 'running', total_plugins: plugins.length, completed_plugins: 0 }] + : [], + handleRefetch: mockHandleRefetch, + } as any) + + vi.mocked(useMutationClearTaskPlugin).mockReturnValue({ + mutateAsync: mockMutateAsync, + } as any) + + return { mockMutateAsync, mockHandleRefetch } +} + +// ============================================================================ +// usePluginTaskStatus Hook Tests +// ============================================================================ +describe('usePluginTaskStatus Hook', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + describe('Plugin categorization', () => { + it('should categorize running plugins correctly', () => { + const runningPlugin = createMockPlugin({ status: TaskStatus.running }) + setupMocks([runningPlugin]) + + const TestComponent = () => { + const { runningPlugins, runningPluginsLength } = usePluginTaskStatus() + return ( +
+ {runningPluginsLength} + {runningPlugins[0]?.plugin_unique_identifier} +
+ ) + } + + render() + + expect(screen.getByTestId('running-count')).toHaveTextContent('1') + expect(screen.getByTestId('running-id')).toHaveTextContent(runningPlugin.plugin_unique_identifier) + }) + + it('should categorize success plugins correctly', () => { + const successPlugin = createMockPlugin({ status: TaskStatus.success }) + setupMocks([successPlugin]) + + const TestComponent = () => { + const { successPlugins, successPluginsLength } = usePluginTaskStatus() + return ( +
+ {successPluginsLength} + {successPlugins[0]?.plugin_unique_identifier} +
+ ) + } + + render() + + expect(screen.getByTestId('success-count')).toHaveTextContent('1') + expect(screen.getByTestId('success-id')).toHaveTextContent(successPlugin.plugin_unique_identifier) + }) + + it('should categorize error plugins correctly', () => { + const errorPlugin = createMockPlugin({ status: TaskStatus.failed, message: 'Install failed' }) + setupMocks([errorPlugin]) + + const TestComponent = () => { + const { errorPlugins, errorPluginsLength } = usePluginTaskStatus() + return ( +
+ {errorPluginsLength} + {errorPlugins[0]?.plugin_unique_identifier} +
+ ) + } + + render() + + expect(screen.getByTestId('error-count')).toHaveTextContent('1') + expect(screen.getByTestId('error-id')).toHaveTextContent(errorPlugin.plugin_unique_identifier) + }) + + it('should categorize mixed plugins correctly', () => { + const plugins = [ + createMockPlugin({ status: TaskStatus.running, plugin_unique_identifier: 'running-1' }), + createMockPlugin({ status: TaskStatus.success, plugin_unique_identifier: 'success-1' }), + createMockPlugin({ status: TaskStatus.failed, plugin_unique_identifier: 'error-1' }), + ] + setupMocks(plugins) + + const TestComponent = () => { + const { runningPluginsLength, successPluginsLength, errorPluginsLength, totalPluginsLength } = usePluginTaskStatus() + return ( +
+ {runningPluginsLength} + {successPluginsLength} + {errorPluginsLength} + {totalPluginsLength} +
+ ) + } + + render() + + expect(screen.getByTestId('running')).toHaveTextContent('1') + expect(screen.getByTestId('success')).toHaveTextContent('1') + expect(screen.getByTestId('error')).toHaveTextContent('1') + expect(screen.getByTestId('total')).toHaveTextContent('3') + }) + }) + + describe('Status flags', () => { + it('should set isInstalling when only running plugins exist', () => { + setupMocks([createMockPlugin({ status: TaskStatus.running })]) + + const TestComponent = () => { + const { isInstalling, isInstallingWithSuccess, isInstallingWithError, isSuccess, isFailed } = usePluginTaskStatus() + return ( +
+ {String(isInstalling)} + {String(isInstallingWithSuccess)} + {String(isInstallingWithError)} + {String(isSuccess)} + {String(isFailed)} +
+ ) + } + + render() + + expect(screen.getByTestId('isInstalling')).toHaveTextContent('true') + expect(screen.getByTestId('isInstallingWithSuccess')).toHaveTextContent('false') + expect(screen.getByTestId('isInstallingWithError')).toHaveTextContent('false') + expect(screen.getByTestId('isSuccess')).toHaveTextContent('false') + expect(screen.getByTestId('isFailed')).toHaveTextContent('false') + }) + + it('should set isInstallingWithSuccess when running and success plugins exist', () => { + setupMocks([ + createMockPlugin({ status: TaskStatus.running }), + createMockPlugin({ status: TaskStatus.success }), + ]) + + const TestComponent = () => { + const { isInstallingWithSuccess } = usePluginTaskStatus() + return {String(isInstallingWithSuccess)} + } + + render() + expect(screen.getByTestId('flag')).toHaveTextContent('true') + }) + + it('should set isInstallingWithError when running and error plugins exist', () => { + setupMocks([ + createMockPlugin({ status: TaskStatus.running }), + createMockPlugin({ status: TaskStatus.failed }), + ]) + + const TestComponent = () => { + const { isInstallingWithError } = usePluginTaskStatus() + return {String(isInstallingWithError)} + } + + render() + expect(screen.getByTestId('flag')).toHaveTextContent('true') + }) + + it('should set isSuccess when all plugins succeeded', () => { + setupMocks([ + createMockPlugin({ status: TaskStatus.success }), + createMockPlugin({ status: TaskStatus.success }), + ]) + + const TestComponent = () => { + const { isSuccess } = usePluginTaskStatus() + return {String(isSuccess)} + } + + render() + expect(screen.getByTestId('flag')).toHaveTextContent('true') + }) + + it('should set isFailed when no running plugins and some failed', () => { + setupMocks([ + createMockPlugin({ status: TaskStatus.success }), + createMockPlugin({ status: TaskStatus.failed }), + ]) + + const TestComponent = () => { + const { isFailed } = usePluginTaskStatus() + return {String(isFailed)} + } + + render() + expect(screen.getByTestId('flag')).toHaveTextContent('true') + }) + }) + + describe('handleClearErrorPlugin', () => { + it('should call mutateAsync and handleRefetch', async () => { + const { mockMutateAsync, mockHandleRefetch } = setupMocks([ + createMockPlugin({ status: TaskStatus.failed }), + ]) + + const TestComponent = () => { + const { handleClearErrorPlugin } = usePluginTaskStatus() + return ( + + ) + } + + render() + fireEvent.click(screen.getByRole('button')) + + await waitFor(() => { + expect(mockMutateAsync).toHaveBeenCalledWith({ + taskId: 'task-1', + pluginId: 'plugin-1', + }) + expect(mockHandleRefetch).toHaveBeenCalled() + }) + }) + }) +}) + +// ============================================================================ +// TaskStatusIndicator Component Tests +// ============================================================================ +describe('TaskStatusIndicator Component', () => { + const defaultProps = { + tip: 'Test tooltip', + isInstalling: false, + isInstallingWithSuccess: false, + isInstallingWithError: false, + isSuccess: false, + isFailed: false, + successPluginsLength: 0, + runningPluginsLength: 0, + totalPluginsLength: 1, + onClick: vi.fn(), + } + + beforeEach(() => { + vi.clearAllMocks() + }) + + describe('Rendering', () => { + it('should render without crashing', () => { + render() + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + + it('should render with correct id', () => { + render() + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + }) + + describe('Icon display', () => { + it('should show downloading icon when installing', () => { + render() + // DownloadingIcon is rendered when isInstalling is true + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + + it('should show downloading icon when installing with error', () => { + render() + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + + it('should show install icon when not installing', () => { + render() + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + }) + + describe('Status badge', () => { + it('should show progress circle when installing', () => { + render( + , + ) + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + + it('should show progress circle when installing with success', () => { + render( + , + ) + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + + it('should show error progress circle when installing with error', () => { + render( + , + ) + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + + it('should show success icon when all completed successfully', () => { + render( + , + ) + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + + it('should show error icon when failed', () => { + render() + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + }) + + describe('Styling', () => { + it('should apply error styles when installing with error', () => { + render() + const trigger = document.getElementById('plugin-task-trigger') + expect(trigger).toHaveClass('bg-state-destructive-hover') + }) + + it('should apply error styles when failed', () => { + render() + const trigger = document.getElementById('plugin-task-trigger') + expect(trigger).toHaveClass('bg-state-destructive-hover') + }) + + it('should apply cursor-pointer when clickable', () => { + render() + const trigger = document.getElementById('plugin-task-trigger') + expect(trigger).toHaveClass('cursor-pointer') + }) + }) + + describe('User interactions', () => { + it('should call onClick when clicked', () => { + const handleClick = vi.fn() + render() + + fireEvent.click(document.getElementById('plugin-task-trigger')!) + + expect(handleClick).toHaveBeenCalledTimes(1) + }) + }) +}) + +// ============================================================================ +// PluginTaskList Component Tests +// ============================================================================ +describe('PluginTaskList Component', () => { + const defaultProps = { + runningPlugins: [] as PluginStatus[], + successPlugins: [] as PluginStatus[], + errorPlugins: [] as PluginStatus[], + getIconUrl: (icon: string) => `https://example.com/${icon}`, + onClearAll: vi.fn(), + onClearErrors: vi.fn(), + onClearSingle: vi.fn(), + } + + beforeEach(() => { + vi.clearAllMocks() + }) + + describe('Rendering', () => { + it('should render without crashing with empty lists', () => { + render() + expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument() + }) + + it('should render running plugins section when plugins exist', () => { + const runningPlugins = [createMockPlugin({ status: TaskStatus.running })] + render() + + // Translation key is returned as text in tests, multiple matches expected (title + status) + expect(screen.getAllByText(/task\.installing/i).length).toBeGreaterThan(0) + // Verify section container is rendered + expect(document.querySelector('.max-h-\\[200px\\]')).toBeInTheDocument() + }) + + it('should render success plugins section when plugins exist', () => { + const successPlugins = [createMockPlugin({ status: TaskStatus.success })] + render() + + // Translation key is returned as text in tests, multiple matches expected + expect(screen.getAllByText(/task\.installed/i).length).toBeGreaterThan(0) + }) + + it('should render error plugins section when plugins exist', () => { + const errorPlugins = [createMockPlugin({ status: TaskStatus.failed, message: 'Error occurred' })] + render() + + expect(screen.getByText('Error occurred')).toBeInTheDocument() + }) + + it('should render all sections when all types exist', () => { + render( + , + ) + + // All sections should be present + expect(document.querySelectorAll('.max-h-\\[200px\\]').length).toBe(3) + }) + }) + + describe('User interactions', () => { + it('should call onClearAll when clear all button is clicked in success section', () => { + const handleClearAll = vi.fn() + const successPlugins = [createMockPlugin({ status: TaskStatus.success })] + + render( + , + ) + + fireEvent.click(screen.getByRole('button', { name: /task\.clearAll/i })) + + expect(handleClearAll).toHaveBeenCalledTimes(1) + }) + + it('should call onClearErrors when clear all button is clicked in error section', () => { + const handleClearErrors = vi.fn() + const errorPlugins = [createMockPlugin({ status: TaskStatus.failed })] + + render( + , + ) + + const clearButtons = screen.getAllByRole('button') + fireEvent.click(clearButtons.find(btn => btn.textContent?.includes('task.clearAll'))!) + + expect(handleClearErrors).toHaveBeenCalledTimes(1) + }) + + it('should call onClearSingle with correct args when individual clear is clicked', () => { + const handleClearSingle = vi.fn() + const errorPlugin = createMockPlugin({ + status: TaskStatus.failed, + plugin_unique_identifier: 'error-plugin-1', + taskId: 'task-123', + }) + + render( + , + ) + + // The individual clear button has the text 'operation.clear' + fireEvent.click(screen.getByRole('button', { name: /operation\.clear/i })) + + expect(handleClearSingle).toHaveBeenCalledWith('task-123', 'error-plugin-1') + }) + }) + + describe('Plugin display', () => { + it('should display plugin name from labels', () => { + const plugin = createMockPlugin({ + status: TaskStatus.running, + labels: { en_US: 'My Test Plugin' } as Record, + }) + + render() + + expect(screen.getByText('My Test Plugin')).toBeInTheDocument() + }) + + it('should display plugin message when available', () => { + const plugin = createMockPlugin({ + status: TaskStatus.success, + message: 'Successfully installed!', + }) + + render() + + expect(screen.getByText('Successfully installed!')).toBeInTheDocument() + }) + + it('should display multiple plugins in each section', () => { + const runningPlugins = [ + createMockPlugin({ status: TaskStatus.running, labels: { en_US: 'Plugin A' } as Record }), + createMockPlugin({ status: TaskStatus.running, labels: { en_US: 'Plugin B' } as Record }), + ] + + render() + + expect(screen.getByText('Plugin A')).toBeInTheDocument() + expect(screen.getByText('Plugin B')).toBeInTheDocument() + // Count is rendered, verify multiple items are in list + expect(document.querySelectorAll('.hover\\:bg-state-base-hover').length).toBe(2) + }) + }) +}) + +// ============================================================================ +// PluginTasks Main Component Tests +// ============================================================================ +describe('PluginTasks Component', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + describe('Rendering', () => { + it('should return null when no plugins exist', () => { + setupMocks([]) + + const { container } = render() + + expect(container.firstChild).toBeNull() + }) + + it('should render when plugins exist', () => { + setupMocks([createMockPlugin({ status: TaskStatus.running })]) + + render() + + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + }) + + describe('Tooltip text (tip memoization)', () => { + it('should show installing tip when isInstalling', () => { + setupMocks([createMockPlugin({ status: TaskStatus.running })]) + + render() + + // The component renders with a tooltip, we verify it exists + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + + it('should show success tip when all succeeded', () => { + setupMocks([createMockPlugin({ status: TaskStatus.success })]) + + render() + + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + + it('should show error tip when some failed', () => { + setupMocks([ + createMockPlugin({ status: TaskStatus.success }), + createMockPlugin({ status: TaskStatus.failed }), + ]) + + render() + + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + }) + + describe('Popover interaction', () => { + it('should toggle popover when trigger is clicked and status allows', () => { + setupMocks([createMockPlugin({ status: TaskStatus.running })]) + + render() + + // Click to open + fireEvent.click(document.getElementById('plugin-task-trigger')!) + + // The popover content should be visible (PluginTaskList) + expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument() + }) + + it('should not toggle when status does not allow', () => { + // Setup with no actionable status (edge case - should not happen in practice) + setupMocks([createMockPlugin({ status: TaskStatus.running })]) + + render() + + // Component should still render + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + }) + + describe('Clear handlers', () => { + it('should clear all completed plugins when onClearAll is called', async () => { + const { mockMutateAsync } = setupMocks([ + createMockPlugin({ status: TaskStatus.success, plugin_unique_identifier: 'success-1' }), + createMockPlugin({ status: TaskStatus.failed, plugin_unique_identifier: 'error-1' }), + ]) + + render() + + // Open popover + fireEvent.click(document.getElementById('plugin-task-trigger')!) + + // Wait for popover content to render + await waitFor(() => { + expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument() + }) + + // Find and click clear all button + const clearButtons = screen.getAllByRole('button') + const clearAllButton = clearButtons.find(btn => btn.textContent?.includes('clearAll')) + if (clearAllButton) + fireEvent.click(clearAllButton) + + // Verify mutateAsync was called for each completed plugin + await waitFor(() => { + expect(mockMutateAsync).toHaveBeenCalled() + }) + }) + + it('should clear only error plugins when onClearErrors is called', async () => { + const { mockMutateAsync } = setupMocks([ + createMockPlugin({ status: TaskStatus.failed, plugin_unique_identifier: 'error-1' }), + ]) + + render() + + // Open popover + fireEvent.click(document.getElementById('plugin-task-trigger')!) + + await waitFor(() => { + expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument() + }) + + // Find and click the clear all button in error section + const clearButtons = screen.getAllByRole('button') + if (clearButtons.length > 0) + fireEvent.click(clearButtons[0]) + + await waitFor(() => { + expect(mockMutateAsync).toHaveBeenCalled() + }) + }) + + it('should clear single plugin when onClearSingle is called', async () => { + const { mockMutateAsync } = setupMocks([ + createMockPlugin({ + status: TaskStatus.failed, + plugin_unique_identifier: 'error-plugin', + taskId: 'task-1', + }), + ]) + + render() + + // Open popover + fireEvent.click(document.getElementById('plugin-task-trigger')!) + + await waitFor(() => { + expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument() + }) + + // Find and click individual clear button (usually the last one) + const clearButtons = screen.getAllByRole('button') + const individualClearButton = clearButtons[clearButtons.length - 1] + fireEvent.click(individualClearButton) + + await waitFor(() => { + expect(mockMutateAsync).toHaveBeenCalledWith({ + taskId: 'task-1', + pluginId: 'error-plugin', + }) + }) + }) + }) + + describe('Edge cases', () => { + it('should handle empty plugin tasks array', () => { + setupMocks([]) + + const { container } = render() + + expect(container.firstChild).toBeNull() + }) + + it('should handle single running plugin', () => { + setupMocks([createMockPlugin({ status: TaskStatus.running })]) + + render() + + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + + it('should handle many plugins', () => { + const manyPlugins = Array.from({ length: 10 }, (_, i) => + createMockPlugin({ + status: i % 3 === 0 ? TaskStatus.running : i % 3 === 1 ? TaskStatus.success : TaskStatus.failed, + plugin_unique_identifier: `plugin-${i}`, + })) + setupMocks(manyPlugins) + + render() + + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + + it('should handle plugins with empty labels', () => { + const plugin = createMockPlugin({ + status: TaskStatus.running, + labels: {} as Record, + }) + setupMocks([plugin]) + + render() + + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + + it('should handle plugins with long messages', () => { + const plugin = createMockPlugin({ + status: TaskStatus.failed, + message: 'A'.repeat(500), + }) + setupMocks([plugin]) + + render() + + // Open popover + fireEvent.click(document.getElementById('plugin-task-trigger')!) + + expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument() + }) + }) +}) + +// ============================================================================ +// Integration Tests +// ============================================================================ +describe('PluginTasks Integration', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + it('should show correct UI flow from installing to success', async () => { + // Start with installing state + setupMocks([createMockPlugin({ status: TaskStatus.running })]) + + const { rerender } = render() + + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + + // Simulate completion by re-rendering with success + setupMocks([createMockPlugin({ status: TaskStatus.success })]) + rerender() + + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + + it('should show correct UI flow from installing to failure', async () => { + // Start with installing state + setupMocks([createMockPlugin({ status: TaskStatus.running })]) + + const { rerender } = render() + + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + + // Simulate failure by re-rendering with failed + setupMocks([createMockPlugin({ status: TaskStatus.failed, message: 'Network error' })]) + rerender() + + expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument() + }) + + it('should handle mixed status during installation', () => { + setupMocks([ + createMockPlugin({ status: TaskStatus.running, plugin_unique_identifier: 'p1' }), + createMockPlugin({ status: TaskStatus.success, plugin_unique_identifier: 'p2' }), + createMockPlugin({ status: TaskStatus.failed, plugin_unique_identifier: 'p3' }), + ]) + + render() + + // Open popover + fireEvent.click(document.getElementById('plugin-task-trigger')!) + + // All sections should be visible + const sections = document.querySelectorAll('.max-h-\\[200px\\]') + expect(sections.length).toBe(3) + }) +}) diff --git a/web/app/components/plugins/plugin-page/plugin-tasks/index.tsx b/web/app/components/plugins/plugin-page/plugin-tasks/index.tsx index 40dd4fedb1..45f1dce86b 100644 --- a/web/app/components/plugins/plugin-page/plugin-tasks/index.tsx +++ b/web/app/components/plugins/plugin-page/plugin-tasks/index.tsx @@ -1,33 +1,21 @@ -import { - RiCheckboxCircleFill, - RiErrorWarningFill, - RiInstallLine, - RiLoaderLine, -} from '@remixicon/react' import { useCallback, useMemo, useState, } from 'react' import { useTranslation } from 'react-i18next' -import Button from '@/app/components/base/button' import { PortalToFollowElem, PortalToFollowElemContent, PortalToFollowElemTrigger, } from '@/app/components/base/portal-to-follow-elem' -import ProgressCircle from '@/app/components/base/progress-bar/progress-circle' -import Tooltip from '@/app/components/base/tooltip' -import DownloadingIcon from '@/app/components/header/plugins-nav/downloading-icon' -import CardIcon from '@/app/components/plugins/card/base/card-icon' import useGetIcon from '@/app/components/plugins/install-plugin/base/use-get-icon' -import { useGetLanguage } from '@/context/i18n' -import { cn } from '@/utils/classnames' +import PluginTaskList from './components/plugin-task-list' +import TaskStatusIndicator from './components/task-status-indicator' import { usePluginTaskStatus } from './hooks' const PluginTasks = () => { const { t } = useTranslation() - const language = useGetLanguage() const [open, setOpen] = useState(false) const { errorPlugins, @@ -46,35 +34,7 @@ const PluginTasks = () => { } = usePluginTaskStatus() const { getIconUrl } = useGetIcon() - const handleClearAllWithModal = useCallback(async () => { - // Clear all completed plugins (success and error) but keep running ones - const completedPlugins = [...successPlugins, ...errorPlugins] - - // Clear all completed plugins individually - for (const plugin of completedPlugins) - await handleClearErrorPlugin(plugin.taskId, plugin.plugin_unique_identifier) - - // Only close modal if no plugins are still installing - if (runningPluginsLength === 0) - setOpen(false) - }, [successPlugins, errorPlugins, handleClearErrorPlugin, runningPluginsLength]) - - const handleClearErrorsWithModal = useCallback(async () => { - // Clear only error plugins, not all plugins - for (const plugin of errorPlugins) - await handleClearErrorPlugin(plugin.taskId, plugin.plugin_unique_identifier) - // Only close modal if no plugins are still installing - if (runningPluginsLength === 0) - setOpen(false) - }, [errorPlugins, handleClearErrorPlugin, runningPluginsLength]) - - const handleClearSingleWithModal = useCallback(async (taskId: string, pluginId: string) => { - await handleClearErrorPlugin(taskId, pluginId) - // Only close modal if no plugins are still installing - if (runningPluginsLength === 0) - setOpen(false) - }, [handleClearErrorPlugin, runningPluginsLength]) - + // Generate tooltip text based on status const tip = useMemo(() => { if (isInstallingWithError) return t('task.installingWithError', { ns: 'plugin', installingLength: runningPluginsLength, successLength: successPluginsLength, errorLength: errorPluginsLength }) @@ -99,8 +59,38 @@ const PluginTasks = () => { t, ]) - // Show icon if there are any plugin tasks (completed, running, or failed) - // Only hide when there are absolutely no plugin tasks + // Generic clear function that handles clearing and modal closing + const clearPluginsAndClose = useCallback(async ( + plugins: Array<{ taskId: string, plugin_unique_identifier: string }>, + ) => { + for (const plugin of plugins) + await handleClearErrorPlugin(plugin.taskId, plugin.plugin_unique_identifier) + if (runningPluginsLength === 0) + setOpen(false) + }, [handleClearErrorPlugin, runningPluginsLength]) + + // Clear handlers using the generic function + const handleClearAll = useCallback( + () => clearPluginsAndClose([...successPlugins, ...errorPlugins]), + [clearPluginsAndClose, successPlugins, errorPlugins], + ) + + const handleClearErrors = useCallback( + () => clearPluginsAndClose(errorPlugins), + [clearPluginsAndClose, errorPlugins], + ) + + const handleClearSingle = useCallback( + (taskId: string, pluginId: string) => clearPluginsAndClose([{ taskId, plugin_unique_identifier: pluginId }]), + [clearPluginsAndClose], + ) + + const handleTriggerClick = useCallback(() => { + if (isFailed || isInstalling || isInstallingWithSuccess || isInstallingWithError || isSuccess) + setOpen(v => !v) + }, [isFailed, isInstalling, isInstallingWithSuccess, isInstallingWithError, isSuccess]) + + // Hide when no plugin tasks if (totalPluginsLength === 0) return null @@ -115,206 +105,30 @@ const PluginTasks = () => { crossAxis: 79, }} > - { - if (isFailed || isInstalling || isInstallingWithSuccess || isInstallingWithError || isSuccess) - setOpen(v => !v) - }} - > - -
- { - (isInstalling || isInstallingWithError) && ( - - ) - } - { - !(isInstalling || isInstallingWithError) && ( - - ) - } -
- { - (isInstalling || isInstallingWithSuccess) && ( - - ) - } - { - isInstallingWithError && ( - - ) - } - { - (isSuccess || (successPluginsLength > 0 && runningPluginsLength === 0 && errorPluginsLength === 0)) && ( - - ) - } - { - isFailed && ( - - ) - } -
-
-
+ + {}} + /> -
- {/* Running Plugins */} - {runningPlugins.length > 0 && ( - <> -
- {t('task.installing', { ns: 'plugin' })} - {' '} - ( - {runningPlugins.length} - ) -
-
- {runningPlugins.map(runningPlugin => ( -
-
- - -
-
-
- {runningPlugin.labels[language]} -
-
- {t('task.installing', { ns: 'plugin' })} -
-
-
- ))} -
- - )} - - {/* Success Plugins */} - {successPlugins.length > 0 && ( - <> -
- {t('task.installed', { ns: 'plugin' })} - {' '} - ( - {successPlugins.length} - ) - -
-
- {successPlugins.map(successPlugin => ( -
-
- - -
-
-
- {successPlugin.labels[language]} -
-
- {successPlugin.message || t('task.installed', { ns: 'plugin' })} -
-
-
- ))} -
- - )} - - {/* Error Plugins */} - {errorPlugins.length > 0 && ( - <> -
- {t('task.installError', { ns: 'plugin', errorLength: errorPlugins.length })} - -
-
- {errorPlugins.map(errorPlugin => ( -
-
- - -
-
-
- {errorPlugin.labels[language]} -
-
- {errorPlugin.message} -
-
- -
- ))} -
- - )} -
+
diff --git a/web/app/components/plugins/plugin-page/use-reference-setting.spec.ts b/web/app/components/plugins/plugin-page/use-reference-setting.spec.ts new file mode 100644 index 0000000000..9f64d3fac5 --- /dev/null +++ b/web/app/components/plugins/plugin-page/use-reference-setting.spec.ts @@ -0,0 +1,388 @@ +import { renderHook, waitFor } from '@testing-library/react' +import { beforeEach, describe, expect, it, vi } from 'vitest' +// Import mocks for assertions +import { useAppContext } from '@/context/app-context' +import { useGlobalPublicStore } from '@/context/global-public-context' + +import { useInvalidateReferenceSettings, useMutationReferenceSettings, useReferenceSettings } from '@/service/use-plugins' +import Toast from '../../base/toast' +import { PermissionType } from '../types' +import useReferenceSetting, { useCanInstallPluginFromMarketplace } from './use-reference-setting' + +// Mock dependencies +vi.mock('react-i18next', () => ({ + useTranslation: () => ({ + t: (key: string) => key, + }), +})) + +vi.mock('@/context/app-context', () => ({ + useAppContext: vi.fn(), +})) + +vi.mock('@/context/global-public-context', () => ({ + useGlobalPublicStore: vi.fn(), +})) + +vi.mock('@/service/use-plugins', () => ({ + useReferenceSettings: vi.fn(), + useMutationReferenceSettings: vi.fn(), + useInvalidateReferenceSettings: vi.fn(), +})) + +vi.mock('../../base/toast', () => ({ + default: { + notify: vi.fn(), + }, +})) + +describe('useReferenceSetting Hook', () => { + beforeEach(() => { + vi.clearAllMocks() + + // Default mocks + vi.mocked(useAppContext).mockReturnValue({ + isCurrentWorkspaceManager: false, + isCurrentWorkspaceOwner: false, + } as ReturnType) + + vi.mocked(useReferenceSettings).mockReturnValue({ + data: { + permission: { + install_permission: PermissionType.everyone, + debug_permission: PermissionType.everyone, + }, + }, + } as ReturnType) + + vi.mocked(useMutationReferenceSettings).mockReturnValue({ + mutate: vi.fn(), + isPending: false, + } as unknown as ReturnType) + + vi.mocked(useInvalidateReferenceSettings).mockReturnValue(vi.fn()) + }) + + describe('hasPermission logic', () => { + it('should return false when permission is undefined', () => { + vi.mocked(useReferenceSettings).mockReturnValue({ + data: { + permission: { + install_permission: undefined, + debug_permission: undefined, + }, + }, + } as unknown as ReturnType) + + const { result } = renderHook(() => useReferenceSetting()) + + expect(result.current.canManagement).toBe(false) + expect(result.current.canDebugger).toBe(false) + }) + + it('should return false when permission is noOne', () => { + vi.mocked(useReferenceSettings).mockReturnValue({ + data: { + permission: { + install_permission: PermissionType.noOne, + debug_permission: PermissionType.noOne, + }, + }, + } as ReturnType) + + const { result } = renderHook(() => useReferenceSetting()) + + expect(result.current.canManagement).toBe(false) + expect(result.current.canDebugger).toBe(false) + }) + + it('should return true when permission is everyone', () => { + vi.mocked(useReferenceSettings).mockReturnValue({ + data: { + permission: { + install_permission: PermissionType.everyone, + debug_permission: PermissionType.everyone, + }, + }, + } as ReturnType) + + const { result } = renderHook(() => useReferenceSetting()) + + expect(result.current.canManagement).toBe(true) + expect(result.current.canDebugger).toBe(true) + }) + + it('should return isAdmin when permission is admin and user is manager', () => { + vi.mocked(useAppContext).mockReturnValue({ + isCurrentWorkspaceManager: true, + isCurrentWorkspaceOwner: false, + } as ReturnType) + + vi.mocked(useReferenceSettings).mockReturnValue({ + data: { + permission: { + install_permission: PermissionType.admin, + debug_permission: PermissionType.admin, + }, + }, + } as ReturnType) + + const { result } = renderHook(() => useReferenceSetting()) + + expect(result.current.canManagement).toBe(true) + expect(result.current.canDebugger).toBe(true) + }) + + it('should return isAdmin when permission is admin and user is owner', () => { + vi.mocked(useAppContext).mockReturnValue({ + isCurrentWorkspaceManager: false, + isCurrentWorkspaceOwner: true, + } as ReturnType) + + vi.mocked(useReferenceSettings).mockReturnValue({ + data: { + permission: { + install_permission: PermissionType.admin, + debug_permission: PermissionType.admin, + }, + }, + } as ReturnType) + + const { result } = renderHook(() => useReferenceSetting()) + + expect(result.current.canManagement).toBe(true) + expect(result.current.canDebugger).toBe(true) + }) + + it('should return false when permission is admin and user is not admin', () => { + vi.mocked(useAppContext).mockReturnValue({ + isCurrentWorkspaceManager: false, + isCurrentWorkspaceOwner: false, + } as ReturnType) + + vi.mocked(useReferenceSettings).mockReturnValue({ + data: { + permission: { + install_permission: PermissionType.admin, + debug_permission: PermissionType.admin, + }, + }, + } as ReturnType) + + const { result } = renderHook(() => useReferenceSetting()) + + expect(result.current.canManagement).toBe(false) + expect(result.current.canDebugger).toBe(false) + }) + }) + + describe('canSetPermissions', () => { + it('should be true when user is workspace manager', () => { + vi.mocked(useAppContext).mockReturnValue({ + isCurrentWorkspaceManager: true, + isCurrentWorkspaceOwner: false, + } as ReturnType) + + const { result } = renderHook(() => useReferenceSetting()) + + expect(result.current.canSetPermissions).toBe(true) + }) + + it('should be true when user is workspace owner', () => { + vi.mocked(useAppContext).mockReturnValue({ + isCurrentWorkspaceManager: false, + isCurrentWorkspaceOwner: true, + } as ReturnType) + + const { result } = renderHook(() => useReferenceSetting()) + + expect(result.current.canSetPermissions).toBe(true) + }) + + it('should be false when user is neither manager nor owner', () => { + vi.mocked(useAppContext).mockReturnValue({ + isCurrentWorkspaceManager: false, + isCurrentWorkspaceOwner: false, + } as ReturnType) + + const { result } = renderHook(() => useReferenceSetting()) + + expect(result.current.canSetPermissions).toBe(false) + }) + }) + + describe('setReferenceSettings callback', () => { + it('should call invalidateReferenceSettings and show toast on success', async () => { + const mockInvalidate = vi.fn() + vi.mocked(useInvalidateReferenceSettings).mockReturnValue(mockInvalidate) + + let onSuccessCallback: (() => void) | undefined + vi.mocked(useMutationReferenceSettings).mockImplementation((options) => { + onSuccessCallback = options?.onSuccess as () => void + return { + mutate: vi.fn(), + isPending: false, + } as unknown as ReturnType + }) + + renderHook(() => useReferenceSetting()) + + // Trigger the onSuccess callback + if (onSuccessCallback) + onSuccessCallback() + + await waitFor(() => { + expect(mockInvalidate).toHaveBeenCalled() + expect(Toast.notify).toHaveBeenCalledWith({ + type: 'success', + message: 'api.actionSuccess', + }) + }) + }) + }) + + describe('returned values', () => { + it('should return referenceSetting data', () => { + const mockData = { + permission: { + install_permission: PermissionType.everyone, + debug_permission: PermissionType.everyone, + }, + } + vi.mocked(useReferenceSettings).mockReturnValue({ + data: mockData, + } as ReturnType) + + const { result } = renderHook(() => useReferenceSetting()) + + expect(result.current.referenceSetting).toEqual(mockData) + }) + + it('should return isUpdatePending from mutation', () => { + vi.mocked(useMutationReferenceSettings).mockReturnValue({ + mutate: vi.fn(), + isPending: true, + } as unknown as ReturnType) + + const { result } = renderHook(() => useReferenceSetting()) + + expect(result.current.isUpdatePending).toBe(true) + }) + + it('should handle null data', () => { + vi.mocked(useReferenceSettings).mockReturnValue({ + data: null, + } as unknown as ReturnType) + + const { result } = renderHook(() => useReferenceSetting()) + + expect(result.current.canManagement).toBe(false) + expect(result.current.canDebugger).toBe(false) + }) + }) +}) + +describe('useCanInstallPluginFromMarketplace Hook', () => { + beforeEach(() => { + vi.clearAllMocks() + + vi.mocked(useAppContext).mockReturnValue({ + isCurrentWorkspaceManager: true, + isCurrentWorkspaceOwner: false, + } as ReturnType) + + vi.mocked(useReferenceSettings).mockReturnValue({ + data: { + permission: { + install_permission: PermissionType.everyone, + debug_permission: PermissionType.everyone, + }, + }, + } as ReturnType) + + vi.mocked(useMutationReferenceSettings).mockReturnValue({ + mutate: vi.fn(), + isPending: false, + } as unknown as ReturnType) + + vi.mocked(useInvalidateReferenceSettings).mockReturnValue(vi.fn()) + }) + + it('should return true when marketplace is enabled and canManagement is true', () => { + vi.mocked(useGlobalPublicStore).mockImplementation((selector) => { + const state = { + systemFeatures: { + enable_marketplace: true, + }, + } + return selector(state as Parameters[0]) + }) + + const { result } = renderHook(() => useCanInstallPluginFromMarketplace()) + + expect(result.current.canInstallPluginFromMarketplace).toBe(true) + }) + + it('should return false when marketplace is disabled', () => { + vi.mocked(useGlobalPublicStore).mockImplementation((selector) => { + const state = { + systemFeatures: { + enable_marketplace: false, + }, + } + return selector(state as Parameters[0]) + }) + + const { result } = renderHook(() => useCanInstallPluginFromMarketplace()) + + expect(result.current.canInstallPluginFromMarketplace).toBe(false) + }) + + it('should return false when canManagement is false', () => { + vi.mocked(useGlobalPublicStore).mockImplementation((selector) => { + const state = { + systemFeatures: { + enable_marketplace: true, + }, + } + return selector(state as Parameters[0]) + }) + + vi.mocked(useReferenceSettings).mockReturnValue({ + data: { + permission: { + install_permission: PermissionType.noOne, + debug_permission: PermissionType.noOne, + }, + }, + } as ReturnType) + + const { result } = renderHook(() => useCanInstallPluginFromMarketplace()) + + expect(result.current.canInstallPluginFromMarketplace).toBe(false) + }) + + it('should return false when both marketplace is disabled and canManagement is false', () => { + vi.mocked(useGlobalPublicStore).mockImplementation((selector) => { + const state = { + systemFeatures: { + enable_marketplace: false, + }, + } + return selector(state as Parameters[0]) + }) + + vi.mocked(useReferenceSettings).mockReturnValue({ + data: { + permission: { + install_permission: PermissionType.noOne, + debug_permission: PermissionType.noOne, + }, + }, + } as ReturnType) + + const { result } = renderHook(() => useCanInstallPluginFromMarketplace()) + + expect(result.current.canInstallPluginFromMarketplace).toBe(false) + }) +}) diff --git a/web/app/components/plugins/plugin-page/use-uploader.spec.ts b/web/app/components/plugins/plugin-page/use-uploader.spec.ts new file mode 100644 index 0000000000..fa9463b7c0 --- /dev/null +++ b/web/app/components/plugins/plugin-page/use-uploader.spec.ts @@ -0,0 +1,487 @@ +import type { RefObject } from 'react' +import { act, renderHook } from '@testing-library/react' +import { beforeEach, describe, expect, it, vi } from 'vitest' +import { useUploader } from './use-uploader' + +describe('useUploader Hook', () => { + let mockContainerRef: RefObject + let mockOnFileChange: (file: File | null) => void + let mockContainer: HTMLDivElement + + beforeEach(() => { + vi.clearAllMocks() + + mockContainer = document.createElement('div') + document.body.appendChild(mockContainer) + + mockContainerRef = { current: mockContainer } + mockOnFileChange = vi.fn() + }) + + afterEach(() => { + if (mockContainer.parentNode) + document.body.removeChild(mockContainer) + }) + + describe('Initial State', () => { + it('should return initial state with dragging false', () => { + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + }), + ) + + expect(result.current.dragging).toBe(false) + expect(result.current.fileUploader.current).toBeNull() + expect(result.current.fileChangeHandle).not.toBeNull() + expect(result.current.removeFile).not.toBeNull() + }) + + it('should return null handlers when disabled', () => { + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + enabled: false, + }), + ) + + expect(result.current.dragging).toBe(false) + expect(result.current.fileChangeHandle).toBeNull() + expect(result.current.removeFile).toBeNull() + }) + }) + + describe('Drag Events', () => { + it('should handle dragenter and set dragging to true', () => { + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + }), + ) + + const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true }) + Object.defineProperty(dragEnterEvent, 'dataTransfer', { + value: { types: ['Files'] }, + }) + + act(() => { + mockContainer.dispatchEvent(dragEnterEvent) + }) + + expect(result.current.dragging).toBe(true) + }) + + it('should not set dragging when dragenter without Files type', () => { + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + }), + ) + + const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true }) + Object.defineProperty(dragEnterEvent, 'dataTransfer', { + value: { types: ['text/plain'] }, + }) + + act(() => { + mockContainer.dispatchEvent(dragEnterEvent) + }) + + expect(result.current.dragging).toBe(false) + }) + + it('should handle dragover event', () => { + renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + }), + ) + + const dragOverEvent = new Event('dragover', { bubbles: true, cancelable: true }) + + act(() => { + mockContainer.dispatchEvent(dragOverEvent) + }) + + // dragover should prevent default and stop propagation + expect(mockContainer).toBeInTheDocument() + }) + + it('should handle dragleave when relatedTarget is null', () => { + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + }), + ) + + // First set dragging to true + const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true }) + Object.defineProperty(dragEnterEvent, 'dataTransfer', { + value: { types: ['Files'] }, + }) + act(() => { + mockContainer.dispatchEvent(dragEnterEvent) + }) + expect(result.current.dragging).toBe(true) + + // Then trigger dragleave with null relatedTarget + const dragLeaveEvent = new Event('dragleave', { bubbles: true, cancelable: true }) + Object.defineProperty(dragLeaveEvent, 'relatedTarget', { + value: null, + }) + + act(() => { + mockContainer.dispatchEvent(dragLeaveEvent) + }) + + expect(result.current.dragging).toBe(false) + }) + + it('should handle dragleave when relatedTarget is outside container', () => { + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + }), + ) + + // First set dragging to true + const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true }) + Object.defineProperty(dragEnterEvent, 'dataTransfer', { + value: { types: ['Files'] }, + }) + act(() => { + mockContainer.dispatchEvent(dragEnterEvent) + }) + expect(result.current.dragging).toBe(true) + + // Create element outside container + const outsideElement = document.createElement('div') + document.body.appendChild(outsideElement) + + // Trigger dragleave with relatedTarget outside container + const dragLeaveEvent = new Event('dragleave', { bubbles: true, cancelable: true }) + Object.defineProperty(dragLeaveEvent, 'relatedTarget', { + value: outsideElement, + }) + + act(() => { + mockContainer.dispatchEvent(dragLeaveEvent) + }) + + expect(result.current.dragging).toBe(false) + document.body.removeChild(outsideElement) + }) + + it('should not set dragging to false when relatedTarget is inside container', () => { + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + }), + ) + + // First set dragging to true + const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true }) + Object.defineProperty(dragEnterEvent, 'dataTransfer', { + value: { types: ['Files'] }, + }) + act(() => { + mockContainer.dispatchEvent(dragEnterEvent) + }) + expect(result.current.dragging).toBe(true) + + // Create element inside container + const insideElement = document.createElement('div') + mockContainer.appendChild(insideElement) + + // Trigger dragleave with relatedTarget inside container + const dragLeaveEvent = new Event('dragleave', { bubbles: true, cancelable: true }) + Object.defineProperty(dragLeaveEvent, 'relatedTarget', { + value: insideElement, + }) + + act(() => { + mockContainer.dispatchEvent(dragLeaveEvent) + }) + + // Should still be dragging since relatedTarget is inside container + expect(result.current.dragging).toBe(true) + }) + }) + + describe('Drop Events', () => { + it('should handle drop event with files', () => { + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + }), + ) + + // First set dragging to true + const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true }) + Object.defineProperty(dragEnterEvent, 'dataTransfer', { + value: { types: ['Files'] }, + }) + act(() => { + mockContainer.dispatchEvent(dragEnterEvent) + }) + + // Create mock file + const file = new File(['content'], 'test.difypkg', { type: 'application/octet-stream' }) + + // Trigger drop event + const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) + Object.defineProperty(dropEvent, 'dataTransfer', { + value: { files: [file] }, + }) + + act(() => { + mockContainer.dispatchEvent(dropEvent) + }) + + expect(result.current.dragging).toBe(false) + expect(mockOnFileChange).toHaveBeenCalledWith(file) + }) + + it('should not call onFileChange when drop has no dataTransfer', () => { + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + }), + ) + + // Set dragging first + const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true }) + Object.defineProperty(dragEnterEvent, 'dataTransfer', { + value: { types: ['Files'] }, + }) + act(() => { + mockContainer.dispatchEvent(dragEnterEvent) + }) + + // Drop without dataTransfer + const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) + // No dataTransfer property + + act(() => { + mockContainer.dispatchEvent(dropEvent) + }) + + expect(result.current.dragging).toBe(false) + expect(mockOnFileChange).not.toHaveBeenCalled() + }) + + it('should not call onFileChange when drop has empty files array', () => { + renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + }), + ) + + const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) + Object.defineProperty(dropEvent, 'dataTransfer', { + value: { files: [] }, + }) + + act(() => { + mockContainer.dispatchEvent(dropEvent) + }) + + expect(mockOnFileChange).not.toHaveBeenCalled() + }) + }) + + describe('File Change Handler', () => { + it('should call onFileChange with file from input', () => { + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + }), + ) + + const file = new File(['content'], 'test.difypkg', { type: 'application/octet-stream' }) + const mockEvent = { + target: { + files: [file], + }, + } as unknown as React.ChangeEvent + + act(() => { + result.current.fileChangeHandle?.(mockEvent) + }) + + expect(mockOnFileChange).toHaveBeenCalledWith(file) + }) + + it('should call onFileChange with null when no files', () => { + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + }), + ) + + const mockEvent = { + target: { + files: null, + }, + } as unknown as React.ChangeEvent + + act(() => { + result.current.fileChangeHandle?.(mockEvent) + }) + + expect(mockOnFileChange).toHaveBeenCalledWith(null) + }) + }) + + describe('Remove File', () => { + it('should call onFileChange with null', () => { + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + }), + ) + + act(() => { + result.current.removeFile?.() + }) + + expect(mockOnFileChange).toHaveBeenCalledWith(null) + }) + + it('should handle removeFile when fileUploader has a value', () => { + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + }), + ) + + // Create a mock input element with value property + const mockInput = { + value: 'test.difypkg', + } + + // Override the fileUploader ref + Object.defineProperty(result.current.fileUploader, 'current', { + value: mockInput, + writable: true, + }) + + act(() => { + result.current.removeFile?.() + }) + + expect(mockOnFileChange).toHaveBeenCalledWith(null) + expect(mockInput.value).toBe('') + }) + + it('should handle removeFile when fileUploader is null', () => { + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + }), + ) + + // fileUploader.current is null by default + act(() => { + result.current.removeFile?.() + }) + + expect(mockOnFileChange).toHaveBeenCalledWith(null) + }) + }) + + describe('Enabled/Disabled State', () => { + it('should not add event listeners when disabled', () => { + const addEventListenerSpy = vi.spyOn(mockContainer, 'addEventListener') + + renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + enabled: false, + }), + ) + + expect(addEventListenerSpy).not.toHaveBeenCalled() + }) + + it('should add event listeners when enabled', () => { + const addEventListenerSpy = vi.spyOn(mockContainer, 'addEventListener') + + renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + enabled: true, + }), + ) + + expect(addEventListenerSpy).toHaveBeenCalledWith('dragenter', expect.any(Function)) + expect(addEventListenerSpy).toHaveBeenCalledWith('dragover', expect.any(Function)) + expect(addEventListenerSpy).toHaveBeenCalledWith('dragleave', expect.any(Function)) + expect(addEventListenerSpy).toHaveBeenCalledWith('drop', expect.any(Function)) + }) + + it('should remove event listeners on cleanup', () => { + const removeEventListenerSpy = vi.spyOn(mockContainer, 'removeEventListener') + + const { unmount } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + enabled: true, + }), + ) + + unmount() + + expect(removeEventListenerSpy).toHaveBeenCalledWith('dragenter', expect.any(Function)) + expect(removeEventListenerSpy).toHaveBeenCalledWith('dragover', expect.any(Function)) + expect(removeEventListenerSpy).toHaveBeenCalledWith('dragleave', expect.any(Function)) + expect(removeEventListenerSpy).toHaveBeenCalledWith('drop', expect.any(Function)) + }) + + it('should return false for dragging when disabled', () => { + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: mockContainerRef, + enabled: false, + }), + ) + + expect(result.current.dragging).toBe(false) + }) + }) + + describe('Container Ref Edge Cases', () => { + it('should handle null containerRef.current', () => { + const nullRef: RefObject = { current: null } + + const { result } = renderHook(() => + useUploader({ + onFileChange: mockOnFileChange, + containerRef: nullRef, + }), + ) + + expect(result.current.dragging).toBe(false) + }) + }) +}) diff --git a/web/app/components/rag-pipeline/index.spec.tsx b/web/app/components/rag-pipeline/index.spec.tsx new file mode 100644 index 0000000000..5adfc828cf --- /dev/null +++ b/web/app/components/rag-pipeline/index.spec.tsx @@ -0,0 +1,550 @@ +import type { FetchWorkflowDraftResponse } from '@/types/workflow' +import { cleanup, render, screen } from '@testing-library/react' +import * as React from 'react' +import { BlockEnum } from '@/app/components/workflow/types' + +// Import real utility functions (pure functions, no side effects) + +// Import mocked modules for manipulation +import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail' +import { usePipelineInit } from './hooks' +import RagPipelineWrapper from './index' +import { processNodesWithoutDataSource } from './utils' + +// Mock: Context - need to control return values +vi.mock('@/context/dataset-detail', () => ({ + useDatasetDetailContextWithSelector: vi.fn(), +})) + +// Mock: Hook with API calls +vi.mock('./hooks', () => ({ + usePipelineInit: vi.fn(), +})) + +// Mock: Store creator +vi.mock('./store', () => ({ + createRagPipelineSliceSlice: vi.fn(() => ({})), +})) + +// Mock: Utility with complex workflow dependencies (generateNewNode, etc.) +vi.mock('./utils', () => ({ + processNodesWithoutDataSource: vi.fn((nodes, viewport) => ({ + nodes, + viewport, + })), +})) + +// Mock: Complex component with useParams, Toast, API calls +vi.mock('./components/conversion', () => ({ + default: () =>
Conversion Component
, +})) + +// Mock: Complex component with many hooks and workflow dependencies +vi.mock('./components/rag-pipeline-main', () => ({ + default: ({ nodes, edges, viewport }: any) => ( +
+ {nodes?.length ?? 0} + {edges?.length ?? 0} + {viewport?.zoom ?? 'none'} +
+ ), +})) + +// Mock: Complex component with ReactFlow and many providers +vi.mock('@/app/components/workflow', () => ({ + default: ({ children }: { children: React.ReactNode }) => ( +
{children}
+ ), +})) + +// Mock: Context provider +vi.mock('@/app/components/workflow/context', () => ({ + WorkflowContextProvider: ({ children }: { children: React.ReactNode }) => ( +
{children}
+ ), +})) + +// Type assertions for mocked functions +const mockUseDatasetDetailContextWithSelector = vi.mocked(useDatasetDetailContextWithSelector) +const mockUsePipelineInit = vi.mocked(usePipelineInit) +const mockProcessNodesWithoutDataSource = vi.mocked(processNodesWithoutDataSource) + +// Helper to mock selector with actual execution (increases function coverage) +// This executes the real selector function: s => s.dataset?.pipeline_id +const mockSelectorWithDataset = (pipelineId: string | null | undefined) => { + mockUseDatasetDetailContextWithSelector.mockImplementation((selector: (state: any) => any) => { + const mockState = { dataset: pipelineId ? { pipeline_id: pipelineId } : null } + return selector(mockState) + }) +} + +// Test data factory +const createMockWorkflowData = (overrides?: Partial): FetchWorkflowDraftResponse => ({ + graph: { + nodes: [ + { id: 'node-1', type: 'custom', data: { type: BlockEnum.Start, title: 'Start' }, position: { x: 100, y: 100 } }, + { id: 'node-2', type: 'custom', data: { type: BlockEnum.End, title: 'End' }, position: { x: 300, y: 100 } }, + ], + edges: [ + { id: 'edge-1', source: 'node-1', target: 'node-2', type: 'custom' }, + ], + viewport: { x: 0, y: 0, zoom: 1 }, + }, + hash: 'test-hash-123', + updated_at: 1234567890, + tool_published: false, + environment_variables: [], + ...overrides, +} as FetchWorkflowDraftResponse) + +afterEach(() => { + cleanup() + vi.clearAllMocks() +}) + +describe('RagPipelineWrapper', () => { + describe('Rendering', () => { + it('should render Conversion component when pipelineId is null', () => { + mockSelectorWithDataset(null) + mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: false }) + + render() + + expect(screen.getByTestId('conversion-component')).toBeInTheDocument() + expect(screen.queryByTestId('workflow-context-provider')).not.toBeInTheDocument() + }) + + it('should render Conversion component when pipelineId is undefined', () => { + mockSelectorWithDataset(undefined) + mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: false }) + + render() + + expect(screen.getByTestId('conversion-component')).toBeInTheDocument() + }) + + it('should render Conversion component when pipelineId is empty string', () => { + mockSelectorWithDataset('') + mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: false }) + + render() + + expect(screen.getByTestId('conversion-component')).toBeInTheDocument() + }) + + it('should render WorkflowContextProvider when pipelineId exists', () => { + mockSelectorWithDataset('pipeline-123') + mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: true }) + + render() + + expect(screen.getByTestId('workflow-context-provider')).toBeInTheDocument() + expect(screen.queryByTestId('conversion-component')).not.toBeInTheDocument() + }) + }) + + describe('Props Variations', () => { + it('should pass injectWorkflowStoreSliceFn to WorkflowContextProvider', () => { + mockSelectorWithDataset('pipeline-456') + mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: true }) + + render() + + expect(screen.getByTestId('workflow-context-provider')).toBeInTheDocument() + }) + }) +}) + +describe('RagPipeline', () => { + beforeEach(() => { + // Default setup for RagPipeline tests - execute real selector function + mockSelectorWithDataset('pipeline-123') + }) + + describe('Loading State', () => { + it('should render Loading component when isLoading is true', () => { + mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: true }) + + render() + + // Real Loading component has role="status" + expect(screen.getByRole('status')).toBeInTheDocument() + }) + + it('should render Loading component when data is undefined', () => { + mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: false }) + + render() + + expect(screen.getByRole('status')).toBeInTheDocument() + }) + + it('should render Loading component when both data is undefined and isLoading is true', () => { + mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: true }) + + render() + + expect(screen.getByRole('status')).toBeInTheDocument() + }) + }) + + describe('Data Loaded State', () => { + it('should render RagPipelineMain when data is loaded', () => { + const mockData = createMockWorkflowData() + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + expect(screen.getByTestId('rag-pipeline-main')).toBeInTheDocument() + expect(screen.queryByTestId('loading-component')).not.toBeInTheDocument() + }) + + it('should pass processed nodes to RagPipelineMain', () => { + const mockData = createMockWorkflowData() + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + expect(screen.getByTestId('nodes-count').textContent).toBe('2') + }) + + it('should pass edges to RagPipelineMain', () => { + const mockData = createMockWorkflowData() + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + expect(screen.getByTestId('edges-count').textContent).toBe('1') + }) + + it('should pass viewport to RagPipelineMain', () => { + const mockData = createMockWorkflowData({ + graph: { + nodes: [], + edges: [], + viewport: { x: 100, y: 200, zoom: 1.5 }, + }, + }) + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + expect(screen.getByTestId('viewport-zoom').textContent).toBe('1.5') + }) + }) + + describe('Memoization Logic', () => { + it('should process nodes through initialNodes when data is loaded', () => { + const mockData = createMockWorkflowData() + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + // initialNodes is a real function - verify nodes are rendered + // The real initialNodes processes nodes and adds position data + expect(screen.getByTestId('rag-pipeline-main')).toBeInTheDocument() + }) + + it('should process edges through initialEdges when data is loaded', () => { + const mockData = createMockWorkflowData() + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + // initialEdges is a real function - verify component renders with edges + expect(screen.getByTestId('edges-count').textContent).toBe('1') + }) + + it('should call processNodesWithoutDataSource with nodesData and viewport', () => { + const mockData = createMockWorkflowData() + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + expect(mockProcessNodesWithoutDataSource).toHaveBeenCalled() + }) + + it('should not process nodes when data is undefined', () => { + mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: false }) + + render() + + // When data is undefined, Loading is shown, processNodesWithoutDataSource is not called + expect(mockProcessNodesWithoutDataSource).not.toHaveBeenCalled() + }) + + it('should use memoized values when data reference is same', () => { + const mockData = createMockWorkflowData() + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + const { rerender } = render() + + // Clear mock call count after initial render + mockProcessNodesWithoutDataSource.mockClear() + + // Rerender with same data reference (no change to mockUsePipelineInit) + rerender() + + // processNodesWithoutDataSource should not be called again due to useMemo + // Note: React strict mode may cause double render, so we check it's not excessive + expect(mockProcessNodesWithoutDataSource.mock.calls.length).toBeLessThanOrEqual(1) + }) + }) + + describe('Edge Cases', () => { + it('should handle empty nodes array', () => { + const mockData = createMockWorkflowData({ + graph: { + nodes: [], + edges: [], + viewport: { x: 0, y: 0, zoom: 1 }, + }, + }) + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + expect(screen.getByTestId('nodes-count').textContent).toBe('0') + }) + + it('should handle empty edges array', () => { + const mockData = createMockWorkflowData({ + graph: { + nodes: [{ id: 'node-1', type: 'custom', data: { type: BlockEnum.Start, title: 'Start', desc: '' }, position: { x: 0, y: 0 } }], + edges: [], + viewport: { x: 0, y: 0, zoom: 1 }, + }, + }) + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + expect(screen.getByTestId('edges-count').textContent).toBe('0') + }) + + it('should handle undefined viewport', () => { + const mockData = createMockWorkflowData({ + graph: { + nodes: [], + edges: [], + viewport: undefined as any, + }, + }) + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + expect(screen.getByTestId('rag-pipeline-main')).toBeInTheDocument() + }) + + it('should handle null viewport', () => { + const mockData = createMockWorkflowData({ + graph: { + nodes: [], + edges: [], + viewport: null as any, + }, + }) + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + expect(screen.getByTestId('rag-pipeline-main')).toBeInTheDocument() + }) + + it('should handle large number of nodes', () => { + const largeNodesArray = Array.from({ length: 100 }, (_, i) => ({ + id: `node-${i}`, + type: 'custom', + data: { type: BlockEnum.Start, title: `Node ${i}`, desc: '' }, + position: { x: i * 100, y: 0 }, + })) + + const mockData = createMockWorkflowData({ + graph: { + nodes: largeNodesArray, + edges: [], + viewport: { x: 0, y: 0, zoom: 1 }, + }, + }) + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + expect(screen.getByTestId('nodes-count').textContent).toBe('100') + }) + + it('should handle viewport with edge case zoom values', () => { + const mockData = createMockWorkflowData({ + graph: { + nodes: [], + edges: [], + viewport: { x: -1000, y: -1000, zoom: 0.25 }, + }, + }) + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + expect(screen.getByTestId('viewport-zoom').textContent).toBe('0.25') + }) + + it('should handle viewport with maximum zoom', () => { + const mockData = createMockWorkflowData({ + graph: { + nodes: [], + edges: [], + viewport: { x: 0, y: 0, zoom: 4 }, + }, + }) + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + expect(screen.getByTestId('viewport-zoom').textContent).toBe('4') + }) + }) + + describe('Component Integration', () => { + it('should render WorkflowWithDefaultContext as wrapper', () => { + const mockData = createMockWorkflowData() + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + expect(screen.getByTestId('workflow-default-context')).toBeInTheDocument() + }) + + it('should nest RagPipelineMain inside WorkflowWithDefaultContext', () => { + const mockData = createMockWorkflowData() + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + render() + + const workflowContext = screen.getByTestId('workflow-default-context') + const ragPipelineMain = screen.getByTestId('rag-pipeline-main') + + expect(workflowContext).toContainElement(ragPipelineMain) + }) + }) +}) + +describe('processNodesWithoutDataSource utility integration', () => { + beforeEach(() => { + mockSelectorWithDataset('pipeline-123') + }) + + it('should process nodes through processNodesWithoutDataSource', () => { + const mockData = createMockWorkflowData() + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + mockProcessNodesWithoutDataSource.mockReturnValue({ + nodes: [{ id: 'processed-node', type: 'custom', data: { type: BlockEnum.Start, title: 'Processed', desc: '' }, position: { x: 0, y: 0 } }] as any, + viewport: { x: 0, y: 0, zoom: 2 }, + }) + + render() + + expect(mockProcessNodesWithoutDataSource).toHaveBeenCalled() + expect(screen.getByTestId('nodes-count').textContent).toBe('1') + expect(screen.getByTestId('viewport-zoom').textContent).toBe('2') + }) + + it('should handle processNodesWithoutDataSource returning modified viewport', () => { + const mockData = createMockWorkflowData() + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + mockProcessNodesWithoutDataSource.mockReturnValue({ + nodes: [], + viewport: { x: 500, y: 500, zoom: 0.5 }, + }) + + render() + + expect(screen.getByTestId('viewport-zoom').textContent).toBe('0.5') + }) +}) + +describe('Conditional Rendering Flow', () => { + it('should transition from loading to loaded state', () => { + mockSelectorWithDataset('pipeline-123') + + // Start with loading state + mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: true }) + const { rerender } = render() + + // Real Loading component has role="status" + expect(screen.getByRole('status')).toBeInTheDocument() + + // Transition to loaded state + const mockData = createMockWorkflowData() + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + rerender() + + expect(screen.getByTestId('rag-pipeline-main')).toBeInTheDocument() + }) + + it('should switch from Conversion to Pipeline when pipelineId becomes available', () => { + // Start without pipelineId + mockSelectorWithDataset(null) + mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: false }) + + const { rerender } = render() + + expect(screen.getByTestId('conversion-component')).toBeInTheDocument() + + // PipelineId becomes available + mockSelectorWithDataset('new-pipeline-id') + mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: true }) + rerender() + + expect(screen.queryByTestId('conversion-component')).not.toBeInTheDocument() + // Real Loading component has role="status" + expect(screen.getByRole('status')).toBeInTheDocument() + }) +}) + +describe('Error Handling', () => { + beforeEach(() => { + mockSelectorWithDataset('pipeline-123') + }) + + it('should throw when graph nodes is null', () => { + const mockData = { + graph: { + nodes: null as any, + edges: null as any, + viewport: { x: 0, y: 0, zoom: 1 }, + }, + hash: 'test', + updated_at: 123, + } as FetchWorkflowDraftResponse + + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + // Suppress console.error for expected error + const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) + + // Real initialNodes will throw when nodes is null + // This documents the component's current behavior - it requires valid nodes array + expect(() => render()).toThrow() + + consoleSpy.mockRestore() + }) + + it('should throw when graph property is missing', () => { + const mockData = { + hash: 'test', + updated_at: 123, + } as unknown as FetchWorkflowDraftResponse + + mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false }) + + // Suppress console.error for expected error + const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) + + // When graph is undefined, component throws because data.graph.nodes is accessed + // This documents the component's current behavior - it requires graph to be present + expect(() => render()).toThrow() + + consoleSpy.mockRestore() + }) +}) From 2bfc54314eeede0352b2abe017a1138cd488e56a Mon Sep 17 00:00:00 2001 From: wangxiaolei Date: Thu, 15 Jan 2026 11:10:55 +0800 Subject: [PATCH 12/25] feat: single run add opentelemetry (#31020) --- api/core/workflow/workflow_entry.py | 29 +++++++++++++++++++++++++---- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/api/core/workflow/workflow_entry.py b/api/core/workflow/workflow_entry.py index fd3fc02f62..ee37314721 100644 --- a/api/core/workflow/workflow_entry.py +++ b/api/core/workflow/workflow_entry.py @@ -189,8 +189,7 @@ class WorkflowEntry: ) try: - # run node - generator = node.run() + generator = cls._traced_node_run(node) except Exception as e: logger.exception( "error while running node, workflow_id=%s, node_id=%s, node_type=%s, node_version=%s", @@ -323,8 +322,7 @@ class WorkflowEntry: tenant_id=tenant_id, ) - # run node - generator = node.run() + generator = cls._traced_node_run(node) return node, generator except Exception as e: @@ -430,3 +428,26 @@ class WorkflowEntry: input_value = current_variable.value | input_value variable_pool.add([variable_node_id] + variable_key_list, input_value) + + @staticmethod + def _traced_node_run(node: Node) -> Generator[GraphNodeEventBase, None, None]: + """ + Wraps a node's run method with OpenTelemetry tracing and returns a generator. + """ + # Wrap node.run() with ObservabilityLayer hooks to produce node-level spans + layer = ObservabilityLayer() + layer.on_graph_start() + node.ensure_execution_id() + + def _gen(): + error: Exception | None = None + layer.on_node_run_start(node) + try: + yield from node.run() + except Exception as exc: + error = exc + raise + finally: + layer.on_node_run_end(node, error) + + return _gen() From 0ef8b5a0ca664ab2be14d4aa74c2b4caa13011c8 Mon Sep 17 00:00:00 2001 From: -LAN- Date: Thu, 15 Jan 2026 11:36:15 +0800 Subject: [PATCH 13/25] chore: bump version to 1.11.4 (#30961) --- api/pyproject.toml | 2 +- api/uv.lock | 2 +- docker/docker-compose-template.yaml | 8 ++++---- docker/docker-compose.yaml | 8 ++++---- web/package.json | 2 +- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/api/pyproject.toml b/api/pyproject.toml index 28bd591d17..d025a92846 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "dify-api" -version = "1.11.3" +version = "1.11.4" requires-python = ">=3.11,<3.13" dependencies = [ diff --git a/api/uv.lock b/api/uv.lock index 792340599d..83aa89072c 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1368,7 +1368,7 @@ wheels = [ [[package]] name = "dify-api" -version = "1.11.3" +version = "1.11.4" source = { virtual = "." } dependencies = [ { name = "aliyun-log-python-sdk" }, diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index aada39569e..9659990383 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -21,7 +21,7 @@ services: # API service api: - image: langgenius/dify-api:1.11.3 + image: langgenius/dify-api:1.11.4 restart: always environment: # Use the shared environment variables. @@ -63,7 +63,7 @@ services: # worker service # The Celery worker for processing all queues (dataset, workflow, mail, etc.) worker: - image: langgenius/dify-api:1.11.3 + image: langgenius/dify-api:1.11.4 restart: always environment: # Use the shared environment variables. @@ -102,7 +102,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:1.11.3 + image: langgenius/dify-api:1.11.4 restart: always environment: # Use the shared environment variables. @@ -132,7 +132,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.11.3 + image: langgenius/dify-web:1.11.4 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 6439cccf47..429667e75f 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -705,7 +705,7 @@ services: # API service api: - image: langgenius/dify-api:1.11.3 + image: langgenius/dify-api:1.11.4 restart: always environment: # Use the shared environment variables. @@ -747,7 +747,7 @@ services: # worker service # The Celery worker for processing all queues (dataset, workflow, mail, etc.) worker: - image: langgenius/dify-api:1.11.3 + image: langgenius/dify-api:1.11.4 restart: always environment: # Use the shared environment variables. @@ -786,7 +786,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:1.11.3 + image: langgenius/dify-api:1.11.4 restart: always environment: # Use the shared environment variables. @@ -816,7 +816,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.11.3 + image: langgenius/dify-web:1.11.4 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} diff --git a/web/package.json b/web/package.json index bdbac2af83..000862204b 100644 --- a/web/package.json +++ b/web/package.json @@ -1,7 +1,7 @@ { "name": "dify-web", "type": "module", - "version": "1.11.3", + "version": "1.11.4", "private": true, "packageManager": "pnpm@10.27.0+sha512.72d699da16b1179c14ba9e64dc71c9a40988cbdc65c264cb0e489db7de917f20dcf4d64d8723625f2969ba52d4b7e2a1170682d9ac2a5dcaeaab732b7e16f04a", "imports": { From 52af829f1fadf2afeb3c62e57137c160e8179050 Mon Sep 17 00:00:00 2001 From: hj24 Date: Thu, 15 Jan 2026 14:03:17 +0800 Subject: [PATCH 14/25] refactor: enhance clean messages task (#29638) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: 非法操作 Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- api/commands.py | 79 ++ api/extensions/ext_commands.py | 2 + ...eat_add_created_at_id_index_to_messages.py | 33 + api/models/model.py | 1 + api/schedule/clean_messages.py | 126 +- .../conversation/messages_clean_policy.py | 216 ++++ .../conversation/messages_clean_service.py | 334 +++++ .../services/test_messages_clean_service.py | 1070 +++++++++++++++++ .../services/test_messages_clean_service.py | 627 ++++++++++ 9 files changed, 2411 insertions(+), 77 deletions(-) create mode 100644 api/migrations/versions/2026_01_12_1729-3334862ee907_feat_add_created_at_id_index_to_messages.py create mode 100644 api/services/retention/conversation/messages_clean_policy.py create mode 100644 api/services/retention/conversation/messages_clean_service.py create mode 100644 api/tests/test_containers_integration_tests/services/test_messages_clean_service.py create mode 100644 api/tests/unit_tests/services/test_messages_clean_service.py diff --git a/api/commands.py b/api/commands.py index 20ce22a6c7..e223df74d4 100644 --- a/api/commands.py +++ b/api/commands.py @@ -3,6 +3,7 @@ import datetime import json import logging import secrets +import time from typing import Any import click @@ -46,6 +47,8 @@ from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpi from services.plugin.data_migration import PluginDataMigration from services.plugin.plugin_migration import PluginMigration from services.plugin.plugin_service import PluginService +from services.retention.conversation.messages_clean_policy import create_message_clean_policy +from services.retention.conversation.messages_clean_service import MessagesCleanService from services.retention.workflow_run.clear_free_plan_expired_workflow_run_logs import WorkflowRunCleanup from tasks.remove_app_and_related_data_task import delete_draft_variables_batch @@ -2172,3 +2175,79 @@ def migrate_oss( except Exception as e: db.session.rollback() click.echo(click.style(f"Failed to update DB storage_type: {str(e)}", fg="red")) + + +@click.command("clean-expired-messages", help="Clean expired messages.") +@click.option( + "--start-from", + type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]), + required=True, + help="Lower bound (inclusive) for created_at.", +) +@click.option( + "--end-before", + type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]), + required=True, + help="Upper bound (exclusive) for created_at.", +) +@click.option("--batch-size", default=1000, show_default=True, help="Batch size for selecting messages.") +@click.option( + "--graceful-period", + default=21, + show_default=True, + help="Graceful period in days after subscription expiration, will be ignored when billing is disabled.", +) +@click.option("--dry-run", is_flag=True, default=False, help="Show messages logs would be cleaned without deleting") +def clean_expired_messages( + batch_size: int, + graceful_period: int, + start_from: datetime.datetime, + end_before: datetime.datetime, + dry_run: bool, +): + """ + Clean expired messages and related data for tenants based on clean policy. + """ + click.echo(click.style("clean_messages: start clean messages.", fg="green")) + + start_at = time.perf_counter() + + try: + # Create policy based on billing configuration + # NOTE: graceful_period will be ignored when billing is disabled. + policy = create_message_clean_policy(graceful_period_days=graceful_period) + + # Create and run the cleanup service + service = MessagesCleanService.from_time_range( + policy=policy, + start_from=start_from, + end_before=end_before, + batch_size=batch_size, + dry_run=dry_run, + ) + stats = service.run() + + end_at = time.perf_counter() + click.echo( + click.style( + f"clean_messages: completed successfully\n" + f" - Latency: {end_at - start_at:.2f}s\n" + f" - Batches processed: {stats['batches']}\n" + f" - Total messages scanned: {stats['total_messages']}\n" + f" - Messages filtered: {stats['filtered_messages']}\n" + f" - Messages deleted: {stats['total_deleted']}", + fg="green", + ) + ) + except Exception as e: + end_at = time.perf_counter() + logger.exception("clean_messages failed") + click.echo( + click.style( + f"clean_messages: failed after {end_at - start_at:.2f}s - {str(e)}", + fg="red", + ) + ) + raise + + click.echo(click.style("messages cleanup completed.", fg="green")) diff --git a/api/extensions/ext_commands.py b/api/extensions/ext_commands.py index c32130d377..51e2c6cdd5 100644 --- a/api/extensions/ext_commands.py +++ b/api/extensions/ext_commands.py @@ -4,6 +4,7 @@ from dify_app import DifyApp def init_app(app: DifyApp): from commands import ( add_qdrant_index, + clean_expired_messages, clean_workflow_runs, cleanup_orphaned_draft_variables, clear_free_plan_tenant_expired_logs, @@ -58,6 +59,7 @@ def init_app(app: DifyApp): transform_datasource_credentials, install_rag_pipeline_plugins, clean_workflow_runs, + clean_expired_messages, ] for cmd in cmds_to_register: app.cli.add_command(cmd) diff --git a/api/migrations/versions/2026_01_12_1729-3334862ee907_feat_add_created_at_id_index_to_messages.py b/api/migrations/versions/2026_01_12_1729-3334862ee907_feat_add_created_at_id_index_to_messages.py new file mode 100644 index 0000000000..758369ba99 --- /dev/null +++ b/api/migrations/versions/2026_01_12_1729-3334862ee907_feat_add_created_at_id_index_to_messages.py @@ -0,0 +1,33 @@ +"""feat: add created_at id index to messages + +Revision ID: 3334862ee907 +Revises: 905527cc8fd3 +Create Date: 2026-01-12 17:29:44.846544 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '3334862ee907' +down_revision = '905527cc8fd3' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.create_index('message_created_at_id_idx', ['created_at', 'id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.drop_index('message_created_at_id_idx') + + # ### end Alembic commands ### diff --git a/api/models/model.py b/api/models/model.py index 68903e86eb..d6a0aa3bb3 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -968,6 +968,7 @@ class Message(Base): Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"), Index("message_created_at_idx", "created_at"), Index("message_app_mode_idx", "app_mode"), + Index("message_created_at_id_idx", "created_at", "id"), ) id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) diff --git a/api/schedule/clean_messages.py b/api/schedule/clean_messages.py index 352a84b592..e85bba8823 100644 --- a/api/schedule/clean_messages.py +++ b/api/schedule/clean_messages.py @@ -1,90 +1,62 @@ -import datetime import logging import time import click -from sqlalchemy.exc import SQLAlchemyError import app from configs import dify_config -from enums.cloud_plan import CloudPlan -from extensions.ext_database import db -from extensions.ext_redis import redis_client -from models.model import ( - App, - Message, - MessageAgentThought, - MessageAnnotation, - MessageChain, - MessageFeedback, - MessageFile, -) -from models.web import SavedMessage -from services.feature_service import FeatureService +from services.retention.conversation.messages_clean_policy import create_message_clean_policy +from services.retention.conversation.messages_clean_service import MessagesCleanService logger = logging.getLogger(__name__) -@app.celery.task(queue="dataset") +@app.celery.task(queue="retention") def clean_messages(): - click.echo(click.style("Start clean messages.", fg="green")) - start_at = time.perf_counter() - plan_sandbox_clean_message_day = datetime.datetime.now() - datetime.timedelta( - days=dify_config.PLAN_SANDBOX_CLEAN_MESSAGE_DAY_SETTING - ) - while True: - try: - # Main query with join and filter - messages = ( - db.session.query(Message) - .where(Message.created_at < plan_sandbox_clean_message_day) - .order_by(Message.created_at.desc()) - .limit(100) - .all() - ) + """ + Clean expired messages based on clean policy. - except SQLAlchemyError: - raise - if not messages: - break - for message in messages: - app = db.session.query(App).filter_by(id=message.app_id).first() - if not app: - logger.warning( - "Expected App record to exist, but none was found, app_id=%s, message_id=%s", - message.app_id, - message.id, - ) - continue - features_cache_key = f"features:{app.tenant_id}" - plan_cache = redis_client.get(features_cache_key) - if plan_cache is None: - features = FeatureService.get_features(app.tenant_id) - redis_client.setex(features_cache_key, 600, features.billing.subscription.plan) - plan = features.billing.subscription.plan - else: - plan = plan_cache.decode() - if plan == CloudPlan.SANDBOX: - # clean related message - db.session.query(MessageFeedback).where(MessageFeedback.message_id == message.id).delete( - synchronize_session=False - ) - db.session.query(MessageAnnotation).where(MessageAnnotation.message_id == message.id).delete( - synchronize_session=False - ) - db.session.query(MessageChain).where(MessageChain.message_id == message.id).delete( - synchronize_session=False - ) - db.session.query(MessageAgentThought).where(MessageAgentThought.message_id == message.id).delete( - synchronize_session=False - ) - db.session.query(MessageFile).where(MessageFile.message_id == message.id).delete( - synchronize_session=False - ) - db.session.query(SavedMessage).where(SavedMessage.message_id == message.id).delete( - synchronize_session=False - ) - db.session.query(Message).where(Message.id == message.id).delete() - db.session.commit() - end_at = time.perf_counter() - click.echo(click.style(f"Cleaned messages from db success latency: {end_at - start_at}", fg="green")) + This task uses MessagesCleanService to efficiently clean messages in batches. + The behavior depends on BILLING_ENABLED configuration: + - BILLING_ENABLED=True: only delete messages from sandbox tenants (with whitelist/grace period) + - BILLING_ENABLED=False: delete all messages within the time range + """ + click.echo(click.style("clean_messages: start clean messages.", fg="green")) + start_at = time.perf_counter() + + try: + # Create policy based on billing configuration + policy = create_message_clean_policy( + graceful_period_days=dify_config.SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD, + ) + + # Create and run the cleanup service + service = MessagesCleanService.from_days( + policy=policy, + days=dify_config.SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS, + batch_size=dify_config.SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE, + ) + stats = service.run() + + end_at = time.perf_counter() + click.echo( + click.style( + f"clean_messages: completed successfully\n" + f" - Latency: {end_at - start_at:.2f}s\n" + f" - Batches processed: {stats['batches']}\n" + f" - Total messages scanned: {stats['total_messages']}\n" + f" - Messages filtered: {stats['filtered_messages']}\n" + f" - Messages deleted: {stats['total_deleted']}", + fg="green", + ) + ) + except Exception as e: + end_at = time.perf_counter() + logger.exception("clean_messages failed") + click.echo( + click.style( + f"clean_messages: failed after {end_at - start_at:.2f}s - {str(e)}", + fg="red", + ) + ) + raise diff --git a/api/services/retention/conversation/messages_clean_policy.py b/api/services/retention/conversation/messages_clean_policy.py new file mode 100644 index 0000000000..6e647b983b --- /dev/null +++ b/api/services/retention/conversation/messages_clean_policy.py @@ -0,0 +1,216 @@ +import datetime +import logging +from abc import ABC, abstractmethod +from collections.abc import Callable, Sequence +from dataclasses import dataclass + +from configs import dify_config +from enums.cloud_plan import CloudPlan +from services.billing_service import BillingService, SubscriptionPlan + +logger = logging.getLogger(__name__) + + +@dataclass +class SimpleMessage: + id: str + app_id: str + created_at: datetime.datetime + + +class MessagesCleanPolicy(ABC): + """ + Abstract base class for message cleanup policies. + + A policy determines which messages from a batch should be deleted. + """ + + @abstractmethod + def filter_message_ids( + self, + messages: Sequence[SimpleMessage], + app_to_tenant: dict[str, str], + ) -> Sequence[str]: + """ + Filter messages and return IDs of messages that should be deleted. + + Args: + messages: Batch of messages to evaluate + app_to_tenant: Mapping from app_id to tenant_id + + Returns: + List of message IDs that should be deleted + """ + ... + + +class BillingDisabledPolicy(MessagesCleanPolicy): + """ + Policy for community or enterpriseedition (billing disabled). + + No special filter logic, just return all message ids. + """ + + def filter_message_ids( + self, + messages: Sequence[SimpleMessage], + app_to_tenant: dict[str, str], + ) -> Sequence[str]: + return [msg.id for msg in messages] + + +class BillingSandboxPolicy(MessagesCleanPolicy): + """ + Policy for sandbox plan tenants in cloud edition (billing enabled). + + Filters messages based on sandbox plan expiration rules: + - Skip tenants in the whitelist + - Only delete messages from sandbox plan tenants + - Respect grace period after subscription expiration + - Safe default: if tenant mapping or plan is missing, do NOT delete + """ + + def __init__( + self, + plan_provider: Callable[[Sequence[str]], dict[str, SubscriptionPlan]], + graceful_period_days: int = 21, + tenant_whitelist: Sequence[str] | None = None, + current_timestamp: int | None = None, + ) -> None: + self._graceful_period_days = graceful_period_days + self._tenant_whitelist: Sequence[str] = tenant_whitelist or [] + self._plan_provider = plan_provider + self._current_timestamp = current_timestamp + + def filter_message_ids( + self, + messages: Sequence[SimpleMessage], + app_to_tenant: dict[str, str], + ) -> Sequence[str]: + """ + Filter messages based on sandbox plan expiration rules. + + Args: + messages: Batch of messages to evaluate + app_to_tenant: Mapping from app_id to tenant_id + + Returns: + List of message IDs that should be deleted + """ + if not messages or not app_to_tenant: + return [] + + # Get unique tenant_ids and fetch subscription plans + tenant_ids = list(set(app_to_tenant.values())) + tenant_plans = self._plan_provider(tenant_ids) + + if not tenant_plans: + return [] + + # Apply sandbox deletion rules + return self._filter_expired_sandbox_messages( + messages=messages, + app_to_tenant=app_to_tenant, + tenant_plans=tenant_plans, + ) + + def _filter_expired_sandbox_messages( + self, + messages: Sequence[SimpleMessage], + app_to_tenant: dict[str, str], + tenant_plans: dict[str, SubscriptionPlan], + ) -> list[str]: + """ + Filter messages that should be deleted based on sandbox plan expiration. + + A message should be deleted if: + 1. It belongs to a sandbox tenant AND + 2. Either: + a) The tenant has no previous subscription (expiration_date == -1), OR + b) The subscription expired more than graceful_period_days ago + + Args: + messages: List of message objects with id and app_id attributes + app_to_tenant: Mapping from app_id to tenant_id + tenant_plans: Mapping from tenant_id to subscription plan info + + Returns: + List of message IDs that should be deleted + """ + current_timestamp = self._current_timestamp + if current_timestamp is None: + current_timestamp = int(datetime.datetime.now(datetime.UTC).timestamp()) + + sandbox_message_ids: list[str] = [] + graceful_period_seconds = self._graceful_period_days * 24 * 60 * 60 + + for msg in messages: + # Get tenant_id for this message's app + tenant_id = app_to_tenant.get(msg.app_id) + if not tenant_id: + continue + + # Skip tenant messages in whitelist + if tenant_id in self._tenant_whitelist: + continue + + # Get subscription plan for this tenant + tenant_plan = tenant_plans.get(tenant_id) + if not tenant_plan: + continue + + plan = str(tenant_plan["plan"]) + expiration_date = int(tenant_plan["expiration_date"]) + + # Only process sandbox plans + if plan != CloudPlan.SANDBOX: + continue + + # Case 1: No previous subscription (-1 means never had a paid subscription) + if expiration_date == -1: + sandbox_message_ids.append(msg.id) + continue + + # Case 2: Subscription expired beyond grace period + if current_timestamp - expiration_date > graceful_period_seconds: + sandbox_message_ids.append(msg.id) + + return sandbox_message_ids + + +def create_message_clean_policy( + graceful_period_days: int = 21, + current_timestamp: int | None = None, +) -> MessagesCleanPolicy: + """ + Factory function to create the appropriate message clean policy. + + Determines which policy to use based on BILLING_ENABLED configuration: + - If BILLING_ENABLED is True: returns BillingSandboxPolicy + - If BILLING_ENABLED is False: returns BillingDisabledPolicy + + Args: + graceful_period_days: Grace period in days after subscription expiration (default: 21) + current_timestamp: Current Unix timestamp for testing (default: None, uses current time) + """ + if not dify_config.BILLING_ENABLED: + logger.info("create_message_clean_policy: billing disabled, using BillingDisabledPolicy") + return BillingDisabledPolicy() + + # Billing enabled - fetch whitelist from BillingService + tenant_whitelist = BillingService.get_expired_subscription_cleanup_whitelist() + plan_provider = BillingService.get_plan_bulk_with_cache + + logger.info( + "create_message_clean_policy: billing enabled, using BillingSandboxPolicy " + "(graceful_period_days=%s, whitelist=%s)", + graceful_period_days, + tenant_whitelist, + ) + + return BillingSandboxPolicy( + plan_provider=plan_provider, + graceful_period_days=graceful_period_days, + tenant_whitelist=tenant_whitelist, + current_timestamp=current_timestamp, + ) diff --git a/api/services/retention/conversation/messages_clean_service.py b/api/services/retention/conversation/messages_clean_service.py new file mode 100644 index 0000000000..3ca5d82860 --- /dev/null +++ b/api/services/retention/conversation/messages_clean_service.py @@ -0,0 +1,334 @@ +import datetime +import logging +import random +from collections.abc import Sequence +from typing import cast + +from sqlalchemy import delete, select +from sqlalchemy.engine import CursorResult +from sqlalchemy.orm import Session + +from extensions.ext_database import db +from models.model import ( + App, + AppAnnotationHitHistory, + DatasetRetrieverResource, + Message, + MessageAgentThought, + MessageAnnotation, + MessageChain, + MessageFeedback, + MessageFile, +) +from models.web import SavedMessage +from services.retention.conversation.messages_clean_policy import ( + MessagesCleanPolicy, + SimpleMessage, +) + +logger = logging.getLogger(__name__) + + +class MessagesCleanService: + """ + Service for cleaning expired messages based on retention policies. + + Compatible with non cloud edition (billing disabled): all messages in the time range will be deleted. + If billing is enabled: only sandbox plan tenant messages are deleted (with whitelist and grace period support). + """ + + def __init__( + self, + policy: MessagesCleanPolicy, + end_before: datetime.datetime, + start_from: datetime.datetime | None = None, + batch_size: int = 1000, + dry_run: bool = False, + ) -> None: + """ + Initialize the service with cleanup parameters. + + Args: + policy: The policy that determines which messages to delete + end_before: End time (exclusive) of the range + start_from: Optional start time (inclusive) of the range + batch_size: Number of messages to process per batch + dry_run: Whether to perform a dry run (no actual deletion) + """ + self._policy = policy + self._end_before = end_before + self._start_from = start_from + self._batch_size = batch_size + self._dry_run = dry_run + + @classmethod + def from_time_range( + cls, + policy: MessagesCleanPolicy, + start_from: datetime.datetime, + end_before: datetime.datetime, + batch_size: int = 1000, + dry_run: bool = False, + ) -> "MessagesCleanService": + """ + Create a service instance for cleaning messages within a specific time range. + + Time range is [start_from, end_before). + + Args: + policy: The policy that determines which messages to delete + start_from: Start time (inclusive) of the range + end_before: End time (exclusive) of the range + batch_size: Number of messages to process per batch + dry_run: Whether to perform a dry run (no actual deletion) + + Returns: + MessagesCleanService instance + + Raises: + ValueError: If start_from >= end_before or invalid parameters + """ + if start_from >= end_before: + raise ValueError(f"start_from ({start_from}) must be less than end_before ({end_before})") + + if batch_size <= 0: + raise ValueError(f"batch_size ({batch_size}) must be greater than 0") + + logger.info( + "clean_messages: start_from=%s, end_before=%s, batch_size=%s, policy=%s", + start_from, + end_before, + batch_size, + policy.__class__.__name__, + ) + + return cls( + policy=policy, + end_before=end_before, + start_from=start_from, + batch_size=batch_size, + dry_run=dry_run, + ) + + @classmethod + def from_days( + cls, + policy: MessagesCleanPolicy, + days: int = 30, + batch_size: int = 1000, + dry_run: bool = False, + ) -> "MessagesCleanService": + """ + Create a service instance for cleaning messages older than specified days. + + Args: + policy: The policy that determines which messages to delete + days: Number of days to look back from now + batch_size: Number of messages to process per batch + dry_run: Whether to perform a dry run (no actual deletion) + + Returns: + MessagesCleanService instance + + Raises: + ValueError: If invalid parameters + """ + if days < 0: + raise ValueError(f"days ({days}) must be greater than or equal to 0") + + if batch_size <= 0: + raise ValueError(f"batch_size ({batch_size}) must be greater than 0") + + end_before = datetime.datetime.now() - datetime.timedelta(days=days) + + logger.info( + "clean_messages: days=%s, end_before=%s, batch_size=%s, policy=%s", + days, + end_before, + batch_size, + policy.__class__.__name__, + ) + + return cls(policy=policy, end_before=end_before, start_from=None, batch_size=batch_size, dry_run=dry_run) + + def run(self) -> dict[str, int]: + """ + Execute the message cleanup operation. + + Returns: + Dict with statistics: batches, filtered_messages, total_deleted + """ + return self._clean_messages_by_time_range() + + def _clean_messages_by_time_range(self) -> dict[str, int]: + """ + Clean messages within a time range using cursor-based pagination. + + Time range is [start_from, end_before) + + Steps: + 1. Iterate messages using cursor pagination (by created_at, id) + 2. Query app_id -> tenant_id mapping + 3. Delegate to policy to determine which messages to delete + 4. Batch delete messages and their relations + + Returns: + Dict with statistics: batches, filtered_messages, total_deleted + """ + stats = { + "batches": 0, + "total_messages": 0, + "filtered_messages": 0, + "total_deleted": 0, + } + + # Cursor-based pagination using (created_at, id) to avoid infinite loops + # and ensure proper ordering with time-based filtering + _cursor: tuple[datetime.datetime, str] | None = None + + logger.info( + "clean_messages: start cleaning messages (dry_run=%s), start_from=%s, end_before=%s", + self._dry_run, + self._start_from, + self._end_before, + ) + + while True: + stats["batches"] += 1 + + # Step 1: Fetch a batch of messages using cursor + with Session(db.engine, expire_on_commit=False) as session: + msg_stmt = ( + select(Message.id, Message.app_id, Message.created_at) + .where(Message.created_at < self._end_before) + .order_by(Message.created_at, Message.id) + .limit(self._batch_size) + ) + + if self._start_from: + msg_stmt = msg_stmt.where(Message.created_at >= self._start_from) + + # Apply cursor condition: (created_at, id) > (last_created_at, last_message_id) + # This translates to: + # created_at > last_created_at OR (created_at = last_created_at AND id > last_message_id) + if _cursor: + # Continuing from previous batch + msg_stmt = msg_stmt.where( + (Message.created_at > _cursor[0]) + | ((Message.created_at == _cursor[0]) & (Message.id > _cursor[1])) + ) + + raw_messages = list(session.execute(msg_stmt).all()) + messages = [ + SimpleMessage(id=msg_id, app_id=app_id, created_at=msg_created_at) + for msg_id, app_id, msg_created_at in raw_messages + ] + + # Track total messages fetched across all batches + stats["total_messages"] += len(messages) + + if not messages: + logger.info("clean_messages (batch %s): no more messages to process", stats["batches"]) + break + + # Update cursor to the last message's (created_at, id) + _cursor = (messages[-1].created_at, messages[-1].id) + + # Step 2: Extract app_ids and query tenant_ids + app_ids = list({msg.app_id for msg in messages}) + + if not app_ids: + logger.info("clean_messages (batch %s): no app_ids found, skip", stats["batches"]) + continue + + app_stmt = select(App.id, App.tenant_id).where(App.id.in_(app_ids)) + apps = list(session.execute(app_stmt).all()) + + if not apps: + logger.info("clean_messages (batch %s): no apps found, skip", stats["batches"]) + continue + + # Build app_id -> tenant_id mapping + app_to_tenant: dict[str, str] = {app.id: app.tenant_id for app in apps} + + # Step 3: Delegate to policy to determine which messages to delete + message_ids_to_delete = self._policy.filter_message_ids(messages, app_to_tenant) + + if not message_ids_to_delete: + logger.info("clean_messages (batch %s): no messages to delete, skip", stats["batches"]) + continue + + stats["filtered_messages"] += len(message_ids_to_delete) + + # Step 4: Batch delete messages and their relations + if not self._dry_run: + with Session(db.engine, expire_on_commit=False) as session: + # Delete related records first + self._batch_delete_message_relations(session, message_ids_to_delete) + + # Delete messages + delete_stmt = delete(Message).where(Message.id.in_(message_ids_to_delete)) + delete_result = cast(CursorResult, session.execute(delete_stmt)) + messages_deleted = delete_result.rowcount + session.commit() + + stats["total_deleted"] += messages_deleted + + logger.info( + "clean_messages (batch %s): processed %s messages, deleted %s messages", + stats["batches"], + len(messages), + messages_deleted, + ) + else: + # Log random sample of message IDs that would be deleted (up to 10) + sample_size = min(10, len(message_ids_to_delete)) + sampled_ids = random.sample(list(message_ids_to_delete), sample_size) + + logger.info( + "clean_messages (batch %s, dry_run): would delete %s messages, sampling %s ids:", + stats["batches"], + len(message_ids_to_delete), + sample_size, + ) + for msg_id in sampled_ids: + logger.info("clean_messages (batch %s, dry_run) sample: message_id=%s", stats["batches"], msg_id) + + logger.info( + "clean_messages completed: total batches: %s, total messages: %s, filtered messages: %s, total deleted: %s", + stats["batches"], + stats["total_messages"], + stats["filtered_messages"], + stats["total_deleted"], + ) + + return stats + + @staticmethod + def _batch_delete_message_relations(session: Session, message_ids: Sequence[str]) -> None: + """ + Batch delete all related records for given message IDs. + + Args: + session: Database session + message_ids: List of message IDs to delete relations for + """ + if not message_ids: + return + + # Delete all related records in batch + session.execute(delete(MessageFeedback).where(MessageFeedback.message_id.in_(message_ids))) + + session.execute(delete(MessageAnnotation).where(MessageAnnotation.message_id.in_(message_ids))) + + session.execute(delete(MessageChain).where(MessageChain.message_id.in_(message_ids))) + + session.execute(delete(MessageAgentThought).where(MessageAgentThought.message_id.in_(message_ids))) + + session.execute(delete(MessageFile).where(MessageFile.message_id.in_(message_ids))) + + session.execute(delete(SavedMessage).where(SavedMessage.message_id.in_(message_ids))) + + session.execute(delete(AppAnnotationHitHistory).where(AppAnnotationHitHistory.message_id.in_(message_ids))) + + session.execute(delete(DatasetRetrieverResource).where(DatasetRetrieverResource.message_id.in_(message_ids))) diff --git a/api/tests/test_containers_integration_tests/services/test_messages_clean_service.py b/api/tests/test_containers_integration_tests/services/test_messages_clean_service.py new file mode 100644 index 0000000000..29baa4d94f --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_messages_clean_service.py @@ -0,0 +1,1070 @@ +import datetime +import json +import uuid +from decimal import Decimal +from unittest.mock import patch + +import pytest +from faker import Faker + +from enums.cloud_plan import CloudPlan +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models.model import ( + App, + AppAnnotationHitHistory, + Conversation, + DatasetRetrieverResource, + Message, + MessageAgentThought, + MessageAnnotation, + MessageChain, + MessageFeedback, + MessageFile, +) +from models.web import SavedMessage +from services.billing_service import BillingService +from services.retention.conversation.messages_clean_policy import ( + BillingDisabledPolicy, + BillingSandboxPolicy, + create_message_clean_policy, +) +from services.retention.conversation.messages_clean_service import MessagesCleanService + + +class TestMessagesCleanServiceIntegration: + """Integration tests for MessagesCleanService.run() and _clean_messages_by_time_range().""" + + # Redis cache key prefix from BillingService + PLAN_CACHE_KEY_PREFIX = BillingService._PLAN_CACHE_KEY_PREFIX # "tenant_plan:" + + @pytest.fixture(autouse=True) + def cleanup_database(self, db_session_with_containers): + """Clean up database before and after each test to ensure isolation.""" + yield + # Clear all test data in correct order (respecting foreign key constraints) + db.session.query(DatasetRetrieverResource).delete() + db.session.query(AppAnnotationHitHistory).delete() + db.session.query(SavedMessage).delete() + db.session.query(MessageFile).delete() + db.session.query(MessageAgentThought).delete() + db.session.query(MessageChain).delete() + db.session.query(MessageAnnotation).delete() + db.session.query(MessageFeedback).delete() + db.session.query(Message).delete() + db.session.query(Conversation).delete() + db.session.query(App).delete() + db.session.query(TenantAccountJoin).delete() + db.session.query(Tenant).delete() + db.session.query(Account).delete() + db.session.commit() + + @pytest.fixture(autouse=True) + def cleanup_redis(self): + """Clean up Redis cache before each test.""" + # Clear tenant plan cache using BillingService key prefix + try: + keys = redis_client.keys(f"{self.PLAN_CACHE_KEY_PREFIX}*") + if keys: + redis_client.delete(*keys) + except Exception: + pass # Redis might not be available in some test environments + yield + # Clean up after test + try: + keys = redis_client.keys(f"{self.PLAN_CACHE_KEY_PREFIX}*") + if keys: + redis_client.delete(*keys) + except Exception: + pass + + @pytest.fixture + def mock_whitelist(self): + """Mock whitelist to return empty list by default.""" + with patch( + "services.retention.conversation.messages_clean_policy.BillingService.get_expired_subscription_cleanup_whitelist" + ) as mock: + mock.return_value = [] + yield mock + + @pytest.fixture + def mock_billing_enabled(self): + """Mock BILLING_ENABLED to be True.""" + with patch("services.retention.conversation.messages_clean_policy.dify_config.BILLING_ENABLED", True): + yield + + @pytest.fixture + def mock_billing_disabled(self): + """Mock BILLING_ENABLED to be False.""" + with patch("services.retention.conversation.messages_clean_policy.dify_config.BILLING_ENABLED", False): + yield + + def _create_account_and_tenant(self, plan: str = CloudPlan.SANDBOX): + """Helper to create account and tenant.""" + fake = Faker() + + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + db.session.add(account) + db.session.flush() + + tenant = Tenant( + name=fake.company(), + plan=str(plan), + status="normal", + ) + db.session.add(tenant) + db.session.flush() + + tenant_account_join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER, + ) + db.session.add(tenant_account_join) + db.session.commit() + + return account, tenant + + def _create_app(self, tenant, account): + """Helper to create an app.""" + fake = Faker() + + app = App( + tenant_id=tenant.id, + name=fake.company(), + description="Test app", + mode="chat", + enable_site=True, + enable_api=True, + api_rpm=60, + api_rph=3600, + is_demo=False, + is_public=False, + created_by=account.id, + updated_by=account.id, + ) + db.session.add(app) + db.session.commit() + + return app + + def _create_conversation(self, app): + """Helper to create a conversation.""" + conversation = Conversation( + app_id=app.id, + app_model_config_id=str(uuid.uuid4()), + model_provider="openai", + model_id="gpt-3.5-turbo", + mode="chat", + name="Test conversation", + inputs={}, + status="normal", + from_source="api", + from_end_user_id=str(uuid.uuid4()), + ) + db.session.add(conversation) + db.session.commit() + + return conversation + + def _create_message(self, app, conversation, created_at=None, with_relations=True): + """Helper to create a message with optional related records.""" + if created_at is None: + created_at = datetime.datetime.now() + + message = Message( + app_id=app.id, + conversation_id=conversation.id, + model_provider="openai", + model_id="gpt-3.5-turbo", + inputs={}, + query="Test query", + answer="Test answer", + message=[{"role": "user", "text": "Test message"}], + message_tokens=10, + message_unit_price=Decimal("0.001"), + answer_tokens=20, + answer_unit_price=Decimal("0.002"), + total_price=Decimal("0.003"), + currency="USD", + from_source="api", + from_account_id=conversation.from_end_user_id, + created_at=created_at, + ) + db.session.add(message) + db.session.flush() + + if with_relations: + self._create_message_relations(message) + + db.session.commit() + return message + + def _create_message_relations(self, message): + """Helper to create all message-related records.""" + # MessageFeedback + feedback = MessageFeedback( + app_id=message.app_id, + conversation_id=message.conversation_id, + message_id=message.id, + rating="like", + from_source="api", + from_end_user_id=str(uuid.uuid4()), + ) + db.session.add(feedback) + + # MessageAnnotation + annotation = MessageAnnotation( + app_id=message.app_id, + conversation_id=message.conversation_id, + message_id=message.id, + question="Test question", + content="Test annotation", + account_id=message.from_account_id, + ) + db.session.add(annotation) + + # MessageChain + chain = MessageChain( + message_id=message.id, + type="system", + input=json.dumps({"test": "input"}), + output=json.dumps({"test": "output"}), + ) + db.session.add(chain) + db.session.flush() + + # MessageFile + file = MessageFile( + message_id=message.id, + type="image", + transfer_method="local_file", + url="http://example.com/test.jpg", + belongs_to="user", + created_by_role="end_user", + created_by=str(uuid.uuid4()), + ) + db.session.add(file) + + # SavedMessage + saved = SavedMessage( + app_id=message.app_id, + message_id=message.id, + created_by_role="end_user", + created_by=str(uuid.uuid4()), + ) + db.session.add(saved) + + db.session.flush() + + # AppAnnotationHitHistory + hit = AppAnnotationHitHistory( + app_id=message.app_id, + annotation_id=annotation.id, + message_id=message.id, + source="annotation", + question="Test question", + account_id=message.from_account_id, + annotation_question="Test annotation question", + annotation_content="Test annotation content", + ) + db.session.add(hit) + + # DatasetRetrieverResource + resource = DatasetRetrieverResource( + message_id=message.id, + position=1, + dataset_id=str(uuid.uuid4()), + dataset_name="Test dataset", + document_id=str(uuid.uuid4()), + document_name="Test document", + data_source_type="upload_file", + segment_id=str(uuid.uuid4()), + score=0.9, + content="Test content", + hit_count=1, + word_count=10, + segment_position=1, + index_node_hash="test_hash", + retriever_from="dataset", + created_by=message.from_account_id, + ) + db.session.add(resource) + + def test_billing_disabled_deletes_all_messages_in_time_range( + self, db_session_with_containers, mock_billing_disabled + ): + """Test that BillingDisabledPolicy deletes all messages within time range regardless of tenant plan.""" + # Arrange - Create tenant with messages (plan doesn't matter for billing disabled) + account, tenant = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + app = self._create_app(tenant, account) + conv = self._create_conversation(app) + + # Create messages: in-range (should be deleted) and out-of-range (should be kept) + in_range_date = datetime.datetime(2024, 1, 15, 12, 0, 0) + out_of_range_date = datetime.datetime(2024, 1, 25, 12, 0, 0) + + in_range_msg = self._create_message(app, conv, created_at=in_range_date, with_relations=True) + in_range_msg_id = in_range_msg.id + + out_of_range_msg = self._create_message(app, conv, created_at=out_of_range_date, with_relations=True) + out_of_range_msg_id = out_of_range_msg.id + + # Act - create_message_clean_policy should return BillingDisabledPolicy + policy = create_message_clean_policy() + + assert isinstance(policy, BillingDisabledPolicy) + + service = MessagesCleanService.from_time_range( + policy=policy, + start_from=datetime.datetime(2024, 1, 10, 0, 0, 0), + end_before=datetime.datetime(2024, 1, 20, 0, 0, 0), + batch_size=100, + ) + stats = service.run() + + # Assert + assert stats["total_messages"] == 1 # Only in-range message fetched + assert stats["filtered_messages"] == 1 + assert stats["total_deleted"] == 1 + + # In-range message deleted + assert db.session.query(Message).where(Message.id == in_range_msg_id).count() == 0 + # Out-of-range message kept + assert db.session.query(Message).where(Message.id == out_of_range_msg_id).count() == 1 + + # Related records of in-range message deleted + assert db.session.query(MessageFeedback).where(MessageFeedback.message_id == in_range_msg_id).count() == 0 + assert db.session.query(MessageAnnotation).where(MessageAnnotation.message_id == in_range_msg_id).count() == 0 + # Related records of out-of-range message kept + assert db.session.query(MessageFeedback).where(MessageFeedback.message_id == out_of_range_msg_id).count() == 1 + + def test_no_messages_returns_empty_stats(self, db_session_with_containers, mock_billing_enabled, mock_whitelist): + """Test cleaning when there are no messages to delete (B1).""" + # Arrange + end_before = datetime.datetime.now() - datetime.timedelta(days=30) + start_from = datetime.datetime.now() - datetime.timedelta(days=60) + + with patch("services.billing_service.BillingService.get_plan_bulk") as mock_billing: + mock_billing.return_value = {} + + # Act + policy = create_message_clean_policy(graceful_period_days=21) + service = MessagesCleanService.from_time_range( + policy=policy, + start_from=start_from, + end_before=end_before, + batch_size=100, + ) + stats = service.run() + + # Assert - loop runs once to check, finds nothing + assert stats["batches"] == 1 + assert stats["total_messages"] == 0 + assert stats["filtered_messages"] == 0 + assert stats["total_deleted"] == 0 + + def test_mixed_sandbox_and_paid_tenants(self, db_session_with_containers, mock_billing_enabled, mock_whitelist): + """Test cleaning with mixed sandbox and paid tenants (B2).""" + # Arrange - Create sandbox tenants with expired messages + sandbox_tenants = [] + sandbox_message_ids = [] + for i in range(2): + account, tenant = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + sandbox_tenants.append(tenant) + app = self._create_app(tenant, account) + conv = self._create_conversation(app) + + # Create 3 expired messages per sandbox tenant + expired_date = datetime.datetime.now() - datetime.timedelta(days=35) + for j in range(3): + msg = self._create_message(app, conv, created_at=expired_date - datetime.timedelta(hours=j)) + sandbox_message_ids.append(msg.id) + + # Create paid tenants with expired messages (should NOT be deleted) + paid_tenants = [] + paid_message_ids = [] + for i in range(2): + account, tenant = self._create_account_and_tenant(plan=CloudPlan.PROFESSIONAL) + paid_tenants.append(tenant) + app = self._create_app(tenant, account) + conv = self._create_conversation(app) + + # Create 2 expired messages per paid tenant + expired_date = datetime.datetime.now() - datetime.timedelta(days=35) + for j in range(2): + msg = self._create_message(app, conv, created_at=expired_date - datetime.timedelta(hours=j)) + paid_message_ids.append(msg.id) + + # Mock billing service - return plan and expiration_date + now_timestamp = int(datetime.datetime.now(datetime.UTC).timestamp()) + expired_15_days_ago = now_timestamp - (15 * 24 * 60 * 60) # Beyond 7-day grace period + + plan_map = {} + for tenant in sandbox_tenants: + plan_map[tenant.id] = { + "plan": CloudPlan.SANDBOX, + "expiration_date": expired_15_days_ago, + } + for tenant in paid_tenants: + plan_map[tenant.id] = { + "plan": CloudPlan.PROFESSIONAL, + "expiration_date": now_timestamp + (365 * 24 * 60 * 60), # Active for 1 year + } + + with patch("services.billing_service.BillingService.get_plan_bulk") as mock_billing: + mock_billing.return_value = plan_map + + # Act + end_before = datetime.datetime.now() - datetime.timedelta(days=30) + policy = create_message_clean_policy(graceful_period_days=7) + + assert isinstance(policy, BillingSandboxPolicy) + + service = MessagesCleanService.from_time_range( + policy=policy, + start_from=datetime.datetime.now() - datetime.timedelta(days=60), + end_before=end_before, + batch_size=100, + ) + stats = service.run() + + # Assert + assert stats["total_messages"] == 10 # 2 sandbox * 3 + 2 paid * 2 + assert stats["filtered_messages"] == 6 # 2 sandbox tenants * 3 messages + assert stats["total_deleted"] == 6 + + # Only sandbox messages should be deleted + assert db.session.query(Message).where(Message.id.in_(sandbox_message_ids)).count() == 0 + # Paid messages should remain + assert db.session.query(Message).where(Message.id.in_(paid_message_ids)).count() == 4 + + # Related records of sandbox messages should be deleted + assert db.session.query(MessageFeedback).where(MessageFeedback.message_id.in_(sandbox_message_ids)).count() == 0 + assert ( + db.session.query(MessageAnnotation).where(MessageAnnotation.message_id.in_(sandbox_message_ids)).count() + == 0 + ) + + def test_cursor_pagination_multiple_batches(self, db_session_with_containers, mock_billing_enabled, mock_whitelist): + """Test cursor pagination works correctly across multiple batches (B3).""" + # Arrange - Create sandbox tenant with messages that will span multiple batches + account, tenant = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + app = self._create_app(tenant, account) + conv = self._create_conversation(app) + + # Create 10 expired messages with different timestamps + base_date = datetime.datetime.now() - datetime.timedelta(days=35) + message_ids = [] + for i in range(10): + msg = self._create_message( + app, + conv, + created_at=base_date + datetime.timedelta(hours=i), + with_relations=False, # Skip relations for speed + ) + message_ids.append(msg.id) + + with patch("services.billing_service.BillingService.get_plan_bulk") as mock_billing: + mock_billing.return_value = { + tenant.id: { + "plan": CloudPlan.SANDBOX, + "expiration_date": -1, # No previous subscription + } + } + + # Act - Use small batch size to trigger multiple batches + end_before = datetime.datetime.now() - datetime.timedelta(days=30) + policy = create_message_clean_policy(graceful_period_days=21) + service = MessagesCleanService.from_time_range( + policy=policy, + start_from=datetime.datetime.now() - datetime.timedelta(days=60), + end_before=end_before, + batch_size=3, # Small batch size to test pagination + ) + stats = service.run() + + # 5 batches for 10 messages with batch_size=3, the last batch is empty + assert stats["batches"] == 5 + assert stats["total_messages"] == 10 + assert stats["filtered_messages"] == 10 + assert stats["total_deleted"] == 10 + + # All messages should be deleted + assert db.session.query(Message).where(Message.id.in_(message_ids)).count() == 0 + + def test_dry_run_does_not_delete(self, db_session_with_containers, mock_billing_enabled, mock_whitelist): + """Test dry_run mode does not delete messages (B4).""" + # Arrange + account, tenant = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + app = self._create_app(tenant, account) + conv = self._create_conversation(app) + + # Create expired messages + expired_date = datetime.datetime.now() - datetime.timedelta(days=35) + message_ids = [] + for i in range(3): + msg = self._create_message(app, conv, created_at=expired_date - datetime.timedelta(hours=i)) + message_ids.append(msg.id) + + with patch("services.billing_service.BillingService.get_plan_bulk") as mock_billing: + mock_billing.return_value = { + tenant.id: { + "plan": CloudPlan.SANDBOX, + "expiration_date": -1, # No previous subscription + } + } + + # Act + end_before = datetime.datetime.now() - datetime.timedelta(days=30) + policy = create_message_clean_policy(graceful_period_days=21) + service = MessagesCleanService.from_time_range( + policy=policy, + start_from=datetime.datetime.now() - datetime.timedelta(days=60), + end_before=end_before, + batch_size=100, + dry_run=True, # Dry run mode + ) + stats = service.run() + + # Assert + assert stats["total_messages"] == 3 + assert stats["filtered_messages"] == 3 # Messages identified + assert stats["total_deleted"] == 0 # But NOT deleted + + # All messages should still exist + assert db.session.query(Message).where(Message.id.in_(message_ids)).count() == 3 + # Related records should also still exist + assert db.session.query(MessageFeedback).where(MessageFeedback.message_id.in_(message_ids)).count() == 3 + + def test_partial_plan_data_safe_default(self, db_session_with_containers, mock_billing_enabled, mock_whitelist): + """Test when billing returns partial data, unknown tenants are preserved (B5).""" + # Arrange - Create 3 tenants + tenants_data = [] + for i in range(3): + account, tenant = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + app = self._create_app(tenant, account) + conv = self._create_conversation(app) + + expired_date = datetime.datetime.now() - datetime.timedelta(days=35) + msg = self._create_message(app, conv, created_at=expired_date) + + tenants_data.append( + { + "tenant": tenant, + "message_id": msg.id, + } + ) + + # Mock billing service to return partial data + now_timestamp = int(datetime.datetime.now(datetime.UTC).timestamp()) + + # Only tenant[0] is confirmed as sandbox, tenant[1] is professional, tenant[2] is missing + partial_plan_map = { + tenants_data[0]["tenant"].id: { + "plan": CloudPlan.SANDBOX, + "expiration_date": -1, # No previous subscription + }, + tenants_data[1]["tenant"].id: { + "plan": CloudPlan.PROFESSIONAL, + "expiration_date": now_timestamp + (365 * 24 * 60 * 60), # Active for 1 year + }, + # tenants_data[2] is missing from response + } + + with patch("services.billing_service.BillingService.get_plan_bulk") as mock_billing: + mock_billing.return_value = partial_plan_map + + # Act + end_before = datetime.datetime.now() - datetime.timedelta(days=30) + policy = create_message_clean_policy(graceful_period_days=21) + service = MessagesCleanService.from_time_range( + policy=policy, + start_from=datetime.datetime.now() - datetime.timedelta(days=60), + end_before=end_before, + batch_size=100, + ) + stats = service.run() + + # Assert - Only tenant[0]'s message should be deleted + assert stats["total_messages"] == 3 # 3 tenants * 1 message + assert stats["filtered_messages"] == 1 + assert stats["total_deleted"] == 1 + + # Check which messages were deleted + assert ( + db.session.query(Message).where(Message.id == tenants_data[0]["message_id"]).count() == 0 + ) # Sandbox tenant's message deleted + + assert ( + db.session.query(Message).where(Message.id == tenants_data[1]["message_id"]).count() == 1 + ) # Professional tenant's message preserved + + assert ( + db.session.query(Message).where(Message.id == tenants_data[2]["message_id"]).count() == 1 + ) # Unknown tenant's message preserved (safe default) + + def test_empty_plan_data_skips_deletion(self, db_session_with_containers, mock_billing_enabled, mock_whitelist): + """Test when billing returns empty data, skip deletion entirely (B6).""" + # Arrange + account, tenant = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + app = self._create_app(tenant, account) + conv = self._create_conversation(app) + + expired_date = datetime.datetime.now() - datetime.timedelta(days=35) + msg = self._create_message(app, conv, created_at=expired_date) + msg_id = msg.id + db.session.commit() + + # Mock billing service to return empty data (simulating failure/no data scenario) + with patch("services.billing_service.BillingService.get_plan_bulk") as mock_billing: + mock_billing.return_value = {} # Empty response, tenant plan unknown + + # Act - Should not raise exception, just skip deletion + end_before = datetime.datetime.now() - datetime.timedelta(days=30) + policy = create_message_clean_policy(graceful_period_days=21) + service = MessagesCleanService.from_time_range( + policy=policy, + start_from=datetime.datetime.now() - datetime.timedelta(days=60), + end_before=end_before, + batch_size=100, + ) + stats = service.run() + + # Assert - No messages should be deleted when plan is unknown + assert stats["total_messages"] == 1 + assert stats["filtered_messages"] == 0 # Cannot determine sandbox messages + assert stats["total_deleted"] == 0 + + # Message should still exist (safe default - don't delete if plan is unknown) + assert db.session.query(Message).where(Message.id == msg_id).count() == 1 + + def test_time_range_boundary_behavior(self, db_session_with_containers, mock_billing_enabled, mock_whitelist): + """Test that messages are correctly filtered by [start_from, end_before) time range (B7).""" + # Arrange + account, tenant = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + app = self._create_app(tenant, account) + conv = self._create_conversation(app) + + # Create messages: before range, in range, after range + msg_before = self._create_message( + app, + conv, + created_at=datetime.datetime(2024, 1, 1, 12, 0, 0), # Before start_from + with_relations=False, + ) + msg_before_id = msg_before.id + + msg_at_start = self._create_message( + app, + conv, + created_at=datetime.datetime(2024, 1, 10, 12, 0, 0), # At start_from (inclusive) + with_relations=False, + ) + msg_at_start_id = msg_at_start.id + + msg_in_range = self._create_message( + app, + conv, + created_at=datetime.datetime(2024, 1, 15, 12, 0, 0), # In range + with_relations=False, + ) + msg_in_range_id = msg_in_range.id + + msg_at_end = self._create_message( + app, + conv, + created_at=datetime.datetime(2024, 1, 20, 12, 0, 0), # At end_before (exclusive) + with_relations=False, + ) + msg_at_end_id = msg_at_end.id + + msg_after = self._create_message( + app, + conv, + created_at=datetime.datetime(2024, 1, 25, 12, 0, 0), # After end_before + with_relations=False, + ) + msg_after_id = msg_after.id + + db.session.commit() + + # Mock billing service + with patch("services.billing_service.BillingService.get_plan_bulk") as mock_billing: + mock_billing.return_value = { + tenant.id: { + "plan": CloudPlan.SANDBOX, + "expiration_date": -1, # No previous subscription + } + } + + # Act - Clean with specific time range [2024-01-10, 2024-01-20) + policy = create_message_clean_policy(graceful_period_days=21) + service = MessagesCleanService.from_time_range( + policy=policy, + start_from=datetime.datetime(2024, 1, 10, 12, 0, 0), + end_before=datetime.datetime(2024, 1, 20, 12, 0, 0), + batch_size=100, + ) + stats = service.run() + + # Assert - Only messages in [start_from, end_before) should be deleted + assert stats["total_messages"] == 2 # Only in-range messages fetched + assert stats["filtered_messages"] == 2 # msg_at_start and msg_in_range + assert stats["total_deleted"] == 2 + + # Verify specific messages using stored IDs + # Before range, kept + assert db.session.query(Message).where(Message.id == msg_before_id).count() == 1 + # At start (inclusive), deleted + assert db.session.query(Message).where(Message.id == msg_at_start_id).count() == 0 + # In range, deleted + assert db.session.query(Message).where(Message.id == msg_in_range_id).count() == 0 + # At end (exclusive), kept + assert db.session.query(Message).where(Message.id == msg_at_end_id).count() == 1 + # After range, kept + assert db.session.query(Message).where(Message.id == msg_after_id).count() == 1 + + def test_grace_period_scenarios(self, db_session_with_containers, mock_billing_enabled, mock_whitelist): + """Test cleaning with different graceful period scenarios (B8).""" + # Arrange - Create 5 different tenants with different plan and expiration scenarios + now_timestamp = int(datetime.datetime.now(datetime.UTC).timestamp()) + graceful_period = 8 # Use 8 days for this test + + # Scenario 1: Sandbox plan with expiration within graceful period (5 days ago) + # Should NOT be deleted + account1, tenant1 = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + app1 = self._create_app(tenant1, account1) + conv1 = self._create_conversation(app1) + expired_date = datetime.datetime.now() - datetime.timedelta(days=35) + msg1 = self._create_message(app1, conv1, created_at=expired_date, with_relations=False) + msg1_id = msg1.id + expired_5_days_ago = now_timestamp - (5 * 24 * 60 * 60) # Within grace period + + # Scenario 2: Sandbox plan with expiration beyond graceful period (10 days ago) + # Should be deleted + account2, tenant2 = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + app2 = self._create_app(tenant2, account2) + conv2 = self._create_conversation(app2) + msg2 = self._create_message(app2, conv2, created_at=expired_date, with_relations=False) + msg2_id = msg2.id + expired_10_days_ago = now_timestamp - (10 * 24 * 60 * 60) # Beyond grace period + + # Scenario 3: Sandbox plan with expiration_date = -1 (no previous subscription) + # Should be deleted + account3, tenant3 = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + app3 = self._create_app(tenant3, account3) + conv3 = self._create_conversation(app3) + msg3 = self._create_message(app3, conv3, created_at=expired_date, with_relations=False) + msg3_id = msg3.id + + # Scenario 4: Non-sandbox plan (professional) with no expiration (future date) + # Should NOT be deleted + account4, tenant4 = self._create_account_and_tenant(plan=CloudPlan.PROFESSIONAL) + app4 = self._create_app(tenant4, account4) + conv4 = self._create_conversation(app4) + msg4 = self._create_message(app4, conv4, created_at=expired_date, with_relations=False) + msg4_id = msg4.id + future_expiration = now_timestamp + (365 * 24 * 60 * 60) # Active for 1 year + + # Scenario 5: Sandbox plan with expiration exactly at grace period boundary (8 days ago) + # Should NOT be deleted (boundary is exclusive: > graceful_period) + account5, tenant5 = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + app5 = self._create_app(tenant5, account5) + conv5 = self._create_conversation(app5) + msg5 = self._create_message(app5, conv5, created_at=expired_date, with_relations=False) + msg5_id = msg5.id + expired_exactly_8_days_ago = now_timestamp - (8 * 24 * 60 * 60) # Exactly at boundary + + db.session.commit() + + # Mock billing service with all scenarios + plan_map = { + tenant1.id: { + "plan": CloudPlan.SANDBOX, + "expiration_date": expired_5_days_ago, + }, + tenant2.id: { + "plan": CloudPlan.SANDBOX, + "expiration_date": expired_10_days_ago, + }, + tenant3.id: { + "plan": CloudPlan.SANDBOX, + "expiration_date": -1, + }, + tenant4.id: { + "plan": CloudPlan.PROFESSIONAL, + "expiration_date": future_expiration, + }, + tenant5.id: { + "plan": CloudPlan.SANDBOX, + "expiration_date": expired_exactly_8_days_ago, + }, + } + + with patch("services.billing_service.BillingService.get_plan_bulk") as mock_billing: + mock_billing.return_value = plan_map + + # Act + end_before = datetime.datetime.now() - datetime.timedelta(days=30) + policy = create_message_clean_policy( + graceful_period_days=graceful_period, + current_timestamp=now_timestamp, # Use fixed timestamp for deterministic behavior + ) + service = MessagesCleanService.from_time_range( + policy=policy, + start_from=datetime.datetime.now() - datetime.timedelta(days=60), + end_before=end_before, + batch_size=100, + ) + stats = service.run() + + # Assert - Only messages from scenario 2 and 3 should be deleted + assert stats["total_messages"] == 5 # 5 tenants * 1 message + assert stats["filtered_messages"] == 2 + assert stats["total_deleted"] == 2 + + # Verify each scenario using saved IDs + assert db.session.query(Message).where(Message.id == msg1_id).count() == 1 # Within grace, kept + assert db.session.query(Message).where(Message.id == msg2_id).count() == 0 # Beyond grace, deleted + assert db.session.query(Message).where(Message.id == msg3_id).count() == 0 # No subscription, deleted + assert db.session.query(Message).where(Message.id == msg4_id).count() == 1 # Professional plan, kept + assert db.session.query(Message).where(Message.id == msg5_id).count() == 1 # At boundary, kept + + def test_tenant_whitelist(self, db_session_with_containers, mock_billing_enabled, mock_whitelist): + """Test that whitelisted tenants' messages are not deleted (B9).""" + # Arrange - Create 3 sandbox tenants with expired messages + tenants_data = [] + for i in range(3): + account, tenant = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + app = self._create_app(tenant, account) + conv = self._create_conversation(app) + + expired_date = datetime.datetime.now() - datetime.timedelta(days=35) + msg = self._create_message(app, conv, created_at=expired_date, with_relations=False) + + tenants_data.append( + { + "tenant": tenant, + "message_id": msg.id, + } + ) + + # Mock billing service - all tenants are sandbox with no subscription + plan_map = { + tenants_data[0]["tenant"].id: { + "plan": CloudPlan.SANDBOX, + "expiration_date": -1, + }, + tenants_data[1]["tenant"].id: { + "plan": CloudPlan.SANDBOX, + "expiration_date": -1, + }, + tenants_data[2]["tenant"].id: { + "plan": CloudPlan.SANDBOX, + "expiration_date": -1, + }, + } + + # Setup whitelist - tenant0 and tenant1 are whitelisted, tenant2 is not + whitelist = [tenants_data[0]["tenant"].id, tenants_data[1]["tenant"].id] + mock_whitelist.return_value = whitelist + + with patch("services.billing_service.BillingService.get_plan_bulk") as mock_billing: + mock_billing.return_value = plan_map + + # Act + end_before = datetime.datetime.now() - datetime.timedelta(days=30) + policy = create_message_clean_policy(graceful_period_days=21) + service = MessagesCleanService.from_time_range( + policy=policy, + start_from=datetime.datetime.now() - datetime.timedelta(days=60), + end_before=end_before, + batch_size=100, + ) + stats = service.run() + + # Assert - Only tenant2's message should be deleted (not whitelisted) + assert stats["total_messages"] == 3 # 3 tenants * 1 message + assert stats["filtered_messages"] == 1 + assert stats["total_deleted"] == 1 + + # Verify tenant0's message still exists (whitelisted) + assert db.session.query(Message).where(Message.id == tenants_data[0]["message_id"]).count() == 1 + + # Verify tenant1's message still exists (whitelisted) + assert db.session.query(Message).where(Message.id == tenants_data[1]["message_id"]).count() == 1 + + # Verify tenant2's message was deleted (not whitelisted) + assert db.session.query(Message).where(Message.id == tenants_data[2]["message_id"]).count() == 0 + + def test_from_days_cleans_old_messages(self, db_session_with_containers, mock_billing_enabled, mock_whitelist): + """Test from_days correctly cleans messages older than N days (B11).""" + # Arrange + account, tenant = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + app = self._create_app(tenant, account) + conv = self._create_conversation(app) + + # Create old messages (should be deleted - older than 30 days) + old_date = datetime.datetime.now() - datetime.timedelta(days=45) + old_msg_ids = [] + for i in range(3): + msg = self._create_message( + app, conv, created_at=old_date - datetime.timedelta(hours=i), with_relations=False + ) + old_msg_ids.append(msg.id) + + # Create recent messages (should be kept - newer than 30 days) + recent_date = datetime.datetime.now() - datetime.timedelta(days=15) + recent_msg_ids = [] + for i in range(2): + msg = self._create_message( + app, conv, created_at=recent_date - datetime.timedelta(hours=i), with_relations=False + ) + recent_msg_ids.append(msg.id) + + db.session.commit() + + with patch("services.billing_service.BillingService.get_plan_bulk") as mock_billing: + mock_billing.return_value = { + tenant.id: { + "plan": CloudPlan.SANDBOX, + "expiration_date": -1, + } + } + + # Act - Use from_days to clean messages older than 30 days + policy = create_message_clean_policy(graceful_period_days=21) + service = MessagesCleanService.from_days( + policy=policy, + days=30, + batch_size=100, + ) + stats = service.run() + + # Assert + assert stats["total_messages"] == 3 # Only old messages in range + assert stats["filtered_messages"] == 3 # Only old messages + assert stats["total_deleted"] == 3 + + # Old messages deleted + assert db.session.query(Message).where(Message.id.in_(old_msg_ids)).count() == 0 + # Recent messages kept + assert db.session.query(Message).where(Message.id.in_(recent_msg_ids)).count() == 2 + + def test_whitelist_precedence_over_grace_period( + self, db_session_with_containers, mock_billing_enabled, mock_whitelist + ): + """Test that whitelist takes precedence over grace period logic.""" + # Arrange - Create 2 sandbox tenants + now_timestamp = int(datetime.datetime.now(datetime.UTC).timestamp()) + + # Tenant1: whitelisted, expired beyond grace period + account1, tenant1 = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + app1 = self._create_app(tenant1, account1) + conv1 = self._create_conversation(app1) + expired_date = datetime.datetime.now() - datetime.timedelta(days=35) + msg1 = self._create_message(app1, conv1, created_at=expired_date, with_relations=False) + expired_30_days_ago = now_timestamp - (30 * 24 * 60 * 60) # Well beyond 21-day grace + + # Tenant2: not whitelisted, within grace period + account2, tenant2 = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + app2 = self._create_app(tenant2, account2) + conv2 = self._create_conversation(app2) + msg2 = self._create_message(app2, conv2, created_at=expired_date, with_relations=False) + expired_10_days_ago = now_timestamp - (10 * 24 * 60 * 60) # Within 21-day grace + + # Mock billing service + plan_map = { + tenant1.id: { + "plan": CloudPlan.SANDBOX, + "expiration_date": expired_30_days_ago, # Beyond grace period + }, + tenant2.id: { + "plan": CloudPlan.SANDBOX, + "expiration_date": expired_10_days_ago, # Within grace period + }, + } + + # Setup whitelist - only tenant1 is whitelisted + whitelist = [tenant1.id] + mock_whitelist.return_value = whitelist + + with patch("services.billing_service.BillingService.get_plan_bulk") as mock_billing: + mock_billing.return_value = plan_map + + # Act + end_before = datetime.datetime.now() - datetime.timedelta(days=30) + policy = create_message_clean_policy(graceful_period_days=21) + service = MessagesCleanService.from_time_range( + policy=policy, + start_from=datetime.datetime.now() - datetime.timedelta(days=60), + end_before=end_before, + batch_size=100, + ) + stats = service.run() + + # Assert - No messages should be deleted + # tenant1: whitelisted (protected even though beyond grace period) + # tenant2: within grace period (not eligible for deletion) + assert stats["total_messages"] == 2 # 2 tenants * 1 message + assert stats["filtered_messages"] == 0 + assert stats["total_deleted"] == 0 + + # Verify both messages still exist + assert db.session.query(Message).where(Message.id == msg1.id).count() == 1 # Whitelisted + assert db.session.query(Message).where(Message.id == msg2.id).count() == 1 # Within grace period + + def test_empty_whitelist_deletes_eligible_messages( + self, db_session_with_containers, mock_billing_enabled, mock_whitelist + ): + """Test that empty whitelist behaves as no whitelist (all eligible messages deleted).""" + # Arrange - Create sandbox tenant with expired messages + account, tenant = self._create_account_and_tenant(plan=CloudPlan.SANDBOX) + app = self._create_app(tenant, account) + conv = self._create_conversation(app) + + expired_date = datetime.datetime.now() - datetime.timedelta(days=35) + msg_ids = [] + for i in range(3): + msg = self._create_message(app, conv, created_at=expired_date - datetime.timedelta(hours=i)) + msg_ids.append(msg.id) + + # Mock billing service + plan_map = { + tenant.id: { + "plan": CloudPlan.SANDBOX, + "expiration_date": -1, + } + } + + # Setup empty whitelist (default behavior from fixture) + mock_whitelist.return_value = [] + + with patch("services.billing_service.BillingService.get_plan_bulk") as mock_billing: + mock_billing.return_value = plan_map + + # Act + end_before = datetime.datetime.now() - datetime.timedelta(days=30) + policy = create_message_clean_policy(graceful_period_days=21) + service = MessagesCleanService.from_time_range( + policy=policy, + start_from=datetime.datetime.now() - datetime.timedelta(days=60), + end_before=end_before, + batch_size=100, + ) + stats = service.run() + + # Assert - All messages should be deleted (no whitelist protection) + assert stats["total_messages"] == 3 + assert stats["filtered_messages"] == 3 + assert stats["total_deleted"] == 3 + + # Verify all messages were deleted + assert db.session.query(Message).where(Message.id.in_(msg_ids)).count() == 0 diff --git a/api/tests/unit_tests/services/test_messages_clean_service.py b/api/tests/unit_tests/services/test_messages_clean_service.py new file mode 100644 index 0000000000..3b619195c7 --- /dev/null +++ b/api/tests/unit_tests/services/test_messages_clean_service.py @@ -0,0 +1,627 @@ +import datetime +from unittest.mock import MagicMock, patch + +import pytest + +from enums.cloud_plan import CloudPlan +from services.retention.conversation.messages_clean_policy import ( + BillingDisabledPolicy, + BillingSandboxPolicy, + SimpleMessage, + create_message_clean_policy, +) +from services.retention.conversation.messages_clean_service import MessagesCleanService + + +def make_simple_message(msg_id: str, app_id: str) -> SimpleMessage: + """Helper to create a SimpleMessage with a fixed created_at timestamp.""" + return SimpleMessage(id=msg_id, app_id=app_id, created_at=datetime.datetime(2024, 1, 1)) + + +def make_plan_provider(tenant_plans: dict) -> MagicMock: + """Helper to create a mock plan_provider that returns the given tenant_plans.""" + provider = MagicMock() + provider.return_value = tenant_plans + return provider + + +class TestBillingSandboxPolicyFilterMessageIds: + """Unit tests for BillingSandboxPolicy.filter_message_ids method.""" + + # Fixed timestamp for deterministic tests + CURRENT_TIMESTAMP = 1000000 + GRACEFUL_PERIOD_DAYS = 8 + GRACEFUL_PERIOD_SECONDS = GRACEFUL_PERIOD_DAYS * 24 * 60 * 60 + + def test_missing_tenant_mapping_excluded(self): + """Test that messages with missing app-to-tenant mapping are excluded.""" + # Arrange + messages = [ + make_simple_message("msg1", "app1"), + make_simple_message("msg2", "app2"), + ] + app_to_tenant = {} # No mapping + tenant_plans = {"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": -1}} + plan_provider = make_plan_provider(tenant_plans) + + policy = BillingSandboxPolicy( + plan_provider=plan_provider, + graceful_period_days=self.GRACEFUL_PERIOD_DAYS, + current_timestamp=self.CURRENT_TIMESTAMP, + ) + + # Act + result = policy.filter_message_ids(messages, app_to_tenant) + + # Assert + assert list(result) == [] + + def test_missing_tenant_plan_excluded(self): + """Test that messages with missing tenant plan are excluded (safe default).""" + # Arrange + messages = [ + make_simple_message("msg1", "app1"), + make_simple_message("msg2", "app2"), + ] + app_to_tenant = {"app1": "tenant1", "app2": "tenant2"} + tenant_plans = {} # No plans + plan_provider = make_plan_provider(tenant_plans) + + policy = BillingSandboxPolicy( + plan_provider=plan_provider, + graceful_period_days=self.GRACEFUL_PERIOD_DAYS, + current_timestamp=self.CURRENT_TIMESTAMP, + ) + + # Act + result = policy.filter_message_ids(messages, app_to_tenant) + + # Assert + assert list(result) == [] + + def test_non_sandbox_plan_excluded(self): + """Test that messages from non-sandbox plans (PROFESSIONAL/TEAM) are excluded.""" + # Arrange + messages = [ + make_simple_message("msg1", "app1"), + make_simple_message("msg2", "app2"), + make_simple_message("msg3", "app3"), + ] + app_to_tenant = {"app1": "tenant1", "app2": "tenant2", "app3": "tenant3"} + tenant_plans = { + "tenant1": {"plan": CloudPlan.PROFESSIONAL, "expiration_date": -1}, + "tenant2": {"plan": CloudPlan.TEAM, "expiration_date": -1}, + "tenant3": {"plan": CloudPlan.SANDBOX, "expiration_date": -1}, # Only this one + } + plan_provider = make_plan_provider(tenant_plans) + + policy = BillingSandboxPolicy( + plan_provider=plan_provider, + graceful_period_days=self.GRACEFUL_PERIOD_DAYS, + current_timestamp=self.CURRENT_TIMESTAMP, + ) + + # Act + result = policy.filter_message_ids(messages, app_to_tenant) + + # Assert - only msg3 (sandbox tenant) should be included + assert set(result) == {"msg3"} + + def test_whitelist_skip(self): + """Test that whitelisted tenants are excluded even if sandbox + expired.""" + # Arrange + messages = [ + make_simple_message("msg1", "app1"), # Whitelisted - excluded + make_simple_message("msg2", "app2"), # Not whitelisted - included + make_simple_message("msg3", "app3"), # Whitelisted - excluded + ] + app_to_tenant = {"app1": "tenant1", "app2": "tenant2", "app3": "tenant3"} + tenant_plans = { + "tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": -1}, + "tenant2": {"plan": CloudPlan.SANDBOX, "expiration_date": -1}, + "tenant3": {"plan": CloudPlan.SANDBOX, "expiration_date": -1}, + } + plan_provider = make_plan_provider(tenant_plans) + tenant_whitelist = ["tenant1", "tenant3"] + + policy = BillingSandboxPolicy( + plan_provider=plan_provider, + graceful_period_days=self.GRACEFUL_PERIOD_DAYS, + tenant_whitelist=tenant_whitelist, + current_timestamp=self.CURRENT_TIMESTAMP, + ) + + # Act + result = policy.filter_message_ids(messages, app_to_tenant) + + # Assert - only msg2 should be included + assert set(result) == {"msg2"} + + def test_no_previous_subscription_included(self): + """Test that messages with expiration_date=-1 (no previous subscription) are included.""" + # Arrange + messages = [ + make_simple_message("msg1", "app1"), + make_simple_message("msg2", "app2"), + ] + app_to_tenant = {"app1": "tenant1", "app2": "tenant2"} + tenant_plans = { + "tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": -1}, + "tenant2": {"plan": CloudPlan.SANDBOX, "expiration_date": -1}, + } + plan_provider = make_plan_provider(tenant_plans) + + policy = BillingSandboxPolicy( + plan_provider=plan_provider, + graceful_period_days=self.GRACEFUL_PERIOD_DAYS, + current_timestamp=self.CURRENT_TIMESTAMP, + ) + + # Act + result = policy.filter_message_ids(messages, app_to_tenant) + + # Assert - all messages should be included + assert set(result) == {"msg1", "msg2"} + + def test_within_grace_period_excluded(self): + """Test that messages within grace period are excluded.""" + # Arrange + now = self.CURRENT_TIMESTAMP + expired_1_day_ago = now - (1 * 24 * 60 * 60) + expired_5_days_ago = now - (5 * 24 * 60 * 60) + expired_7_days_ago = now - (7 * 24 * 60 * 60) + + messages = [ + make_simple_message("msg1", "app1"), + make_simple_message("msg2", "app2"), + make_simple_message("msg3", "app3"), + ] + app_to_tenant = {"app1": "tenant1", "app2": "tenant2", "app3": "tenant3"} + tenant_plans = { + "tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_1_day_ago}, + "tenant2": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_5_days_ago}, + "tenant3": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_7_days_ago}, + } + plan_provider = make_plan_provider(tenant_plans) + + policy = BillingSandboxPolicy( + plan_provider=plan_provider, + graceful_period_days=self.GRACEFUL_PERIOD_DAYS, # 8 days + current_timestamp=now, + ) + + # Act + result = policy.filter_message_ids(messages, app_to_tenant) + + # Assert - all within 8-day grace period, none should be included + assert list(result) == [] + + def test_exactly_at_boundary_excluded(self): + """Test that messages exactly at grace period boundary are excluded (code uses >).""" + # Arrange + now = self.CURRENT_TIMESTAMP + expired_exactly_8_days_ago = now - self.GRACEFUL_PERIOD_SECONDS # Exactly at boundary + + messages = [make_simple_message("msg1", "app1")] + app_to_tenant = {"app1": "tenant1"} + tenant_plans = { + "tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_exactly_8_days_ago}, + } + plan_provider = make_plan_provider(tenant_plans) + + policy = BillingSandboxPolicy( + plan_provider=plan_provider, + graceful_period_days=self.GRACEFUL_PERIOD_DAYS, + current_timestamp=now, + ) + + # Act + result = policy.filter_message_ids(messages, app_to_tenant) + + # Assert - exactly at boundary (==) should be excluded (code uses >) + assert list(result) == [] + + def test_beyond_grace_period_included(self): + """Test that messages beyond grace period are included.""" + # Arrange + now = self.CURRENT_TIMESTAMP + expired_9_days_ago = now - (9 * 24 * 60 * 60) # Just beyond 8-day grace + expired_30_days_ago = now - (30 * 24 * 60 * 60) # Well beyond + + messages = [ + make_simple_message("msg1", "app1"), + make_simple_message("msg2", "app2"), + ] + app_to_tenant = {"app1": "tenant1", "app2": "tenant2"} + tenant_plans = { + "tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_9_days_ago}, + "tenant2": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_30_days_ago}, + } + plan_provider = make_plan_provider(tenant_plans) + + policy = BillingSandboxPolicy( + plan_provider=plan_provider, + graceful_period_days=self.GRACEFUL_PERIOD_DAYS, + current_timestamp=now, + ) + + # Act + result = policy.filter_message_ids(messages, app_to_tenant) + + # Assert - both beyond grace period, should be included + assert set(result) == {"msg1", "msg2"} + + def test_empty_messages_returns_empty(self): + """Test that empty messages returns empty list.""" + # Arrange + messages: list[SimpleMessage] = [] + app_to_tenant = {"app1": "tenant1"} + plan_provider = make_plan_provider({"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": -1}}) + + policy = BillingSandboxPolicy( + plan_provider=plan_provider, + graceful_period_days=self.GRACEFUL_PERIOD_DAYS, + current_timestamp=self.CURRENT_TIMESTAMP, + ) + + # Act + result = policy.filter_message_ids(messages, app_to_tenant) + + # Assert + assert list(result) == [] + + def test_plan_provider_called_with_correct_tenant_ids(self): + """Test that plan_provider is called with correct tenant_ids.""" + # Arrange + messages = [ + make_simple_message("msg1", "app1"), + make_simple_message("msg2", "app2"), + make_simple_message("msg3", "app3"), + ] + app_to_tenant = {"app1": "tenant1", "app2": "tenant2", "app3": "tenant1"} # tenant1 appears twice + plan_provider = make_plan_provider({}) + + policy = BillingSandboxPolicy( + plan_provider=plan_provider, + graceful_period_days=self.GRACEFUL_PERIOD_DAYS, + current_timestamp=self.CURRENT_TIMESTAMP, + ) + + # Act + policy.filter_message_ids(messages, app_to_tenant) + + # Assert - plan_provider should be called once with unique tenant_ids + plan_provider.assert_called_once() + called_tenant_ids = set(plan_provider.call_args[0][0]) + assert called_tenant_ids == {"tenant1", "tenant2"} + + def test_complex_mixed_scenario(self): + """Test complex scenario with mixed plans, expirations, whitelist, and missing mappings.""" + # Arrange + now = self.CURRENT_TIMESTAMP + sandbox_expired_old = now - (15 * 24 * 60 * 60) # Beyond grace + sandbox_expired_recent = now - (3 * 24 * 60 * 60) # Within grace + future_expiration = now + (30 * 24 * 60 * 60) + + messages = [ + make_simple_message("msg1", "app1"), # Sandbox, no subscription - included + make_simple_message("msg2", "app2"), # Sandbox, expired old - included + make_simple_message("msg3", "app3"), # Sandbox, within grace - excluded + make_simple_message("msg4", "app4"), # Team plan, active - excluded + make_simple_message("msg5", "app5"), # No tenant mapping - excluded + make_simple_message("msg6", "app6"), # No plan info - excluded + make_simple_message("msg7", "app7"), # Sandbox, expired old, whitelisted - excluded + ] + app_to_tenant = { + "app1": "tenant1", + "app2": "tenant2", + "app3": "tenant3", + "app4": "tenant4", + "app6": "tenant6", # Has mapping but no plan + "app7": "tenant7", + # app5 has no mapping + } + tenant_plans = { + "tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": -1}, + "tenant2": {"plan": CloudPlan.SANDBOX, "expiration_date": sandbox_expired_old}, + "tenant3": {"plan": CloudPlan.SANDBOX, "expiration_date": sandbox_expired_recent}, + "tenant4": {"plan": CloudPlan.TEAM, "expiration_date": future_expiration}, + "tenant7": {"plan": CloudPlan.SANDBOX, "expiration_date": sandbox_expired_old}, + # tenant6 has no plan + } + plan_provider = make_plan_provider(tenant_plans) + tenant_whitelist = ["tenant7"] + + policy = BillingSandboxPolicy( + plan_provider=plan_provider, + graceful_period_days=self.GRACEFUL_PERIOD_DAYS, + tenant_whitelist=tenant_whitelist, + current_timestamp=now, + ) + + # Act + result = policy.filter_message_ids(messages, app_to_tenant) + + # Assert - only msg1 and msg2 should be included + assert set(result) == {"msg1", "msg2"} + + +class TestBillingDisabledPolicyFilterMessageIds: + """Unit tests for BillingDisabledPolicy.filter_message_ids method.""" + + def test_returns_all_message_ids(self): + """Test that all message IDs are returned (order-preserving).""" + # Arrange + messages = [ + make_simple_message("msg1", "app1"), + make_simple_message("msg2", "app2"), + make_simple_message("msg3", "app3"), + ] + app_to_tenant = {"app1": "tenant1", "app2": "tenant2"} + + policy = BillingDisabledPolicy() + + # Act + result = policy.filter_message_ids(messages, app_to_tenant) + + # Assert - all message IDs returned in order + assert list(result) == ["msg1", "msg2", "msg3"] + + def test_ignores_app_to_tenant(self): + """Test that app_to_tenant mapping is ignored.""" + # Arrange + messages = [ + make_simple_message("msg1", "app1"), + make_simple_message("msg2", "app2"), + ] + app_to_tenant: dict[str, str] = {} # Empty - should be ignored + + policy = BillingDisabledPolicy() + + # Act + result = policy.filter_message_ids(messages, app_to_tenant) + + # Assert - all message IDs still returned + assert list(result) == ["msg1", "msg2"] + + def test_empty_messages_returns_empty(self): + """Test that empty messages returns empty list.""" + # Arrange + messages: list[SimpleMessage] = [] + app_to_tenant = {"app1": "tenant1"} + + policy = BillingDisabledPolicy() + + # Act + result = policy.filter_message_ids(messages, app_to_tenant) + + # Assert + assert list(result) == [] + + +class TestCreateMessageCleanPolicy: + """Unit tests for create_message_clean_policy factory function.""" + + @patch("services.retention.conversation.messages_clean_policy.dify_config") + def test_billing_disabled_returns_billing_disabled_policy(self, mock_config): + """Test that BILLING_ENABLED=False returns BillingDisabledPolicy.""" + # Arrange + mock_config.BILLING_ENABLED = False + + # Act + policy = create_message_clean_policy(graceful_period_days=21) + + # Assert + assert isinstance(policy, BillingDisabledPolicy) + + @patch("services.retention.conversation.messages_clean_policy.BillingService") + @patch("services.retention.conversation.messages_clean_policy.dify_config") + def test_billing_enabled_policy_has_correct_internals(self, mock_config, mock_billing_service): + """Test that BillingSandboxPolicy is created with correct internal values.""" + # Arrange + mock_config.BILLING_ENABLED = True + whitelist = ["tenant1", "tenant2"] + mock_billing_service.get_expired_subscription_cleanup_whitelist.return_value = whitelist + mock_plan_provider = MagicMock() + mock_billing_service.get_plan_bulk_with_cache = mock_plan_provider + + # Act + policy = create_message_clean_policy(graceful_period_days=14, current_timestamp=1234567) + + # Assert + mock_billing_service.get_expired_subscription_cleanup_whitelist.assert_called_once() + assert isinstance(policy, BillingSandboxPolicy) + assert policy._graceful_period_days == 14 + assert list(policy._tenant_whitelist) == whitelist + assert policy._plan_provider == mock_plan_provider + assert policy._current_timestamp == 1234567 + + +class TestMessagesCleanServiceFromTimeRange: + """Unit tests for MessagesCleanService.from_time_range factory method.""" + + def test_start_from_end_before_raises_value_error(self): + """Test that start_from == end_before raises ValueError.""" + policy = BillingDisabledPolicy() + + # Arrange + same_time = datetime.datetime(2024, 1, 1, 12, 0, 0) + + # Act & Assert + with pytest.raises(ValueError, match="start_from .* must be less than end_before"): + MessagesCleanService.from_time_range( + policy=policy, + start_from=same_time, + end_before=same_time, + ) + + # Arrange + start_from = datetime.datetime(2024, 12, 31) + end_before = datetime.datetime(2024, 1, 1) + + # Act & Assert + with pytest.raises(ValueError, match="start_from .* must be less than end_before"): + MessagesCleanService.from_time_range( + policy=policy, + start_from=start_from, + end_before=end_before, + ) + + def test_batch_size_raises_value_error(self): + """Test that batch_size=0 raises ValueError.""" + # Arrange + start_from = datetime.datetime(2024, 1, 1) + end_before = datetime.datetime(2024, 2, 1) + policy = BillingDisabledPolicy() + + # Act & Assert + with pytest.raises(ValueError, match="batch_size .* must be greater than 0"): + MessagesCleanService.from_time_range( + policy=policy, + start_from=start_from, + end_before=end_before, + batch_size=0, + ) + + start_from = datetime.datetime(2024, 1, 1) + end_before = datetime.datetime(2024, 2, 1) + policy = BillingDisabledPolicy() + + # Act & Assert + with pytest.raises(ValueError, match="batch_size .* must be greater than 0"): + MessagesCleanService.from_time_range( + policy=policy, + start_from=start_from, + end_before=end_before, + batch_size=-100, + ) + + def test_valid_params_creates_instance(self): + """Test that valid parameters create a correctly configured instance.""" + # Arrange + start_from = datetime.datetime(2024, 1, 1, 0, 0, 0) + end_before = datetime.datetime(2024, 12, 31, 23, 59, 59) + policy = BillingDisabledPolicy() + batch_size = 500 + dry_run = True + + # Act + service = MessagesCleanService.from_time_range( + policy=policy, + start_from=start_from, + end_before=end_before, + batch_size=batch_size, + dry_run=dry_run, + ) + + # Assert + assert isinstance(service, MessagesCleanService) + assert service._policy is policy + assert service._start_from == start_from + assert service._end_before == end_before + assert service._batch_size == batch_size + assert service._dry_run == dry_run + + def test_default_params(self): + """Test that default parameters are applied correctly.""" + # Arrange + start_from = datetime.datetime(2024, 1, 1) + end_before = datetime.datetime(2024, 2, 1) + policy = BillingDisabledPolicy() + + # Act + service = MessagesCleanService.from_time_range( + policy=policy, + start_from=start_from, + end_before=end_before, + ) + + # Assert + assert service._batch_size == 1000 # default + assert service._dry_run is False # default + + +class TestMessagesCleanServiceFromDays: + """Unit tests for MessagesCleanService.from_days factory method.""" + + def test_days_raises_value_error(self): + """Test that days < 0 raises ValueError.""" + # Arrange + policy = BillingDisabledPolicy() + + # Act & Assert + with pytest.raises(ValueError, match="days .* must be greater than or equal to 0"): + MessagesCleanService.from_days(policy=policy, days=-1) + + # Act + with patch("services.retention.conversation.messages_clean_service.datetime") as mock_datetime: + fixed_now = datetime.datetime(2024, 6, 15, 14, 0, 0) + mock_datetime.datetime.now.return_value = fixed_now + mock_datetime.timedelta = datetime.timedelta + + service = MessagesCleanService.from_days(policy=policy, days=0) + + # Assert + assert service._end_before == fixed_now + + def test_batch_size_raises_value_error(self): + """Test that batch_size=0 raises ValueError.""" + # Arrange + policy = BillingDisabledPolicy() + + # Act & Assert + with pytest.raises(ValueError, match="batch_size .* must be greater than 0"): + MessagesCleanService.from_days(policy=policy, days=30, batch_size=0) + + # Act & Assert + with pytest.raises(ValueError, match="batch_size .* must be greater than 0"): + MessagesCleanService.from_days(policy=policy, days=30, batch_size=-500) + + def test_valid_params_creates_instance(self): + """Test that valid parameters create a correctly configured instance.""" + # Arrange + policy = BillingDisabledPolicy() + days = 90 + batch_size = 500 + dry_run = True + + # Act + with patch("services.retention.conversation.messages_clean_service.datetime") as mock_datetime: + fixed_now = datetime.datetime(2024, 6, 15, 10, 30, 0) + mock_datetime.datetime.now.return_value = fixed_now + mock_datetime.timedelta = datetime.timedelta + + service = MessagesCleanService.from_days( + policy=policy, + days=days, + batch_size=batch_size, + dry_run=dry_run, + ) + + # Assert + expected_end_before = fixed_now - datetime.timedelta(days=days) + assert isinstance(service, MessagesCleanService) + assert service._policy is policy + assert service._start_from is None + assert service._end_before == expected_end_before + assert service._batch_size == batch_size + assert service._dry_run == dry_run + + def test_default_params(self): + """Test that default parameters are applied correctly.""" + # Arrange + policy = BillingDisabledPolicy() + + # Act + with patch("services.retention.conversation.messages_clean_service.datetime") as mock_datetime: + fixed_now = datetime.datetime(2024, 6, 15, 10, 30, 0) + mock_datetime.datetime.now.return_value = fixed_now + mock_datetime.timedelta = datetime.timedelta + + service = MessagesCleanService.from_days(policy=policy) + + # Assert + expected_end_before = fixed_now - datetime.timedelta(days=30) # default days=30 + assert service._end_before == expected_end_before + assert service._batch_size == 1000 # default + assert service._dry_run is False # default From 33e99f069bec8dfe6338597e47045bac7975d511 Mon Sep 17 00:00:00 2001 From: hj24 Date: Thu, 15 Jan 2026 15:13:25 +0800 Subject: [PATCH 15/25] fix: message clean service ut (#31038) --- .../services/test_messages_clean_service.py | 1 + 1 file changed, 1 insertion(+) diff --git a/api/tests/test_containers_integration_tests/services/test_messages_clean_service.py b/api/tests/test_containers_integration_tests/services/test_messages_clean_service.py index 29baa4d94f..5b6db64c09 100644 --- a/api/tests/test_containers_integration_tests/services/test_messages_clean_service.py +++ b/api/tests/test_containers_integration_tests/services/test_messages_clean_service.py @@ -271,6 +271,7 @@ class TestMessagesCleanServiceIntegration: source="annotation", question="Test question", account_id=message.from_account_id, + score=0.9, annotation_question="Test annotation question", annotation_content="Test annotation content", ) From ab1c5a202737b997878f7b6451543e9d768aa52f Mon Sep 17 00:00:00 2001 From: Stephen Zhou <38493346+hyoban@users.noreply.github.com> Date: Thu, 15 Jan 2026 15:25:43 +0800 Subject: [PATCH 16/25] refactor: remove manual set query logic (#31039) --- web/app/components/apps/list.tsx | 30 ------------------------- web/app/components/header/nav/index.tsx | 14 +++--------- 2 files changed, 3 insertions(+), 41 deletions(-) diff --git a/web/app/components/apps/list.tsx b/web/app/components/apps/list.tsx index 095ed3f696..84150ad480 100644 --- a/web/app/components/apps/list.tsx +++ b/web/app/components/apps/list.tsx @@ -12,7 +12,6 @@ import { useDebounceFn } from 'ahooks' import dynamic from 'next/dynamic' import { useRouter, - useSearchParams, } from 'next/navigation' import { parseAsString, useQueryState } from 'nuqs' import { useCallback, useEffect, useRef, useState } from 'react' @@ -29,7 +28,6 @@ import { CheckModal } from '@/hooks/use-pay' import { useInfiniteAppList } from '@/service/use-apps' import { AppModeEnum } from '@/types/app' import { cn } from '@/utils/classnames' -import { isServer } from '@/utils/client' import AppCard from './app-card' import { AppCardSkeleton } from './app-card-skeleton' import Empty from './empty' @@ -59,7 +57,6 @@ const List = () => { const { t } = useTranslation() const { systemFeatures } = useGlobalPublicStore() const router = useRouter() - const searchParams = useSearchParams() const { isCurrentWorkspaceEditor, isCurrentWorkspaceDatasetOperator, isLoadingCurrentWorkspace } = useAppContext() const showTagManagementModal = useTagStore(s => s.showTagManagementModal) const [activeTab, setActiveTab] = useQueryState( @@ -67,33 +64,6 @@ const List = () => { parseAsString.withDefault('all').withOptions({ history: 'push' }), ) - // valid tabs for apps list; anything else should fallback to 'all' - - // 1) Normalize legacy/incorrect query params like ?mode=discover -> ?category=all - useEffect(() => { - // avoid running on server - if (isServer) - return - const mode = searchParams.get('mode') - if (!mode) - return - const url = new URL(window.location.href) - url.searchParams.delete('mode') - if (validTabs.has(mode)) { - // migrate to category key - url.searchParams.set('category', mode) - } - else { - url.searchParams.set('category', 'all') - } - router.replace(url.pathname + url.search) - }, [router, searchParams]) - - // 2) If category has an invalid value (e.g., 'discover'), reset to 'all' - useEffect(() => { - if (!validTabs.has(activeTab)) - setActiveTab('all') - }, [activeTab, setActiveTab]) const { query: { tagIDs = [], keywords = '', isCreatedByMe: queryIsCreatedByMe = false }, setQuery } = useAppsQueryState() const [isCreatedByMe, setIsCreatedByMe] = useState(queryIsCreatedByMe) const [tagFilterValue, setTagFilterValue] = useState(tagIDs) diff --git a/web/app/components/header/nav/index.tsx b/web/app/components/header/nav/index.tsx index 83e75b8513..2edc64486e 100644 --- a/web/app/components/header/nav/index.tsx +++ b/web/app/components/header/nav/index.tsx @@ -2,9 +2,9 @@ import type { INavSelectorProps } from './nav-selector' import Link from 'next/link' -import { usePathname, useSearchParams, useSelectedLayoutSegment } from 'next/navigation' +import { useSelectedLayoutSegment } from 'next/navigation' import * as React from 'react' -import { useEffect, useState } from 'react' +import { useState } from 'react' import { useStore as useAppStore } from '@/app/components/app/store' import { ArrowNarrowLeft } from '@/app/components/base/icons/src/vender/line/arrows' import { cn } from '@/utils/classnames' @@ -36,14 +36,6 @@ const Nav = ({ const [hovered, setHovered] = useState(false) const segment = useSelectedLayoutSegment() const isActivated = Array.isArray(activeSegment) ? activeSegment.includes(segment!) : segment === activeSegment - const pathname = usePathname() - const searchParams = useSearchParams() - const [linkLastSearchParams, setLinkLastSearchParams] = useState('') - - useEffect(() => { - if (pathname === link) - setLinkLastSearchParams(searchParams.toString()) - }, [pathname, searchParams]) return (
- +
{ // Don't clear state if opening in new tab/window From 772ff636ec92b5b35635769a17ab0e49c3576df9 Mon Sep 17 00:00:00 2001 From: Xiyuan Chen <52963600+GareArc@users.noreply.github.com> Date: Wed, 14 Jan 2026 23:33:24 -0800 Subject: [PATCH 17/25] feat: credential sync fix for enterprise edition (#30626) --- api/events/event_handlers/__init__.py | 2 + ...eue_credential_sync_when_tenant_created.py | 19 ++++++ api/services/enterprise/workspace_sync.py | 58 +++++++++++++++++++ 3 files changed, 79 insertions(+) create mode 100644 api/events/event_handlers/queue_credential_sync_when_tenant_created.py create mode 100644 api/services/enterprise/workspace_sync.py diff --git a/api/events/event_handlers/__init__.py b/api/events/event_handlers/__init__.py index c79764983b..d37217e168 100644 --- a/api/events/event_handlers/__init__.py +++ b/api/events/event_handlers/__init__.py @@ -6,6 +6,7 @@ from .create_site_record_when_app_created import handle as handle_create_site_re from .delete_tool_parameters_cache_when_sync_draft_workflow import ( handle as handle_delete_tool_parameters_cache_when_sync_draft_workflow, ) +from .queue_credential_sync_when_tenant_created import handle as handle_queue_credential_sync_when_tenant_created from .sync_plugin_trigger_when_app_created import handle as handle_sync_plugin_trigger_when_app_created from .sync_webhook_when_app_created import handle as handle_sync_webhook_when_app_created from .sync_workflow_schedule_when_app_published import handle as handle_sync_workflow_schedule_when_app_published @@ -30,6 +31,7 @@ __all__ = [ "handle_create_installed_app_when_app_created", "handle_create_site_record_when_app_created", "handle_delete_tool_parameters_cache_when_sync_draft_workflow", + "handle_queue_credential_sync_when_tenant_created", "handle_sync_plugin_trigger_when_app_created", "handle_sync_webhook_when_app_created", "handle_sync_workflow_schedule_when_app_published", diff --git a/api/events/event_handlers/queue_credential_sync_when_tenant_created.py b/api/events/event_handlers/queue_credential_sync_when_tenant_created.py new file mode 100644 index 0000000000..6566c214b0 --- /dev/null +++ b/api/events/event_handlers/queue_credential_sync_when_tenant_created.py @@ -0,0 +1,19 @@ +from configs import dify_config +from events.tenant_event import tenant_was_created +from services.enterprise.workspace_sync import WorkspaceSyncService + + +@tenant_was_created.connect +def handle(sender, **kwargs): + """Queue credential sync when a tenant/workspace is created.""" + # Only queue sync tasks if plugin manager (enterprise feature) is enabled + if not dify_config.ENTERPRISE_ENABLED: + return + + tenant = sender + + # Determine source from kwargs if available, otherwise use generic + source = kwargs.get("source", "tenant_created") + + # Queue credential sync task to Redis for enterprise backend to process + WorkspaceSyncService.queue_credential_sync(tenant.id, source=source) diff --git a/api/services/enterprise/workspace_sync.py b/api/services/enterprise/workspace_sync.py new file mode 100644 index 0000000000..acfe325397 --- /dev/null +++ b/api/services/enterprise/workspace_sync.py @@ -0,0 +1,58 @@ +import json +import logging +import uuid +from datetime import UTC, datetime + +from redis import RedisError + +from extensions.ext_redis import redis_client + +logger = logging.getLogger(__name__) + +WORKSPACE_SYNC_QUEUE = "enterprise:workspace:sync:queue" +WORKSPACE_SYNC_PROCESSING = "enterprise:workspace:sync:processing" + + +class WorkspaceSyncService: + """Service to publish workspace sync tasks to Redis queue for enterprise backend consumption""" + + @staticmethod + def queue_credential_sync(workspace_id: str, *, source: str) -> bool: + """ + Queue a credential sync task for a newly created workspace. + + This publishes a task to Redis that will be consumed by the enterprise backend + worker to sync credentials with the plugin-manager. + + Args: + workspace_id: The workspace/tenant ID to sync credentials for + source: Source of the sync request (for debugging/tracking) + + Returns: + bool: True if task was queued successfully, False otherwise + """ + try: + task = { + "task_id": str(uuid.uuid4()), + "workspace_id": workspace_id, + "retry_count": 0, + "created_at": datetime.now(UTC).isoformat(), + "source": source, + } + + # Push to Redis list (queue) - LPUSH adds to the head, worker consumes from tail with RPOP + redis_client.lpush(WORKSPACE_SYNC_QUEUE, json.dumps(task)) + + logger.info( + "Queued credential sync task for workspace %s, task_id: %s, source: %s", + workspace_id, + task["task_id"], + source, + ) + return True + + except (RedisError, TypeError) as e: + logger.error("Failed to queue credential sync for workspace %s: %s", workspace_id, str(e), exc_info=True) + # Don't raise - we don't want to fail workspace creation if queueing fails + # The scheduled task will catch it later + return False From 2b021e8752a9b8e18867634f04144859b2f6c8f4 Mon Sep 17 00:00:00 2001 From: lif <1835304752@qq.com> Date: Thu, 15 Jan 2026 17:43:00 +0800 Subject: [PATCH 19/25] fix: remove hardcoded 48-character limit from text inputs (#30156) Signed-off-by: majiayu000 <1835304752@qq.com> Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> --- .../config-var/config-modal/index.tsx | 3 -- .../app/configuration/config-var/index.tsx | 3 -- .../configuration/debug/chat-user-input.tsx | 5 +-- .../prompt-value-panel/index.tsx | 5 +-- .../panel/input-field/editor/form/hooks.ts | 3 -- .../input-field/editor/form/index.spec.tsx | 21 ---------- .../share/text-generation/index.tsx | 9 ++-- .../text-generation/run-once/index.spec.tsx | 42 +++++++++++++++++++ .../share/text-generation/run-once/index.tsx | 3 +- web/config/index.ts | 3 -- web/utils/var.ts | 2 +- 11 files changed, 52 insertions(+), 47 deletions(-) diff --git a/web/app/components/app/configuration/config-var/config-modal/index.tsx b/web/app/components/app/configuration/config-var/config-modal/index.tsx index 5ffa87375c..7ea784baa3 100644 --- a/web/app/components/app/configuration/config-var/config-modal/index.tsx +++ b/web/app/components/app/configuration/config-var/config-modal/index.tsx @@ -21,7 +21,6 @@ import CodeEditor from '@/app/components/workflow/nodes/_base/components/editor/ import FileUploadSetting from '@/app/components/workflow/nodes/_base/components/file-upload-setting' import { CodeLanguage } from '@/app/components/workflow/nodes/code/types' import { ChangeType, InputVarType, SupportUploadFileTypes } from '@/app/components/workflow/types' -import { DEFAULT_VALUE_MAX_LEN } from '@/config' import ConfigContext from '@/context/debug-configuration' import { AppModeEnum, TransferMethod } from '@/types/app' import { checkKeys, getNewVarInWorkflow, replaceSpaceWithUnderscoreInVarNameInput } from '@/utils/var' @@ -198,8 +197,6 @@ const ConfigModal: FC = ({ if (type === InputVarType.multiFiles) draft.max_length = DEFAULT_FILE_UPLOAD_SETTING.max_length } - if (type === InputVarType.paragraph) - draft.max_length = DEFAULT_VALUE_MAX_LEN }) setTempPayload(newPayload) }, [tempPayload]) diff --git a/web/app/components/app/configuration/config-var/index.tsx b/web/app/components/app/configuration/config-var/index.tsx index 4a38fc92a6..1a8810f7cd 100644 --- a/web/app/components/app/configuration/config-var/index.tsx +++ b/web/app/components/app/configuration/config-var/index.tsx @@ -15,7 +15,6 @@ import Confirm from '@/app/components/base/confirm' import Toast from '@/app/components/base/toast' import Tooltip from '@/app/components/base/tooltip' import { InputVarType } from '@/app/components/workflow/types' -import { DEFAULT_VALUE_MAX_LEN } from '@/config' import ConfigContext from '@/context/debug-configuration' import { useEventEmitterContextContext } from '@/context/event-emitter' import { useModalContext } from '@/context/modal-context' @@ -58,8 +57,6 @@ const buildPromptVariableFromInput = (payload: InputVar): PromptVariable => { key: variable, name: label as string, } - if (payload.type === InputVarType.textInput) - nextItem.max_length = nextItem.max_length || DEFAULT_VALUE_MAX_LEN if (payload.type !== InputVarType.select) delete nextItem.options diff --git a/web/app/components/app/configuration/debug/chat-user-input.tsx b/web/app/components/app/configuration/debug/chat-user-input.tsx index 11189751e0..3f9fdc32be 100644 --- a/web/app/components/app/configuration/debug/chat-user-input.tsx +++ b/web/app/components/app/configuration/debug/chat-user-input.tsx @@ -7,7 +7,6 @@ import Input from '@/app/components/base/input' import Select from '@/app/components/base/select' import Textarea from '@/app/components/base/textarea' import BoolInput from '@/app/components/workflow/nodes/_base/components/before-run-form/bool-input' -import { DEFAULT_VALUE_MAX_LEN } from '@/config' import ConfigContext from '@/context/debug-configuration' import { cn } from '@/utils/classnames' @@ -88,7 +87,7 @@ const ChatUserInput = ({ onChange={(e) => { handleInputValueChange(key, e.target.value) }} placeholder={name} autoFocus={index === 0} - maxLength={max_length || DEFAULT_VALUE_MAX_LEN} + maxLength={max_length} /> )} {type === 'paragraph' && ( @@ -115,7 +114,7 @@ const ChatUserInput = ({ onChange={(e) => { handleInputValueChange(key, e.target.value) }} placeholder={name} autoFocus={index === 0} - maxLength={max_length || DEFAULT_VALUE_MAX_LEN} + maxLength={max_length} /> )} {type === 'checkbox' && ( diff --git a/web/app/components/app/configuration/prompt-value-panel/index.tsx b/web/app/components/app/configuration/prompt-value-panel/index.tsx index 9b61b3c7aa..613efb8710 100644 --- a/web/app/components/app/configuration/prompt-value-panel/index.tsx +++ b/web/app/components/app/configuration/prompt-value-panel/index.tsx @@ -20,7 +20,6 @@ import Select from '@/app/components/base/select' import Textarea from '@/app/components/base/textarea' import Tooltip from '@/app/components/base/tooltip' import BoolInput from '@/app/components/workflow/nodes/_base/components/before-run-form/bool-input' -import { DEFAULT_VALUE_MAX_LEN } from '@/config' import ConfigContext from '@/context/debug-configuration' import { AppModeEnum, ModelModeType } from '@/types/app' import { cn } from '@/utils/classnames' @@ -142,7 +141,7 @@ const PromptValuePanel: FC = ({ onChange={(e) => { handleInputValueChange(key, e.target.value) }} placeholder={name} autoFocus={index === 0} - maxLength={max_length || DEFAULT_VALUE_MAX_LEN} + maxLength={max_length} /> )} {type === 'paragraph' && ( @@ -170,7 +169,7 @@ const PromptValuePanel: FC = ({ onChange={(e) => { handleInputValueChange(key, e.target.value) }} placeholder={name} autoFocus={index === 0} - maxLength={max_length || DEFAULT_VALUE_MAX_LEN} + maxLength={max_length} /> )} {type === 'checkbox' && ( diff --git a/web/app/components/rag-pipeline/components/panel/input-field/editor/form/hooks.ts b/web/app/components/rag-pipeline/components/panel/input-field/editor/form/hooks.ts index 3820d5f1b8..80aa879b8f 100644 --- a/web/app/components/rag-pipeline/components/panel/input-field/editor/form/hooks.ts +++ b/web/app/components/rag-pipeline/components/panel/input-field/editor/form/hooks.ts @@ -6,7 +6,6 @@ import { useTranslation } from 'react-i18next' import { useFileSizeLimit } from '@/app/components/base/file-uploader/hooks' import { InputFieldType } from '@/app/components/base/form/form-scenarios/input-field/types' import { DEFAULT_FILE_UPLOAD_SETTING } from '@/app/components/workflow/constants' -import { DEFAULT_VALUE_MAX_LEN } from '@/config' import { PipelineInputVarType } from '@/models/pipeline' import { useFileUploadConfig } from '@/service/use-common' import { formatFileSize } from '@/utils/format' @@ -87,8 +86,6 @@ export const useConfigurations = (props: { if (type === PipelineInputVarType.multiFiles) setFieldValue('maxLength', DEFAULT_FILE_UPLOAD_SETTING.max_length) } - if (type === PipelineInputVarType.paragraph) - setFieldValue('maxLength', DEFAULT_VALUE_MAX_LEN) }, [setFieldValue]) const handleVariableNameBlur = useCallback((value: string) => { diff --git a/web/app/components/rag-pipeline/components/panel/input-field/editor/form/index.spec.tsx b/web/app/components/rag-pipeline/components/panel/input-field/editor/form/index.spec.tsx index 0470bd4c68..48df13acb2 100644 --- a/web/app/components/rag-pipeline/components/panel/input-field/editor/form/index.spec.tsx +++ b/web/app/components/rag-pipeline/components/panel/input-field/editor/form/index.spec.tsx @@ -779,27 +779,6 @@ describe('useConfigurations', () => { expect(mockSetFieldValue).toHaveBeenCalledWith('maxLength', expect.any(Number)) }) - it('should call setFieldValue when type changes to paragraph', () => { - // Arrange - const mockGetFieldValue = vi.fn() - const mockSetFieldValue = vi.fn() - - const { result } = renderHookWithProviders(() => - useConfigurations({ - getFieldValue: mockGetFieldValue, - setFieldValue: mockSetFieldValue, - supportFile: false, - }), - ) - - // Act - const typeConfig = result.current.find(config => config.variable === 'type') - typeConfig?.listeners?.onChange?.(createMockEvent(PipelineInputVarType.paragraph)) - - // Assert - expect(mockSetFieldValue).toHaveBeenCalledWith('maxLength', 48) // DEFAULT_VALUE_MAX_LEN - }) - it('should set label from variable name on blur when label is empty', () => { // Arrange const mockGetFieldValue = vi.fn().mockReturnValue('') diff --git a/web/app/components/share/text-generation/index.tsx b/web/app/components/share/text-generation/index.tsx index b793a03ce7..509687e245 100644 --- a/web/app/components/share/text-generation/index.tsx +++ b/web/app/components/share/text-generation/index.tsx @@ -26,7 +26,7 @@ import DifyLogo from '@/app/components/base/logo/dify-logo' import Toast from '@/app/components/base/toast' import Res from '@/app/components/share/text-generation/result' import RunOnce from '@/app/components/share/text-generation/run-once' -import { appDefaultIconBackground, BATCH_CONCURRENCY, DEFAULT_VALUE_MAX_LEN } from '@/config' +import { appDefaultIconBackground, BATCH_CONCURRENCY } from '@/config' import { useGlobalPublicStore } from '@/context/global-public-context' import { useWebAppStore } from '@/context/web-app-context' import { useAppFavicon } from '@/hooks/use-app-favicon' @@ -256,11 +256,10 @@ const TextGeneration: FC = ({ promptConfig?.prompt_variables.forEach((varItem, varIndex) => { if (errorRowIndex !== 0) return - if (varItem.type === 'string') { - const maxLen = varItem.max_length || DEFAULT_VALUE_MAX_LEN - if (item[varIndex].length > maxLen) { + if (varItem.type === 'string' && varItem.max_length) { + if (item[varIndex].length > varItem.max_length) { moreThanMaxLengthVarName = varItem.name - maxLength = maxLen + maxLength = varItem.max_length errorRowIndex = index + 1 return } diff --git a/web/app/components/share/text-generation/run-once/index.spec.tsx b/web/app/components/share/text-generation/run-once/index.spec.tsx index 8882253d0e..ea5ce3c902 100644 --- a/web/app/components/share/text-generation/run-once/index.spec.tsx +++ b/web/app/components/share/text-generation/run-once/index.spec.tsx @@ -236,4 +236,46 @@ describe('RunOnce', () => { const stopButton = screen.getByTestId('stop-button') expect(stopButton).toBeDisabled() }) + + describe('maxLength behavior', () => { + it('should not have maxLength attribute when max_length is not set', async () => { + const promptConfig: PromptConfig = { + prompt_template: 'template', + prompt_variables: [ + createPromptVariable({ + key: 'textInput', + name: 'Text Input', + type: 'string', + // max_length is not set + }), + ], + } + const { onInputsChange } = setup({ promptConfig, visionConfig: { ...baseVisionConfig, enabled: false } }) + await waitFor(() => { + expect(onInputsChange).toHaveBeenCalled() + }) + const input = screen.getByPlaceholderText('Text Input') + expect(input).not.toHaveAttribute('maxLength') + }) + + it('should have maxLength attribute when max_length is set', async () => { + const promptConfig: PromptConfig = { + prompt_template: 'template', + prompt_variables: [ + createPromptVariable({ + key: 'textInput', + name: 'Text Input', + type: 'string', + max_length: 100, + }), + ], + } + const { onInputsChange } = setup({ promptConfig, visionConfig: { ...baseVisionConfig, enabled: false } }) + await waitFor(() => { + expect(onInputsChange).toHaveBeenCalled() + }) + const input = screen.getByPlaceholderText('Text Input') + expect(input).toHaveAttribute('maxLength', '100') + }) + }) }) diff --git a/web/app/components/share/text-generation/run-once/index.tsx b/web/app/components/share/text-generation/run-once/index.tsx index b8193fd944..ca29ce1a98 100644 --- a/web/app/components/share/text-generation/run-once/index.tsx +++ b/web/app/components/share/text-generation/run-once/index.tsx @@ -19,7 +19,6 @@ import Textarea from '@/app/components/base/textarea' import BoolInput from '@/app/components/workflow/nodes/_base/components/before-run-form/bool-input' import CodeEditor from '@/app/components/workflow/nodes/_base/components/editor/code-editor' import { CodeLanguage } from '@/app/components/workflow/nodes/code/types' -import { DEFAULT_VALUE_MAX_LEN } from '@/config' import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints' import { cn } from '@/utils/classnames' @@ -140,7 +139,7 @@ const RunOnce: FC = ({ placeholder={item.name} value={inputs[item.key]} onChange={(e: ChangeEvent) => { handleInputsChange({ ...inputsRef.current, [item.key]: e.target.value }) }} - maxLength={item.max_length || DEFAULT_VALUE_MAX_LEN} + maxLength={item.max_length} /> )} {item.type === 'paragraph' && ( diff --git a/web/config/index.ts b/web/config/index.ts index b804629048..08ce14b264 100644 --- a/web/config/index.ts +++ b/web/config/index.ts @@ -208,7 +208,6 @@ export const VAR_ITEM_TEMPLATE = { key: '', name: '', type: 'string', - max_length: DEFAULT_VALUE_MAX_LEN, required: true, } @@ -216,7 +215,6 @@ export const VAR_ITEM_TEMPLATE_IN_WORKFLOW = { variable: '', label: '', type: InputVarType.textInput, - max_length: DEFAULT_VALUE_MAX_LEN, required: true, options: [], } @@ -225,7 +223,6 @@ export const VAR_ITEM_TEMPLATE_IN_PIPELINE = { variable: '', label: '', type: PipelineInputVarType.textInput, - max_length: DEFAULT_VALUE_MAX_LEN, required: true, options: [], } diff --git a/web/utils/var.ts b/web/utils/var.ts index 4f572d7768..1851084b2e 100644 --- a/web/utils/var.ts +++ b/web/utils/var.ts @@ -30,7 +30,7 @@ export const getNewVar = (key: string, type: string) => { } export const getNewVarInWorkflow = (key: string, type = InputVarType.textInput): InputVar => { - const { max_length: _maxLength, ...rest } = VAR_ITEM_TEMPLATE_IN_WORKFLOW + const { ...rest } = VAR_ITEM_TEMPLATE_IN_WORKFLOW if (type !== InputVarType.textInput) { return { ...rest, From 1a2fce7055830ac8690bf807b01073c4b4b1dea3 Mon Sep 17 00:00:00 2001 From: Stephen Zhou <38493346+hyoban@users.noreply.github.com> Date: Thu, 15 Jan 2026 21:49:46 +0800 Subject: [PATCH 20/25] ci: eslint annotation (#31056) --- .github/workflows/style.yml | 13 ++++++++++++- web/package.json | 7 ++++--- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index 6c5d6f4135..b96db5a390 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -65,6 +65,9 @@ jobs: defaults: run: working-directory: ./web + permissions: + checks: write + pull-requests: read steps: - name: Checkout code @@ -103,7 +106,15 @@ jobs: if: steps.changed-files.outputs.any_changed == 'true' working-directory: ./web run: | - pnpm run lint + pnpm run lint:report + continue-on-error: true + + - name: Annotate Code + if: steps.changed-files.outputs.any_changed == 'true' + uses: DerLev/eslint-annotations@51347b3a0abfb503fc8734d5ae31c4b151297fae + with: + eslint-report: web/eslint_report.json + github-token: ${{ secrets.GITHUB_TOKEN }} - name: Web type check if: steps.changed-files.outputs.any_changed == 'true' diff --git a/web/package.json b/web/package.json index 000862204b..5ca90c75ea 100644 --- a/web/package.json +++ b/web/package.json @@ -28,9 +28,10 @@ "build:docker": "next build && node scripts/optimize-standalone.js", "start": "node ./scripts/copy-and-start.mjs", "lint": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache", - "lint:fix": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache --fix", - "lint:quiet": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache --quiet", - "lint:complexity": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache --rule 'complexity: [error, {max: 15}]' --quiet", + "lint:fix": "pnpm lint --fix", + "lint:quiet": "pnpm lint --quiet", + "lint:complexity": "pnpm lint --rule 'complexity: [error, {max: 15}]' --quiet", + "lint:report": "pnpm lint --output-file eslint_report.json --format json", "type-check": "tsc --noEmit", "type-check:tsgo": "tsgo --noEmit", "prepare": "cd ../ && node -e \"if (process.env.NODE_ENV !== 'production'){process.exit(1)} \" || husky ./web/.husky", From b06c7c8f33c76f4803978c659e5e56b0529d676c Mon Sep 17 00:00:00 2001 From: Stephen Zhou <38493346+hyoban@users.noreply.github.com> Date: Thu, 15 Jan 2026 23:04:26 +0800 Subject: [PATCH 21/25] ci: disable limit annotation (#31072) --- .github/workflows/style.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index b96db5a390..86b66bf9df 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -109,12 +109,12 @@ jobs: pnpm run lint:report continue-on-error: true - - name: Annotate Code - if: steps.changed-files.outputs.any_changed == 'true' - uses: DerLev/eslint-annotations@51347b3a0abfb503fc8734d5ae31c4b151297fae - with: - eslint-report: web/eslint_report.json - github-token: ${{ secrets.GITHUB_TOKEN }} + # - name: Annotate Code + # if: steps.changed-files.outputs.any_changed == 'true' && github.event_name == 'pull_request' + # uses: DerLev/eslint-annotations@51347b3a0abfb503fc8734d5ae31c4b151297fae + # with: + # eslint-report: web/eslint_report.json + # github-token: ${{ secrets.GITHUB_TOKEN }} - name: Web type check if: steps.changed-files.outputs.any_changed == 'true' From c98870c3f457a94e7cfa39857ccf7ee2a8d8f7a4 Mon Sep 17 00:00:00 2001 From: byteforge Date: Thu, 15 Jan 2026 18:52:53 -0500 Subject: [PATCH 22/25] refactor: always preserve marketplace search state in URL (#31069) Co-authored-by: Stephen Zhou <38493346+hyoban@users.noreply.github.com> --- .../components/plugins/marketplace/atoms.ts | 25 +++---------------- .../plugins/marketplace/hydration-client.tsx | 15 ----------- .../components/plugins/marketplace/index.tsx | 20 ++++++--------- .../plugins/plugin-page/context.tsx | 2 +- 4 files changed, 12 insertions(+), 50 deletions(-) delete mode 100644 web/app/components/plugins/marketplace/hydration-client.tsx diff --git a/web/app/components/plugins/marketplace/atoms.ts b/web/app/components/plugins/marketplace/atoms.ts index 6ca9bd1c05..b13d30407e 100644 --- a/web/app/components/plugins/marketplace/atoms.ts +++ b/web/app/components/plugins/marketplace/atoms.ts @@ -1,4 +1,3 @@ -import type { ActivePluginType } from './constants' import type { PluginsSort, SearchParamsFromCollection } from './types' import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai' import { useQueryState } from 'nuqs' @@ -17,32 +16,14 @@ export function useSetMarketplaceSort() { return useSetAtom(marketplaceSortAtom) } -/** - * Preserve the state for marketplace - */ -export const preserveSearchStateInQueryAtom = atom(false) - -const searchPluginTextAtom = atom('') -const activePluginTypeAtom = atom('all') -const filterPluginTagsAtom = atom([]) - export function useSearchPluginText() { - const preserveSearchStateInQuery = useAtomValue(preserveSearchStateInQueryAtom) - const queryState = useQueryState('q', marketplaceSearchParamsParsers.q) - const atomState = useAtom(searchPluginTextAtom) - return preserveSearchStateInQuery ? queryState : atomState + return useQueryState('q', marketplaceSearchParamsParsers.q) } export function useActivePluginType() { - const preserveSearchStateInQuery = useAtomValue(preserveSearchStateInQueryAtom) - const queryState = useQueryState('category', marketplaceSearchParamsParsers.category) - const atomState = useAtom(activePluginTypeAtom) - return preserveSearchStateInQuery ? queryState : atomState + return useQueryState('category', marketplaceSearchParamsParsers.category) } export function useFilterPluginTags() { - const preserveSearchStateInQuery = useAtomValue(preserveSearchStateInQueryAtom) - const queryState = useQueryState('tags', marketplaceSearchParamsParsers.tags) - const atomState = useAtom(filterPluginTagsAtom) - return preserveSearchStateInQuery ? queryState : atomState + return useQueryState('tags', marketplaceSearchParamsParsers.tags) } /** diff --git a/web/app/components/plugins/marketplace/hydration-client.tsx b/web/app/components/plugins/marketplace/hydration-client.tsx deleted file mode 100644 index 5698db711f..0000000000 --- a/web/app/components/plugins/marketplace/hydration-client.tsx +++ /dev/null @@ -1,15 +0,0 @@ -'use client' - -import { useHydrateAtoms } from 'jotai/utils' -import { preserveSearchStateInQueryAtom } from './atoms' - -export function HydrateMarketplaceAtoms({ - preserveSearchStateInQuery, - children, -}: { - preserveSearchStateInQuery: boolean - children: React.ReactNode -}) { - useHydrateAtoms([[preserveSearchStateInQueryAtom, preserveSearchStateInQuery]]) - return <>{children} -} diff --git a/web/app/components/plugins/marketplace/index.tsx b/web/app/components/plugins/marketplace/index.tsx index 1f32ee4d29..0eb2488cef 100644 --- a/web/app/components/plugins/marketplace/index.tsx +++ b/web/app/components/plugins/marketplace/index.tsx @@ -1,7 +1,6 @@ import type { SearchParams } from 'nuqs' import { TanstackQueryInitializer } from '@/context/query-client' import Description from './description' -import { HydrateMarketplaceAtoms } from './hydration-client' import { HydrateQueryClient } from './hydration-server' import ListWrapper from './list/list-wrapper' import StickySearchAndSwitchWrapper from './sticky-search-and-switch-wrapper' @@ -10,8 +9,7 @@ type MarketplaceProps = { showInstallButton?: boolean pluginTypeSwitchClassName?: string /** - * Pass the search params from the request to prefetch data on the server - * and preserve the search params in the URL. + * Pass the search params from the request to prefetch data on the server. */ searchParams?: Promise } @@ -24,15 +22,13 @@ const Marketplace = async ({ return ( - - - - - + + + ) diff --git a/web/app/components/plugins/plugin-page/context.tsx b/web/app/components/plugins/plugin-page/context.tsx index fea78ae181..abc4408d62 100644 --- a/web/app/components/plugins/plugin-page/context.tsx +++ b/web/app/components/plugins/plugin-page/context.tsx @@ -68,7 +68,7 @@ export const PluginPageContextProvider = ({ const options = useMemo(() => { return enable_marketplace ? tabs : tabs.filter(tab => tab.value !== PLUGIN_PAGE_TABS_MAP.marketplace) }, [tabs, enable_marketplace]) - const [activeTab, setActiveTab] = useQueryState('category', { + const [activeTab, setActiveTab] = useQueryState('tab', { defaultValue: options[0].value, }) From 4bff0cd0aba459f36068d99a4392b4935aec462c Mon Sep 17 00:00:00 2001 From: "E.G" <146701565+GlobalStar117@users.noreply.github.com> Date: Fri, 16 Jan 2026 14:34:42 +1100 Subject: [PATCH 23/25] fix: resolve 'Expand all chunks' button not working (#31074) Co-authored-by: GlobalStar117 Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> Co-authored-by: crazywoola <427733928@qq.com> --- .../documents/detail/completed/index.tsx | 23 +++++++++++-------- .../completed/segment-card/chunk-content.tsx | 5 +++- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/web/app/components/datasets/documents/detail/completed/index.tsx b/web/app/components/datasets/documents/detail/completed/index.tsx index 40c70e34f6..78cf0e1178 100644 --- a/web/app/components/datasets/documents/detail/completed/index.tsx +++ b/web/app/components/datasets/documents/detail/completed/index.tsx @@ -442,6 +442,10 @@ const Completed: FC = ({ setFullScreen(!fullScreen) }, [fullScreen]) + const toggleCollapsed = useCallback(() => { + setIsCollapsed(prev => !prev) + }, []) + const viewNewlyAddedChunk = useCallback(async () => { const totalPages = segmentListData?.total_pages || 0 const total = segmentListData?.total || 0 @@ -578,15 +582,16 @@ const Completed: FC = ({ return selectedStatus ? 1 : 0 }, [selectedStatus]) + const contextValue = useMemo(() => ({ + isCollapsed, + fullScreen, + toggleFullScreen, + currSegment, + currChildChunk, + }), [isCollapsed, fullScreen, toggleFullScreen, currSegment, currChildChunk]) + return ( - + {/* Menu Bar */} {!isFullDocMode && (
@@ -618,7 +623,7 @@ const Completed: FC = ({ onClear={() => handleInputChange('')} /> - setIsCollapsed(!isCollapsed)} /> +
)} {/* Segment list */} diff --git a/web/app/components/datasets/documents/detail/completed/segment-card/chunk-content.tsx b/web/app/components/datasets/documents/detail/completed/segment-card/chunk-content.tsx index dda2d9bf80..cd329345a1 100644 --- a/web/app/components/datasets/documents/detail/completed/segment-card/chunk-content.tsx +++ b/web/app/components/datasets/documents/detail/completed/segment-card/chunk-content.tsx @@ -1,4 +1,5 @@ import type { FC } from 'react' +import type { SegmentListContextValue } from '..' import * as React from 'react' import { Markdown } from '@/app/components/base/markdown' import { cn } from '@/utils/classnames' @@ -14,13 +15,15 @@ type ChunkContentProps = { className?: string } +const selectIsCollapsed = (s: SegmentListContextValue) => s.isCollapsed + const ChunkContent: FC = ({ detail, isFullDocMode, className, }) => { const { answer, content, sign_content } = detail - const isCollapsed = useSegmentListContext(s => s.isCollapsed) + const isCollapsed = useSegmentListContext(selectIsCollapsed) if (answer) { return ( From 7aab4529e6c7ef71c63b837ada4eb1a5cb737a86 Mon Sep 17 00:00:00 2001 From: Stephen Zhou <38493346+hyoban@users.noreply.github.com> Date: Fri, 16 Jan 2026 11:58:28 +0800 Subject: [PATCH 24/25] chore: lint for state hooks (#31088) --- web/eslint.config.mjs | 11 +- web/package.json | 10 +- web/pnpm-lock.yaml | 803 +++++++++++++++++++----------------------- 3 files changed, 385 insertions(+), 439 deletions(-) diff --git a/web/eslint.config.mjs b/web/eslint.config.mjs index b8191a5eea..fc9f6c16b9 100644 --- a/web/eslint.config.mjs +++ b/web/eslint.config.mjs @@ -26,7 +26,8 @@ export default antfu( 'react-hooks/preserve-manual-memoization': 'warn', 'react-hooks/purity': 'warn', 'react-hooks/refs': 'warn', - 'react-hooks/set-state-in-effect': 'warn', + // prefer react-hooks-extra/no-direct-set-state-in-use-effect + 'react-hooks/set-state-in-effect': 'off', 'react-hooks/set-state-in-render': 'warn', 'react-hooks/static-components': 'warn', 'react-hooks/unsupported-syntax': 'warn', @@ -53,6 +54,14 @@ export default antfu( }, }, }, + { + files: ['**/*.ts', '**/*.tsx'], + settings: { + 'react-x': { + additionalStateHooks: '/^use\\w*State(?:s)?|useAtom$/u', + }, + }, + }, // downgrade some rules from error to warn for gradual adoption // we should fix these in following pull requests { diff --git a/web/package.json b/web/package.json index 5ca90c75ea..a4d6da1532 100644 --- a/web/package.json +++ b/web/package.json @@ -153,9 +153,9 @@ "zustand": "^5.0.9" }, "devDependencies": { - "@antfu/eslint-config": "^6.7.3", + "@antfu/eslint-config": "^7.0.1", "@chromatic-com/storybook": "^4.1.1", - "@eslint-react/eslint-plugin": "^2.3.13", + "@eslint-react/eslint-plugin": "^2.7.0", "@mdx-js/loader": "^3.1.1", "@mdx-js/react": "^3.1.1", "@next/bundle-analyzer": "15.5.9", @@ -190,7 +190,7 @@ "@types/semver": "^7.7.1", "@types/sortablejs": "^1.15.8", "@types/uuid": "^10.0.0", - "@typescript-eslint/parser": "^8.50.0", + "@typescript-eslint/parser": "^8.53.0", "@typescript/native-preview": "^7.0.0-dev", "@vitejs/plugin-react": "^5.1.2", "@vitest/coverage-v8": "4.0.16", @@ -202,7 +202,7 @@ "eslint-plugin-react-hooks": "^7.0.1", "eslint-plugin-react-refresh": "^0.4.26", "eslint-plugin-sonarjs": "^3.0.5", - "eslint-plugin-storybook": "^10.1.10", + "eslint-plugin-storybook": "^10.1.11", "eslint-plugin-tailwindcss": "^3.18.2", "husky": "^9.1.7", "jsdom": "^27.3.0", @@ -225,7 +225,6 @@ }, "pnpm": { "overrides": { - "@eslint/plugin-kit@<0.3.4": "0.3.4", "@monaco-editor/loader": "1.5.0", "@nolyfill/safe-buffer": "npm:safe-buffer@^5.2.1", "array-includes": "npm:@nolyfill/array-includes@^1", @@ -276,7 +275,6 @@ ] }, "resolutions": { - "@eslint/plugin-kit": "~0.3", "@types/react": "~19.2.7", "@types/react-dom": "~19.2.3", "brace-expansion": "~2.0", diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index c8797e3d65..50e2bd543a 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -5,7 +5,6 @@ settings: excludeLinksFromLockfile: false overrides: - '@eslint/plugin-kit': ~0.3 '@types/react': ~19.2.7 '@types/react-dom': ~19.2.3 brace-expansion: ~2.0 @@ -13,7 +12,6 @@ overrides: pbkdf2: ~3.1.3 prismjs: ~1.30 string-width: ~4.2.3 - '@eslint/plugin-kit@<0.3.4': 0.3.4 '@monaco-editor/loader': 1.5.0 '@nolyfill/safe-buffer': npm:safe-buffer@^5.2.1 array-includes: npm:@nolyfill/array-includes@^1 @@ -362,14 +360,14 @@ importers: version: 5.0.9(@types/react@19.2.7)(immer@11.1.0)(react@19.2.3)(use-sync-external-store@1.6.0(react@19.2.3)) devDependencies: '@antfu/eslint-config': - specifier: ^6.7.3 - version: 6.7.3(@eslint-react/eslint-plugin@2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(@next/eslint-plugin-next@15.5.9)(@vue/compiler-sfc@3.5.25)(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@1.21.7)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@1.21.7)))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)(vitest@4.0.16(@types/node@18.15.0)(happy-dom@20.0.11)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.0))(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)) + specifier: ^7.0.1 + version: 7.0.1(@eslint-react/eslint-plugin@2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(@next/eslint-plugin-next@15.5.9)(@vue/compiler-sfc@3.5.25)(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@1.21.7)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@1.21.7)))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)(vitest@4.0.16(@types/node@18.15.0)(happy-dom@20.0.11)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.0))(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)) '@chromatic-com/storybook': specifier: ^4.1.1 version: 4.1.3(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2))) '@eslint-react/eslint-plugin': - specifier: ^2.3.13 - version: 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + specifier: ^2.7.0 + version: 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) '@mdx-js/loader': specifier: ^3.1.1 version: 3.1.1(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) @@ -473,8 +471,8 @@ importers: specifier: ^10.0.0 version: 10.0.0 '@typescript-eslint/parser': - specifier: ^8.50.0 - version: 8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + specifier: ^8.53.0 + version: 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) '@typescript/native-preview': specifier: ^7.0.0-dev version: 7.0.0-dev.20251209.1 @@ -509,8 +507,8 @@ importers: specifier: ^3.0.5 version: 3.0.5(eslint@9.39.2(jiti@1.21.7)) eslint-plugin-storybook: - specifier: ^10.1.10 - version: 10.1.10(eslint@9.39.2(jiti@1.21.7))(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(typescript@5.9.3) + specifier: ^10.1.11 + version: 10.1.11(eslint@9.39.2(jiti@1.21.7))(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(typescript@5.9.3) eslint-plugin-tailwindcss: specifier: ^3.18.2 version: 3.18.2(tailwindcss@3.4.18(tsx@4.21.0)(yaml@2.8.2)) @@ -658,8 +656,8 @@ packages: '@amplitude/targeting@0.2.0': resolution: {integrity: sha512-/50ywTrC4hfcfJVBbh5DFbqMPPfaIOivZeb5Gb+OGM03QrA+lsUqdvtnKLNuWtceD4H6QQ2KFzPJ5aAJLyzVDA==} - '@antfu/eslint-config@6.7.3': - resolution: {integrity: sha512-0tYYzY59uLnxWgbP9xpuxpvodTcWDacj439kTAJZB3sn7O0BnPfVxTnRvleGYaKCEALBZkzdC/wCho9FD7ICLw==} + '@antfu/eslint-config@7.0.1': + resolution: {integrity: sha512-QbCDrLPo2Bpn9/W5PnpGvUuD/EIKhiCmLBuIj9ylxeMvl47XSkXy3MZyinqUVsBJzk196B7BcJQByDZRr5TbZQ==} hasBin: true peerDependencies: '@eslint-react/eslint-plugin': ^2.0.1 @@ -1411,14 +1409,14 @@ packages: '@epic-web/invariant@1.0.0': resolution: {integrity: sha512-lrTPqgvfFQtR/eY/qkIzp98OGdNJu0m5ji3q/nJI8v3SXkRKEnWiOxMmbvcSoAIzv/cGiuvRy57k4suKQSAdwA==} - '@es-joy/jsdoccomment@0.76.0': - resolution: {integrity: sha512-g+RihtzFgGTx2WYCuTHbdOXJeAlGnROws0TeALx9ow/ZmOROOZkVg5wp/B44n0WJgI4SQFP1eWM2iRPlU2Y14w==} - engines: {node: '>=20.11.0'} - '@es-joy/jsdoccomment@0.78.0': resolution: {integrity: sha512-rQkU5u8hNAq2NVRzHnIUUvR6arbO0b6AOlvpTNS48CkiKSn/xtNfOzBK23JE4SiW89DgvU7GtxLVgV4Vn2HBAw==} engines: {node: '>=20.11.0'} + '@es-joy/jsdoccomment@0.79.0': + resolution: {integrity: sha512-q/Nc241VsVRC5b1dgbsOI0fnWfrb1S9sdceFewpDHto4+4r2o6SSCpcY+Z+EdLdMPN6Nsj/PjlPcKag6WbU6XQ==} + engines: {node: ^20.19.0 || ^22.13.0 || >=24} + '@es-joy/resolve.exports@1.2.0': resolution: {integrity: sha512-Q9hjxWI5xBM+qW2enxfe8wDKdFWMfd0Z29k5ZJnuBqD/CasY5Zryj09aCA6owbGATWz+39p5uIdaHXpopOcG8g==} engines: {node: '>=10'} @@ -1579,8 +1577,8 @@ packages: cpu: [x64] os: [win32] - '@eslint-community/eslint-plugin-eslint-comments@4.5.0': - resolution: {integrity: sha512-MAhuTKlr4y/CE3WYX26raZjy+I/kS2PLKSzvfmDCGrBLTFHOYwqROZdr4XwPgXwX3K9rjzMr4pSmUWGnzsUyMg==} + '@eslint-community/eslint-plugin-eslint-comments@4.6.0': + resolution: {integrity: sha512-2EX2bBQq1ez++xz2o9tEeEQkyvfieWgUFMH4rtJJri2q0Azvhja3hZGXsjPXs31R4fQkZDtWzNDDK2zQn5UE5g==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} peerDependencies: eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 || ^9.0.0 @@ -1591,6 +1589,12 @@ packages: peerDependencies: eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + '@eslint-community/eslint-utils@4.9.1': + resolution: {integrity: sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + '@eslint-community/regexpp@4.12.1': resolution: {integrity: sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==} engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} @@ -1599,40 +1603,40 @@ packages: resolution: {integrity: sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==} engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} - '@eslint-react/ast@2.3.13': - resolution: {integrity: sha512-OP2rOhHYLx2nfd9uA9uACKZJN9z9rX9uuAMx4PjT75JNOdYr1GgqWQZcYCepyJ+gmVNCyiXcLXuyhavqxCSM8Q==} + '@eslint-react/ast@2.7.0': + resolution: {integrity: sha512-GGrvel9+kR++wK7orcS2kS1xtHpY0o0rh6hbHbiGVWsSiZmg0X8jZfK1nSf8a3FLJR2WLtQlUsrrtJ4hObaqeQ==} engines: {node: '>=20.19.0'} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@eslint-react/core@2.3.13': - resolution: {integrity: sha512-4bWBE+1kApuxJKIrLJH2FuFtCbM4fXfDs6Ou8MNamGoX6hdynlntssvaMZTd/lk/L8dt01H/3btr7xBX4+4BNA==} + '@eslint-react/core@2.7.0': + resolution: {integrity: sha512-xeRSnzLI35Msr2lnGjH4vxgOwohODy2FaXRmXUS1IpmMRDp1Ct+7I3SDknfeW/YExjGZXvpxR0uD2P9dSjU6NA==} engines: {node: '>=20.19.0'} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@eslint-react/eff@2.3.13': - resolution: {integrity: sha512-byXsssozwh3VaiqcOonAKQgLXgpMVNSxBWFjdfbNhW7+NttorSt950qtiw+P7A9JoRab1OuGYk4MDY5UVBno8Q==} + '@eslint-react/eff@2.7.0': + resolution: {integrity: sha512-+uUI53LkS6EDU0ysVUeM2SdyZQwt/xEfh4OSJ0JMLT8fJbseZY8c0hyev7X5arifcLs0PVPHwUP1IPcNhSLOFw==} engines: {node: '>=20.19.0'} - '@eslint-react/eslint-plugin@2.3.13': - resolution: {integrity: sha512-gq0Z0wADAXvJS8Y/Wk3isK7WIEcfrQGGGdWvorAv0T7MxPd3d32TVwdc1Gx3hVLka3fYq1BBlQ5Fr8e1VgNuIg==} + '@eslint-react/eslint-plugin@2.7.0': + resolution: {integrity: sha512-Bog14dOrsG/jBA9B8URZPJMI6dZuEwqHdkPcTuIkJe92EjFj8NwyziNGFXKY3j7o9AU9ILCBbjfC4JFq56lwjQ==} engines: {node: '>=20.19.0'} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@eslint-react/shared@2.3.13': - resolution: {integrity: sha512-ESE7dVeOXtem3K6BD6k2wJaFt35kPtTT9SWCL99LFk7pym4OEGoMxPcyB2R7PMWiVudwl63BmiOgQOdaFYPONg==} + '@eslint-react/shared@2.7.0': + resolution: {integrity: sha512-/lF5uiGYd+XIfO5t2YMC5RdbQ9lxLkxfL4icZgrbiJIPndirAKjFNl1cdXd+C/qqRCYDACrTPqI8HEL1T4N1Iw==} engines: {node: '>=20.19.0'} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@eslint-react/var@2.3.13': - resolution: {integrity: sha512-BozBfUZkzzobD6x/M8XERAnZQ3UvZPsD49zTGFKKU9M/bgsM78HwzxAPLkiu88W55v3sO/Kqf8fQTXT4VEeZ/g==} + '@eslint-react/var@2.7.0': + resolution: {integrity: sha512-EFztHstOAYYCrFFNUOPZ7+J3o/X/zawqPKgLL7b5/271rhL6/DMxUmTcKtJIHO7hCdFPMcGT+vPxe+omq62Ukg==} engines: {node: '>=20.19.0'} peerDependencies: eslint: ^8.57.0 || ^9.0.0 @@ -1655,14 +1659,14 @@ packages: resolution: {integrity: sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/core@0.15.2': - resolution: {integrity: sha512-78Md3/Rrxh83gCxoUc0EiciuOHsIITzLy53m3d9UyiW8y9Dj2D29FeETqyKA+BRK76tnTp6RXWb3pCay8Oyomg==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/core@0.17.0': resolution: {integrity: sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@eslint/core@1.0.1': + resolution: {integrity: sha512-r18fEAj9uCk+VjzGt2thsbOmychS+4kxI14spVNibUO2vqKX7obOG+ymZljAwuPZl+S3clPGwCwTDtrdqTiY6Q==} + engines: {node: ^20.19.0 || ^22.13.0 || >=24} + '@eslint/eslintrc@3.3.3': resolution: {integrity: sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -1679,10 +1683,14 @@ packages: resolution: {integrity: sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/plugin-kit@0.3.5': - resolution: {integrity: sha512-Z5kJ+wU3oA7MMIqVR9tyZRtjYPr4OC004Q4Rw7pgOKUOKkJfZ3O24nz3WYfGRpMDNmcOi3TwQOmgm7B7Tpii0w==} + '@eslint/plugin-kit@0.4.1': + resolution: {integrity: sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@eslint/plugin-kit@0.5.1': + resolution: {integrity: sha512-hZ2uC1jbf6JMSsF2ZklhRQqf6GLpYyux6DlzegnW/aFlpu6qJj5GO7ub7WOETCrEl6pl6DAX7RgTgj/fyG+6BQ==} + engines: {node: ^20.19.0 || ^22.13.0 || >=24} + '@floating-ui/core@1.7.3': resolution: {integrity: sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==} @@ -3206,8 +3214,8 @@ packages: typescript: optional: true - '@stylistic/eslint-plugin@5.6.1': - resolution: {integrity: sha512-JCs+MqoXfXrRPGbGmho/zGS/jMcn3ieKl/A8YImqib76C8kjgZwq5uUFzc30lJkMvcchuRn6/v8IApLxli3Jyw==} + '@stylistic/eslint-plugin@5.7.0': + resolution: {integrity: sha512-PsSugIf9ip1H/mWKj4bi/BlEoerxXAda9ByRFsYuwsmr6af9NxJL0AaiNXs8Le7R21QR5KMiD/KdxZZ71LjAxQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: '>=9.0.0' @@ -3702,62 +3710,40 @@ packages: '@types/zen-observable@0.8.3': resolution: {integrity: sha512-fbF6oTd4sGGy0xjHPKAt+eS2CrxJ3+6gQ3FGcBoIJR2TLAyCkCyI8JqZNy+FeON0AhVgNJoUumVoZQjBFUqHkw==} - '@typescript-eslint/eslint-plugin@8.50.0': - resolution: {integrity: sha512-O7QnmOXYKVtPrfYzMolrCTfkezCJS9+ljLdKW/+DCvRsc3UAz+sbH6Xcsv7p30+0OwUbeWfUDAQE0vpabZ3QLg==} + '@typescript-eslint/eslint-plugin@8.53.0': + resolution: {integrity: sha512-eEXsVvLPu8Z4PkFibtuFJLJOTAV/nPdgtSjkGoPpddpFk3/ym2oy97jynY6ic2m6+nc5M8SE1e9v/mHKsulcJg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - '@typescript-eslint/parser': ^8.50.0 + '@typescript-eslint/parser': ^8.53.0 eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/parser@8.50.0': - resolution: {integrity: sha512-6/cmF2piao+f6wSxUsJLZjck7OQsYyRtcOZS02k7XINSNlz93v6emM8WutDQSXnroG2xwYlEVHJI+cPA7CPM3Q==} + '@typescript-eslint/parser@8.53.0': + resolution: {integrity: sha512-npiaib8XzbjtzS2N4HlqPvlpxpmZ14FjSJrteZpPxGUaYPlvhzlzUZ4mZyABo0EFrOWnvyd0Xxroq//hKhtAWg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/project-service@8.49.0': - resolution: {integrity: sha512-/wJN0/DKkmRUMXjZUXYZpD1NEQzQAAn9QWfGwo+Ai8gnzqH7tvqS7oNVdTjKqOcPyVIdZdyCMoqN66Ia789e7g==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - typescript: '>=4.8.4 <6.0.0' - - '@typescript-eslint/project-service@8.50.0': - resolution: {integrity: sha512-Cg/nQcL1BcoTijEWyx4mkVC56r8dj44bFDvBdygifuS20f3OZCHmFbjF34DPSi07kwlFvqfv/xOLnJ5DquxSGQ==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/project-service@8.50.1': resolution: {integrity: sha512-E1ur1MCVf+YiP89+o4Les/oBAVzmSbeRB0MQLfSlYtbWU17HPxZ6Bhs5iYmKZRALvEuBoXIZMOIRRc/P++Ortg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/scope-manager@8.49.0': - resolution: {integrity: sha512-npgS3zi+/30KSOkXNs0LQXtsg9ekZ8OISAOLGWA/ZOEn0ZH74Ginfl7foziV8DT+D98WfQ5Kopwqb/PZOaIJGg==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - - '@typescript-eslint/scope-manager@8.50.0': - resolution: {integrity: sha512-xCwfuCZjhIqy7+HKxBLrDVT5q/iq7XBVBXLn57RTIIpelLtEIZHXAF/Upa3+gaCpeV1NNS5Z9A+ID6jn50VD4A==} + '@typescript-eslint/project-service@8.53.0': + resolution: {integrity: sha512-Bl6Gdr7NqkqIP5yP9z1JU///Nmes4Eose6L1HwpuVHwScgDPPuEWbUVhvlZmb8hy0vX9syLk5EGNL700WcBlbg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.0.0' '@typescript-eslint/scope-manager@8.50.1': resolution: {integrity: sha512-mfRx06Myt3T4vuoHaKi8ZWNTPdzKPNBhiblze5N50//TSHOAQQevl/aolqA/BcqqbJ88GUnLqjjcBc8EWdBcVw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/tsconfig-utils@8.49.0': - resolution: {integrity: sha512-8prixNi1/6nawsRYxet4YOhnbW+W9FK/bQPxsGB1D3ZrDzbJ5FXw5XmzxZv82X3B+ZccuSxo/X8q9nQ+mFecWA==} + '@typescript-eslint/scope-manager@8.53.0': + resolution: {integrity: sha512-kWNj3l01eOGSdVBnfAF2K1BTh06WS0Yet6JUgb9Cmkqaz3Jlu0fdVUjj9UI8gPidBWSMqDIglmEXifSgDT/D0g==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - typescript: '>=4.8.4 <6.0.0' - - '@typescript-eslint/tsconfig-utils@8.50.0': - resolution: {integrity: sha512-vxd3G/ybKTSlm31MOA96gqvrRGv9RJ7LGtZCn2Vrc5htA0zCDvcMqUkifcjrWNNKXHUU3WCkYOzzVSFBd0wa2w==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - typescript: '>=4.8.4 <6.0.0' '@typescript-eslint/tsconfig-utils@8.50.1': resolution: {integrity: sha512-ooHmotT/lCWLXi55G4mvaUF60aJa012QzvLK0Y+Mp4WdSt17QhMhWOaBWeGTFVkb2gDgBe19Cxy1elPXylslDw==} @@ -3765,43 +3751,26 @@ packages: peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/type-utils@8.49.0': - resolution: {integrity: sha512-KTExJfQ+svY8I10P4HdxKzWsvtVnsuCifU5MvXrRwoP2KOlNZ9ADNEWWsQTJgMxLzS5VLQKDjkCT/YzgsnqmZg==} + '@typescript-eslint/tsconfig-utils@8.53.0': + resolution: {integrity: sha512-K6Sc0R5GIG6dNoPdOooQ+KtvT5KCKAvTcY8h2rIuul19vxH5OTQk7ArKkd4yTzkw66WnNY0kPPzzcmWA+XRmiA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/type-utils@8.53.0': + resolution: {integrity: sha512-BBAUhlx7g4SmcLhn8cnbxoxtmS7hcq39xKCgiutL3oNx1TaIp+cny51s8ewnKMpVUKQUGb41RAUWZ9kxYdovuw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/type-utils@8.50.0': - resolution: {integrity: sha512-7OciHT2lKCewR0mFoBrvZJ4AXTMe/sYOe87289WAViOocEmDjjv8MvIOT2XESuKj9jp8u3SZYUSh89QA4S1kQw==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - eslint: ^8.57.0 || ^9.0.0 - typescript: '>=4.8.4 <6.0.0' - - '@typescript-eslint/types@8.49.0': - resolution: {integrity: sha512-e9k/fneezorUo6WShlQpMxXh8/8wfyc+biu6tnAqA81oWrEic0k21RHzP9uqqpyBBeBKu4T+Bsjy9/b8u7obXQ==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - - '@typescript-eslint/types@8.50.0': - resolution: {integrity: sha512-iX1mgmGrXdANhhITbpp2QQM2fGehBse9LbTf0sidWK6yg/NE+uhV5dfU1g6EYPlcReYmkE9QLPq/2irKAmtS9w==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/types@8.50.1': resolution: {integrity: sha512-v5lFIS2feTkNyMhd7AucE/9j/4V9v5iIbpVRncjk/K0sQ6Sb+Np9fgYS/63n6nwqahHQvbmujeBL7mp07Q9mlA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/typescript-estree@8.49.0': - resolution: {integrity: sha512-jrLdRuAbPfPIdYNppHJ/D0wN+wwNfJ32YTAm10eJVsFmrVpXQnDWBn8niCSMlWjvml8jsce5E/O+86IQtTbJWA==} + '@typescript-eslint/types@8.53.0': + resolution: {integrity: sha512-Bmh9KX31Vlxa13+PqPvt4RzKRN1XORYSLlAE+sO1i28NkisGbTtSLFVB3l7PWdHtR3E0mVMuC7JilWJ99m2HxQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - typescript: '>=4.8.4 <6.0.0' - - '@typescript-eslint/typescript-estree@8.50.0': - resolution: {integrity: sha512-W7SVAGBR/IX7zm1t70Yujpbk+zdPq/u4soeFSknWFdXIFuWsBGBOUu/Tn/I6KHSKvSh91OiMuaSnYp3mtPt5IQ==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - typescript: '>=4.8.4 <6.0.0' '@typescript-eslint/typescript-estree@8.50.1': resolution: {integrity: sha512-woHPdW+0gj53aM+cxchymJCrh0cyS7BTIdcDxWUNsclr9VDkOSbqC13juHzxOmQ22dDkMZEpZB+3X1WpUvzgVQ==} @@ -3809,18 +3778,10 @@ packages: peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/utils@8.49.0': - resolution: {integrity: sha512-N3W7rJw7Rw+z1tRsHZbK395TWSYvufBXumYtEGzypgMUthlg0/hmCImeA8hgO2d2G4pd7ftpxxul2J8OdtdaFA==} + '@typescript-eslint/typescript-estree@8.53.0': + resolution: {integrity: sha512-pw0c0Gdo7Z4xOG987u3nJ8akL9093yEEKv8QTJ+Bhkghj1xyj8cgPaavlr9rq8h7+s6plUJ4QJYw2gCZodqmGw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - eslint: ^8.57.0 || ^9.0.0 - typescript: '>=4.8.4 <6.0.0' - - '@typescript-eslint/utils@8.50.0': - resolution: {integrity: sha512-87KgUXET09CRjGCi2Ejxy3PULXna63/bMYv72tCAlDJC3Yqwln0HiFJ3VJMst2+mEtNtZu5oFvX4qJGjKsnAgg==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' '@typescript-eslint/utils@8.50.1': @@ -3830,18 +3791,21 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/visitor-keys@8.49.0': - resolution: {integrity: sha512-LlKaciDe3GmZFphXIc79THF/YYBugZ7FS1pO581E/edlVVNbZKDy93evqmrfQ9/Y4uN0vVhX4iuchq26mK/iiA==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - - '@typescript-eslint/visitor-keys@8.50.0': - resolution: {integrity: sha512-Xzmnb58+Db78gT/CCj/PVCvK+zxbnsw6F+O1oheYszJbBSdEjVhQi3C/Xttzxgi/GLmpvOggRs1RFpiJ8+c34Q==} + '@typescript-eslint/utils@8.53.0': + resolution: {integrity: sha512-XDY4mXTez3Z1iRDI5mbRhH4DFSt46oaIFsLg+Zn97+sYrXACziXSQcSelMybnVZ5pa1P6xYkPr5cMJyunM1ZDA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <6.0.0' '@typescript-eslint/visitor-keys@8.50.1': resolution: {integrity: sha512-IrDKrw7pCRUR94zeuCSUWQ+w8JEf5ZX5jl/e6AHGSLi1/zIr0lgutfn/7JpfCey+urpgQEdrZVYzCaVVKiTwhQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript-eslint/visitor-keys@8.53.0': + resolution: {integrity: sha512-LZ2NqIHFhvFwxG0qZeLL9DvdNAHPGCY5dIRwBhyYeU+LfLhcStE1ImjsuTG/WaVh3XysGaeLW8Rqq7cGkPCFvw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript/native-preview-darwin-arm64@7.0.0-dev.20251209.1': resolution: {integrity: sha512-F1cnYi+ZeinYQnaTQKKIsbuoq8vip5iepBkSZXlB8PjbG62LW1edUdktd/nVEc+Q+SEysSQ3jRdk9eU766s5iw==} cpu: [arm64] @@ -3899,8 +3863,8 @@ packages: '@vitest/browser': optional: true - '@vitest/eslint-plugin@1.6.1': - resolution: {integrity: sha512-q4ZCihsURDxhJm6bEUtJjciXtT5k3ijWR4U+0f9XdCRAzAfML5NUUSwulsFoK1AFohBieh52akKWJEIFFMLn/g==} + '@vitest/eslint-plugin@1.6.6': + resolution: {integrity: sha512-bwgQxQWRtnTVzsUHK824tBmHzjV0iTx3tZaiQIYDjX3SA7TsQS8CuDVqxXrRY3FaOUMgbGavesCxI9MOfFLm7Q==} engines: {node: '>=18'} peerDependencies: eslint: '>=8.57.0' @@ -5119,8 +5083,8 @@ packages: peerDependencies: eslint: '*' - eslint-plugin-antfu@3.1.1: - resolution: {integrity: sha512-7Q+NhwLfHJFvopI2HBZbSxWXngTwBLKxW1AGXLr2lEGxcEIK/AsDs8pn8fvIizl5aZjBbVbVK5ujmMpBe4Tvdg==} + eslint-plugin-antfu@3.1.3: + resolution: {integrity: sha512-Az1QuqQJ/c2efWCxVxF249u3D4AcAu1Y3VCGAlJm+x4cgnn1ybUAnCT5DWVcogeaWduQKeVw07YFydVTOF4xDw==} peerDependencies: eslint: '*' @@ -5135,19 +5099,15 @@ packages: peerDependencies: eslint: '>=8' - eslint-plugin-import-lite@0.4.0: - resolution: {integrity: sha512-My0ReAg8WbHXYECIHVJkWB8UxrinZn3m72yonOYH6MFj40ZN1vHYQj16iq2Fd8Wrt/vRZJwDX2xm/BzDk1FzTg==} + eslint-plugin-import-lite@0.5.0: + resolution: {integrity: sha512-7uBvxuQj+VlYmZSYSHcm33QgmZnvMLP2nQiWaLtjhJ5x1zKcskOqjolL+dJC13XY+ktQqBgidAnnQMELfRaXQg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: '>=9.0.0' - typescript: '>=4.5' - peerDependenciesMeta: - typescript: - optional: true - eslint-plugin-jsdoc@61.5.0: - resolution: {integrity: sha512-PR81eOGq4S7diVnV9xzFSBE4CDENRQGP0Lckkek8AdHtbj+6Bm0cItwlFnxsLFriJHspiE3mpu8U20eODyToIg==} - engines: {node: '>=20.11.0'} + eslint-plugin-jsdoc@62.0.0: + resolution: {integrity: sha512-sNdIGLAvjFK3pB0SYFW74iXODZ4ifF8Ax13Wgq8jKepKnrCFzGo7+jRZfLf70h81SD7lPYnTE7MR2nhYSvaLTA==} + engines: {node: ^20.19.0 || ^22.13.0 || >=24} peerDependencies: eslint: ^7.0.0 || ^8.0.0 || ^9.0.0 @@ -5157,8 +5117,8 @@ packages: peerDependencies: eslint: '>=6.0.0' - eslint-plugin-n@17.23.1: - resolution: {integrity: sha512-68PealUpYoHOBh332JLLD9Sj7OQUDkFpmcfqt8R9sySfFSeuGJjMTJQvCRRB96zO3A/PELRLkPrzsHmzEFQQ5A==} + eslint-plugin-n@17.23.2: + resolution: {integrity: sha512-RhWBeb7YVPmNa2eggvJooiuehdL76/bbfj/OJewyoGT80qn5PXdz8zMOTO6YHOsI7byPt7+Ighh/i/4a5/v7hw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: '>=8.23.0' @@ -5167,9 +5127,9 @@ packages: resolution: {integrity: sha512-brcKcxGnISN2CcVhXJ/kEQlNa0MEfGRtwKtWA16SkqXHKitaKIMrfemJKLKX1YqDU5C/5JY3PvZXd5jEW04e0Q==} engines: {node: '>=5.0.0'} - eslint-plugin-perfectionist@4.15.1: - resolution: {integrity: sha512-MHF0cBoOG0XyBf7G0EAFCuJJu4I18wy0zAoT1OHfx2o6EOx1EFTIzr2HGeuZa1kDcusoX0xJ9V7oZmaeFd773Q==} - engines: {node: ^18.0.0 || >=20.0.0} + eslint-plugin-perfectionist@5.3.1: + resolution: {integrity: sha512-v8kAP8TarQYqDC4kxr343ZNi++/oOlBnmWovsUZpbJ7A/pq1VHGlgsf/fDh4CdEvEstzkrc8NLvoVKtfpsC4oA==} + engines: {node: ^20.0.0 || >=22.0.0} peerDependencies: eslint: '>=8.45.0' @@ -5178,16 +5138,16 @@ packages: peerDependencies: eslint: ^9.0.0 - eslint-plugin-react-dom@2.3.13: - resolution: {integrity: sha512-O9jglTOnnuyfJcSxjeVc8lqIp5kuS9/0MLLCHlOTH8ZjIifHHxUr6GZ2fd4la9y0FsoEYXEO7DBIMjWx2vCwjg==} + eslint-plugin-react-dom@2.7.0: + resolution: {integrity: sha512-9dvpfaAG3dC14jkDx5c9yXK9mQkYvxAUphQYfzorCntumQi5iOPsWNhITO+M1P+uIEpoc4HwuWkX42E/395AGQ==} engines: {node: '>=20.19.0'} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - eslint-plugin-react-hooks-extra@2.3.13: - resolution: {integrity: sha512-NSnY8yvtrvu2FAALLuvc2xesIAkMqGyJgilpy8wEi1w/Nw6v0IwBEffoNKLq9OHW4v3nikud3aBTqWfWKOx67Q==} - engines: {node: '>=20.0.0'} + eslint-plugin-react-hooks-extra@2.7.0: + resolution: {integrity: sha512-pvjuFvUJkmmHLRjWgJcuRKI+UUq8DddyVU5PrMJY2G3LTYewr4kMHRGaFQ6qg+mbVZWovfxy+VjZjJ8PTfJTDg==} + engines: {node: '>=20.19.0'} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' @@ -5198,8 +5158,8 @@ packages: peerDependencies: eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0 - eslint-plugin-react-naming-convention@2.3.13: - resolution: {integrity: sha512-2iler1ldFpB/PaNpN8WAVk6dKYKwKcoGm1j0JAAjdCrsfOTJ007ol2xTAyoHKAbMOvkZSi7qq90q+Q//RuhWwA==} + eslint-plugin-react-naming-convention@2.7.0: + resolution: {integrity: sha512-BENL2tUVW/PSpFjLyfS0WloG5Buh76rvBM1hG/dCEyWDpHA6s4oJpF2Th9J92eKfim48/uprIPkKCB520Ev2nQ==} engines: {node: '>=20.19.0'} peerDependencies: eslint: ^8.57.0 || ^9.0.0 @@ -5210,15 +5170,15 @@ packages: peerDependencies: eslint: '>=8.40' - eslint-plugin-react-web-api@2.3.13: - resolution: {integrity: sha512-+UypRPHP9GFMulIENpsC/J+TygWywiyz2mb4qyUP6y/IwdcSilk1MyF9WquNYKB/4/FN4Rl1oRm6WMbfkbpMnQ==} + eslint-plugin-react-web-api@2.7.0: + resolution: {integrity: sha512-vIuYyHbn2H337YZR8tKqUbzSNAiH6+9jk3atQBEgISJT0NTuwd80nhEPm3oPHfbgB3Sc4+rEhchVTnG+4BsFfg==} engines: {node: '>=20.19.0'} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - eslint-plugin-react-x@2.3.13: - resolution: {integrity: sha512-+m+V/5VLMxgx0VsFUUyflMNLQG0WFYspsfv0XJFqx7me3A2b3P20QatNDHQCYswz0PRbRFqinTPukPRhZh68ag==} + eslint-plugin-react-x@2.7.0: + resolution: {integrity: sha512-/za228LsbKt1OlZ2XxP3R4xouG0rXeeuLyEnpHfKsAcY0mKPklempmQ5s0E9+SqcpQ/Jd+O4Jg9/30RU+vCqfw==} engines: {node: '>=20.19.0'} peerDependencies: eslint: ^8.57.0 || ^9.0.0 @@ -5235,11 +5195,11 @@ packages: peerDependencies: eslint: ^8.0.0 || ^9.0.0 - eslint-plugin-storybook@10.1.10: - resolution: {integrity: sha512-ITr6Aq3buR/DuDATkq1BafUVJLybyo676fY+tj9Zjd1Ak+UXBAMQcQ++tiBVVHm1RqADwM3b1o6bnWHK2fPPKw==} + eslint-plugin-storybook@10.1.11: + resolution: {integrity: sha512-mbq2r2kK5+AcLl0XDJ3to91JOgzCbHOqj+J3n+FRw6drk+M1boRqMShSoMMm0HdzXPLmlr7iur+qJ5ZuhH6ayQ==} peerDependencies: eslint: '>=8' - storybook: ^10.1.10 + storybook: ^10.1.11 eslint-plugin-tailwindcss@3.18.2: resolution: {integrity: sha512-QbkMLDC/OkkjFQ1iz/5jkMdHfiMu/uwujUHLAJK5iwNHD8RTxVTlsUezE0toTZ6VhybNBsk+gYGPDq2agfeRNA==} @@ -5247,11 +5207,11 @@ packages: peerDependencies: tailwindcss: ^3.4.0 - eslint-plugin-toml@0.12.0: - resolution: {integrity: sha512-+/wVObA9DVhwZB1nG83D2OAQRrcQZXy+drqUnFJKymqnmbnbfg/UPmEMCKrJNcEboUGxUjYrJlgy+/Y930mURQ==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + eslint-plugin-toml@1.0.0: + resolution: {integrity: sha512-ACotflJMZ9CKCZlc0nznBxRNbiOYcBqWmXUSoKsGf6cyDV7EN1kGoD/WKnMo/lEsIF0WnzaYXcOU1HBOoyxRrg==} + engines: {node: ^20.19.0 || ^22.13.0 || >=24} peerDependencies: - eslint: '>=6.0.0' + eslint: '>=9.38.0' eslint-plugin-unicorn@62.0.0: resolution: {integrity: sha512-HIlIkGLkvf29YEiS/ImuDZQbP12gWyx5i3C6XrRxMvVdqMroCI9qoVYCoIl17ChN+U89pn9sVwLxhIWj5nEc7g==} @@ -5310,6 +5270,10 @@ packages: resolution: {integrity: sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + eslint-visitor-keys@5.0.0: + resolution: {integrity: sha512-A0XeIi7CXU7nPlfHS9loMYEKxUaONu/hTEzHTGba9Huu94Cq1hPivf+DE5erJozZOky0LfvXAyrV/tcswpLI0Q==} + engines: {node: ^20.19.0 || ^22.13.0 || >=24} + eslint@9.39.2: resolution: {integrity: sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -5324,6 +5288,10 @@ packages: resolution: {integrity: sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + espree@11.0.0: + resolution: {integrity: sha512-+gMeWRrIh/NsG+3NaLeWHuyeyk70p2tbvZIWBYcqQ4/7Xvars6GYTZNhF1sIeLcc6Wb11He5ffz3hsHyXFrw5A==} + engines: {node: ^20.19.0 || ^22.13.0 || >=24} + espree@9.6.1: resolution: {integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -5337,6 +5305,10 @@ packages: resolution: {integrity: sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==} engines: {node: '>=0.10'} + esquery@1.7.0: + resolution: {integrity: sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==} + engines: {node: '>=0.10'} + esrecurse@4.3.0: resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} engines: {node: '>=4.0'} @@ -5619,6 +5591,10 @@ packages: resolution: {integrity: sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==} engines: {node: '>=18'} + globals@17.0.0: + resolution: {integrity: sha512-gv5BeD2EssA793rlFWVPMMCqefTlpusw6/2TbAVMy0FzcG8wKJn4O+NqJ4+XWmmwrayJgw5TzrmWjFgmz1XPqw==} + engines: {node: '>=18'} + globrex@0.1.2: resolution: {integrity: sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==} @@ -6057,10 +6033,6 @@ packages: resolution: {integrity: sha512-iZ8Bdb84lWRuGHamRXFyML07r21pcwBrLkHEuHgEY5UbCouBwv7ECknDRKzsQIXMiqpPymqtIf8TC/shYKB5rw==} engines: {node: '>=12.0.0'} - jsdoc-type-pratt-parser@6.10.0: - resolution: {integrity: sha512-+LexoTRyYui5iOhJGn13N9ZazL23nAHGkXsa1p/C8yeq79WRfLBag6ZZ0FQG2aRoc9yfo59JT9EYCQonOkHKkQ==} - engines: {node: '>=20.0.0'} - jsdoc-type-pratt-parser@7.0.0: resolution: {integrity: sha512-c7YbokssPOSHmqTbSAmTtnVgAVa/7lumWNYqomgd5KOMyPrRve2anx6lonfOsXEQacqF9FKVUj7bLg4vRSvdYA==} engines: {node: '>=20.0.0'} @@ -7983,9 +7955,9 @@ packages: toggle-selection@1.0.6: resolution: {integrity: sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ==} - toml-eslint-parser@0.10.1: - resolution: {integrity: sha512-9mjy3frhioGIVGcwamlVlUyJ9x+WHw/TXiz9R4YOlmsIuBN43r9Dp8HZ35SF9EKjHrn3BUZj04CF+YqZ2oJ+7w==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + toml-eslint-parser@1.0.2: + resolution: {integrity: sha512-ZI3t5mJiCt+1jQei8iNvKacpoPg9Qc9LumWZBJpWpHKbezA2df0nIXl16HjgwCr44qxpVm7azTYpJ5rylcbsNg==} + engines: {node: ^20.19.0 || ^22.13.0 || >=24} totalist@3.0.1: resolution: {integrity: sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==} @@ -8014,6 +7986,12 @@ packages: peerDependencies: typescript: '>=4.8.4' + ts-api-utils@2.4.0: + resolution: {integrity: sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==} + engines: {node: '>=18.12'} + peerDependencies: + typescript: '>=4.8.4' + ts-debounce@4.0.0: resolution: {integrity: sha512-+1iDGY6NmOGidq7i7xZGA4cm8DAa6fqdYcvO5Z6yBevH++Bdo9Qt/mN0TzHUgcCcKv1gmh9+W5dHqz8pMWbCbg==} @@ -8811,47 +8789,47 @@ snapshots: idb: 8.0.0 tslib: 2.8.1 - '@antfu/eslint-config@6.7.3(@eslint-react/eslint-plugin@2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(@next/eslint-plugin-next@15.5.9)(@vue/compiler-sfc@3.5.25)(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@1.21.7)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@1.21.7)))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)(vitest@4.0.16(@types/node@18.15.0)(happy-dom@20.0.11)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.0))(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2))': + '@antfu/eslint-config@7.0.1(@eslint-react/eslint-plugin@2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(@next/eslint-plugin-next@15.5.9)(@vue/compiler-sfc@3.5.25)(eslint-plugin-react-hooks@7.0.1(eslint@9.39.2(jiti@1.21.7)))(eslint-plugin-react-refresh@0.4.26(eslint@9.39.2(jiti@1.21.7)))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)(vitest@4.0.16(@types/node@18.15.0)(happy-dom@20.0.11)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.0))(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2))': dependencies: '@antfu/install-pkg': 1.1.0 '@clack/prompts': 0.11.0 - '@eslint-community/eslint-plugin-eslint-comments': 4.5.0(eslint@9.39.2(jiti@1.21.7)) + '@eslint-community/eslint-plugin-eslint-comments': 4.6.0(eslint@9.39.2(jiti@1.21.7)) '@eslint/markdown': 7.5.1 - '@stylistic/eslint-plugin': 5.6.1(eslint@9.39.2(jiti@1.21.7)) - '@typescript-eslint/eslint-plugin': 8.50.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@vitest/eslint-plugin': 1.6.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)(vitest@4.0.16(@types/node@18.15.0)(happy-dom@20.0.11)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.0))(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)) + '@stylistic/eslint-plugin': 5.7.0(eslint@9.39.2(jiti@1.21.7)) + '@typescript-eslint/eslint-plugin': 8.53.0(@typescript-eslint/parser@8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/parser': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@vitest/eslint-plugin': 1.6.6(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)(vitest@4.0.16(@types/node@18.15.0)(happy-dom@20.0.11)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.0))(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)) ansis: 4.2.0 cac: 6.7.14 eslint: 9.39.2(jiti@1.21.7) eslint-config-flat-gitignore: 2.1.0(eslint@9.39.2(jiti@1.21.7)) eslint-flat-config-utils: 2.1.4 eslint-merge-processors: 2.0.0(eslint@9.39.2(jiti@1.21.7)) - eslint-plugin-antfu: 3.1.1(eslint@9.39.2(jiti@1.21.7)) + eslint-plugin-antfu: 3.1.3(eslint@9.39.2(jiti@1.21.7)) eslint-plugin-command: 3.4.0(eslint@9.39.2(jiti@1.21.7)) - eslint-plugin-import-lite: 0.4.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - eslint-plugin-jsdoc: 61.5.0(eslint@9.39.2(jiti@1.21.7)) + eslint-plugin-import-lite: 0.5.0(eslint@9.39.2(jiti@1.21.7)) + eslint-plugin-jsdoc: 62.0.0(eslint@9.39.2(jiti@1.21.7)) eslint-plugin-jsonc: 2.21.0(eslint@9.39.2(jiti@1.21.7)) - eslint-plugin-n: 17.23.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + eslint-plugin-n: 17.23.2(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) eslint-plugin-no-only-tests: 3.3.0 - eslint-plugin-perfectionist: 4.15.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + eslint-plugin-perfectionist: 5.3.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) eslint-plugin-pnpm: 1.4.3(eslint@9.39.2(jiti@1.21.7)) eslint-plugin-regexp: 2.10.0(eslint@9.39.2(jiti@1.21.7)) - eslint-plugin-toml: 0.12.0(eslint@9.39.2(jiti@1.21.7)) + eslint-plugin-toml: 1.0.0(eslint@9.39.2(jiti@1.21.7)) eslint-plugin-unicorn: 62.0.0(eslint@9.39.2(jiti@1.21.7)) - eslint-plugin-unused-imports: 4.3.0(@typescript-eslint/eslint-plugin@8.50.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7)) - eslint-plugin-vue: 10.6.2(@stylistic/eslint-plugin@5.6.1(eslint@9.39.2(jiti@1.21.7)))(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@1.21.7))) + eslint-plugin-unused-imports: 4.3.0(@typescript-eslint/eslint-plugin@8.53.0(@typescript-eslint/parser@8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7)) + eslint-plugin-vue: 10.6.2(@stylistic/eslint-plugin@5.7.0(eslint@9.39.2(jiti@1.21.7)))(@typescript-eslint/parser@8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@1.21.7))) eslint-plugin-yml: 1.19.1(eslint@9.39.2(jiti@1.21.7)) eslint-processor-vue-blocks: 2.0.0(@vue/compiler-sfc@3.5.25)(eslint@9.39.2(jiti@1.21.7)) - globals: 16.5.0 + globals: 17.0.0 jsonc-eslint-parser: 2.4.2 local-pkg: 1.1.2 parse-gitignore: 2.0.0 - toml-eslint-parser: 0.10.1 + toml-eslint-parser: 1.0.2 vue-eslint-parser: 10.2.0(eslint@9.39.2(jiti@1.21.7)) yaml-eslint-parser: 1.3.2 optionalDependencies: - '@eslint-react/eslint-plugin': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/eslint-plugin': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) '@next/eslint-plugin-next': 15.5.9 eslint-plugin-react-hooks: 7.0.1(eslint@9.39.2(jiti@1.21.7)) eslint-plugin-react-refresh: 0.4.26(eslint@9.39.2(jiti@1.21.7)) @@ -9785,14 +9763,6 @@ snapshots: '@epic-web/invariant@1.0.0': {} - '@es-joy/jsdoccomment@0.76.0': - dependencies: - '@types/estree': 1.0.8 - '@typescript-eslint/types': 8.50.1 - comment-parser: 1.4.1 - esquery: 1.6.0 - jsdoc-type-pratt-parser: 6.10.0 - '@es-joy/jsdoccomment@0.78.0': dependencies: '@types/estree': 1.0.8 @@ -9801,6 +9771,14 @@ snapshots: esquery: 1.6.0 jsdoc-type-pratt-parser: 7.0.0 + '@es-joy/jsdoccomment@0.79.0': + dependencies: + '@types/estree': 1.0.8 + '@typescript-eslint/types': 8.53.0 + comment-parser: 1.4.1 + esquery: 1.7.0 + jsdoc-type-pratt-parser: 7.0.0 + '@es-joy/resolve.exports@1.2.0': {} '@esbuild/aix-ppc64@0.27.2': @@ -9881,42 +9859,47 @@ snapshots: '@esbuild/win32-x64@0.27.2': optional: true - '@eslint-community/eslint-plugin-eslint-comments@4.5.0(eslint@9.39.2(jiti@1.21.7))': + '@eslint-community/eslint-plugin-eslint-comments@4.6.0(eslint@9.39.2(jiti@1.21.7))': dependencies: escape-string-regexp: 4.0.0 eslint: 9.39.2(jiti@1.21.7) - ignore: 5.3.2 + ignore: 7.0.5 '@eslint-community/eslint-utils@4.9.0(eslint@9.39.2(jiti@1.21.7))': dependencies: eslint: 9.39.2(jiti@1.21.7) eslint-visitor-keys: 3.4.3 + '@eslint-community/eslint-utils@4.9.1(eslint@9.39.2(jiti@1.21.7))': + dependencies: + eslint: 9.39.2(jiti@1.21.7) + eslint-visitor-keys: 3.4.3 + '@eslint-community/regexpp@4.12.1': {} '@eslint-community/regexpp@4.12.2': {} - '@eslint-react/ast@2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@eslint-react/ast@2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@eslint-react/eff': 2.3.13 - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/typescript-estree': 8.50.0(typescript@5.9.3) - '@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/eff': 2.7.0 + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/typescript-estree': 8.53.0(typescript@5.9.3) + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) eslint: 9.39.2(jiti@1.21.7) string-ts: 2.3.1 typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@eslint-react/core@2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@eslint-react/core@2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@eslint-react/ast': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/eff': 2.3.13 - '@eslint-react/shared': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/var': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.50.0 - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/ast': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/eff': 2.7.0 + '@eslint-react/shared': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/var': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.53.0 + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) birecord: 0.1.1 eslint: 9.39.2(jiti@1.21.7) ts-pattern: 5.9.0 @@ -9924,45 +9907,45 @@ snapshots: transitivePeerDependencies: - supports-color - '@eslint-react/eff@2.3.13': {} + '@eslint-react/eff@2.7.0': {} - '@eslint-react/eslint-plugin@2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@eslint-react/eslint-plugin@2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@eslint-react/eff': 2.3.13 - '@eslint-react/shared': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.49.0 - '@typescript-eslint/type-utils': 8.49.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/types': 8.49.0 - '@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/eff': 2.7.0 + '@eslint-react/shared': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.53.0 + '@typescript-eslint/type-utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) eslint: 9.39.2(jiti@1.21.7) - eslint-plugin-react-dom: 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - eslint-plugin-react-hooks-extra: 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - eslint-plugin-react-naming-convention: 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - eslint-plugin-react-web-api: 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - eslint-plugin-react-x: 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - ts-api-utils: 2.1.0(typescript@5.9.3) + eslint-plugin-react-dom: 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + eslint-plugin-react-hooks-extra: 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + eslint-plugin-react-naming-convention: 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + eslint-plugin-react-web-api: 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + eslint-plugin-react-x: 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + ts-api-utils: 2.4.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@eslint-react/shared@2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@eslint-react/shared@2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@eslint-react/eff': 2.3.13 - '@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/eff': 2.7.0 + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) eslint: 9.39.2(jiti@1.21.7) ts-pattern: 5.9.0 typescript: 5.9.3 - zod: 4.1.13 + zod: 3.25.76 transitivePeerDependencies: - supports-color - '@eslint-react/var@2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@eslint-react/var@2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@eslint-react/ast': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/eff': 2.3.13 - '@typescript-eslint/scope-manager': 8.50.0 - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/ast': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/eff': 2.7.0 + '@typescript-eslint/scope-manager': 8.53.0 + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) eslint: 9.39.2(jiti@1.21.7) ts-pattern: 5.9.0 typescript: 5.9.3 @@ -9987,11 +9970,11 @@ snapshots: dependencies: '@eslint/core': 0.17.0 - '@eslint/core@0.15.2': + '@eslint/core@0.17.0': dependencies: '@types/json-schema': 7.0.15 - '@eslint/core@0.17.0': + '@eslint/core@1.0.1': dependencies: '@types/json-schema': 7.0.15 @@ -10014,7 +9997,7 @@ snapshots: '@eslint/markdown@7.5.1': dependencies: '@eslint/core': 0.17.0 - '@eslint/plugin-kit': 0.3.5 + '@eslint/plugin-kit': 0.4.1 github-slugger: 2.0.0 mdast-util-from-markdown: 2.0.2 mdast-util-frontmatter: 2.0.1 @@ -10027,9 +10010,14 @@ snapshots: '@eslint/object-schema@2.1.7': {} - '@eslint/plugin-kit@0.3.5': + '@eslint/plugin-kit@0.4.1': dependencies: - '@eslint/core': 0.15.2 + '@eslint/core': 0.17.0 + levn: 0.4.1 + + '@eslint/plugin-kit@0.5.1': + dependencies: + '@eslint/core': 1.0.1 levn: 0.4.1 '@floating-ui/core@1.7.3': @@ -11610,13 +11598,13 @@ snapshots: optionalDependencies: typescript: 5.9.3 - '@stylistic/eslint-plugin@5.6.1(eslint@9.39.2(jiti@1.21.7))': + '@stylistic/eslint-plugin@5.7.0(eslint@9.39.2(jiti@1.21.7))': dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@1.21.7)) - '@typescript-eslint/types': 8.50.1 + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@1.21.7)) + '@typescript-eslint/types': 8.53.0 eslint: 9.39.2(jiti@1.21.7) - eslint-visitor-keys: 4.2.1 - espree: 10.4.0 + eslint-visitor-keys: 5.0.0 + espree: 11.0.0 estraverse: 5.3.0 picomatch: 4.0.3 @@ -12147,52 +12135,34 @@ snapshots: '@types/zen-observable@0.8.3': {} - '@typescript-eslint/eslint-plugin@8.50.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/eslint-plugin@8.53.0(@typescript-eslint/parser@8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: '@eslint-community/regexpp': 4.12.2 - '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.50.0 - '@typescript-eslint/type-utils': 8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/utils': 8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.50.0 + '@typescript-eslint/parser': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.53.0 + '@typescript-eslint/type-utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.53.0 eslint: 9.39.2(jiti@1.21.7) ignore: 7.0.5 natural-compare: 1.4.0 - ts-api-utils: 2.1.0(typescript@5.9.3) + ts-api-utils: 2.4.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/parser@8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@typescript-eslint/scope-manager': 8.50.0 - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/typescript-estree': 8.50.0(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.50.0 + '@typescript-eslint/scope-manager': 8.53.0 + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/typescript-estree': 8.53.0(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.53.0 debug: 4.4.3 eslint: 9.39.2(jiti@1.21.7) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/project-service@8.49.0(typescript@5.9.3)': - dependencies: - '@typescript-eslint/tsconfig-utils': 8.49.0(typescript@5.9.3) - '@typescript-eslint/types': 8.50.0 - debug: 4.4.3 - typescript: 5.9.3 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/project-service@8.50.0(typescript@5.9.3)': - dependencies: - '@typescript-eslint/tsconfig-utils': 8.50.1(typescript@5.9.3) - '@typescript-eslint/types': 8.50.1 - debug: 4.4.3 - typescript: 5.9.3 - transitivePeerDependencies: - - supports-color - '@typescript-eslint/project-service@8.50.1(typescript@5.9.3)': dependencies: '@typescript-eslint/tsconfig-utils': 8.50.1(typescript@5.9.3) @@ -12202,92 +12172,48 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/scope-manager@8.49.0': + '@typescript-eslint/project-service@8.53.0(typescript@5.9.3)': dependencies: - '@typescript-eslint/types': 8.49.0 - '@typescript-eslint/visitor-keys': 8.49.0 - - '@typescript-eslint/scope-manager@8.50.0': - dependencies: - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/visitor-keys': 8.50.0 + '@typescript-eslint/tsconfig-utils': 8.53.0(typescript@5.9.3) + '@typescript-eslint/types': 8.53.0 + debug: 4.4.3 + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color '@typescript-eslint/scope-manager@8.50.1': dependencies: '@typescript-eslint/types': 8.50.1 '@typescript-eslint/visitor-keys': 8.50.1 - '@typescript-eslint/tsconfig-utils@8.49.0(typescript@5.9.3)': + '@typescript-eslint/scope-manager@8.53.0': dependencies: - typescript: 5.9.3 - - '@typescript-eslint/tsconfig-utils@8.50.0(typescript@5.9.3)': - dependencies: - typescript: 5.9.3 + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/visitor-keys': 8.53.0 '@typescript-eslint/tsconfig-utils@8.50.1(typescript@5.9.3)': dependencies: typescript: 5.9.3 - '@typescript-eslint/type-utils@8.49.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/tsconfig-utils@8.53.0(typescript@5.9.3)': dependencies: - '@typescript-eslint/types': 8.49.0 - '@typescript-eslint/typescript-estree': 8.49.0(typescript@5.9.3) - '@typescript-eslint/utils': 8.49.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + typescript: 5.9.3 + + '@typescript-eslint/type-utils@8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + dependencies: + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/typescript-estree': 8.53.0(typescript@5.9.3) + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) debug: 4.4.3 eslint: 9.39.2(jiti@1.21.7) - ts-api-utils: 2.1.0(typescript@5.9.3) + ts-api-utils: 2.4.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/type-utils@8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': - dependencies: - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/typescript-estree': 8.50.0(typescript@5.9.3) - '@typescript-eslint/utils': 8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - debug: 4.4.3 - eslint: 9.39.2(jiti@1.21.7) - ts-api-utils: 2.1.0(typescript@5.9.3) - typescript: 5.9.3 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/types@8.49.0': {} - - '@typescript-eslint/types@8.50.0': {} - '@typescript-eslint/types@8.50.1': {} - '@typescript-eslint/typescript-estree@8.49.0(typescript@5.9.3)': - dependencies: - '@typescript-eslint/project-service': 8.49.0(typescript@5.9.3) - '@typescript-eslint/tsconfig-utils': 8.49.0(typescript@5.9.3) - '@typescript-eslint/types': 8.49.0 - '@typescript-eslint/visitor-keys': 8.49.0 - debug: 4.4.3 - minimatch: 9.0.5 - semver: 7.7.3 - tinyglobby: 0.2.15 - ts-api-utils: 2.1.0(typescript@5.9.3) - typescript: 5.9.3 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/typescript-estree@8.50.0(typescript@5.9.3)': - dependencies: - '@typescript-eslint/project-service': 8.50.0(typescript@5.9.3) - '@typescript-eslint/tsconfig-utils': 8.50.0(typescript@5.9.3) - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/visitor-keys': 8.50.0 - debug: 4.4.3 - minimatch: 9.0.5 - semver: 7.7.3 - tinyglobby: 0.2.15 - ts-api-utils: 2.1.0(typescript@5.9.3) - typescript: 5.9.3 - transitivePeerDependencies: - - supports-color + '@typescript-eslint/types@8.53.0': {} '@typescript-eslint/typescript-estree@8.50.1(typescript@5.9.3)': dependencies: @@ -12304,24 +12230,17 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.49.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/typescript-estree@8.53.0(typescript@5.9.3)': dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@1.21.7)) - '@typescript-eslint/scope-manager': 8.49.0 - '@typescript-eslint/types': 8.49.0 - '@typescript-eslint/typescript-estree': 8.49.0(typescript@5.9.3) - eslint: 9.39.2(jiti@1.21.7) - typescript: 5.9.3 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': - dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@1.21.7)) - '@typescript-eslint/scope-manager': 8.50.0 - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/typescript-estree': 8.50.0(typescript@5.9.3) - eslint: 9.39.2(jiti@1.21.7) + '@typescript-eslint/project-service': 8.53.0(typescript@5.9.3) + '@typescript-eslint/tsconfig-utils': 8.53.0(typescript@5.9.3) + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/visitor-keys': 8.53.0 + debug: 4.4.3 + minimatch: 9.0.5 + semver: 7.7.3 + tinyglobby: 0.2.15 + ts-api-utils: 2.4.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: - supports-color @@ -12337,21 +12256,27 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/visitor-keys@8.49.0': + '@typescript-eslint/utils@8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@typescript-eslint/types': 8.49.0 - eslint-visitor-keys: 4.2.1 - - '@typescript-eslint/visitor-keys@8.50.0': - dependencies: - '@typescript-eslint/types': 8.50.0 - eslint-visitor-keys: 4.2.1 + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@1.21.7)) + '@typescript-eslint/scope-manager': 8.53.0 + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/typescript-estree': 8.53.0(typescript@5.9.3) + eslint: 9.39.2(jiti@1.21.7) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color '@typescript-eslint/visitor-keys@8.50.1': dependencies: '@typescript-eslint/types': 8.50.1 eslint-visitor-keys: 4.2.1 + '@typescript-eslint/visitor-keys@8.53.0': + dependencies: + '@typescript-eslint/types': 8.53.0 + eslint-visitor-keys: 4.2.1 + '@typescript/native-preview-darwin-arm64@7.0.0-dev.20251209.1': optional: true @@ -12414,10 +12339,10 @@ snapshots: transitivePeerDependencies: - supports-color - '@vitest/eslint-plugin@1.6.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)(vitest@4.0.16(@types/node@18.15.0)(happy-dom@20.0.11)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.0))(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2))': + '@vitest/eslint-plugin@1.6.6(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)(vitest@4.0.16(@types/node@18.15.0)(happy-dom@20.0.11)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.0))(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2))': dependencies: - '@typescript-eslint/scope-manager': 8.50.1 - '@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.53.0 + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) eslint: 9.39.2(jiti@1.21.7) optionalDependencies: typescript: 5.9.3 @@ -13742,7 +13667,7 @@ snapshots: dependencies: eslint: 9.39.2(jiti@1.21.7) - eslint-plugin-antfu@3.1.1(eslint@9.39.2(jiti@1.21.7)): + eslint-plugin-antfu@3.1.3(eslint@9.39.2(jiti@1.21.7)): dependencies: eslint: 9.39.2(jiti@1.21.7) @@ -13758,23 +13683,21 @@ snapshots: eslint: 9.39.2(jiti@1.21.7) eslint-compat-utils: 0.5.1(eslint@9.39.2(jiti@1.21.7)) - eslint-plugin-import-lite@0.4.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): + eslint-plugin-import-lite@0.5.0(eslint@9.39.2(jiti@1.21.7)): dependencies: eslint: 9.39.2(jiti@1.21.7) - optionalDependencies: - typescript: 5.9.3 - eslint-plugin-jsdoc@61.5.0(eslint@9.39.2(jiti@1.21.7)): + eslint-plugin-jsdoc@62.0.0(eslint@9.39.2(jiti@1.21.7)): dependencies: - '@es-joy/jsdoccomment': 0.76.0 + '@es-joy/jsdoccomment': 0.79.0 '@es-joy/resolve.exports': 1.2.0 are-docs-informative: 0.0.2 comment-parser: 1.4.1 debug: 4.4.3 escape-string-regexp: 4.0.0 eslint: 9.39.2(jiti@1.21.7) - espree: 10.4.0 - esquery: 1.6.0 + espree: 11.0.0 + esquery: 1.7.0 html-entities: 2.6.0 object-deep-merge: 2.0.0 parse-imports-exports: 0.2.4 @@ -13799,7 +13722,7 @@ snapshots: transitivePeerDependencies: - '@eslint/json' - eslint-plugin-n@17.23.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): + eslint-plugin-n@17.23.2(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): dependencies: '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@1.21.7)) enhanced-resolve: 5.18.3 @@ -13816,10 +13739,9 @@ snapshots: eslint-plugin-no-only-tests@3.3.0: {} - eslint-plugin-perfectionist@4.15.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): + eslint-plugin-perfectionist@5.3.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): dependencies: - '@typescript-eslint/types': 8.50.1 - '@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) eslint: 9.39.2(jiti@1.21.7) natural-orderby: 5.0.0 transitivePeerDependencies: @@ -13837,16 +13759,16 @@ snapshots: yaml: 2.8.2 yaml-eslint-parser: 1.3.2 - eslint-plugin-react-dom@2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): + eslint-plugin-react-dom@2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): dependencies: - '@eslint-react/ast': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/core': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/eff': 2.3.13 - '@eslint-react/shared': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/var': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.49.0 - '@typescript-eslint/types': 8.49.0 - '@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/ast': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/core': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/eff': 2.7.0 + '@eslint-react/shared': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/var': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.53.0 + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) compare-versions: 6.1.1 eslint: 9.39.2(jiti@1.21.7) string-ts: 2.3.1 @@ -13855,17 +13777,17 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-react-hooks-extra@2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): + eslint-plugin-react-hooks-extra@2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): dependencies: - '@eslint-react/ast': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/core': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/eff': 2.3.13 - '@eslint-react/shared': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/var': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.49.0 - '@typescript-eslint/type-utils': 8.49.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/types': 8.49.0 - '@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/ast': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/core': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/eff': 2.7.0 + '@eslint-react/shared': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/var': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.53.0 + '@typescript-eslint/type-utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) eslint: 9.39.2(jiti@1.21.7) string-ts: 2.3.1 ts-pattern: 5.9.0 @@ -13884,17 +13806,18 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-react-naming-convention@2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): + eslint-plugin-react-naming-convention@2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): dependencies: - '@eslint-react/ast': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/core': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/eff': 2.3.13 - '@eslint-react/shared': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/var': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.49.0 - '@typescript-eslint/type-utils': 8.49.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/types': 8.49.0 - '@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/ast': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/core': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/eff': 2.7.0 + '@eslint-react/shared': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/var': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.53.0 + '@typescript-eslint/type-utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + compare-versions: 6.1.1 eslint: 9.39.2(jiti@1.21.7) string-ts: 2.3.1 ts-pattern: 5.9.0 @@ -13906,16 +13829,16 @@ snapshots: dependencies: eslint: 9.39.2(jiti@1.21.7) - eslint-plugin-react-web-api@2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): + eslint-plugin-react-web-api@2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): dependencies: - '@eslint-react/ast': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/core': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/eff': 2.3.13 - '@eslint-react/shared': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/var': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.49.0 - '@typescript-eslint/types': 8.49.0 - '@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/ast': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/core': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/eff': 2.7.0 + '@eslint-react/shared': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/var': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.53.0 + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) eslint: 9.39.2(jiti@1.21.7) string-ts: 2.3.1 ts-pattern: 5.9.0 @@ -13923,22 +13846,22 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-react-x@2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): + eslint-plugin-react-x@2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): dependencies: - '@eslint-react/ast': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/core': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/eff': 2.3.13 - '@eslint-react/shared': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@eslint-react/var': 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.49.0 - '@typescript-eslint/type-utils': 8.49.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/types': 8.49.0 - '@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/ast': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/core': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/eff': 2.7.0 + '@eslint-react/shared': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@eslint-react/var': 2.7.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.53.0 + '@typescript-eslint/type-utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) compare-versions: 6.1.1 eslint: 9.39.2(jiti@1.21.7) is-immutable-type: 5.0.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) string-ts: 2.3.1 - ts-api-utils: 2.1.0(typescript@5.9.3) + ts-api-utils: 2.4.0(typescript@5.9.3) ts-pattern: 5.9.0 typescript: 5.9.3 transitivePeerDependencies: @@ -13969,7 +13892,7 @@ snapshots: semver: 7.7.2 typescript: 5.9.3 - eslint-plugin-storybook@10.1.10(eslint@9.39.2(jiti@1.21.7))(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(typescript@5.9.3): + eslint-plugin-storybook@10.1.11(eslint@9.39.2(jiti@1.21.7))(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(typescript@5.9.3): dependencies: '@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) eslint: 9.39.2(jiti@1.21.7) @@ -13984,13 +13907,13 @@ snapshots: postcss: 8.5.6 tailwindcss: 3.4.18(tsx@4.21.0)(yaml@2.8.2) - eslint-plugin-toml@0.12.0(eslint@9.39.2(jiti@1.21.7)): + eslint-plugin-toml@1.0.0(eslint@9.39.2(jiti@1.21.7)): dependencies: + '@eslint/core': 1.0.1 + '@eslint/plugin-kit': 0.5.1 debug: 4.4.3 eslint: 9.39.2(jiti@1.21.7) - eslint-compat-utils: 0.6.5(eslint@9.39.2(jiti@1.21.7)) - lodash: 4.17.21 - toml-eslint-parser: 0.10.1 + toml-eslint-parser: 1.0.2 transitivePeerDependencies: - supports-color @@ -13998,7 +13921,7 @@ snapshots: dependencies: '@babel/helper-validator-identifier': 7.28.5 '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@1.21.7)) - '@eslint/plugin-kit': 0.3.5 + '@eslint/plugin-kit': 0.4.1 change-case: 5.4.4 ci-info: 4.3.1 clean-regexp: 1.0.0 @@ -14016,13 +13939,13 @@ snapshots: semver: 7.7.3 strip-indent: 4.1.1 - eslint-plugin-unused-imports@4.3.0(@typescript-eslint/eslint-plugin@8.50.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7)): + eslint-plugin-unused-imports@4.3.0(@typescript-eslint/eslint-plugin@8.53.0(@typescript-eslint/parser@8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7)): dependencies: eslint: 9.39.2(jiti@1.21.7) optionalDependencies: - '@typescript-eslint/eslint-plugin': 8.50.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/eslint-plugin': 8.53.0(@typescript-eslint/parser@8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - eslint-plugin-vue@10.6.2(@stylistic/eslint-plugin@5.6.1(eslint@9.39.2(jiti@1.21.7)))(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@1.21.7))): + eslint-plugin-vue@10.6.2(@stylistic/eslint-plugin@5.7.0(eslint@9.39.2(jiti@1.21.7)))(@typescript-eslint/parser@8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@1.21.7))): dependencies: '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@1.21.7)) eslint: 9.39.2(jiti@1.21.7) @@ -14033,8 +13956,8 @@ snapshots: vue-eslint-parser: 10.2.0(eslint@9.39.2(jiti@1.21.7)) xml-name-validator: 4.0.0 optionalDependencies: - '@stylistic/eslint-plugin': 5.6.1(eslint@9.39.2(jiti@1.21.7)) - '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@stylistic/eslint-plugin': 5.7.0(eslint@9.39.2(jiti@1.21.7)) + '@typescript-eslint/parser': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) eslint-plugin-yml@1.19.1(eslint@9.39.2(jiti@1.21.7)): dependencies: @@ -14067,6 +13990,8 @@ snapshots: eslint-visitor-keys@4.2.1: {} + eslint-visitor-keys@5.0.0: {} + eslint@9.39.2(jiti@1.21.7): dependencies: '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@1.21.7)) @@ -14076,7 +14001,7 @@ snapshots: '@eslint/core': 0.17.0 '@eslint/eslintrc': 3.3.3 '@eslint/js': 9.39.2 - '@eslint/plugin-kit': 0.3.5 + '@eslint/plugin-kit': 0.4.1 '@humanfs/node': 0.16.7 '@humanwhocodes/module-importer': 1.0.1 '@humanwhocodes/retry': 0.4.3 @@ -14114,6 +14039,12 @@ snapshots: acorn-jsx: 5.3.2(acorn@8.15.0) eslint-visitor-keys: 4.2.1 + espree@11.0.0: + dependencies: + acorn: 8.15.0 + acorn-jsx: 5.3.2(acorn@8.15.0) + eslint-visitor-keys: 5.0.0 + espree@9.6.1: dependencies: acorn: 8.15.0 @@ -14126,6 +14057,10 @@ snapshots: dependencies: estraverse: 5.3.0 + esquery@1.7.0: + dependencies: + estraverse: 5.3.0 + esrecurse@4.3.0: dependencies: estraverse: 5.3.0 @@ -14414,6 +14349,8 @@ snapshots: globals@16.5.0: {} + globals@17.0.0: {} + globrex@0.1.2: {} goober@2.1.18(csstype@3.2.3): @@ -14801,9 +14738,9 @@ snapshots: is-immutable-type@5.0.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): dependencies: - '@typescript-eslint/type-utils': 8.49.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/type-utils': 8.53.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) eslint: 9.39.2(jiti@1.21.7) - ts-api-utils: 2.1.0(typescript@5.9.3) + ts-api-utils: 2.4.0(typescript@5.9.3) ts-declaration-location: 1.0.7(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: @@ -14889,8 +14826,6 @@ snapshots: jsdoc-type-pratt-parser@4.8.0: {} - jsdoc-type-pratt-parser@6.10.0: {} - jsdoc-type-pratt-parser@7.0.0: {} jsdom-testing-mocks@1.16.0: @@ -17345,9 +17280,9 @@ snapshots: toggle-selection@1.0.6: {} - toml-eslint-parser@0.10.1: + toml-eslint-parser@1.0.2: dependencies: - eslint-visitor-keys: 3.4.3 + eslint-visitor-keys: 5.0.0 totalist@3.0.1: {} @@ -17371,6 +17306,10 @@ snapshots: dependencies: typescript: 5.9.3 + ts-api-utils@2.4.0(typescript@5.9.3): + dependencies: + typescript: 5.9.3 + ts-debounce@4.0.0: {} ts-declaration-location@1.0.7(typescript@5.9.3): From cd497a8c525d59f923bb4ed392c6a5325e306342 Mon Sep 17 00:00:00 2001 From: pavior <103894247+Pavior0@users.noreply.github.com> Date: Fri, 16 Jan 2026 13:31:57 +0800 Subject: [PATCH 25/25] fix(web): use portal for variable picker in code editor (Fixes #31063) (#31066) --- .../components/editor/code-editor/editor-support-vars.tsx | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/web/app/components/workflow/nodes/_base/components/editor/code-editor/editor-support-vars.tsx b/web/app/components/workflow/nodes/_base/components/editor/code-editor/editor-support-vars.tsx index ae14e7ccbc..346ee7efe6 100644 --- a/web/app/components/workflow/nodes/_base/components/editor/code-editor/editor-support-vars.tsx +++ b/web/app/components/workflow/nodes/_base/components/editor/code-editor/editor-support-vars.tsx @@ -5,6 +5,7 @@ import type { NodeOutPutVar, Variable } from '@/app/components/workflow/types' import { useBoolean } from 'ahooks' import * as React from 'react' import { useEffect, useRef, useState } from 'react' +import { createPortal } from 'react-dom' import { useTranslation } from 'react-i18next' import VarReferenceVars from '@/app/components/workflow/nodes/_base/components/variable/var-reference-vars' import { cn } from '@/utils/classnames' @@ -147,7 +148,7 @@ const CodeEditor: FC = ({ onMount={onEditorMounted} placeholder={t('common.jinjaEditorPlaceholder', { ns: 'workflow' })!} /> - {isShowVarPicker && ( + {isShowVarPicker && createPortal(
= ({ onChange={handleSelectVar} isSupportFileVar={false} /> -
+
, + document.body, )}
)