From 15270f09af9d4d5512ed6d2a97fc403b4d25d998 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Sep 2025 12:16:05 +0800 Subject: [PATCH 001/126] chore(deps): bump boto3-stubs from 1.40.29 to 1.40.35 in /api (#26014) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- api/uv.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/api/uv.lock b/api/uv.lock index 1c346b984a..ee49e79eff 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -578,16 +578,16 @@ wheels = [ [[package]] name = "boto3-stubs" -version = "1.40.29" +version = "1.40.35" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore-stubs" }, { name = "types-s3transfer" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dd/35/0cdc62641577e8a0a6d4191ecc803fee16adf18de1e81280eb3d87c7d9e8/boto3_stubs-1.40.29.tar.gz", hash = "sha256:9fc7d24dcbcc786093daf42487a9ed4a58a6be7f1ccf28f5be0b2bad4a3edb11", size = 100996, upload-time = "2025-09-11T19:48:28.487Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/18/6a64ff9603845d635f6167b6d9a3f9a6e658d8a28eef36f8423eb5a99ae1/boto3_stubs-1.40.35.tar.gz", hash = "sha256:2d6f2dbe6e9b42deb7b8fbeed051461e7906903f26e99634d00be45cc40db41a", size = 100819, upload-time = "2025-09-19T19:42:36.372Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/a2/e47bf7595fadc6154ff2941e9ab9bb68173fba95f5ccdb24e5c13d16e5e5/boto3_stubs-1.40.29-py3-none-any.whl", hash = "sha256:1ad373b68b1c9a5e8e5deb243ef3a4c5b1d2c25c3477559eba1089ed4a0ee94e", size = 69769, upload-time = "2025-09-11T19:48:20.453Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d4/d744260908ad55903baefa086a3c9cabc50bfafd63c3f2d0e05688378013/boto3_stubs-1.40.35-py3-none-any.whl", hash = "sha256:2bb44e6c17831650a28e3e00bf5be0a6ba771fce08724ba978ffcd06a7bca7e3", size = 69689, upload-time = "2025-09-19T19:42:30.08Z" }, ] [package.optional-dependencies] From a39b18562749db83873f2099b1fd80129b1e5fb6 Mon Sep 17 00:00:00 2001 From: Wu Tianwei <30284043+WTW0313@users.noreply.github.com> Date: Mon, 22 Sep 2025 14:17:02 +0800 Subject: [PATCH 002/126] fix: comment out unused segmentation rule properties in RuleDetail component (#26031) --- .../embedding-process/rule-detail.tsx | 56 ++----------------- 1 file changed, 6 insertions(+), 50 deletions(-) diff --git a/web/app/components/datasets/documents/create-from-pipeline/processing/embedding-process/rule-detail.tsx b/web/app/components/datasets/documents/create-from-pipeline/processing/embedding-process/rule-detail.tsx index cb036def62..c8b1375069 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/processing/embedding-process/rule-detail.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/processing/embedding-process/rule-detail.tsx @@ -20,35 +20,8 @@ const RuleDetail = ({ }: RuleDetailProps) => { const { t } = useTranslation() - const segmentationRuleMap = { - mode: t('datasetDocuments.embedding.mode'), - segmentLength: t('datasetDocuments.embedding.segmentLength'), - textCleaning: t('datasetDocuments.embedding.textCleaning'), - } - - const getRuleName = useCallback((key: string) => { - if (key === 'remove_extra_spaces') - return t('datasetCreation.stepTwo.removeExtraSpaces') - - if (key === 'remove_urls_emails') - return t('datasetCreation.stepTwo.removeUrlEmails') - - if (key === 'remove_stopwords') - return t('datasetCreation.stepTwo.removeStopwords') - }, [t]) - - const isNumber = useCallback((value: unknown) => { - return typeof value === 'number' - }, []) - const getValue = useCallback((field: string) => { - let value: string | number | undefined = '-' - const maxTokens = isNumber(sourceData?.rules?.segmentation?.max_tokens) - ? sourceData.rules.segmentation.max_tokens - : value - const childMaxTokens = isNumber(sourceData?.rules?.subchunk_segmentation?.max_tokens) - ? sourceData.rules.subchunk_segmentation.max_tokens - : value + let value = '-' switch (field) { case 'mode': value = !sourceData?.mode @@ -61,33 +34,16 @@ const RuleDetail = ({ ? t('dataset.parentMode.paragraph') : t('dataset.parentMode.fullDoc')}` break - case 'segmentLength': - value = !sourceData?.mode - ? value - // eslint-disable-next-line sonarjs/no-nested-conditional - : sourceData.mode === ProcessMode.general - ? maxTokens - : `${t('datasetDocuments.embedding.parentMaxTokens')} ${maxTokens}; ${t('datasetDocuments.embedding.childMaxTokens')} ${childMaxTokens}` - break - default: - value = !sourceData?.mode - ? value - : sourceData?.rules?.pre_processing_rules?.filter(rule => - rule.enabled).map(rule => getRuleName(rule.id)).join(',') - break } return value - }, [getRuleName, isNumber, sourceData, t]) + }, [sourceData, t]) return (
- {Object.keys(segmentationRuleMap).map((field) => { - return - })} + Date: Mon, 22 Sep 2025 14:31:49 +0800 Subject: [PATCH 003/126] fix: Correctly map source_url to preview_url in file fields (#25957) --- api/fields/file_fields.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/fields/file_fields.py b/api/fields/file_fields.py index dd359e2f5f..a6e2d7038b 100644 --- a/api/fields/file_fields.py +++ b/api/fields/file_fields.py @@ -32,7 +32,7 @@ file_fields = { "mime_type": fields.String, "created_by": fields.String, "created_at": TimestampField, - "preview_url": fields.String, + "preview_url": fields.String(attribute="source_url"), } From 75a10c276c8b7615190c95a0bc47fd4940b4da81 Mon Sep 17 00:00:00 2001 From: Stream Date: Mon, 22 Sep 2025 16:07:02 +0800 Subject: [PATCH 004/126] chore: remove mistakenly added trash file (#26041) --- spec.http | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 spec.http diff --git a/spec.http b/spec.http deleted file mode 100644 index dc3a37d08a..0000000000 --- a/spec.http +++ /dev/null @@ -1,4 +0,0 @@ -GET /console/api/spec/schema-definitions -Host: cloud-rag.dify.dev -authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoiNzExMDZhYTQtZWJlMC00NGMzLWI4NWYtMWQ4Mjc5ZTExOGZmIiwiZXhwIjoxNzU2MTkyNDE4LCJpc3MiOiJDTE9VRCIsInN1YiI6IkNvbnNvbGUgQVBJIFBhc3Nwb3J0In0.Yx_TMdWVXCp5YEoQ8WR90lRhHHKggxAQvEl5RUnkZuc -### \ No newline at end of file From 1e3df09fc6462bb03be1c0b9e37ccc5b414d0e04 Mon Sep 17 00:00:00 2001 From: QuantumGhost Date: Mon, 22 Sep 2025 18:23:01 +0800 Subject: [PATCH 005/126] chore(api): adjust monkey patching in gunicorn.conf.py (#26056) --- api/celery_entrypoint.py | 13 ++----------- api/gunicorn.conf.py | 28 +++++++++++++++++++++++++--- 2 files changed, 27 insertions(+), 14 deletions(-) diff --git a/api/celery_entrypoint.py b/api/celery_entrypoint.py index 4d1f17430d..28fa0972e8 100644 --- a/api/celery_entrypoint.py +++ b/api/celery_entrypoint.py @@ -1,20 +1,11 @@ -import logging - import psycogreen.gevent as pscycogreen_gevent # type: ignore from grpc.experimental import gevent as grpc_gevent # type: ignore -_logger = logging.getLogger(__name__) - - -def _log(message: str): - _logger.debug(message) - - # grpc gevent grpc_gevent.init_gevent() -_log("gRPC patched with gevent.") +print("gRPC patched with gevent.", flush=True) # noqa: T201 pscycogreen_gevent.patch_psycopg() -_log("psycopg2 patched with gevent.") +print("psycopg2 patched with gevent.", flush=True) # noqa: T201 from app import app, celery diff --git a/api/gunicorn.conf.py b/api/gunicorn.conf.py index fc91a43670..943ee100ca 100644 --- a/api/gunicorn.conf.py +++ b/api/gunicorn.conf.py @@ -1,10 +1,32 @@ import psycogreen.gevent as pscycogreen_gevent # type: ignore +from gevent import events as gevent_events from grpc.experimental import gevent as grpc_gevent # type: ignore +# NOTE(QuantumGhost): here we cannot use post_fork to patch gRPC, as +# grpc_gevent.init_gevent must be called after patching stdlib. +# Gunicorn calls `post_init` before applying monkey patch. +# Use `post_init` to setup gRPC gevent support would cause deadlock and +# some other weird issues. +# +# ref: +# - https://github.com/grpc/grpc/blob/62533ea13879d6ee95c6fda11ec0826ca822c9dd/src/python/grpcio/grpc/experimental/gevent.py +# - https://github.com/gevent/gevent/issues/2060#issuecomment-3016768668 +# - https://github.com/benoitc/gunicorn/blob/master/gunicorn/arbiter.py#L607-L613 -def post_fork(server, worker): + +def post_patch(event): + # this function is only called for gevent worker. + # from gevent docs (https://www.gevent.org/api/gevent.monkey.html): + # You can also subscribe to the events to provide additional patching beyond what gevent distributes, either for + # additional standard library modules, or for third-party packages. The suggested time to do this patching is in + # the subscriber for gevent.events.GeventDidPatchBuiltinModulesEvent. + if not isinstance(event, gevent_events.GeventDidPatchBuiltinModulesEvent): + return # grpc gevent grpc_gevent.init_gevent() - server.log.info("gRPC patched with gevent.") + print("gRPC patched with gevent.", flush=True) # noqa: T201 pscycogreen_gevent.patch_psycopg() - server.log.info("psycopg2 patched with gevent.") + print("psycopg2 patched with gevent.", flush=True) # noqa: T201 + + +gevent_events.subscribers.append(post_patch) From d823da18dbc132e0033383421b476a738ae8baed Mon Sep 17 00:00:00 2001 From: Novice Date: Mon, 22 Sep 2025 19:14:24 +0800 Subject: [PATCH 006/126] fix: iteration and loop node single step run (#26036) --- api/core/app/apps/advanced_chat/app_runner.py | 27 +- .../app/apps/pipeline/pipeline_generator.py | 5 + api/core/app/apps/pipeline/pipeline_runner.py | 27 +- api/core/app/apps/workflow/app_runner.py | 28 +-- api/core/app/apps/workflow_app_runner.py | 233 +++++++++--------- .../nodes/iteration/iteration_node.py | 49 +--- api/core/workflow/nodes/loop/loop_node.py | 79 +++--- 7 files changed, 179 insertions(+), 269 deletions(-) diff --git a/api/core/app/apps/advanced_chat/app_runner.py b/api/core/app/apps/advanced_chat/app_runner.py index af8b7e4e17..919b135ec9 100644 --- a/api/core/app/apps/advanced_chat/app_runner.py +++ b/api/core/app/apps/advanced_chat/app_runner.py @@ -79,29 +79,12 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): if not app_record: raise ValueError("App not found") - if self.application_generate_entity.single_iteration_run: - # if only single iteration run is requested - graph_runtime_state = GraphRuntimeState( - variable_pool=VariablePool.empty(), - start_at=time.time(), - ) - graph, variable_pool = self._get_graph_and_variable_pool_of_single_iteration( + if self.application_generate_entity.single_iteration_run or self.application_generate_entity.single_loop_run: + # Handle single iteration or single loop run + graph, variable_pool, graph_runtime_state = self._prepare_single_node_execution( workflow=self._workflow, - node_id=self.application_generate_entity.single_iteration_run.node_id, - user_inputs=dict(self.application_generate_entity.single_iteration_run.inputs), - graph_runtime_state=graph_runtime_state, - ) - elif self.application_generate_entity.single_loop_run: - # if only single loop run is requested - graph_runtime_state = GraphRuntimeState( - variable_pool=VariablePool.empty(), - start_at=time.time(), - ) - graph, variable_pool = self._get_graph_and_variable_pool_of_single_loop( - workflow=self._workflow, - node_id=self.application_generate_entity.single_loop_run.node_id, - user_inputs=dict(self.application_generate_entity.single_loop_run.inputs), - graph_runtime_state=graph_runtime_state, + single_iteration_run=self.application_generate_entity.single_iteration_run, + single_loop_run=self.application_generate_entity.single_loop_run, ) else: inputs = self.application_generate_entity.inputs diff --git a/api/core/app/apps/pipeline/pipeline_generator.py b/api/core/app/apps/pipeline/pipeline_generator.py index 76627b876b..bd077c4cb8 100644 --- a/api/core/app/apps/pipeline/pipeline_generator.py +++ b/api/core/app/apps/pipeline/pipeline_generator.py @@ -427,6 +427,9 @@ class PipelineGenerator(BaseAppGenerator): invoke_from=InvokeFrom.DEBUGGER, call_depth=0, workflow_execution_id=str(uuid.uuid4()), + single_iteration_run=RagPipelineGenerateEntity.SingleIterationRunEntity( + node_id=node_id, inputs=args["inputs"] + ), ) contexts.plugin_tool_providers.set({}) contexts.plugin_tool_providers_lock.set(threading.Lock()) @@ -465,6 +468,7 @@ class PipelineGenerator(BaseAppGenerator): workflow_node_execution_repository=workflow_node_execution_repository, streaming=streaming, variable_loader=var_loader, + context=contextvars.copy_context(), ) def single_loop_generate( @@ -559,6 +563,7 @@ class PipelineGenerator(BaseAppGenerator): workflow_node_execution_repository=workflow_node_execution_repository, streaming=streaming, variable_loader=var_loader, + context=contextvars.copy_context(), ) def _generate_worker( diff --git a/api/core/app/apps/pipeline/pipeline_runner.py b/api/core/app/apps/pipeline/pipeline_runner.py index ebb8b15163..145f629c4d 100644 --- a/api/core/app/apps/pipeline/pipeline_runner.py +++ b/api/core/app/apps/pipeline/pipeline_runner.py @@ -86,29 +86,12 @@ class PipelineRunner(WorkflowBasedAppRunner): db.session.close() # if only single iteration run is requested - if self.application_generate_entity.single_iteration_run: - graph_runtime_state = GraphRuntimeState( - variable_pool=VariablePool.empty(), - start_at=time.time(), - ) - # if only single iteration run is requested - graph, variable_pool = self._get_graph_and_variable_pool_of_single_iteration( + if self.application_generate_entity.single_iteration_run or self.application_generate_entity.single_loop_run: + # Handle single iteration or single loop run + graph, variable_pool, graph_runtime_state = self._prepare_single_node_execution( workflow=workflow, - node_id=self.application_generate_entity.single_iteration_run.node_id, - user_inputs=self.application_generate_entity.single_iteration_run.inputs, - graph_runtime_state=graph_runtime_state, - ) - elif self.application_generate_entity.single_loop_run: - graph_runtime_state = GraphRuntimeState( - variable_pool=VariablePool.empty(), - start_at=time.time(), - ) - # if only single loop run is requested - graph, variable_pool = self._get_graph_and_variable_pool_of_single_loop( - workflow=workflow, - node_id=self.application_generate_entity.single_loop_run.node_id, - user_inputs=self.application_generate_entity.single_loop_run.inputs, - graph_runtime_state=graph_runtime_state, + single_iteration_run=self.application_generate_entity.single_iteration_run, + single_loop_run=self.application_generate_entity.single_loop_run, ) else: inputs = self.application_generate_entity.inputs diff --git a/api/core/app/apps/workflow/app_runner.py b/api/core/app/apps/workflow/app_runner.py index b009dc7715..943ae8ab4e 100644 --- a/api/core/app/apps/workflow/app_runner.py +++ b/api/core/app/apps/workflow/app_runner.py @@ -51,30 +51,12 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): app_config = self.application_generate_entity.app_config app_config = cast(WorkflowAppConfig, app_config) - # if only single iteration run is requested - if self.application_generate_entity.single_iteration_run: - # if only single iteration run is requested - graph_runtime_state = GraphRuntimeState( - variable_pool=VariablePool.empty(), - start_at=time.time(), - ) - graph, variable_pool = self._get_graph_and_variable_pool_of_single_iteration( + # if only single iteration or single loop run is requested + if self.application_generate_entity.single_iteration_run or self.application_generate_entity.single_loop_run: + graph, variable_pool, graph_runtime_state = self._prepare_single_node_execution( workflow=self._workflow, - node_id=self.application_generate_entity.single_iteration_run.node_id, - user_inputs=self.application_generate_entity.single_iteration_run.inputs, - graph_runtime_state=graph_runtime_state, - ) - elif self.application_generate_entity.single_loop_run: - # if only single loop run is requested - graph_runtime_state = GraphRuntimeState( - variable_pool=VariablePool.empty(), - start_at=time.time(), - ) - graph, variable_pool = self._get_graph_and_variable_pool_of_single_loop( - workflow=self._workflow, - node_id=self.application_generate_entity.single_loop_run.node_id, - user_inputs=self.application_generate_entity.single_loop_run.inputs, - graph_runtime_state=graph_runtime_state, + single_iteration_run=self.application_generate_entity.single_iteration_run, + single_loop_run=self.application_generate_entity.single_loop_run, ) else: inputs = self.application_generate_entity.inputs diff --git a/api/core/app/apps/workflow_app_runner.py b/api/core/app/apps/workflow_app_runner.py index 056e03fa14..564daba86d 100644 --- a/api/core/app/apps/workflow_app_runner.py +++ b/api/core/app/apps/workflow_app_runner.py @@ -1,3 +1,4 @@ +import time from collections.abc import Mapping from typing import Any, cast @@ -119,15 +120,81 @@ class WorkflowBasedAppRunner: return graph - def _get_graph_and_variable_pool_of_single_iteration( + def _prepare_single_node_execution( + self, + workflow: Workflow, + single_iteration_run: Any | None = None, + single_loop_run: Any | None = None, + ) -> tuple[Graph, VariablePool, GraphRuntimeState]: + """ + Prepare graph, variable pool, and runtime state for single node execution + (either single iteration or single loop). + + Args: + workflow: The workflow instance + single_iteration_run: SingleIterationRunEntity if running single iteration, None otherwise + single_loop_run: SingleLoopRunEntity if running single loop, None otherwise + + Returns: + A tuple containing (graph, variable_pool, graph_runtime_state) + + Raises: + ValueError: If neither single_iteration_run nor single_loop_run is specified + """ + # Create initial runtime state with variable pool containing environment variables + graph_runtime_state = GraphRuntimeState( + variable_pool=VariablePool( + system_variables=SystemVariable.empty(), + user_inputs={}, + environment_variables=workflow.environment_variables, + ), + start_at=time.time(), + ) + + # Determine which type of single node execution and get graph/variable_pool + if single_iteration_run: + graph, variable_pool = self._get_graph_and_variable_pool_of_single_iteration( + workflow=workflow, + node_id=single_iteration_run.node_id, + user_inputs=dict(single_iteration_run.inputs), + graph_runtime_state=graph_runtime_state, + ) + elif single_loop_run: + graph, variable_pool = self._get_graph_and_variable_pool_of_single_loop( + workflow=workflow, + node_id=single_loop_run.node_id, + user_inputs=dict(single_loop_run.inputs), + graph_runtime_state=graph_runtime_state, + ) + else: + raise ValueError("Neither single_iteration_run nor single_loop_run is specified") + + # Return the graph, variable_pool, and the same graph_runtime_state used during graph creation + # This ensures all nodes in the graph reference the same GraphRuntimeState instance + return graph, variable_pool, graph_runtime_state + + def _get_graph_and_variable_pool_for_single_node_run( self, workflow: Workflow, node_id: str, - user_inputs: dict, + user_inputs: dict[str, Any], graph_runtime_state: GraphRuntimeState, + node_type_filter_key: str, # 'iteration_id' or 'loop_id' + node_type_label: str = "node", # 'iteration' or 'loop' for error messages ) -> tuple[Graph, VariablePool]: """ - Get variable pool of single iteration + Get graph and variable pool for single node execution (iteration or loop). + + Args: + workflow: The workflow instance + node_id: The node ID to execute + user_inputs: User inputs for the node + graph_runtime_state: The graph runtime state + node_type_filter_key: The key to filter nodes ('iteration_id' or 'loop_id') + node_type_label: Label for error messages ('iteration' or 'loop') + + Returns: + A tuple containing (graph, variable_pool) """ # fetch workflow graph graph_config = workflow.graph_dict @@ -145,18 +212,22 @@ class WorkflowBasedAppRunner: if not isinstance(graph_config.get("edges"), list): raise ValueError("edges in workflow graph must be a list") - # filter nodes only in iteration + # filter nodes only in the specified node type (iteration or loop) + main_node_config = next((n for n in graph_config.get("nodes", []) if n.get("id") == node_id), None) + start_node_id = main_node_config.get("data", {}).get("start_node_id") if main_node_config else None node_configs = [ node for node in graph_config.get("nodes", []) - if node.get("id") == node_id or node.get("data", {}).get("iteration_id", "") == node_id + if node.get("id") == node_id + or node.get("data", {}).get(node_type_filter_key, "") == node_id + or (start_node_id and node.get("id") == start_node_id) ] graph_config["nodes"] = node_configs node_ids = [node.get("id") for node in node_configs] - # filter edges only in iteration + # filter edges only in the specified node type edge_configs = [ edge for edge in graph_config.get("edges", []) @@ -190,30 +261,26 @@ class WorkflowBasedAppRunner: raise ValueError("graph not found in workflow") # fetch node config from node id - iteration_node_config = None + target_node_config = None for node in node_configs: if node.get("id") == node_id: - iteration_node_config = node + target_node_config = node break - if not iteration_node_config: - raise ValueError("iteration node id not found in workflow graph") + if not target_node_config: + raise ValueError(f"{node_type_label} node id not found in workflow graph") # Get node class - node_type = NodeType(iteration_node_config.get("data", {}).get("type")) - node_version = iteration_node_config.get("data", {}).get("version", "1") + node_type = NodeType(target_node_config.get("data", {}).get("type")) + node_version = target_node_config.get("data", {}).get("version", "1") node_cls = NODE_TYPE_CLASSES_MAPPING[node_type][node_version] - # init variable pool - variable_pool = VariablePool( - system_variables=SystemVariable.empty(), - user_inputs={}, - environment_variables=workflow.environment_variables, - ) + # Use the variable pool from graph_runtime_state instead of creating a new one + variable_pool = graph_runtime_state.variable_pool try: variable_mapping = node_cls.extract_variable_selector_to_variable_mapping( - graph_config=workflow.graph_dict, config=iteration_node_config + graph_config=workflow.graph_dict, config=target_node_config ) except NotImplementedError: variable_mapping = {} @@ -234,120 +301,44 @@ class WorkflowBasedAppRunner: return graph, variable_pool + def _get_graph_and_variable_pool_of_single_iteration( + self, + workflow: Workflow, + node_id: str, + user_inputs: dict[str, Any], + graph_runtime_state: GraphRuntimeState, + ) -> tuple[Graph, VariablePool]: + """ + Get variable pool of single iteration + """ + return self._get_graph_and_variable_pool_for_single_node_run( + workflow=workflow, + node_id=node_id, + user_inputs=user_inputs, + graph_runtime_state=graph_runtime_state, + node_type_filter_key="iteration_id", + node_type_label="iteration", + ) + def _get_graph_and_variable_pool_of_single_loop( self, workflow: Workflow, node_id: str, - user_inputs: dict, + user_inputs: dict[str, Any], graph_runtime_state: GraphRuntimeState, ) -> tuple[Graph, VariablePool]: """ Get variable pool of single loop """ - # fetch workflow graph - graph_config = workflow.graph_dict - if not graph_config: - raise ValueError("workflow graph not found") - - graph_config = cast(dict[str, Any], graph_config) - - if "nodes" not in graph_config or "edges" not in graph_config: - raise ValueError("nodes or edges not found in workflow graph") - - if not isinstance(graph_config.get("nodes"), list): - raise ValueError("nodes in workflow graph must be a list") - - if not isinstance(graph_config.get("edges"), list): - raise ValueError("edges in workflow graph must be a list") - - # filter nodes only in loop - node_configs = [ - node - for node in graph_config.get("nodes", []) - if node.get("id") == node_id or node.get("data", {}).get("loop_id", "") == node_id - ] - - graph_config["nodes"] = node_configs - - node_ids = [node.get("id") for node in node_configs] - - # filter edges only in loop - edge_configs = [ - edge - for edge in graph_config.get("edges", []) - if (edge.get("source") is None or edge.get("source") in node_ids) - and (edge.get("target") is None or edge.get("target") in node_ids) - ] - - graph_config["edges"] = edge_configs - - # Create required parameters for Graph.init - graph_init_params = GraphInitParams( - tenant_id=workflow.tenant_id, - app_id=self._app_id, - workflow_id=workflow.id, - graph_config=graph_config, - user_id="", - user_from=UserFrom.ACCOUNT.value, - invoke_from=InvokeFrom.SERVICE_API.value, - call_depth=0, - ) - - node_factory = DifyNodeFactory( - graph_init_params=graph_init_params, + return self._get_graph_and_variable_pool_for_single_node_run( + workflow=workflow, + node_id=node_id, + user_inputs=user_inputs, graph_runtime_state=graph_runtime_state, + node_type_filter_key="loop_id", + node_type_label="loop", ) - # init graph - graph = Graph.init(graph_config=graph_config, node_factory=node_factory, root_node_id=node_id) - - if not graph: - raise ValueError("graph not found in workflow") - - # fetch node config from node id - loop_node_config = None - for node in node_configs: - if node.get("id") == node_id: - loop_node_config = node - break - - if not loop_node_config: - raise ValueError("loop node id not found in workflow graph") - - # Get node class - node_type = NodeType(loop_node_config.get("data", {}).get("type")) - node_version = loop_node_config.get("data", {}).get("version", "1") - node_cls = NODE_TYPE_CLASSES_MAPPING[node_type][node_version] - - # init variable pool - variable_pool = VariablePool( - system_variables=SystemVariable.empty(), - user_inputs={}, - environment_variables=workflow.environment_variables, - ) - - try: - variable_mapping = node_cls.extract_variable_selector_to_variable_mapping( - graph_config=workflow.graph_dict, config=loop_node_config - ) - except NotImplementedError: - variable_mapping = {} - load_into_variable_pool( - self._variable_loader, - variable_pool=variable_pool, - variable_mapping=variable_mapping, - user_inputs=user_inputs, - ) - - WorkflowEntry.mapping_user_inputs_to_variable_pool( - variable_mapping=variable_mapping, - user_inputs=user_inputs, - variable_pool=variable_pool, - tenant_id=workflow.tenant_id, - ) - - return graph, variable_pool - def _handle_event(self, workflow_entry: WorkflowEntry, event: GraphEngineEvent): """ Handle event diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py index 5340a5b6ce..6e57b17d5c 100644 --- a/api/core/workflow/nodes/iteration/iteration_node.py +++ b/api/core/workflow/nodes/iteration/iteration_node.py @@ -372,43 +372,16 @@ class IterationNode(Node): variable_mapping: dict[str, Sequence[str]] = { f"{node_id}.input_selector": typed_node_data.iterator_selector, } + iteration_node_ids = set() - # init graph - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.graph import Graph - from core.workflow.nodes.node_factory import DifyNodeFactory - - # Create minimal GraphInitParams for static analysis - graph_init_params = GraphInitParams( - tenant_id="", - app_id="", - workflow_id="", - graph_config=graph_config, - user_id="", - user_from="", - invoke_from="", - call_depth=0, - ) - - # Create minimal GraphRuntimeState for static analysis - from core.workflow.entities import VariablePool - - graph_runtime_state = GraphRuntimeState( - variable_pool=VariablePool(), - start_at=0, - ) - - # Create node factory for static analysis - node_factory = DifyNodeFactory(graph_init_params=graph_init_params, graph_runtime_state=graph_runtime_state) - - iteration_graph = Graph.init( - graph_config=graph_config, - node_factory=node_factory, - root_node_id=typed_node_data.start_node_id, - ) - - if not iteration_graph: - raise IterationGraphNotFoundError("iteration graph not found") + # Find all nodes that belong to this loop + nodes = graph_config.get("nodes", []) + for node in nodes: + node_data = node.get("data", {}) + if node_data.get("iteration_id") == node_id: + in_iteration_node_id = node.get("id") + if in_iteration_node_id: + iteration_node_ids.add(in_iteration_node_id) # Get node configs from graph_config instead of non-existent node_id_config_mapping node_configs = {node["id"]: node for node in graph_config.get("nodes", []) if "id" in node} @@ -444,9 +417,7 @@ class IterationNode(Node): variable_mapping.update(sub_node_variable_mapping) # remove variable out from iteration - variable_mapping = { - key: value for key, value in variable_mapping.items() if value[0] not in iteration_graph.node_ids - } + variable_mapping = {key: value for key, value in variable_mapping.items() if value[0] not in iteration_node_ids} return variable_mapping diff --git a/api/core/workflow/nodes/loop/loop_node.py b/api/core/workflow/nodes/loop/loop_node.py index 2b988ad944..790975d556 100644 --- a/api/core/workflow/nodes/loop/loop_node.py +++ b/api/core/workflow/nodes/loop/loop_node.py @@ -1,3 +1,4 @@ +import contextlib import json import logging from collections.abc import Callable, Generator, Mapping, Sequence @@ -127,11 +128,13 @@ class LoopNode(Node): try: reach_break_condition = False if break_conditions: - _, _, reach_break_condition = condition_processor.process_conditions( - variable_pool=self.graph_runtime_state.variable_pool, - conditions=break_conditions, - operator=logical_operator, - ) + with contextlib.suppress(ValueError): + _, _, reach_break_condition = condition_processor.process_conditions( + variable_pool=self.graph_runtime_state.variable_pool, + conditions=break_conditions, + operator=logical_operator, + ) + if reach_break_condition: loop_count = 0 cost_tokens = 0 @@ -295,42 +298,11 @@ class LoopNode(Node): variable_mapping = {} - # init graph - from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool - from core.workflow.graph import Graph - from core.workflow.nodes.node_factory import DifyNodeFactory + # Extract loop node IDs statically from graph_config - # Create minimal GraphInitParams for static analysis - graph_init_params = GraphInitParams( - tenant_id="", - app_id="", - workflow_id="", - graph_config=graph_config, - user_id="", - user_from="", - invoke_from="", - call_depth=0, - ) + loop_node_ids = cls._extract_loop_node_ids_from_config(graph_config, node_id) - # Create minimal GraphRuntimeState for static analysis - graph_runtime_state = GraphRuntimeState( - variable_pool=VariablePool(), - start_at=0, - ) - - # Create node factory for static analysis - node_factory = DifyNodeFactory(graph_init_params=graph_init_params, graph_runtime_state=graph_runtime_state) - - loop_graph = Graph.init( - graph_config=graph_config, - node_factory=node_factory, - root_node_id=typed_node_data.start_node_id, - ) - - if not loop_graph: - raise ValueError("loop graph not found") - - # Get node configs from graph_config instead of non-existent node_id_config_mapping + # Get node configs from graph_config node_configs = {node["id"]: node for node in graph_config.get("nodes", []) if "id" in node} for sub_node_id, sub_node_config in node_configs.items(): if sub_node_config.get("data", {}).get("loop_id") != node_id: @@ -371,12 +343,35 @@ class LoopNode(Node): variable_mapping[f"{node_id}.{loop_variable.label}"] = selector # remove variable out from loop - variable_mapping = { - key: value for key, value in variable_mapping.items() if value[0] not in loop_graph.node_ids - } + variable_mapping = {key: value for key, value in variable_mapping.items() if value[0] not in loop_node_ids} return variable_mapping + @classmethod + def _extract_loop_node_ids_from_config(cls, graph_config: Mapping[str, Any], loop_node_id: str) -> set[str]: + """ + Extract node IDs that belong to a specific loop from graph configuration. + + This method statically analyzes the graph configuration to find all nodes + that are part of the specified loop, without creating actual node instances. + + :param graph_config: the complete graph configuration + :param loop_node_id: the ID of the loop node + :return: set of node IDs that belong to the loop + """ + loop_node_ids = set() + + # Find all nodes that belong to this loop + nodes = graph_config.get("nodes", []) + for node in nodes: + node_data = node.get("data", {}) + if node_data.get("loop_id") == loop_node_id: + node_id = node.get("id") + if node_id: + loop_node_ids.add(node_id) + + return loop_node_ids + @staticmethod def _get_segment_for_constant(var_type: SegmentType, original_value: Any) -> Segment: """Get the appropriate segment type for a constant value.""" From 24e8d21b3f04ff46cc9c10323002b3456aeef280 Mon Sep 17 00:00:00 2001 From: QuantumGhost Date: Mon, 22 Sep 2025 19:14:43 +0800 Subject: [PATCH 007/126] chore(api): bump version (#25917) --- api/pyproject.toml | 2 +- .../conftest.py | 2 +- api/uv.lock | 4 +-- docker/docker-compose-template.yaml | 10 +++---- docker/docker-compose.middleware.yaml | 29 +++++++++++++++---- docker/docker-compose.yaml | 10 +++---- web/package.json | 2 +- 7 files changed, 39 insertions(+), 20 deletions(-) diff --git a/api/pyproject.toml b/api/pyproject.toml index 0b2b41d6db..012702edd2 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "dify-api" -version = "2.0.0-beta2" +version = "1.9.0" requires-python = ">=3.11,<3.13" dependencies = [ diff --git a/api/tests/test_containers_integration_tests/conftest.py b/api/tests/test_containers_integration_tests/conftest.py index 145e31bca0..243c8d1d62 100644 --- a/api/tests/test_containers_integration_tests/conftest.py +++ b/api/tests/test_containers_integration_tests/conftest.py @@ -173,7 +173,7 @@ class DifyTestContainers: # Start Dify Plugin Daemon container for plugin management # Dify Plugin Daemon provides plugin lifecycle management and execution logger.info("Initializing Dify Plugin Daemon container...") - self.dify_plugin_daemon = DockerContainer(image="langgenius/dify-plugin-daemon:0.2.0-local") + self.dify_plugin_daemon = DockerContainer(image="langgenius/dify-plugin-daemon:0.3.0-local") self.dify_plugin_daemon.with_exposed_ports(5002) self.dify_plugin_daemon.env = { "DB_HOST": db_host, diff --git a/api/uv.lock b/api/uv.lock index ee49e79eff..7ce71cd215 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.11, <3.13" resolution-markers = [ "python_full_version >= '3.12.4' and sys_platform == 'linux'", @@ -1273,7 +1273,7 @@ wheels = [ [[package]] name = "dify-api" -version = "2.0.0b2" +version = "1.9.0" source = { virtual = "." } dependencies = [ { name = "arize-phoenix-otel" }, diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index 93159b056f..685fc325d0 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:2.0.0-beta.2 + image: langgenius/dify-api:1.9.0 restart: always environment: # Use the shared environment variables. @@ -31,7 +31,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:2.0.0-beta.2 + image: langgenius/dify-api:1.9.0 restart: always environment: # Use the shared environment variables. @@ -58,7 +58,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:2.0.0-beta.2 + image: langgenius/dify-api:1.9.0 restart: always environment: # Use the shared environment variables. @@ -76,7 +76,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:2.0.0-beta.2 + image: langgenius/dify-web:1.9.0 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -177,7 +177,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.3.0b1-local + image: langgenius/dify-plugin-daemon:0.3.0-local restart: always environment: # Use the shared environment variables. diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml index 9e7060aad2..d350503f27 100644 --- a/docker/docker-compose.middleware.yaml +++ b/docker/docker-compose.middleware.yaml @@ -20,7 +20,17 @@ services: ports: - "${EXPOSE_POSTGRES_PORT:-5432}:5432" healthcheck: - test: [ 'CMD', 'pg_isready', '-h', 'db', '-U', '${PGUSER:-postgres}', '-d', '${POSTGRES_DB:-dify}' ] + test: + [ + "CMD", + "pg_isready", + "-h", + "db", + "-U", + "${PGUSER:-postgres}", + "-d", + "${POSTGRES_DB:-dify}", + ] interval: 1s timeout: 3s retries: 30 @@ -41,7 +51,11 @@ services: ports: - "${EXPOSE_REDIS_PORT:-6379}:6379" healthcheck: - test: [ 'CMD-SHELL', 'redis-cli -a ${REDIS_PASSWORD:-difyai123456} ping | grep -q PONG' ] + test: + [ + "CMD-SHELL", + "redis-cli -a ${REDIS_PASSWORD:-difyai123456} ping | grep -q PONG", + ] # The DifySandbox sandbox: @@ -65,13 +79,13 @@ services: - ./volumes/sandbox/dependencies:/dependencies - ./volumes/sandbox/conf:/conf healthcheck: - test: [ "CMD", "curl", "-f", "http://localhost:8194/health" ] + test: ["CMD", "curl", "-f", "http://localhost:8194/health"] networks: - ssrf_proxy_network # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.3.0b1-local + image: langgenius/dify-plugin-daemon:0.3.0-local restart: always env_file: - ./middleware.env @@ -143,7 +157,12 @@ services: volumes: - ./ssrf_proxy/squid.conf.template:/etc/squid/squid.conf.template - ./ssrf_proxy/docker-entrypoint.sh:/docker-entrypoint-mount.sh - entrypoint: [ "sh", "-c", "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ] + entrypoint: + [ + "sh", + "-c", + "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh", + ] env_file: - ./middleware.env environment: diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 2d6ba572e6..dc94883b75 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -593,7 +593,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:2.0.0-beta.2 + image: langgenius/dify-api:1.9.0 restart: always environment: # Use the shared environment variables. @@ -622,7 +622,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:2.0.0-beta.2 + image: langgenius/dify-api:1.9.0 restart: always environment: # Use the shared environment variables. @@ -649,7 +649,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:2.0.0-beta.2 + image: langgenius/dify-api:1.9.0 restart: always environment: # Use the shared environment variables. @@ -667,7 +667,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:2.0.0-beta.2 + image: langgenius/dify-web:1.9.0 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -768,7 +768,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.3.0b1-local + image: langgenius/dify-plugin-daemon:0.3.0-local restart: always environment: # Use the shared environment variables. diff --git a/web/package.json b/web/package.json index d9bd413b0e..57ab734eb2 100644 --- a/web/package.json +++ b/web/package.json @@ -1,6 +1,6 @@ { "name": "dify-web", - "version": "2.0.0-beta2", + "version": "1.9.0", "private": true, "packageManager": "pnpm@10.16.0", "engines": { From ef80d3b7078c25d5a205f51e1802d3c8fec9229e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=9D=9E=E6=B3=95=E6=93=8D=E4=BD=9C?= Date: Mon, 22 Sep 2025 19:39:17 +0800 Subject: [PATCH 008/126] fix: Ensure compatibility with old provider name when updating model credentials (#26017) --- api/core/entities/provider_configuration.py | 72 +++++++++------------ api/core/provider_manager.py | 22 ++++++- 2 files changed, 52 insertions(+), 42 deletions(-) diff --git a/api/core/entities/provider_configuration.py b/api/core/entities/provider_configuration.py index de3b0964ff..111de89178 100644 --- a/api/core/entities/provider_configuration.py +++ b/api/core/entities/provider_configuration.py @@ -205,16 +205,10 @@ class ProviderConfiguration(BaseModel): """ Get custom provider record. """ - # get provider - model_provider_id = ModelProviderID(self.provider.provider) - provider_names = [self.provider.provider] - if model_provider_id.is_langgenius(): - provider_names.append(model_provider_id.provider_name) - stmt = select(Provider).where( Provider.tenant_id == self.tenant_id, Provider.provider_type == ProviderType.CUSTOM.value, - Provider.provider_name.in_(provider_names), + Provider.provider_name.in_(self._get_provider_names()), ) return session.execute(stmt).scalar_one_or_none() @@ -276,7 +270,7 @@ class ProviderConfiguration(BaseModel): """ stmt = select(ProviderCredential.id).where( ProviderCredential.tenant_id == self.tenant_id, - ProviderCredential.provider_name == self.provider.provider, + ProviderCredential.provider_name.in_(self._get_provider_names()), ProviderCredential.credential_name == credential_name, ) if exclude_id: @@ -324,7 +318,7 @@ class ProviderConfiguration(BaseModel): try: stmt = select(ProviderCredential).where( ProviderCredential.tenant_id == self.tenant_id, - ProviderCredential.provider_name == self.provider.provider, + ProviderCredential.provider_name.in_(self._get_provider_names()), ProviderCredential.id == credential_id, ) credential_record = s.execute(stmt).scalar_one_or_none() @@ -374,7 +368,7 @@ class ProviderConfiguration(BaseModel): session=session, query_factory=lambda: select(ProviderCredential).where( ProviderCredential.tenant_id == self.tenant_id, - ProviderCredential.provider_name == self.provider.provider, + ProviderCredential.provider_name.in_(self._get_provider_names()), ), ) @@ -387,7 +381,7 @@ class ProviderConfiguration(BaseModel): session=session, query_factory=lambda: select(ProviderModelCredential).where( ProviderModelCredential.tenant_id == self.tenant_id, - ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.provider_name.in_(self._get_provider_names()), ProviderModelCredential.model_name == model, ProviderModelCredential.model_type == model_type.to_origin_model_type(), ), @@ -423,6 +417,16 @@ class ProviderConfiguration(BaseModel): logger.warning("Error generating next credential name: %s", str(e)) return "API KEY 1" + def _get_provider_names(self): + """ + The provider name might be stored in the database as either `openai` or `langgenius/openai/openai`. + """ + model_provider_id = ModelProviderID(self.provider.provider) + provider_names = [self.provider.provider] + if model_provider_id.is_langgenius(): + provider_names.append(model_provider_id.provider_name) + return provider_names + def create_provider_credential(self, credentials: dict, credential_name: str | None): """ Add custom provider credentials. @@ -501,7 +505,7 @@ class ProviderConfiguration(BaseModel): stmt = select(ProviderCredential).where( ProviderCredential.id == credential_id, ProviderCredential.tenant_id == self.tenant_id, - ProviderCredential.provider_name == self.provider.provider, + ProviderCredential.provider_name.in_(self._get_provider_names()), ) # Get the credential record to update @@ -554,7 +558,7 @@ class ProviderConfiguration(BaseModel): # Find all load balancing configs that use this credential_id stmt = select(LoadBalancingModelConfig).where( LoadBalancingModelConfig.tenant_id == self.tenant_id, - LoadBalancingModelConfig.provider_name == self.provider.provider, + LoadBalancingModelConfig.provider_name.in_(self._get_provider_names()), LoadBalancingModelConfig.credential_id == credential_id, LoadBalancingModelConfig.credential_source_type == credential_source, ) @@ -591,7 +595,7 @@ class ProviderConfiguration(BaseModel): stmt = select(ProviderCredential).where( ProviderCredential.id == credential_id, ProviderCredential.tenant_id == self.tenant_id, - ProviderCredential.provider_name == self.provider.provider, + ProviderCredential.provider_name.in_(self._get_provider_names()), ) # Get the credential record to update @@ -602,7 +606,7 @@ class ProviderConfiguration(BaseModel): # Check if this credential is used in load balancing configs lb_stmt = select(LoadBalancingModelConfig).where( LoadBalancingModelConfig.tenant_id == self.tenant_id, - LoadBalancingModelConfig.provider_name == self.provider.provider, + LoadBalancingModelConfig.provider_name.in_(self._get_provider_names()), LoadBalancingModelConfig.credential_id == credential_id, LoadBalancingModelConfig.credential_source_type == "provider", ) @@ -624,7 +628,7 @@ class ProviderConfiguration(BaseModel): # if this is the last credential, we need to delete the provider record count_stmt = select(func.count(ProviderCredential.id)).where( ProviderCredential.tenant_id == self.tenant_id, - ProviderCredential.provider_name == self.provider.provider, + ProviderCredential.provider_name.in_(self._get_provider_names()), ) available_credentials_count = session.execute(count_stmt).scalar() or 0 session.delete(credential_record) @@ -668,7 +672,7 @@ class ProviderConfiguration(BaseModel): stmt = select(ProviderCredential).where( ProviderCredential.id == credential_id, ProviderCredential.tenant_id == self.tenant_id, - ProviderCredential.provider_name == self.provider.provider, + ProviderCredential.provider_name.in_(self._get_provider_names()), ) credential_record = session.execute(stmt).scalar_one_or_none() if not credential_record: @@ -737,7 +741,7 @@ class ProviderConfiguration(BaseModel): stmt = select(ProviderModelCredential).where( ProviderModelCredential.id == credential_id, ProviderModelCredential.tenant_id == self.tenant_id, - ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.provider_name.in_(self._get_provider_names()), ProviderModelCredential.model_name == model, ProviderModelCredential.model_type == model_type.to_origin_model_type(), ) @@ -784,7 +788,7 @@ class ProviderConfiguration(BaseModel): """ stmt = select(ProviderModelCredential).where( ProviderModelCredential.tenant_id == self.tenant_id, - ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.provider_name.in_(self._get_provider_names()), ProviderModelCredential.model_name == model, ProviderModelCredential.model_type == model_type.to_origin_model_type(), ProviderModelCredential.credential_name == credential_name, @@ -860,7 +864,7 @@ class ProviderConfiguration(BaseModel): stmt = select(ProviderModelCredential).where( ProviderModelCredential.id == credential_id, ProviderModelCredential.tenant_id == self.tenant_id, - ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.provider_name.in_(self._get_provider_names()), ProviderModelCredential.model_name == model, ProviderModelCredential.model_type == model_type.to_origin_model_type(), ) @@ -997,7 +1001,7 @@ class ProviderConfiguration(BaseModel): stmt = select(ProviderModelCredential).where( ProviderModelCredential.id == credential_id, ProviderModelCredential.tenant_id == self.tenant_id, - ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.provider_name.in_(self._get_provider_names()), ProviderModelCredential.model_name == model, ProviderModelCredential.model_type == model_type.to_origin_model_type(), ) @@ -1042,7 +1046,7 @@ class ProviderConfiguration(BaseModel): stmt = select(ProviderModelCredential).where( ProviderModelCredential.id == credential_id, ProviderModelCredential.tenant_id == self.tenant_id, - ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.provider_name.in_(self._get_provider_names()), ProviderModelCredential.model_name == model, ProviderModelCredential.model_type == model_type.to_origin_model_type(), ) @@ -1052,7 +1056,7 @@ class ProviderConfiguration(BaseModel): lb_stmt = select(LoadBalancingModelConfig).where( LoadBalancingModelConfig.tenant_id == self.tenant_id, - LoadBalancingModelConfig.provider_name == self.provider.provider, + LoadBalancingModelConfig.provider_name.in_(self._get_provider_names()), LoadBalancingModelConfig.credential_id == credential_id, LoadBalancingModelConfig.credential_source_type == "custom_model", ) @@ -1075,7 +1079,7 @@ class ProviderConfiguration(BaseModel): # if this is the last credential, we need to delete the custom model record count_stmt = select(func.count(ProviderModelCredential.id)).where( ProviderModelCredential.tenant_id == self.tenant_id, - ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.provider_name.in_(self._get_provider_names()), ProviderModelCredential.model_name == model, ProviderModelCredential.model_type == model_type.to_origin_model_type(), ) @@ -1115,7 +1119,7 @@ class ProviderConfiguration(BaseModel): stmt = select(ProviderModelCredential).where( ProviderModelCredential.id == credential_id, ProviderModelCredential.tenant_id == self.tenant_id, - ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.provider_name.in_(self._get_provider_names()), ProviderModelCredential.model_name == model, ProviderModelCredential.model_type == model_type.to_origin_model_type(), ) @@ -1157,7 +1161,7 @@ class ProviderConfiguration(BaseModel): stmt = select(ProviderModelCredential).where( ProviderModelCredential.id == credential_id, ProviderModelCredential.tenant_id == self.tenant_id, - ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.provider_name.in_(self._get_provider_names()), ProviderModelCredential.model_name == model, ProviderModelCredential.model_type == model_type.to_origin_model_type(), ) @@ -1204,15 +1208,9 @@ class ProviderConfiguration(BaseModel): """ Get provider model setting. """ - - model_provider_id = ModelProviderID(self.provider.provider) - provider_names = [self.provider.provider] - if model_provider_id.is_langgenius(): - provider_names.append(model_provider_id.provider_name) - stmt = select(ProviderModelSetting).where( ProviderModelSetting.tenant_id == self.tenant_id, - ProviderModelSetting.provider_name.in_(provider_names), + ProviderModelSetting.provider_name.in_(self._get_provider_names()), ProviderModelSetting.model_type == model_type.to_origin_model_type(), ProviderModelSetting.model_name == model, ) @@ -1384,15 +1382,9 @@ class ProviderConfiguration(BaseModel): return def _switch(s: Session): - # get preferred provider - model_provider_id = ModelProviderID(self.provider.provider) - provider_names = [self.provider.provider] - if model_provider_id.is_langgenius(): - provider_names.append(model_provider_id.provider_name) - stmt = select(TenantPreferredModelProvider).where( TenantPreferredModelProvider.tenant_id == self.tenant_id, - TenantPreferredModelProvider.provider_name.in_(provider_names), + TenantPreferredModelProvider.provider_name.in_(self._get_provider_names()), ) preferred_model_provider = s.execute(stmt).scalars().first() diff --git a/api/core/provider_manager.py b/api/core/provider_manager.py index 6f642ab5db..499d39bd5d 100644 --- a/api/core/provider_manager.py +++ b/api/core/provider_manager.py @@ -513,6 +513,21 @@ class ProviderManager: return provider_name_to_provider_load_balancing_model_configs_dict + @staticmethod + def _get_provider_names(provider_name: str) -> list[str]: + """ + provider_name: `openai` or `langgenius/openai/openai` + return: [`openai`, `langgenius/openai/openai`] + """ + provider_names = [provider_name] + model_provider_id = ModelProviderID(provider_name) + if model_provider_id.is_langgenius(): + if "/" in provider_name: + provider_names.append(model_provider_id.provider_name) + else: + provider_names.append(str(model_provider_id)) + return provider_names + @staticmethod def get_provider_available_credentials(tenant_id: str, provider_name: str) -> list[CredentialConfiguration]: """ @@ -525,7 +540,10 @@ class ProviderManager: with Session(db.engine, expire_on_commit=False) as session: stmt = ( select(ProviderCredential) - .where(ProviderCredential.tenant_id == tenant_id, ProviderCredential.provider_name == provider_name) + .where( + ProviderCredential.tenant_id == tenant_id, + ProviderCredential.provider_name.in_(ProviderManager._get_provider_names(provider_name)), + ) .order_by(ProviderCredential.created_at.desc()) ) @@ -554,7 +572,7 @@ class ProviderManager: select(ProviderModelCredential) .where( ProviderModelCredential.tenant_id == tenant_id, - ProviderModelCredential.provider_name == provider_name, + ProviderModelCredential.provider_name.in_(ProviderManager._get_provider_names(provider_name)), ProviderModelCredential.model_name == model_name, ProviderModelCredential.model_type == model_type, ) From c60c754ac9fb1cde43862dd83a76a59ee61c20f5 Mon Sep 17 00:00:00 2001 From: Jyong <76649700+JohnJyong@users.noreply.github.com> Date: Mon, 22 Sep 2025 19:47:39 +0800 Subject: [PATCH 009/126] fix preview url (#26059) --- api/fields/file_fields.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/api/fields/file_fields.py b/api/fields/file_fields.py index a6e2d7038b..c12ebc09c8 100644 --- a/api/fields/file_fields.py +++ b/api/fields/file_fields.py @@ -32,7 +32,8 @@ file_fields = { "mime_type": fields.String, "created_by": fields.String, "created_at": TimestampField, - "preview_url": fields.String(attribute="source_url"), + "preview_url": fields.String, + "source_url": fields.String, } From cd40cde790618c6f6ddc9ca68aa5f36a30b6c4ba Mon Sep 17 00:00:00 2001 From: Jyong <76649700+JohnJyong@users.noreply.github.com> Date: Mon, 22 Sep 2025 20:50:30 +0800 Subject: [PATCH 010/126] fix tenant not exist (#26066) --- api/commands.py | 213 +++++++++++++++++++++++++++--------------------- 1 file changed, 122 insertions(+), 91 deletions(-) diff --git a/api/commands.py b/api/commands.py index 259d823dea..cb8aa8430a 100644 --- a/api/commands.py +++ b/api/commands.py @@ -1448,41 +1448,52 @@ def transform_datasource_credentials(): notion_credentials_tenant_mapping[tenant_id] = [] notion_credentials_tenant_mapping[tenant_id].append(notion_credential) for tenant_id, notion_tenant_credentials in notion_credentials_tenant_mapping.items(): - # check notion plugin is installed - installed_plugins = installer_manager.list_plugins(tenant_id) - installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins] - if notion_plugin_id not in installed_plugins_ids: - if notion_plugin_unique_identifier: - # install notion plugin - PluginService.install_from_marketplace_pkg(tenant_id, [notion_plugin_unique_identifier]) - auth_count = 0 - for notion_tenant_credential in notion_tenant_credentials: - auth_count += 1 - # get credential oauth params - access_token = notion_tenant_credential.access_token - # notion info - notion_info = notion_tenant_credential.source_info - workspace_id = notion_info.get("workspace_id") - workspace_name = notion_info.get("workspace_name") - workspace_icon = notion_info.get("workspace_icon") - new_credentials = { - "integration_secret": encrypter.encrypt_token(tenant_id, access_token), - "workspace_id": workspace_id, - "workspace_name": workspace_name, - "workspace_icon": workspace_icon, - } - datasource_provider = DatasourceProvider( - provider="notion_datasource", - tenant_id=tenant_id, - plugin_id=notion_plugin_id, - auth_type=oauth_credential_type.value, - encrypted_credentials=new_credentials, - name=f"Auth {auth_count}", - avatar_url=workspace_icon or "default", - is_default=False, + tenant = db.session.query(Tenant).filter_by(id=tenant_id).first() + if not tenant: + continue + try: + # check notion plugin is installed + installed_plugins = installer_manager.list_plugins(tenant_id) + installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins] + if notion_plugin_id not in installed_plugins_ids: + if notion_plugin_unique_identifier: + # install notion plugin + PluginService.install_from_marketplace_pkg(tenant_id, [notion_plugin_unique_identifier]) + auth_count = 0 + for notion_tenant_credential in notion_tenant_credentials: + auth_count += 1 + # get credential oauth params + access_token = notion_tenant_credential.access_token + # notion info + notion_info = notion_tenant_credential.source_info + workspace_id = notion_info.get("workspace_id") + workspace_name = notion_info.get("workspace_name") + workspace_icon = notion_info.get("workspace_icon") + new_credentials = { + "integration_secret": encrypter.encrypt_token(tenant_id, access_token), + "workspace_id": workspace_id, + "workspace_name": workspace_name, + "workspace_icon": workspace_icon, + } + datasource_provider = DatasourceProvider( + provider="notion_datasource", + tenant_id=tenant_id, + plugin_id=notion_plugin_id, + auth_type=oauth_credential_type.value, + encrypted_credentials=new_credentials, + name=f"Auth {auth_count}", + avatar_url=workspace_icon or "default", + is_default=False, + ) + db.session.add(datasource_provider) + deal_notion_count += 1 + except Exception as e: + click.echo( + click.style( + f"Error transforming notion credentials: {str(e)}, tenant_id: {tenant_id}", fg="red" + ) ) - db.session.add(datasource_provider) - deal_notion_count += 1 + continue db.session.commit() # deal firecrawl credentials deal_firecrawl_count = 0 @@ -1495,37 +1506,48 @@ def transform_datasource_credentials(): firecrawl_credentials_tenant_mapping[tenant_id] = [] firecrawl_credentials_tenant_mapping[tenant_id].append(firecrawl_credential) for tenant_id, firecrawl_tenant_credentials in firecrawl_credentials_tenant_mapping.items(): - # check firecrawl plugin is installed - installed_plugins = installer_manager.list_plugins(tenant_id) - installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins] - if firecrawl_plugin_id not in installed_plugins_ids: - if firecrawl_plugin_unique_identifier: - # install firecrawl plugin - PluginService.install_from_marketplace_pkg(tenant_id, [firecrawl_plugin_unique_identifier]) + tenant = db.session.query(Tenant).filter_by(id=tenant_id).first() + if not tenant: + continue + try: + # check firecrawl plugin is installed + installed_plugins = installer_manager.list_plugins(tenant_id) + installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins] + if firecrawl_plugin_id not in installed_plugins_ids: + if firecrawl_plugin_unique_identifier: + # install firecrawl plugin + PluginService.install_from_marketplace_pkg(tenant_id, [firecrawl_plugin_unique_identifier]) - auth_count = 0 - for firecrawl_tenant_credential in firecrawl_tenant_credentials: - auth_count += 1 - # get credential api key - credentials_json = json.loads(firecrawl_tenant_credential.credentials) - api_key = credentials_json.get("config", {}).get("api_key") - base_url = credentials_json.get("config", {}).get("base_url") - new_credentials = { - "firecrawl_api_key": api_key, - "base_url": base_url, - } - datasource_provider = DatasourceProvider( - provider="firecrawl", - tenant_id=tenant_id, - plugin_id=firecrawl_plugin_id, - auth_type=api_key_credential_type.value, - encrypted_credentials=new_credentials, - name=f"Auth {auth_count}", - avatar_url="default", - is_default=False, + auth_count = 0 + for firecrawl_tenant_credential in firecrawl_tenant_credentials: + auth_count += 1 + # get credential api key + credentials_json = json.loads(firecrawl_tenant_credential.credentials) + api_key = credentials_json.get("config", {}).get("api_key") + base_url = credentials_json.get("config", {}).get("base_url") + new_credentials = { + "firecrawl_api_key": api_key, + "base_url": base_url, + } + datasource_provider = DatasourceProvider( + provider="firecrawl", + tenant_id=tenant_id, + plugin_id=firecrawl_plugin_id, + auth_type=api_key_credential_type.value, + encrypted_credentials=new_credentials, + name=f"Auth {auth_count}", + avatar_url="default", + is_default=False, + ) + db.session.add(datasource_provider) + deal_firecrawl_count += 1 + except Exception as e: + click.echo( + click.style( + f"Error transforming firecrawl credentials: {str(e)}, tenant_id: {tenant_id}", fg="red" + ) ) - db.session.add(datasource_provider) - deal_firecrawl_count += 1 + continue db.session.commit() # deal jina credentials deal_jina_count = 0 @@ -1538,36 +1560,45 @@ def transform_datasource_credentials(): jina_credentials_tenant_mapping[tenant_id] = [] jina_credentials_tenant_mapping[tenant_id].append(jina_credential) for tenant_id, jina_tenant_credentials in jina_credentials_tenant_mapping.items(): - # check jina plugin is installed - installed_plugins = installer_manager.list_plugins(tenant_id) - installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins] - if jina_plugin_id not in installed_plugins_ids: - if jina_plugin_unique_identifier: - # install jina plugin - logger.debug("Installing Jina plugin %s", jina_plugin_unique_identifier) - PluginService.install_from_marketplace_pkg(tenant_id, [jina_plugin_unique_identifier]) + tenant = db.session.query(Tenant).filter_by(id=tenant_id).first() + if not tenant: + continue + try: + # check jina plugin is installed + installed_plugins = installer_manager.list_plugins(tenant_id) + installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins] + if jina_plugin_id not in installed_plugins_ids: + if jina_plugin_unique_identifier: + # install jina plugin + logger.debug("Installing Jina plugin %s", jina_plugin_unique_identifier) + PluginService.install_from_marketplace_pkg(tenant_id, [jina_plugin_unique_identifier]) - auth_count = 0 - for jina_tenant_credential in jina_tenant_credentials: - auth_count += 1 - # get credential api key - credentials_json = json.loads(jina_tenant_credential.credentials) - api_key = credentials_json.get("config", {}).get("api_key") - new_credentials = { - "integration_secret": api_key, - } - datasource_provider = DatasourceProvider( - provider="jina", - tenant_id=tenant_id, - plugin_id=jina_plugin_id, - auth_type=api_key_credential_type.value, - encrypted_credentials=new_credentials, - name=f"Auth {auth_count}", - avatar_url="default", - is_default=False, + auth_count = 0 + for jina_tenant_credential in jina_tenant_credentials: + auth_count += 1 + # get credential api key + credentials_json = json.loads(jina_tenant_credential.credentials) + api_key = credentials_json.get("config", {}).get("api_key") + new_credentials = { + "integration_secret": api_key, + } + datasource_provider = DatasourceProvider( + provider="jina", + tenant_id=tenant_id, + plugin_id=jina_plugin_id, + auth_type=api_key_credential_type.value, + encrypted_credentials=new_credentials, + name=f"Auth {auth_count}", + avatar_url="default", + is_default=False, + ) + db.session.add(datasource_provider) + deal_jina_count += 1 + except Exception as e: + click.echo( + click.style(f"Error transforming jina credentials: {str(e)}, tenant_id: {tenant_id}", fg="red") ) - db.session.add(datasource_provider) - deal_jina_count += 1 + continue db.session.commit() except Exception as e: click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red")) From 0c4193bd915065319c5cf3641784a05cf6472bcc Mon Sep 17 00:00:00 2001 From: Jyong <76649700+JohnJyong@users.noreply.github.com> Date: Mon, 22 Sep 2025 21:28:42 +0800 Subject: [PATCH 011/126] fix avatar-url to text (#26068) --- .../2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py | 2 +- api/models/oauth.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py b/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py index 742cfc345a..53a95141ec 100644 --- a/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py +++ b/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py @@ -47,7 +47,7 @@ def upgrade(): sa.Column('plugin_id', sa.String(length=255), nullable=False), sa.Column('auth_type', sa.String(length=255), nullable=False), sa.Column('encrypted_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column('avatar_url', sa.String(length=255), nullable=True), + sa.Column('avatar_url', sa.Text(), nullable=True), sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False), sa.Column('expires_at', sa.Integer(), server_default='-1', nullable=False), sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), diff --git a/api/models/oauth.py b/api/models/oauth.py index b6a76793fc..1d5d37e3e1 100644 --- a/api/models/oauth.py +++ b/api/models/oauth.py @@ -35,7 +35,7 @@ class DatasourceProvider(Base): plugin_id: Mapped[str] = db.Column(db.String(255), nullable=False) auth_type: Mapped[str] = db.Column(db.String(255), nullable=False) encrypted_credentials: Mapped[dict] = db.Column(JSONB, nullable=False) - avatar_url: Mapped[str] = db.Column(db.String(255), nullable=True, default="default") + avatar_url: Mapped[str] = db.Column(db.Text, nullable=True, default="default") is_default: Mapped[bool] = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) expires_at: Mapped[int] = db.Column(db.Integer, nullable=False, server_default="-1") From 8940decd1b84e6bcc33e32379a669e33835f0955 Mon Sep 17 00:00:00 2001 From: Asuka Minato Date: Tue, 23 Sep 2025 00:07:09 +0900 Subject: [PATCH 012/126] more httpx (#25651) Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- .../nacos/http_request.py | 8 +++--- .../console/auth/data_source_oauth.py | 6 ++-- api/controllers/console/auth/oauth.py | 8 ++++-- api/controllers/console/version.py | 8 ++++-- .../aliyun_trace/data_exporter/traceclient.py | 6 ++-- api/libs/oauth.py | 12 ++++---- api/libs/oauth_data_source.py | 12 ++++---- api/services/auth/firecrawl/firecrawl.py | 4 +-- api/services/auth/jina.py | 4 +-- api/services/auth/jina/jina.py | 4 +-- api/services/auth/watercrawl/watercrawl.py | 4 +-- api/services/operation_service.py | 4 +-- api/services/website_service.py | 16 +++++------ .../integration_tests/plugin/__mock/http.py | 15 ++++------ .../vdb/clickzetta/test_docker_integration.py | 4 +-- .../controllers/console/auth/test_oauth.py | 4 +-- .../unit_tests/libs/test_oauth_clients.py | 24 ++++++++-------- .../services/auth/test_auth_integration.py | 20 ++++++------- .../services/auth/test_firecrawl_auth.py | 26 ++++++++--------- .../services/auth/test_jina_auth.py | 20 ++++++------- .../services/auth/test_watercrawl_auth.py | 28 +++++++++---------- 21 files changed, 120 insertions(+), 117 deletions(-) diff --git a/api/configs/remote_settings_sources/nacos/http_request.py b/api/configs/remote_settings_sources/nacos/http_request.py index 6401c5830d..1a0744a21b 100644 --- a/api/configs/remote_settings_sources/nacos/http_request.py +++ b/api/configs/remote_settings_sources/nacos/http_request.py @@ -5,7 +5,7 @@ import logging import os import time -import requests +import httpx logger = logging.getLogger(__name__) @@ -30,10 +30,10 @@ class NacosHttpClient: params = {} try: self._inject_auth_info(headers, params) - response = requests.request(method, url="http://" + self.server + url, headers=headers, params=params) + response = httpx.request(method, url="http://" + self.server + url, headers=headers, params=params) response.raise_for_status() return response.text - except requests.RequestException as e: + except httpx.RequestError as e: return f"Request to Nacos failed: {e}" def _inject_auth_info(self, headers: dict[str, str], params: dict[str, str], module: str = "config") -> None: @@ -78,7 +78,7 @@ class NacosHttpClient: params = {"username": self.username, "password": self.password} url = "http://" + self.server + "/nacos/v1/auth/login" try: - resp = requests.request("POST", url, headers=None, params=params) + resp = httpx.request("POST", url, headers=None, params=params) resp.raise_for_status() response_data = resp.json() self.token = response_data.get("accessToken") diff --git a/api/controllers/console/auth/data_source_oauth.py b/api/controllers/console/auth/data_source_oauth.py index fc4ba3a2c7..6f1fd2f11a 100644 --- a/api/controllers/console/auth/data_source_oauth.py +++ b/api/controllers/console/auth/data_source_oauth.py @@ -1,6 +1,6 @@ import logging -import requests +import httpx from flask import current_app, redirect, request from flask_login import current_user from flask_restx import Resource, fields @@ -119,7 +119,7 @@ class OAuthDataSourceBinding(Resource): return {"error": "Invalid code"}, 400 try: oauth_provider.get_access_token(code) - except requests.HTTPError as e: + except httpx.HTTPStatusError as e: logger.exception( "An error occurred during the OAuthCallback process with %s: %s", provider, e.response.text ) @@ -152,7 +152,7 @@ class OAuthDataSourceSync(Resource): return {"error": "Invalid provider"}, 400 try: oauth_provider.sync_data_source(binding_id) - except requests.HTTPError as e: + except httpx.HTTPStatusError as e: logger.exception( "An error occurred during the OAuthCallback process with %s: %s", provider, e.response.text ) diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index 1602ee6eea..5528dc0569 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -1,6 +1,6 @@ import logging -import requests +import httpx from flask import current_app, redirect, request from flask_restx import Resource from sqlalchemy import select @@ -101,8 +101,10 @@ class OAuthCallback(Resource): try: token = oauth_provider.get_access_token(code) user_info = oauth_provider.get_user_info(token) - except requests.RequestException as e: - error_text = e.response.text if e.response else str(e) + except httpx.RequestError as e: + error_text = str(e) + if isinstance(e, httpx.HTTPStatusError): + error_text = e.response.text logger.exception("An error occurred during the OAuth process with %s: %s", provider, error_text) return {"error": "OAuth process failed"}, 400 diff --git a/api/controllers/console/version.py b/api/controllers/console/version.py index 8d081ad995..965a520f70 100644 --- a/api/controllers/console/version.py +++ b/api/controllers/console/version.py @@ -1,7 +1,7 @@ import json import logging -import requests +import httpx from flask_restx import Resource, fields, reqparse from packaging import version @@ -57,7 +57,11 @@ class VersionApi(Resource): return result try: - response = requests.get(check_update_url, {"current_version": args["current_version"]}, timeout=(3, 10)) + response = httpx.get( + check_update_url, + params={"current_version": args["current_version"]}, + timeout=httpx.Timeout(connect=3, read=10), + ) except Exception as error: logger.warning("Check update version error: %s.", str(error)) result["version"] = args["current_version"] diff --git a/api/core/ops/aliyun_trace/data_exporter/traceclient.py b/api/core/ops/aliyun_trace/data_exporter/traceclient.py index 09cb6e3fc1..baaf9fd9f6 100644 --- a/api/core/ops/aliyun_trace/data_exporter/traceclient.py +++ b/api/core/ops/aliyun_trace/data_exporter/traceclient.py @@ -8,7 +8,7 @@ from collections import deque from collections.abc import Sequence from datetime import datetime -import requests +import httpx from opentelemetry import trace as trace_api from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter from opentelemetry.sdk.resources import Resource @@ -65,13 +65,13 @@ class TraceClient: def api_check(self): try: - response = requests.head(self.endpoint, timeout=5) + response = httpx.head(self.endpoint, timeout=5) if response.status_code == 405: return True else: logger.debug("AliyunTrace API check failed: Unexpected status code: %s", response.status_code) return False - except requests.RequestException as e: + except httpx.RequestError as e: logger.debug("AliyunTrace API check failed: %s", str(e)) raise ValueError(f"AliyunTrace API check failed: {str(e)}") diff --git a/api/libs/oauth.py b/api/libs/oauth.py index 35bd6c2c7c..889a5a3248 100644 --- a/api/libs/oauth.py +++ b/api/libs/oauth.py @@ -1,7 +1,7 @@ import urllib.parse from dataclasses import dataclass -import requests +import httpx @dataclass @@ -58,7 +58,7 @@ class GitHubOAuth(OAuth): "redirect_uri": self.redirect_uri, } headers = {"Accept": "application/json"} - response = requests.post(self._TOKEN_URL, data=data, headers=headers) + response = httpx.post(self._TOKEN_URL, data=data, headers=headers) response_json = response.json() access_token = response_json.get("access_token") @@ -70,11 +70,11 @@ class GitHubOAuth(OAuth): def get_raw_user_info(self, token: str): headers = {"Authorization": f"token {token}"} - response = requests.get(self._USER_INFO_URL, headers=headers) + response = httpx.get(self._USER_INFO_URL, headers=headers) response.raise_for_status() user_info = response.json() - email_response = requests.get(self._EMAIL_INFO_URL, headers=headers) + email_response = httpx.get(self._EMAIL_INFO_URL, headers=headers) email_info = email_response.json() primary_email: dict = next((email for email in email_info if email["primary"] == True), {}) @@ -112,7 +112,7 @@ class GoogleOAuth(OAuth): "redirect_uri": self.redirect_uri, } headers = {"Accept": "application/json"} - response = requests.post(self._TOKEN_URL, data=data, headers=headers) + response = httpx.post(self._TOKEN_URL, data=data, headers=headers) response_json = response.json() access_token = response_json.get("access_token") @@ -124,7 +124,7 @@ class GoogleOAuth(OAuth): def get_raw_user_info(self, token: str): headers = {"Authorization": f"Bearer {token}"} - response = requests.get(self._USER_INFO_URL, headers=headers) + response = httpx.get(self._USER_INFO_URL, headers=headers) response.raise_for_status() return response.json() diff --git a/api/libs/oauth_data_source.py b/api/libs/oauth_data_source.py index 987c5d7135..ae0ae3bcb6 100644 --- a/api/libs/oauth_data_source.py +++ b/api/libs/oauth_data_source.py @@ -1,7 +1,7 @@ import urllib.parse from typing import Any -import requests +import httpx from flask_login import current_user from sqlalchemy import select @@ -43,7 +43,7 @@ class NotionOAuth(OAuthDataSource): data = {"code": code, "grant_type": "authorization_code", "redirect_uri": self.redirect_uri} headers = {"Accept": "application/json"} auth = (self.client_id, self.client_secret) - response = requests.post(self._TOKEN_URL, data=data, auth=auth, headers=headers) + response = httpx.post(self._TOKEN_URL, data=data, auth=auth, headers=headers) response_json = response.json() access_token = response_json.get("access_token") @@ -239,7 +239,7 @@ class NotionOAuth(OAuthDataSource): "Notion-Version": "2022-06-28", } - response = requests.post(url=self._NOTION_PAGE_SEARCH, json=data, headers=headers) + response = httpx.post(url=self._NOTION_PAGE_SEARCH, json=data, headers=headers) response_json = response.json() results.extend(response_json.get("results", [])) @@ -254,7 +254,7 @@ class NotionOAuth(OAuthDataSource): "Authorization": f"Bearer {access_token}", "Notion-Version": "2022-06-28", } - response = requests.get(url=f"{self._NOTION_BLOCK_SEARCH}/{block_id}", headers=headers) + response = httpx.get(url=f"{self._NOTION_BLOCK_SEARCH}/{block_id}", headers=headers) response_json = response.json() if response.status_code != 200: message = response_json.get("message", "unknown error") @@ -270,7 +270,7 @@ class NotionOAuth(OAuthDataSource): "Authorization": f"Bearer {access_token}", "Notion-Version": "2022-06-28", } - response = requests.get(url=self._NOTION_BOT_USER, headers=headers) + response = httpx.get(url=self._NOTION_BOT_USER, headers=headers) response_json = response.json() if "object" in response_json and response_json["object"] == "user": user_type = response_json["type"] @@ -294,7 +294,7 @@ class NotionOAuth(OAuthDataSource): "Authorization": f"Bearer {access_token}", "Notion-Version": "2022-06-28", } - response = requests.post(url=self._NOTION_PAGE_SEARCH, json=data, headers=headers) + response = httpx.post(url=self._NOTION_PAGE_SEARCH, json=data, headers=headers) response_json = response.json() results.extend(response_json.get("results", [])) diff --git a/api/services/auth/firecrawl/firecrawl.py b/api/services/auth/firecrawl/firecrawl.py index 6ef034f292..d455475bfc 100644 --- a/api/services/auth/firecrawl/firecrawl.py +++ b/api/services/auth/firecrawl/firecrawl.py @@ -1,6 +1,6 @@ import json -import requests +import httpx from services.auth.api_key_auth_base import ApiKeyAuthBase @@ -36,7 +36,7 @@ class FirecrawlAuth(ApiKeyAuthBase): return {"Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}"} def _post_request(self, url, data, headers): - return requests.post(url, headers=headers, json=data) + return httpx.post(url, headers=headers, json=data) def _handle_error(self, response): if response.status_code in {402, 409, 500}: diff --git a/api/services/auth/jina.py b/api/services/auth/jina.py index 6100e9afc8..afaed28ac9 100644 --- a/api/services/auth/jina.py +++ b/api/services/auth/jina.py @@ -1,6 +1,6 @@ import json -import requests +import httpx from services.auth.api_key_auth_base import ApiKeyAuthBase @@ -31,7 +31,7 @@ class JinaAuth(ApiKeyAuthBase): return {"Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}"} def _post_request(self, url, data, headers): - return requests.post(url, headers=headers, json=data) + return httpx.post(url, headers=headers, json=data) def _handle_error(self, response): if response.status_code in {402, 409, 500}: diff --git a/api/services/auth/jina/jina.py b/api/services/auth/jina/jina.py index 6100e9afc8..afaed28ac9 100644 --- a/api/services/auth/jina/jina.py +++ b/api/services/auth/jina/jina.py @@ -1,6 +1,6 @@ import json -import requests +import httpx from services.auth.api_key_auth_base import ApiKeyAuthBase @@ -31,7 +31,7 @@ class JinaAuth(ApiKeyAuthBase): return {"Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}"} def _post_request(self, url, data, headers): - return requests.post(url, headers=headers, json=data) + return httpx.post(url, headers=headers, json=data) def _handle_error(self, response): if response.status_code in {402, 409, 500}: diff --git a/api/services/auth/watercrawl/watercrawl.py b/api/services/auth/watercrawl/watercrawl.py index 153ab5ba75..b2d28a83d1 100644 --- a/api/services/auth/watercrawl/watercrawl.py +++ b/api/services/auth/watercrawl/watercrawl.py @@ -1,7 +1,7 @@ import json from urllib.parse import urljoin -import requests +import httpx from services.auth.api_key_auth_base import ApiKeyAuthBase @@ -31,7 +31,7 @@ class WatercrawlAuth(ApiKeyAuthBase): return {"Content-Type": "application/json", "X-API-KEY": self.api_key} def _get_request(self, url, headers): - return requests.get(url, headers=headers) + return httpx.get(url, headers=headers) def _handle_error(self, response): if response.status_code in {402, 409, 500}: diff --git a/api/services/operation_service.py b/api/services/operation_service.py index 8c8b64bcd5..c05e9d555c 100644 --- a/api/services/operation_service.py +++ b/api/services/operation_service.py @@ -1,6 +1,6 @@ import os -import requests +import httpx class OperationService: @@ -12,7 +12,7 @@ class OperationService: headers = {"Content-Type": "application/json", "Billing-Api-Secret-Key": cls.secret_key} url = f"{cls.base_url}{endpoint}" - response = requests.request(method, url, json=json, params=params, headers=headers) + response = httpx.request(method, url, json=json, params=params, headers=headers) return response.json() diff --git a/api/services/website_service.py b/api/services/website_service.py index 7634fdd8f3..37588d6ba5 100644 --- a/api/services/website_service.py +++ b/api/services/website_service.py @@ -3,7 +3,7 @@ import json from dataclasses import dataclass from typing import Any -import requests +import httpx from flask_login import current_user from core.helper import encrypter @@ -216,7 +216,7 @@ class WebsiteService: @classmethod def _crawl_with_jinareader(cls, request: CrawlRequest, api_key: str) -> dict[str, Any]: if not request.options.crawl_sub_pages: - response = requests.get( + response = httpx.get( f"https://r.jina.ai/{request.url}", headers={"Accept": "application/json", "Authorization": f"Bearer {api_key}"}, ) @@ -224,7 +224,7 @@ class WebsiteService: raise ValueError("Failed to crawl:") return {"status": "active", "data": response.json().get("data")} else: - response = requests.post( + response = httpx.post( "https://adaptivecrawl-kir3wx7b3a-uc.a.run.app", json={ "url": request.url, @@ -287,7 +287,7 @@ class WebsiteService: @classmethod def _get_jinareader_status(cls, job_id: str, api_key: str) -> dict[str, Any]: - response = requests.post( + response = httpx.post( "https://adaptivecrawlstatus-kir3wx7b3a-uc.a.run.app", headers={"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"}, json={"taskId": job_id}, @@ -303,7 +303,7 @@ class WebsiteService: } if crawl_status_data["status"] == "completed": - response = requests.post( + response = httpx.post( "https://adaptivecrawlstatus-kir3wx7b3a-uc.a.run.app", headers={"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"}, json={"taskId": job_id, "urls": list(data.get("processed", {}).keys())}, @@ -362,7 +362,7 @@ class WebsiteService: @classmethod def _get_jinareader_url_data(cls, job_id: str, url: str, api_key: str) -> dict[str, Any] | None: if not job_id: - response = requests.get( + response = httpx.get( f"https://r.jina.ai/{url}", headers={"Accept": "application/json", "Authorization": f"Bearer {api_key}"}, ) @@ -371,7 +371,7 @@ class WebsiteService: return dict(response.json().get("data", {})) else: # Get crawl status first - status_response = requests.post( + status_response = httpx.post( "https://adaptivecrawlstatus-kir3wx7b3a-uc.a.run.app", headers={"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"}, json={"taskId": job_id}, @@ -381,7 +381,7 @@ class WebsiteService: raise ValueError("Crawl job is not completed") # Get processed data - data_response = requests.post( + data_response = httpx.post( "https://adaptivecrawlstatus-kir3wx7b3a-uc.a.run.app", headers={"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"}, json={"taskId": job_id, "urls": list(status_data.get("processed", {}).keys())}, diff --git a/api/tests/integration_tests/plugin/__mock/http.py b/api/tests/integration_tests/plugin/__mock/http.py index 8f8988899b..d5cf47e2c2 100644 --- a/api/tests/integration_tests/plugin/__mock/http.py +++ b/api/tests/integration_tests/plugin/__mock/http.py @@ -1,8 +1,8 @@ import os from typing import Literal +import httpx import pytest -import requests from core.plugin.entities.plugin_daemon import PluginDaemonBasicResponse from core.tools.entities.common_entities import I18nObject @@ -27,13 +27,11 @@ class MockedHttp: @classmethod def requests_request( cls, method: Literal["GET", "POST", "PUT", "DELETE", "PATCH", "HEAD"], url: str, **kwargs - ) -> requests.Response: + ) -> httpx.Response: """ - Mocked requests.request + Mocked httpx.request """ - request = requests.PreparedRequest() - request.method = method - request.url = url + request = httpx.Request(method, url) if url.endswith("/tools"): content = PluginDaemonBasicResponse[list[ToolProviderEntity]]( code=0, message="success", data=cls.list_tools() @@ -41,8 +39,7 @@ class MockedHttp: else: raise ValueError("") - response = requests.Response() - response.status_code = 200 + response = httpx.Response(status_code=200) response.request = request response._content = content.encode("utf-8") return response @@ -54,7 +51,7 @@ MOCK_SWITCH = os.getenv("MOCK_SWITCH", "false").lower() == "true" @pytest.fixture def setup_http_mock(request, monkeypatch: pytest.MonkeyPatch): if MOCK_SWITCH: - monkeypatch.setattr(requests, "request", MockedHttp.requests_request) + monkeypatch.setattr(httpx, "request", MockedHttp.requests_request) def unpatch(): monkeypatch.undo() diff --git a/api/tests/integration_tests/vdb/clickzetta/test_docker_integration.py b/api/tests/integration_tests/vdb/clickzetta/test_docker_integration.py index ef54eaa174..60e3f30f26 100644 --- a/api/tests/integration_tests/vdb/clickzetta/test_docker_integration.py +++ b/api/tests/integration_tests/vdb/clickzetta/test_docker_integration.py @@ -6,7 +6,7 @@ Test Clickzetta integration in Docker environment import os import time -import requests +import httpx from clickzetta import connect @@ -66,7 +66,7 @@ def test_dify_api(): max_retries = 30 for i in range(max_retries): try: - response = requests.get(f"{base_url}/console/api/health") + response = httpx.get(f"{base_url}/console/api/health") if response.status_code == 200: print("✓ Dify API is ready") break diff --git a/api/tests/unit_tests/controllers/console/auth/test_oauth.py b/api/tests/unit_tests/controllers/console/auth/test_oauth.py index a7bdf5de33..1a2e27e8fe 100644 --- a/api/tests/unit_tests/controllers/console/auth/test_oauth.py +++ b/api/tests/unit_tests/controllers/console/auth/test_oauth.py @@ -201,9 +201,9 @@ class TestOAuthCallback: mock_db.session.rollback = MagicMock() # Import the real requests module to create a proper exception - import requests + import httpx - request_exception = requests.exceptions.RequestException("OAuth error") + request_exception = httpx.RequestError("OAuth error") request_exception.response = MagicMock() request_exception.response.text = str(exception) diff --git a/api/tests/unit_tests/libs/test_oauth_clients.py b/api/tests/unit_tests/libs/test_oauth_clients.py index 629d15b81a..b6595a8c57 100644 --- a/api/tests/unit_tests/libs/test_oauth_clients.py +++ b/api/tests/unit_tests/libs/test_oauth_clients.py @@ -1,8 +1,8 @@ import urllib.parse from unittest.mock import MagicMock, patch +import httpx import pytest -import requests from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo @@ -68,7 +68,7 @@ class TestGitHubOAuth(BaseOAuthTest): ({}, None, True), ], ) - @patch("requests.post") + @patch("httpx.post") def test_should_retrieve_access_token( self, mock_post, oauth, mock_response, response_data, expected_token, should_raise ): @@ -105,7 +105,7 @@ class TestGitHubOAuth(BaseOAuthTest): ), ], ) - @patch("requests.get") + @patch("httpx.get") def test_should_retrieve_user_info_correctly(self, mock_get, oauth, user_data, email_data, expected_email): user_response = MagicMock() user_response.json.return_value = user_data @@ -121,11 +121,11 @@ class TestGitHubOAuth(BaseOAuthTest): assert user_info.name == user_data["name"] assert user_info.email == expected_email - @patch("requests.get") + @patch("httpx.get") def test_should_handle_network_errors(self, mock_get, oauth): - mock_get.side_effect = requests.exceptions.RequestException("Network error") + mock_get.side_effect = httpx.RequestError("Network error") - with pytest.raises(requests.exceptions.RequestException): + with pytest.raises(httpx.RequestError): oauth.get_raw_user_info("test_token") @@ -167,7 +167,7 @@ class TestGoogleOAuth(BaseOAuthTest): ({}, None, True), ], ) - @patch("requests.post") + @patch("httpx.post") def test_should_retrieve_access_token( self, mock_post, oauth, oauth_config, mock_response, response_data, expected_token, should_raise ): @@ -201,7 +201,7 @@ class TestGoogleOAuth(BaseOAuthTest): ({"sub": "123", "email": "test@example.com", "name": "Test User"}, ""), # Always returns empty string ], ) - @patch("requests.get") + @patch("httpx.get") def test_should_retrieve_user_info_correctly(self, mock_get, oauth, mock_response, user_data, expected_name): mock_response.json.return_value = user_data mock_get.return_value = mock_response @@ -217,12 +217,12 @@ class TestGoogleOAuth(BaseOAuthTest): @pytest.mark.parametrize( "exception_type", [ - requests.exceptions.HTTPError, - requests.exceptions.ConnectionError, - requests.exceptions.Timeout, + httpx.HTTPError, + httpx.ConnectError, + httpx.TimeoutException, ], ) - @patch("requests.get") + @patch("httpx.get") def test_should_handle_http_errors(self, mock_get, oauth, exception_type): mock_response = MagicMock() mock_response.raise_for_status.side_effect = exception_type("Error") diff --git a/api/tests/unit_tests/services/auth/test_auth_integration.py b/api/tests/unit_tests/services/auth/test_auth_integration.py index bb39b92c09..acfc5cc526 100644 --- a/api/tests/unit_tests/services/auth/test_auth_integration.py +++ b/api/tests/unit_tests/services/auth/test_auth_integration.py @@ -6,8 +6,8 @@ import json from concurrent.futures import ThreadPoolExecutor from unittest.mock import Mock, patch +import httpx import pytest -import requests from services.auth.api_key_auth_factory import ApiKeyAuthFactory from services.auth.api_key_auth_service import ApiKeyAuthService @@ -26,7 +26,7 @@ class TestAuthIntegration: self.watercrawl_credentials = {"auth_type": "x-api-key", "config": {"api_key": "wc_test_key_789"}} @patch("services.auth.api_key_auth_service.db.session") - @patch("services.auth.firecrawl.firecrawl.requests.post") + @patch("services.auth.firecrawl.firecrawl.httpx.post") @patch("services.auth.api_key_auth_service.encrypter.encrypt_token") def test_end_to_end_auth_flow(self, mock_encrypt, mock_http, mock_session): """Test complete authentication flow: request → validation → encryption → storage""" @@ -47,7 +47,7 @@ class TestAuthIntegration: mock_session.add.assert_called_once() mock_session.commit.assert_called_once() - @patch("services.auth.firecrawl.firecrawl.requests.post") + @patch("services.auth.firecrawl.firecrawl.httpx.post") def test_cross_component_integration(self, mock_http): """Test factory → provider → HTTP call integration""" mock_http.return_value = self._create_success_response() @@ -97,7 +97,7 @@ class TestAuthIntegration: assert "another_secret" not in factory_str @patch("services.auth.api_key_auth_service.db.session") - @patch("services.auth.firecrawl.firecrawl.requests.post") + @patch("services.auth.firecrawl.firecrawl.httpx.post") @patch("services.auth.api_key_auth_service.encrypter.encrypt_token") def test_concurrent_creation_safety(self, mock_encrypt, mock_http, mock_session): """Test concurrent authentication creation safety""" @@ -142,31 +142,31 @@ class TestAuthIntegration: with pytest.raises((ValueError, KeyError, TypeError, AttributeError)): ApiKeyAuthFactory(AuthType.FIRECRAWL, invalid_input) - @patch("services.auth.firecrawl.firecrawl.requests.post") + @patch("services.auth.firecrawl.firecrawl.httpx.post") def test_http_error_handling(self, mock_http): """Test proper HTTP error handling""" mock_response = Mock() mock_response.status_code = 401 mock_response.text = '{"error": "Unauthorized"}' - mock_response.raise_for_status.side_effect = requests.exceptions.HTTPError("Unauthorized") + mock_response.raise_for_status.side_effect = httpx.HTTPError("Unauthorized") mock_http.return_value = mock_response # PT012: Split into single statement for pytest.raises factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, self.firecrawl_credentials) - with pytest.raises((requests.exceptions.HTTPError, Exception)): + with pytest.raises((httpx.HTTPError, Exception)): factory.validate_credentials() @patch("services.auth.api_key_auth_service.db.session") - @patch("services.auth.firecrawl.firecrawl.requests.post") + @patch("services.auth.firecrawl.firecrawl.httpx.post") def test_network_failure_recovery(self, mock_http, mock_session): """Test system recovery from network failures""" - mock_http.side_effect = requests.exceptions.RequestException("Network timeout") + mock_http.side_effect = httpx.RequestError("Network timeout") mock_session.add = Mock() mock_session.commit = Mock() args = {"category": self.category, "provider": AuthType.FIRECRAWL, "credentials": self.firecrawl_credentials} - with pytest.raises(requests.exceptions.RequestException): + with pytest.raises(httpx.RequestError): ApiKeyAuthService.create_provider_auth(self.tenant_id_1, args) mock_session.commit.assert_not_called() diff --git a/api/tests/unit_tests/services/auth/test_firecrawl_auth.py b/api/tests/unit_tests/services/auth/test_firecrawl_auth.py index ffdf5897ed..b5ee55706d 100644 --- a/api/tests/unit_tests/services/auth/test_firecrawl_auth.py +++ b/api/tests/unit_tests/services/auth/test_firecrawl_auth.py @@ -1,7 +1,7 @@ from unittest.mock import MagicMock, patch +import httpx import pytest -import requests from services.auth.firecrawl.firecrawl import FirecrawlAuth @@ -64,7 +64,7 @@ class TestFirecrawlAuth: FirecrawlAuth(credentials) assert str(exc_info.value) == expected_error - @patch("services.auth.firecrawl.firecrawl.requests.post") + @patch("services.auth.firecrawl.firecrawl.httpx.post") def test_should_validate_valid_credentials_successfully(self, mock_post, auth_instance): """Test successful credential validation""" mock_response = MagicMock() @@ -95,7 +95,7 @@ class TestFirecrawlAuth: (500, "Internal server error"), ], ) - @patch("services.auth.firecrawl.firecrawl.requests.post") + @patch("services.auth.firecrawl.firecrawl.httpx.post") def test_should_handle_http_errors(self, mock_post, status_code, error_message, auth_instance): """Test handling of various HTTP error codes""" mock_response = MagicMock() @@ -115,7 +115,7 @@ class TestFirecrawlAuth: (401, "Not JSON", True, "Expecting value"), # JSON decode error ], ) - @patch("services.auth.firecrawl.firecrawl.requests.post") + @patch("services.auth.firecrawl.firecrawl.httpx.post") def test_should_handle_unexpected_errors( self, mock_post, status_code, response_text, has_json_error, expected_error_contains, auth_instance ): @@ -134,13 +134,13 @@ class TestFirecrawlAuth: @pytest.mark.parametrize( ("exception_type", "exception_message"), [ - (requests.ConnectionError, "Network error"), - (requests.Timeout, "Request timeout"), - (requests.ReadTimeout, "Read timeout"), - (requests.ConnectTimeout, "Connection timeout"), + (httpx.ConnectError, "Network error"), + (httpx.TimeoutException, "Request timeout"), + (httpx.ReadTimeout, "Read timeout"), + (httpx.ConnectTimeout, "Connection timeout"), ], ) - @patch("services.auth.firecrawl.firecrawl.requests.post") + @patch("services.auth.firecrawl.firecrawl.httpx.post") def test_should_handle_network_errors(self, mock_post, exception_type, exception_message, auth_instance): """Test handling of various network-related errors including timeouts""" mock_post.side_effect = exception_type(exception_message) @@ -162,7 +162,7 @@ class TestFirecrawlAuth: FirecrawlAuth({"auth_type": "basic", "config": {"api_key": "super_secret_key_12345"}}) assert "super_secret_key_12345" not in str(exc_info.value) - @patch("services.auth.firecrawl.firecrawl.requests.post") + @patch("services.auth.firecrawl.firecrawl.httpx.post") def test_should_use_custom_base_url_in_validation(self, mock_post): """Test that custom base URL is used in validation""" mock_response = MagicMock() @@ -179,12 +179,12 @@ class TestFirecrawlAuth: assert result is True assert mock_post.call_args[0][0] == "https://custom.firecrawl.dev/v1/crawl" - @patch("services.auth.firecrawl.firecrawl.requests.post") + @patch("services.auth.firecrawl.firecrawl.httpx.post") def test_should_handle_timeout_with_retry_suggestion(self, mock_post, auth_instance): """Test that timeout errors are handled gracefully with appropriate error message""" - mock_post.side_effect = requests.Timeout("The request timed out after 30 seconds") + mock_post.side_effect = httpx.TimeoutException("The request timed out after 30 seconds") - with pytest.raises(requests.Timeout) as exc_info: + with pytest.raises(httpx.TimeoutException) as exc_info: auth_instance.validate_credentials() # Verify the timeout exception is raised with original message diff --git a/api/tests/unit_tests/services/auth/test_jina_auth.py b/api/tests/unit_tests/services/auth/test_jina_auth.py index ccbca5a36f..4d2f300d25 100644 --- a/api/tests/unit_tests/services/auth/test_jina_auth.py +++ b/api/tests/unit_tests/services/auth/test_jina_auth.py @@ -1,7 +1,7 @@ from unittest.mock import MagicMock, patch +import httpx import pytest -import requests from services.auth.jina.jina import JinaAuth @@ -35,7 +35,7 @@ class TestJinaAuth: JinaAuth(credentials) assert str(exc_info.value) == "No API key provided" - @patch("services.auth.jina.jina.requests.post") + @patch("services.auth.jina.jina.httpx.post") def test_should_validate_valid_credentials_successfully(self, mock_post): """Test successful credential validation""" mock_response = MagicMock() @@ -53,7 +53,7 @@ class TestJinaAuth: json={"url": "https://example.com"}, ) - @patch("services.auth.jina.jina.requests.post") + @patch("services.auth.jina.jina.httpx.post") def test_should_handle_http_402_error(self, mock_post): """Test handling of 402 Payment Required error""" mock_response = MagicMock() @@ -68,7 +68,7 @@ class TestJinaAuth: auth.validate_credentials() assert str(exc_info.value) == "Failed to authorize. Status code: 402. Error: Payment required" - @patch("services.auth.jina.jina.requests.post") + @patch("services.auth.jina.jina.httpx.post") def test_should_handle_http_409_error(self, mock_post): """Test handling of 409 Conflict error""" mock_response = MagicMock() @@ -83,7 +83,7 @@ class TestJinaAuth: auth.validate_credentials() assert str(exc_info.value) == "Failed to authorize. Status code: 409. Error: Conflict error" - @patch("services.auth.jina.jina.requests.post") + @patch("services.auth.jina.jina.httpx.post") def test_should_handle_http_500_error(self, mock_post): """Test handling of 500 Internal Server Error""" mock_response = MagicMock() @@ -98,7 +98,7 @@ class TestJinaAuth: auth.validate_credentials() assert str(exc_info.value) == "Failed to authorize. Status code: 500. Error: Internal server error" - @patch("services.auth.jina.jina.requests.post") + @patch("services.auth.jina.jina.httpx.post") def test_should_handle_unexpected_error_with_text_response(self, mock_post): """Test handling of unexpected errors with text response""" mock_response = MagicMock() @@ -114,7 +114,7 @@ class TestJinaAuth: auth.validate_credentials() assert str(exc_info.value) == "Failed to authorize. Status code: 403. Error: Forbidden" - @patch("services.auth.jina.jina.requests.post") + @patch("services.auth.jina.jina.httpx.post") def test_should_handle_unexpected_error_without_text(self, mock_post): """Test handling of unexpected errors without text response""" mock_response = MagicMock() @@ -130,15 +130,15 @@ class TestJinaAuth: auth.validate_credentials() assert str(exc_info.value) == "Unexpected error occurred while trying to authorize. Status code: 404" - @patch("services.auth.jina.jina.requests.post") + @patch("services.auth.jina.jina.httpx.post") def test_should_handle_network_errors(self, mock_post): """Test handling of network connection errors""" - mock_post.side_effect = requests.ConnectionError("Network error") + mock_post.side_effect = httpx.ConnectError("Network error") credentials = {"auth_type": "bearer", "config": {"api_key": "test_api_key_123"}} auth = JinaAuth(credentials) - with pytest.raises(requests.ConnectionError): + with pytest.raises(httpx.ConnectError): auth.validate_credentials() def test_should_not_expose_api_key_in_error_messages(self): diff --git a/api/tests/unit_tests/services/auth/test_watercrawl_auth.py b/api/tests/unit_tests/services/auth/test_watercrawl_auth.py index bacf0b24ea..ec99cb10b0 100644 --- a/api/tests/unit_tests/services/auth/test_watercrawl_auth.py +++ b/api/tests/unit_tests/services/auth/test_watercrawl_auth.py @@ -1,7 +1,7 @@ from unittest.mock import MagicMock, patch +import httpx import pytest -import requests from services.auth.watercrawl.watercrawl import WatercrawlAuth @@ -64,7 +64,7 @@ class TestWatercrawlAuth: WatercrawlAuth(credentials) assert str(exc_info.value) == expected_error - @patch("services.auth.watercrawl.watercrawl.requests.get") + @patch("services.auth.watercrawl.watercrawl.httpx.get") def test_should_validate_valid_credentials_successfully(self, mock_get, auth_instance): """Test successful credential validation""" mock_response = MagicMock() @@ -87,7 +87,7 @@ class TestWatercrawlAuth: (500, "Internal server error"), ], ) - @patch("services.auth.watercrawl.watercrawl.requests.get") + @patch("services.auth.watercrawl.watercrawl.httpx.get") def test_should_handle_http_errors(self, mock_get, status_code, error_message, auth_instance): """Test handling of various HTTP error codes""" mock_response = MagicMock() @@ -107,7 +107,7 @@ class TestWatercrawlAuth: (401, "Not JSON", True, "Expecting value"), # JSON decode error ], ) - @patch("services.auth.watercrawl.watercrawl.requests.get") + @patch("services.auth.watercrawl.watercrawl.httpx.get") def test_should_handle_unexpected_errors( self, mock_get, status_code, response_text, has_json_error, expected_error_contains, auth_instance ): @@ -126,13 +126,13 @@ class TestWatercrawlAuth: @pytest.mark.parametrize( ("exception_type", "exception_message"), [ - (requests.ConnectionError, "Network error"), - (requests.Timeout, "Request timeout"), - (requests.ReadTimeout, "Read timeout"), - (requests.ConnectTimeout, "Connection timeout"), + (httpx.ConnectError, "Network error"), + (httpx.TimeoutException, "Request timeout"), + (httpx.ReadTimeout, "Read timeout"), + (httpx.ConnectTimeout, "Connection timeout"), ], ) - @patch("services.auth.watercrawl.watercrawl.requests.get") + @patch("services.auth.watercrawl.watercrawl.httpx.get") def test_should_handle_network_errors(self, mock_get, exception_type, exception_message, auth_instance): """Test handling of various network-related errors including timeouts""" mock_get.side_effect = exception_type(exception_message) @@ -154,7 +154,7 @@ class TestWatercrawlAuth: WatercrawlAuth({"auth_type": "bearer", "config": {"api_key": "super_secret_key_12345"}}) assert "super_secret_key_12345" not in str(exc_info.value) - @patch("services.auth.watercrawl.watercrawl.requests.get") + @patch("services.auth.watercrawl.watercrawl.httpx.get") def test_should_use_custom_base_url_in_validation(self, mock_get): """Test that custom base URL is used in validation""" mock_response = MagicMock() @@ -179,7 +179,7 @@ class TestWatercrawlAuth: ("https://app.watercrawl.dev//", "https://app.watercrawl.dev/api/v1/core/crawl-requests/"), ], ) - @patch("services.auth.watercrawl.watercrawl.requests.get") + @patch("services.auth.watercrawl.watercrawl.httpx.get") def test_should_use_urljoin_for_url_construction(self, mock_get, base_url, expected_url): """Test that urljoin is used correctly for URL construction with various base URLs""" mock_response = MagicMock() @@ -193,12 +193,12 @@ class TestWatercrawlAuth: # Verify the correct URL was called assert mock_get.call_args[0][0] == expected_url - @patch("services.auth.watercrawl.watercrawl.requests.get") + @patch("services.auth.watercrawl.watercrawl.httpx.get") def test_should_handle_timeout_with_retry_suggestion(self, mock_get, auth_instance): """Test that timeout errors are handled gracefully with appropriate error message""" - mock_get.side_effect = requests.Timeout("The request timed out after 30 seconds") + mock_get.side_effect = httpx.TimeoutException("The request timed out after 30 seconds") - with pytest.raises(requests.Timeout) as exc_info: + with pytest.raises(httpx.TimeoutException) as exc_info: auth_instance.validate_credentials() # Verify the timeout exception is raised with original message From 760a2c656cfe86ee2c6de5c8ad62c0d41cd1eca4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=A4=8F=E7=9B=AE=E7=8C=AB=E7=8C=AB?= <87644354+Natsume-Neko@users.noreply.github.com> Date: Mon, 22 Sep 2025 23:47:13 +0800 Subject: [PATCH 013/126] amend regexp exec (#25986) --- .../components/workflow/nodes/variable-assigner/use-config.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/app/components/workflow/nodes/variable-assigner/use-config.ts b/web/app/components/workflow/nodes/variable-assigner/use-config.ts index c65941e32d..31bd53dca7 100644 --- a/web/app/components/workflow/nodes/variable-assigner/use-config.ts +++ b/web/app/components/workflow/nodes/variable-assigner/use-config.ts @@ -124,7 +124,7 @@ const useConfig = (id: string, payload: VariableAssignerNodeType) => { const handleAddGroup = useCallback(() => { let maxInGroupName = 1 inputs.advanced_settings.groups.forEach((item) => { - const match = item.group_name.match(/(\d+)$/) + const match = /(\d+)$/.exec(item.group_name) if (match) { const num = Number.parseInt(match[1], 10) if (num > maxInGroupName) From f4522fd6952aa714ced013ed7832077cb5ed8b1c Mon Sep 17 00:00:00 2001 From: Asuka Minato Date: Tue, 23 Sep 2025 01:35:54 +0900 Subject: [PATCH 014/126] try contextmanager (#26074) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../event_management/event_manager.py | 49 ++++++------------- 1 file changed, 15 insertions(+), 34 deletions(-) diff --git a/api/core/workflow/graph_engine/event_management/event_manager.py b/api/core/workflow/graph_engine/event_management/event_manager.py index 6f37193070..751a2a4352 100644 --- a/api/core/workflow/graph_engine/event_management/event_manager.py +++ b/api/core/workflow/graph_engine/event_management/event_manager.py @@ -5,6 +5,7 @@ Unified event manager for collecting and emitting events. import threading import time from collections.abc import Generator +from contextlib import contextmanager from typing import final from core.workflow.graph_events import GraphEngineEvent @@ -51,43 +52,23 @@ class ReadWriteLock: """Release a write lock.""" self._read_ready.release() - def read_lock(self) -> "ReadLockContext": + @contextmanager + def read_lock(self): """Return a context manager for read locking.""" - return ReadLockContext(self) + self.acquire_read() + try: + yield + finally: + self.release_read() - def write_lock(self) -> "WriteLockContext": + @contextmanager + def write_lock(self): """Return a context manager for write locking.""" - return WriteLockContext(self) - - -@final -class ReadLockContext: - """Context manager for read locks.""" - - def __init__(self, lock: ReadWriteLock) -> None: - self._lock = lock - - def __enter__(self) -> "ReadLockContext": - self._lock.acquire_read() - return self - - def __exit__(self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: object) -> None: - self._lock.release_read() - - -@final -class WriteLockContext: - """Context manager for write locks.""" - - def __init__(self, lock: ReadWriteLock) -> None: - self._lock = lock - - def __enter__(self) -> "WriteLockContext": - self._lock.acquire_write() - return self - - def __exit__(self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: object) -> None: - self._lock.release_write() + self.acquire_write() + try: + yield + finally: + self.release_write() @final From 2e2c87c5a111c641255da029dcda17b0d4f1acbb Mon Sep 17 00:00:00 2001 From: -LAN- Date: Tue, 23 Sep 2025 01:51:43 +0800 Subject: [PATCH 015/126] fix(graph_engine): error strategy fall. (#26078) Signed-off-by: -LAN- --- .../graph_engine/domain/graph_execution.py | 12 +- .../event_management/event_handlers.py | 66 ++++++++-- .../workflow/graph_engine/graph_engine.py | 20 ++- .../graph_engine/layers/debug_logging.py | 8 ++ .../nodes/iteration/iteration_node.py | 3 +- .../event_management/test_event_handlers.py | 120 ++++++++++++++++++ .../graph_engine/test_graph_engine.py | 45 ++++++- .../core/workflow/nodes/test_retry.py | 65 ---------- 8 files changed, 255 insertions(+), 84 deletions(-) create mode 100644 api/tests/unit_tests/core/workflow/graph_engine/event_management/test_event_handlers.py delete mode 100644 api/tests/unit_tests/core/workflow/nodes/test_retry.py diff --git a/api/core/workflow/graph_engine/domain/graph_execution.py b/api/core/workflow/graph_engine/domain/graph_execution.py index 5951af1087..b273ee9969 100644 --- a/api/core/workflow/graph_engine/domain/graph_execution.py +++ b/api/core/workflow/graph_engine/domain/graph_execution.py @@ -41,7 +41,8 @@ class GraphExecutionState(BaseModel): completed: bool = Field(default=False) aborted: bool = Field(default=False) error: GraphExecutionErrorState | None = Field(default=None) - node_executions: list[NodeExecutionState] = Field(default_factory=list) + exceptions_count: int = Field(default=0) + node_executions: list[NodeExecutionState] = Field(default_factory=list[NodeExecutionState]) def _serialize_error(error: Exception | None) -> GraphExecutionErrorState | None: @@ -103,7 +104,8 @@ class GraphExecution: completed: bool = False aborted: bool = False error: Exception | None = None - node_executions: dict[str, NodeExecution] = field(default_factory=dict) + node_executions: dict[str, NodeExecution] = field(default_factory=dict[str, NodeExecution]) + exceptions_count: int = 0 def start(self) -> None: """Mark the graph execution as started.""" @@ -172,6 +174,7 @@ class GraphExecution: completed=self.completed, aborted=self.aborted, error=_serialize_error(self.error), + exceptions_count=self.exceptions_count, node_executions=node_states, ) @@ -195,6 +198,7 @@ class GraphExecution: self.completed = state.completed self.aborted = state.aborted self.error = _deserialize_error(state.error) + self.exceptions_count = state.exceptions_count self.node_executions = { item.node_id: NodeExecution( node_id=item.node_id, @@ -205,3 +209,7 @@ class GraphExecution: ) for item in state.node_executions } + + def record_node_failure(self) -> None: + """Increment the count of node failures encountered during execution.""" + self.exceptions_count += 1 diff --git a/api/core/workflow/graph_engine/event_management/event_handlers.py b/api/core/workflow/graph_engine/event_management/event_handlers.py index 244f4a4d86..7247b17967 100644 --- a/api/core/workflow/graph_engine/event_management/event_handlers.py +++ b/api/core/workflow/graph_engine/event_management/event_handlers.py @@ -3,11 +3,12 @@ Event handler implementations for different event types. """ import logging +from collections.abc import Mapping from functools import singledispatchmethod from typing import TYPE_CHECKING, final from core.workflow.entities import GraphRuntimeState -from core.workflow.enums import NodeExecutionType +from core.workflow.enums import ErrorStrategy, NodeExecutionType from core.workflow.graph import Graph from core.workflow.graph_events import ( GraphNodeEventBase, @@ -122,13 +123,15 @@ class EventHandler: """ # Track execution in domain model node_execution = self._graph_execution.get_or_create_node_execution(event.node_id) + is_initial_attempt = node_execution.retry_count == 0 node_execution.mark_started(event.id) # Track in response coordinator for stream ordering self._response_coordinator.track_node_execution(event.node_id, event.id) - # Collect the event - self._event_collector.collect(event) + # Collect the event only for the first attempt; retries remain silent + if is_initial_attempt: + self._event_collector.collect(event) @_dispatch.register def _(self, event: NodeRunStreamChunkEvent) -> None: @@ -161,7 +164,7 @@ class EventHandler: node_execution.mark_taken() # Store outputs in variable pool - self._store_node_outputs(event) + self._store_node_outputs(event.node_id, event.node_run_result.outputs) # Forward to response coordinator and emit streaming events streaming_events = self._response_coordinator.intercept_event(event) @@ -191,7 +194,7 @@ class EventHandler: # Handle response node outputs if node.execution_type == NodeExecutionType.RESPONSE: - self._update_response_outputs(event) + self._update_response_outputs(event.node_run_result.outputs) # Collect the event self._event_collector.collect(event) @@ -207,6 +210,7 @@ class EventHandler: # Update domain model node_execution = self._graph_execution.get_or_create_node_execution(event.node_id) node_execution.mark_failed(event.error) + self._graph_execution.record_node_failure() result = self._error_handler.handle_node_failure(event) @@ -227,10 +231,40 @@ class EventHandler: Args: event: The node exception event """ - # Node continues via fail-branch, so it's technically "succeeded" + # Node continues via fail-branch/default-value, treat as completion node_execution = self._graph_execution.get_or_create_node_execution(event.node_id) node_execution.mark_taken() + # Persist outputs produced by the exception strategy (e.g. default values) + self._store_node_outputs(event.node_id, event.node_run_result.outputs) + + node = self._graph.nodes[event.node_id] + + if node.error_strategy == ErrorStrategy.DEFAULT_VALUE: + ready_nodes, edge_streaming_events = self._edge_processor.process_node_success(event.node_id) + elif node.error_strategy == ErrorStrategy.FAIL_BRANCH: + ready_nodes, edge_streaming_events = self._edge_processor.handle_branch_completion( + event.node_id, event.node_run_result.edge_source_handle + ) + else: + raise NotImplementedError(f"Unsupported error strategy: {node.error_strategy}") + + for edge_event in edge_streaming_events: + self._event_collector.collect(edge_event) + + for node_id in ready_nodes: + self._state_manager.enqueue_node(node_id) + self._state_manager.start_execution(node_id) + + # Update response outputs if applicable + if node.execution_type == NodeExecutionType.RESPONSE: + self._update_response_outputs(event.node_run_result.outputs) + + self._state_manager.finish_execution(event.node_id) + + # Collect the exception event for observers + self._event_collector.collect(event) + @_dispatch.register def _(self, event: NodeRunRetryEvent) -> None: """ @@ -242,21 +276,31 @@ class EventHandler: node_execution = self._graph_execution.get_or_create_node_execution(event.node_id) node_execution.increment_retry() - def _store_node_outputs(self, event: NodeRunSucceededEvent) -> None: + # Finish the previous attempt before re-queuing the node + self._state_manager.finish_execution(event.node_id) + + # Emit retry event for observers + self._event_collector.collect(event) + + # Re-queue node for execution + self._state_manager.enqueue_node(event.node_id) + self._state_manager.start_execution(event.node_id) + + def _store_node_outputs(self, node_id: str, outputs: Mapping[str, object]) -> None: """ Store node outputs in the variable pool. Args: event: The node succeeded event containing outputs """ - for variable_name, variable_value in event.node_run_result.outputs.items(): - self._graph_runtime_state.variable_pool.add((event.node_id, variable_name), variable_value) + for variable_name, variable_value in outputs.items(): + self._graph_runtime_state.variable_pool.add((node_id, variable_name), variable_value) - def _update_response_outputs(self, event: NodeRunSucceededEvent) -> None: + def _update_response_outputs(self, outputs: Mapping[str, object]) -> None: """Update response outputs for response nodes.""" # TODO: Design a mechanism for nodes to notify the engine about how to update outputs # in runtime state, rather than allowing nodes to directly access runtime state. - for key, value in event.node_run_result.outputs.items(): + for key, value in outputs.items(): if key == "answer": existing = self._graph_runtime_state.get_output("answer", "") if existing: diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index 164ae41cca..a21fb7c022 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -23,6 +23,7 @@ from core.workflow.graph_events import ( GraphNodeEventBase, GraphRunAbortedEvent, GraphRunFailedEvent, + GraphRunPartialSucceededEvent, GraphRunStartedEvent, GraphRunSucceededEvent, ) @@ -260,12 +261,23 @@ class GraphEngine: if self._graph_execution.error: raise self._graph_execution.error else: - yield GraphRunSucceededEvent( - outputs=self._graph_runtime_state.outputs, - ) + outputs = self._graph_runtime_state.outputs + exceptions_count = self._graph_execution.exceptions_count + if exceptions_count > 0: + yield GraphRunPartialSucceededEvent( + exceptions_count=exceptions_count, + outputs=outputs, + ) + else: + yield GraphRunSucceededEvent( + outputs=outputs, + ) except Exception as e: - yield GraphRunFailedEvent(error=str(e)) + yield GraphRunFailedEvent( + error=str(e), + exceptions_count=self._graph_execution.exceptions_count, + ) raise finally: diff --git a/api/core/workflow/graph_engine/layers/debug_logging.py b/api/core/workflow/graph_engine/layers/debug_logging.py index f24c3fe33c..5b44c23899 100644 --- a/api/core/workflow/graph_engine/layers/debug_logging.py +++ b/api/core/workflow/graph_engine/layers/debug_logging.py @@ -15,6 +15,7 @@ from core.workflow.graph_events import ( GraphEngineEvent, GraphRunAbortedEvent, GraphRunFailedEvent, + GraphRunPartialSucceededEvent, GraphRunStartedEvent, GraphRunSucceededEvent, NodeRunExceptionEvent, @@ -127,6 +128,13 @@ class DebugLoggingLayer(GraphEngineLayer): if self.include_outputs and event.outputs: self.logger.info(" Final outputs: %s", self._format_dict(event.outputs)) + elif isinstance(event, GraphRunPartialSucceededEvent): + self.logger.warning("⚠️ Graph run partially succeeded") + if event.exceptions_count > 0: + self.logger.warning(" Total exceptions: %s", event.exceptions_count) + if self.include_outputs and event.outputs: + self.logger.info(" Final outputs: %s", self._format_dict(event.outputs)) + elif isinstance(event, GraphRunFailedEvent): self.logger.error("❌ Graph run failed: %s", event.error) if event.exceptions_count > 0: diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py index 6e57b17d5c..593281c9b5 100644 --- a/api/core/workflow/nodes/iteration/iteration_node.py +++ b/api/core/workflow/nodes/iteration/iteration_node.py @@ -19,6 +19,7 @@ from core.workflow.enums import ( from core.workflow.graph_events import ( GraphNodeEventBase, GraphRunFailedEvent, + GraphRunPartialSucceededEvent, GraphRunSucceededEvent, ) from core.workflow.node_events import ( @@ -456,7 +457,7 @@ class IterationNode(Node): if isinstance(event, GraphNodeEventBase): self._append_iteration_info_to_event(event=event, iter_run_index=current_index) yield event - elif isinstance(event, GraphRunSucceededEvent): + elif isinstance(event, (GraphRunSucceededEvent, GraphRunPartialSucceededEvent)): result = variable_pool.get(self._node_data.output_selector) if result is None: outputs.append(None) diff --git a/api/tests/unit_tests/core/workflow/graph_engine/event_management/test_event_handlers.py b/api/tests/unit_tests/core/workflow/graph_engine/event_management/test_event_handlers.py new file mode 100644 index 0000000000..d556bb138e --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/event_management/test_event_handlers.py @@ -0,0 +1,120 @@ +"""Tests for graph engine event handlers.""" + +from __future__ import annotations + +from datetime import datetime + +from core.workflow.entities import GraphRuntimeState, VariablePool +from core.workflow.enums import NodeExecutionType, NodeState, NodeType, WorkflowNodeExecutionStatus +from core.workflow.graph import Graph +from core.workflow.graph_engine.domain.graph_execution import GraphExecution +from core.workflow.graph_engine.event_management.event_handlers import EventHandler +from core.workflow.graph_engine.event_management.event_manager import EventManager +from core.workflow.graph_engine.graph_state_manager import GraphStateManager +from core.workflow.graph_engine.ready_queue.in_memory import InMemoryReadyQueue +from core.workflow.graph_engine.response_coordinator.coordinator import ResponseStreamCoordinator +from core.workflow.graph_events import NodeRunRetryEvent, NodeRunStartedEvent +from core.workflow.node_events import NodeRunResult +from core.workflow.nodes.base.entities import RetryConfig + + +class _StubEdgeProcessor: + """Minimal edge processor stub for tests.""" + + +class _StubErrorHandler: + """Minimal error handler stub for tests.""" + + +class _StubNode: + """Simple node stub exposing the attributes needed by the state manager.""" + + def __init__(self, node_id: str) -> None: + self.id = node_id + self.state = NodeState.UNKNOWN + self.title = "Stub Node" + self.execution_type = NodeExecutionType.EXECUTABLE + self.error_strategy = None + self.retry_config = RetryConfig() + self.retry = False + + +def _build_event_handler(node_id: str) -> tuple[EventHandler, EventManager, GraphExecution]: + """Construct an EventHandler with in-memory dependencies for testing.""" + + node = _StubNode(node_id) + graph = Graph(nodes={node_id: node}, edges={}, in_edges={}, out_edges={}, root_node=node) + + variable_pool = VariablePool() + runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=0.0) + graph_execution = GraphExecution(workflow_id="test-workflow") + + event_manager = EventManager() + state_manager = GraphStateManager(graph=graph, ready_queue=InMemoryReadyQueue()) + response_coordinator = ResponseStreamCoordinator(variable_pool=variable_pool, graph=graph) + + handler = EventHandler( + graph=graph, + graph_runtime_state=runtime_state, + graph_execution=graph_execution, + response_coordinator=response_coordinator, + event_collector=event_manager, + edge_processor=_StubEdgeProcessor(), + state_manager=state_manager, + error_handler=_StubErrorHandler(), + ) + + return handler, event_manager, graph_execution + + +def test_retry_does_not_emit_additional_start_event() -> None: + """Ensure retry attempts do not produce duplicate start events.""" + + node_id = "test-node" + handler, event_manager, graph_execution = _build_event_handler(node_id) + + execution_id = "exec-1" + node_type = NodeType.CODE + start_time = datetime.utcnow() + + start_event = NodeRunStartedEvent( + id=execution_id, + node_id=node_id, + node_type=node_type, + node_title="Stub Node", + start_at=start_time, + ) + handler.dispatch(start_event) + + retry_event = NodeRunRetryEvent( + id=execution_id, + node_id=node_id, + node_type=node_type, + node_title="Stub Node", + start_at=start_time, + error="boom", + retry_index=1, + node_run_result=NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + error="boom", + error_type="TestError", + ), + ) + handler.dispatch(retry_event) + + # Simulate the node starting execution again after retry + second_start_event = NodeRunStartedEvent( + id=execution_id, + node_id=node_id, + node_type=node_type, + node_title="Stub Node", + start_at=start_time, + ) + handler.dispatch(second_start_event) + + collected_types = [type(event) for event in event_manager._events] # type: ignore[attr-defined] + + assert collected_types == [NodeRunStartedEvent, NodeRunRetryEvent] + + node_execution = graph_execution.get_or_create_node_execution(node_id) + assert node_execution.retry_count == 1 diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_graph_engine.py b/api/tests/unit_tests/core/workflow/graph_engine/test_graph_engine.py index 6a723999de..4a117f8c96 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_graph_engine.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_graph_engine.py @@ -10,11 +10,18 @@ import time from hypothesis import HealthCheck, given, settings from hypothesis import strategies as st +from core.workflow.enums import ErrorStrategy from core.workflow.graph_engine import GraphEngine from core.workflow.graph_engine.command_channels import InMemoryChannel -from core.workflow.graph_events import GraphRunStartedEvent, GraphRunSucceededEvent +from core.workflow.graph_events import ( + GraphRunPartialSucceededEvent, + GraphRunStartedEvent, + GraphRunSucceededEvent, +) +from core.workflow.nodes.base.entities import DefaultValue, DefaultValueType # Import the test framework from the new module +from .test_mock_config import MockConfigBuilder from .test_table_runner import TableTestRunner, WorkflowRunner, WorkflowTestCase @@ -721,3 +728,39 @@ def test_event_sequence_validation_with_table_tests(): else: assert result.event_sequence_match is True assert result.success, f"Test {i + 1} failed: {result.event_mismatch_details or result.error}" + + +def test_graph_run_emits_partial_success_when_node_failure_recovered(): + runner = TableTestRunner() + + fixture_data = runner.workflow_runner.load_fixture("basic_chatflow") + mock_config = MockConfigBuilder().with_node_error("llm", "mock llm failure").build() + + graph, graph_runtime_state = runner.workflow_runner.create_graph_from_fixture( + fixture_data=fixture_data, + query="hello", + use_mock_factory=True, + mock_config=mock_config, + ) + + llm_node = graph.nodes["llm"] + base_node_data = llm_node.get_base_node_data() + base_node_data.error_strategy = ErrorStrategy.DEFAULT_VALUE + base_node_data.default_value = [DefaultValue(key="text", value="fallback response", type=DefaultValueType.STRING)] + + engine = GraphEngine( + workflow_id="test_workflow", + graph=graph, + graph_runtime_state=graph_runtime_state, + command_channel=InMemoryChannel(), + ) + + events = list(engine.run()) + + assert isinstance(events[-1], GraphRunPartialSucceededEvent) + + partial_event = next(event for event in events if isinstance(event, GraphRunPartialSucceededEvent)) + assert partial_event.exceptions_count == 1 + assert partial_event.outputs.get("answer") == "fallback response" + + assert not any(isinstance(event, GraphRunSucceededEvent) for event in events) diff --git a/api/tests/unit_tests/core/workflow/nodes/test_retry.py b/api/tests/unit_tests/core/workflow/nodes/test_retry.py deleted file mode 100644 index 23cef58d2e..0000000000 --- a/api/tests/unit_tests/core/workflow/nodes/test_retry.py +++ /dev/null @@ -1,65 +0,0 @@ -import pytest - -pytest.skip( - "Retry functionality is part of Phase 2 enhanced error handling - not implemented in MVP of queue-based engine", - allow_module_level=True, -) - -DEFAULT_VALUE_EDGE = [ - { - "id": "start-source-node-target", - "source": "start", - "target": "node", - "sourceHandle": "source", - }, - { - "id": "node-source-answer-target", - "source": "node", - "target": "answer", - "sourceHandle": "source", - }, -] - - -def test_retry_default_value_partial_success(): - """retry default value node with partial success status""" - graph_config = { - "edges": DEFAULT_VALUE_EDGE, - "nodes": [ - {"data": {"title": "start", "type": "start", "variables": []}, "id": "start"}, - {"data": {"title": "answer", "type": "answer", "answer": "{{#node.result#}}"}, "id": "answer"}, - ContinueOnErrorTestHelper.get_http_node( - "default-value", - [{"key": "result", "type": "string", "value": "http node got error response"}], - retry_config={"retry_config": {"max_retries": 2, "retry_interval": 1000, "retry_enabled": True}}, - ), - ], - } - - graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) - events = list(graph_engine.run()) - assert sum(1 for e in events if isinstance(e, NodeRunRetryEvent)) == 2 - assert events[-1].outputs == {"answer": "http node got error response"} - assert any(isinstance(e, GraphRunPartialSucceededEvent) for e in events) - assert len(events) == 11 - - -def test_retry_failed(): - """retry failed with success status""" - graph_config = { - "edges": DEFAULT_VALUE_EDGE, - "nodes": [ - {"data": {"title": "start", "type": "start", "variables": []}, "id": "start"}, - {"data": {"title": "answer", "type": "answer", "answer": "{{#node.result#}}"}, "id": "answer"}, - ContinueOnErrorTestHelper.get_http_node( - None, - None, - retry_config={"retry_config": {"max_retries": 2, "retry_interval": 1000, "retry_enabled": True}}, - ), - ], - } - graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) - events = list(graph_engine.run()) - assert sum(1 for e in events if isinstance(e, NodeRunRetryEvent)) == 2 - assert any(isinstance(e, GraphRunFailedEvent) for e in events) - assert len(events) == 8 From 407323f8172bcdb113fcb5960a5cd3057e743689 Mon Sep 17 00:00:00 2001 From: quicksand Date: Tue, 23 Sep 2025 13:46:45 +0800 Subject: [PATCH 016/126] fix(api): graph engine debug logging NodeRunRetryEvent not effective (#26085) --- .../workflow/graph_engine/layers/debug_logging.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/api/core/workflow/graph_engine/layers/debug_logging.py b/api/core/workflow/graph_engine/layers/debug_logging.py index 5b44c23899..034ebcf54f 100644 --- a/api/core/workflow/graph_engine/layers/debug_logging.py +++ b/api/core/workflow/graph_engine/layers/debug_logging.py @@ -146,6 +146,12 @@ class DebugLoggingLayer(GraphEngineLayer): self.logger.info(" Partial outputs: %s", self._format_dict(event.outputs)) # Node-level events + # Retry before Started because Retry subclasses Started; + elif isinstance(event, NodeRunRetryEvent): + self.retry_count += 1 + self.logger.warning("🔄 Node retry: %s (attempt %s)", event.node_id, event.retry_index) + self.logger.warning(" Previous error: %s", event.error) + elif isinstance(event, NodeRunStartedEvent): self.node_count += 1 self.logger.info('▶️ Node started: %s - "%s" (type: %s)', event.node_id, event.node_title, event.node_type) @@ -175,11 +181,6 @@ class DebugLoggingLayer(GraphEngineLayer): self.logger.warning("⚠️ Node exception handled: %s", event.node_id) self.logger.warning(" Error: %s", event.error) - elif isinstance(event, NodeRunRetryEvent): - self.retry_count += 1 - self.logger.warning("🔄 Node retry: %s (attempt %s)", event.node_id, event.retry_index) - self.logger.warning(" Previous error: %s", event.error) - elif isinstance(event, NodeRunStreamChunkEvent): # Log stream chunks at debug level to avoid spam final_indicator = " (FINAL)" if event.is_final else "" From dd089573817205ff039debbbab05d20f4df491c2 Mon Sep 17 00:00:00 2001 From: Jyong <76649700+JohnJyong@users.noreply.github.com> Date: Tue, 23 Sep 2025 16:40:26 +0800 Subject: [PATCH 017/126] fix full_text_search name (#26104) --- api/core/workflow/nodes/knowledge_index/entities.py | 2 +- .../entities/knowledge_entities/rag_pipeline_entities.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/core/workflow/nodes/knowledge_index/entities.py b/api/core/workflow/nodes/knowledge_index/entities.py index 2a2e983a0c..c79373afd5 100644 --- a/api/core/workflow/nodes/knowledge_index/entities.py +++ b/api/core/workflow/nodes/knowledge_index/entities.py @@ -63,7 +63,7 @@ class RetrievalSetting(BaseModel): Retrieval Setting. """ - search_method: Literal["semantic_search", "keyword_search", "fulltext_search", "hybrid_search"] + search_method: Literal["semantic_search", "keyword_search", "full_text_search", "hybrid_search"] top_k: int score_threshold: float | None = 0.5 score_threshold_enabled: bool = False diff --git a/api/services/entities/knowledge_entities/rag_pipeline_entities.py b/api/services/entities/knowledge_entities/rag_pipeline_entities.py index ac96b5c8ad..860bfde401 100644 --- a/api/services/entities/knowledge_entities/rag_pipeline_entities.py +++ b/api/services/entities/knowledge_entities/rag_pipeline_entities.py @@ -83,7 +83,7 @@ class RetrievalSetting(BaseModel): Retrieval Setting. """ - search_method: Literal["semantic_search", "fulltext_search", "keyword_search", "hybrid_search"] + search_method: Literal["semantic_search", "full_text_search", "keyword_search", "hybrid_search"] top_k: int score_threshold: float | None = 0.5 score_threshold_enabled: bool = False From 8b74ae683a0a242e70c58d0dee247e00127ed7ae Mon Sep 17 00:00:00 2001 From: 17hz <0x149527@gmail.com> Date: Tue, 23 Sep 2025 16:59:26 +0800 Subject: [PATCH 018/126] bump nextjs to 15.5 and turbopack for development mode (#24346) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: crazywoola <427733928@qq.com> Co-authored-by: 非法操作 --- web/app/styles/globals.css | 14 ++-- web/next.config.js | 14 ++-- web/package.json | 4 +- web/pnpm-lock.yaml | 129 +++++++++++++++++++++---------------- web/tsconfig.json | 3 + 5 files changed, 96 insertions(+), 68 deletions(-) diff --git a/web/app/styles/globals.css b/web/app/styles/globals.css index 353cfa2fff..c1078b6eb6 100644 --- a/web/app/styles/globals.css +++ b/web/app/styles/globals.css @@ -1,12 +1,18 @@ @import "preflight.css"; -@tailwind base; -@tailwind components; + @import '../../themes/light.css'; @import '../../themes/dark.css'; @import "../../themes/manual-light.css"; @import "../../themes/manual-dark.css"; +@import "../components/base/button/index.css"; +@import "../components/base/action-button/index.css"; +@import "../components/base/modal/index.css"; + +@tailwind base; +@tailwind components; + html { color-scheme: light; } @@ -680,10 +686,6 @@ button:focus-within { display: none; } -@import "../components/base/button/index.css"; -@import "../components/base/action-button/index.css"; -@import "../components/base/modal/index.css"; - @tailwind utilities; @layer utilities { diff --git a/web/next.config.js b/web/next.config.js index 7e89c33e62..9c5e331f34 100644 --- a/web/next.config.js +++ b/web/next.config.js @@ -91,12 +91,10 @@ const remoteImageURLs = [hasSetWebPrefix ? new URL(`${process.env.NEXT_PUBLIC_WE /** @type {import('next').NextConfig} */ const nextConfig = { basePath: process.env.NEXT_PUBLIC_BASE_PATH || '', - webpack: (config, { dev, isServer }) => { - if (dev) { - config.plugins.push(codeInspectorPlugin({ bundler: 'webpack' })) - } - - return config + turbopack: { + rules: codeInspectorPlugin({ + bundler: 'turbopack' + }) }, productionBrowserSourceMaps: false, // enable browser source map generation during the production build // Configure pageExtensions to include md and mdx @@ -112,6 +110,10 @@ const nextConfig = { })), }, experimental: { + optimizePackageImports: [ + '@remixicon/react', + '@heroicons/react' + ], }, // fix all before production. Now it slow the develop speed. eslint: { diff --git a/web/package.json b/web/package.json index 57ab734eb2..78e62c9aa7 100644 --- a/web/package.json +++ b/web/package.json @@ -19,7 +19,7 @@ "and_qq >= 14.9" ], "scripts": { - "dev": "cross-env NODE_OPTIONS='--inspect' next dev", + "dev": "cross-env NODE_OPTIONS='--inspect' next dev --turbopack", "build": "next build", "build:docker": "next build && node scripts/optimize-standalone.js", "start": "cp -r .next/static .next/standalone/.next/static && cp -r public .next/standalone/public && cross-env PORT=$npm_config_port HOSTNAME=$npm_config_host node .next/standalone/server.js", @@ -203,7 +203,7 @@ "autoprefixer": "^10.4.20", "babel-loader": "^10.0.0", "bing-translate-api": "^4.0.2", - "code-inspector-plugin": "^0.18.1", + "code-inspector-plugin": "1.2.9", "cross-env": "^7.0.3", "eslint": "^9.35.0", "eslint-config-next": "15.5.0", diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index 5d4308288c..e47985fd71 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -519,8 +519,8 @@ importers: specifier: ^4.0.2 version: 4.1.0 code-inspector-plugin: - specifier: ^0.18.1 - version: 0.18.3 + specifier: 1.2.9 + version: 1.2.9 cross-env: specifier: ^7.0.3 version: 7.0.3 @@ -1372,6 +1372,24 @@ packages: '@clack/prompts@0.11.0': resolution: {integrity: sha512-pMN5FcrEw9hUkZA4f+zLlzivQSeQf5dRGJjSUbvVYDLvpKCdQx5OaknvKzgbtXOizhP+SJJJjqEbOe55uKKfAw==} + '@code-inspector/core@1.2.9': + resolution: {integrity: sha512-A1w+G73HlTB6S8X6sA6tT+ziWHTAcTyH+7FZ1Sgd3ZLXF/E/jT+hgRbKposjXMwxcbodRc6hBG6UyiV+VxwE6Q==} + + '@code-inspector/esbuild@1.2.9': + resolution: {integrity: sha512-DuyfxGupV43CN8YElIqynAniBtE86i037+3OVJYrm3jlJscXzbV98/kOzvu+VJQQvElcDgpgD6C/aGmPvFEiUg==} + + '@code-inspector/mako@1.2.9': + resolution: {integrity: sha512-8N+MHdr64AnthLB4v+YGe8/9bgog3BnkxIW/fqX5iVS0X06mF7X1pxfZOD2bABVtv1tW25lRtNs5AgvYJs0vpg==} + + '@code-inspector/turbopack@1.2.9': + resolution: {integrity: sha512-UVOUbqU6rpi5eOkrFamKrdeSWb0/OFFJQBaxbgs1RK5V5f4/iVwC5KjO2wkjv8cOGU4EppLfBVSBI1ysOo8S5A==} + + '@code-inspector/vite@1.2.9': + resolution: {integrity: sha512-saIokJ3o3SdrHEgTEg1fbbowbKfh7J4mYtu0i1mVfah1b1UfdCF/iFHTEJ6SADMiY47TeNZTg0TQWTlU1AWPww==} + + '@code-inspector/webpack@1.2.9': + resolution: {integrity: sha512-9YEykVrOIc0zMV7pyTyZhCprjScjn6gPPmxb4/OQXKCrP2fAm+NB188rg0s95e4sM7U3qRUpPA4NUH5F7Ogo+g==} + '@cspotcode/source-map-support@0.8.1': resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} engines: {node: '>=12'} @@ -4425,11 +4443,8 @@ packages: resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} - code-inspector-core@0.18.3: - resolution: {integrity: sha512-60pT2cPoguMTUYdN1MMpjoPUnuF0ud/u7M2y+Vqit/bniLEit9dySEWAVxLU/Ukc5ILrDeLKEttc6fCMl9RUrA==} - - code-inspector-plugin@0.18.3: - resolution: {integrity: sha512-d9oJXZUsnvfTaQDwFmDNA2F+AR/TXIxWg1rr8KGcEskltR2prbZsfuu1z70EAn4khpx0smfi/PvIIwNJQ7FAMw==} + code-inspector-plugin@1.2.9: + resolution: {integrity: sha512-PGp/AQ03vaajimG9rn5+eQHGifrym5CSNLCViPtwzot7FM3MqEkGNqcvimH0FVuv3wDOcP5KvETAUSLf1BE3HA==} collapse-white-space@2.1.0: resolution: {integrity: sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw==} @@ -5055,9 +5070,6 @@ packages: esast-util-from-js@2.0.1: resolution: {integrity: sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw==} - esbuild-code-inspector-plugin@0.18.3: - resolution: {integrity: sha512-FaPt5eFMtW1oXMWqAcqfAJByNagP1V/R9dwDDLQO29JmryMF35+frskTqy+G53whmTaVi19+TCrFqhNbMZH5ZQ==} - esbuild-register@3.6.0: resolution: {integrity: sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg==} peerDependencies: @@ -6413,8 +6425,8 @@ packages: resolution: {integrity: sha512-MbjN408fEndfiQXbFQ1vnd+1NoLDsnQW41410oQBXiyXDMYH5z505juWa4KUE1LqxRC7DgOgZDbKLxHIwm27hA==} engines: {node: '>=0.10'} - launch-ide@1.0.1: - resolution: {integrity: sha512-U7qBxSNk774PxWq4XbmRe0ThiIstPoa4sMH/OGSYxrFVvg8x3biXcF1fsH6wasDpEmEXMdINUrQhBdwsSgKyMg==} + launch-ide@1.2.0: + resolution: {integrity: sha512-7nXSPQOt3b2JT52Ge8jp4miFcY+nrUEZxNLWBzrEfjmByDTb9b5ytqMSwGhsNwY6Cntwop+6n7rWIFN0+S8PTw==} layout-base@1.0.2: resolution: {integrity: sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==} @@ -8693,9 +8705,6 @@ packages: vfile@6.0.3: resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==} - vite-code-inspector-plugin@0.18.3: - resolution: {integrity: sha512-178H73vbDUHE+JpvfAfioUHlUr7qXCYIEa2YNXtzenFQGOjtae59P1jjcxGfa6pPHEnOoaitb13K+0qxwhi/WA==} - vm-browserify@1.1.2: resolution: {integrity: sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==} @@ -8754,9 +8763,6 @@ packages: engines: {node: '>= 10.13.0'} hasBin: true - webpack-code-inspector-plugin@0.18.3: - resolution: {integrity: sha512-3782rsJhBnRiw0IpR6EqnyGDQoiSq0CcGeLJ52rZXlszYCe8igXtcujq7OhI0byaivWQ1LW7sXKyMEoVpBhq0w==} - webpack-dev-middleware@6.1.3: resolution: {integrity: sha512-A4ChP0Qj8oGociTs6UdlRUGANIGrCDL3y+pmQMc+dSsraXHCatFpmMey4mYELA+juqwUqwQsUgJJISXl1KWmiw==} engines: {node: '>= 14.15.0'} @@ -9993,6 +9999,48 @@ snapshots: picocolors: 1.1.1 sisteransi: 1.0.5 + '@code-inspector/core@1.2.9': + dependencies: + '@vue/compiler-dom': 3.5.17 + chalk: 4.1.2 + dotenv: 16.6.1 + launch-ide: 1.2.0 + portfinder: 1.0.37 + transitivePeerDependencies: + - supports-color + + '@code-inspector/esbuild@1.2.9': + dependencies: + '@code-inspector/core': 1.2.9 + transitivePeerDependencies: + - supports-color + + '@code-inspector/mako@1.2.9': + dependencies: + '@code-inspector/core': 1.2.9 + transitivePeerDependencies: + - supports-color + + '@code-inspector/turbopack@1.2.9': + dependencies: + '@code-inspector/core': 1.2.9 + '@code-inspector/webpack': 1.2.9 + transitivePeerDependencies: + - supports-color + + '@code-inspector/vite@1.2.9': + dependencies: + '@code-inspector/core': 1.2.9 + chalk: 4.1.1 + transitivePeerDependencies: + - supports-color + + '@code-inspector/webpack@1.2.9': + dependencies: + '@code-inspector/core': 1.2.9 + transitivePeerDependencies: + - supports-color + '@cspotcode/source-map-support@0.8.1': dependencies: '@jridgewell/trace-mapping': 0.3.9 @@ -12799,7 +12847,7 @@ snapshots: '@vue/compiler-core@3.5.17': dependencies: - '@babel/parser': 7.28.0 + '@babel/parser': 7.28.4 '@vue/shared': 3.5.17 entities: 4.5.0 estree-walker: 2.0.2 @@ -13503,24 +13551,15 @@ snapshots: co@4.6.0: {} - code-inspector-core@0.18.3: + code-inspector-plugin@1.2.9: dependencies: - '@vue/compiler-dom': 3.5.17 + '@code-inspector/core': 1.2.9 + '@code-inspector/esbuild': 1.2.9 + '@code-inspector/mako': 1.2.9 + '@code-inspector/turbopack': 1.2.9 + '@code-inspector/vite': 1.2.9 + '@code-inspector/webpack': 1.2.9 chalk: 4.1.1 - dotenv: 16.6.1 - launch-ide: 1.0.1 - portfinder: 1.0.37 - transitivePeerDependencies: - - supports-color - - code-inspector-plugin@0.18.3: - dependencies: - chalk: 4.1.1 - code-inspector-core: 0.18.3 - dotenv: 16.6.1 - esbuild-code-inspector-plugin: 0.18.3 - vite-code-inspector-plugin: 0.18.3 - webpack-code-inspector-plugin: 0.18.3 transitivePeerDependencies: - supports-color @@ -14160,12 +14199,6 @@ snapshots: esast-util-from-estree: 2.0.0 vfile-message: 4.0.2 - esbuild-code-inspector-plugin@0.18.3: - dependencies: - code-inspector-core: 0.18.3 - transitivePeerDependencies: - - supports-color - esbuild-register@3.6.0(esbuild@0.25.0): dependencies: debug: 4.4.1 @@ -16020,7 +16053,7 @@ snapshots: dependencies: language-subtag-registry: 0.3.23 - launch-ide@1.0.1: + launch-ide@1.2.0: dependencies: chalk: 4.1.2 dotenv: 16.6.1 @@ -18779,12 +18812,6 @@ snapshots: '@types/unist': 3.0.3 vfile-message: 4.0.2 - vite-code-inspector-plugin@0.18.3: - dependencies: - code-inspector-core: 0.18.3 - transitivePeerDependencies: - - supports-color - vm-browserify@1.1.2: {} void-elements@3.1.0: {} @@ -18855,12 +18882,6 @@ snapshots: - bufferutil - utf-8-validate - webpack-code-inspector-plugin@0.18.3: - dependencies: - code-inspector-core: 0.18.3 - transitivePeerDependencies: - - supports-color - webpack-dev-middleware@6.1.3(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)): dependencies: colorette: 2.0.20 diff --git a/web/tsconfig.json b/web/tsconfig.json index c3e0bca665..3b022e4708 100644 --- a/web/tsconfig.json +++ b/web/tsconfig.json @@ -26,6 +26,9 @@ "paths": { "@/*": [ "./*" + ], + "~@/*": [ + "./*" ] } }, From fb6ccccc3da48739c20ce0d8a4bee9fa8ad89908 Mon Sep 17 00:00:00 2001 From: GuanMu Date: Tue, 23 Sep 2025 17:04:56 +0800 Subject: [PATCH 019/126] chore: refactor component exports for consistency (#26033) Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- web/app/components/app/log/list.tsx | 2 +- web/app/components/base/copy-icon/index.tsx | 2 +- web/app/components/base/markdown-blocks/think-block.tsx | 2 +- web/app/components/base/svg-gallery/index.tsx | 2 +- .../json-schema-config-modal/json-schema-generator/index.tsx | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/web/app/components/app/log/list.tsx b/web/app/components/app/log/list.tsx index b73d1f19de..8b3370b678 100644 --- a/web/app/components/app/log/list.tsx +++ b/web/app/components/app/log/list.tsx @@ -35,7 +35,7 @@ import { useStore as useAppStore } from '@/app/components/app/store' import { useAppContext } from '@/context/app-context' import useTimestamp from '@/hooks/use-timestamp' import Tooltip from '@/app/components/base/tooltip' -import { CopyIcon } from '@/app/components/base/copy-icon' +import CopyIcon from '@/app/components/base/copy-icon' import { buildChatItemTree, getThreadMessages } from '@/app/components/base/chat/utils' import { getProcessedFilesFromResponse } from '@/app/components/base/file-uploader/utils' import cn from '@/utils/classnames' diff --git a/web/app/components/base/copy-icon/index.tsx b/web/app/components/base/copy-icon/index.tsx index 196e256978..158eaa5fbe 100644 --- a/web/app/components/base/copy-icon/index.tsx +++ b/web/app/components/base/copy-icon/index.tsx @@ -15,7 +15,7 @@ type Props = { const prefixEmbedded = 'appOverview.overview.appInfo.embedded' -export const CopyIcon = ({ content }: Props) => { +const CopyIcon = ({ content }: Props) => { const { t } = useTranslation() const [isCopied, setIsCopied] = useState(false) diff --git a/web/app/components/base/markdown-blocks/think-block.tsx b/web/app/components/base/markdown-blocks/think-block.tsx index a5813266f1..acceecd433 100644 --- a/web/app/components/base/markdown-blocks/think-block.tsx +++ b/web/app/components/base/markdown-blocks/think-block.tsx @@ -63,7 +63,7 @@ const useThinkTimer = (children: any) => { return { elapsedTime, isComplete } } -export const ThinkBlock = ({ children, ...props }: any) => { +const ThinkBlock = ({ children, ...props }: React.ComponentProps<'details'>) => { const { elapsedTime, isComplete } = useThinkTimer(children) const displayContent = removeEndThink(children) const { t } = useTranslation() diff --git a/web/app/components/base/svg-gallery/index.tsx b/web/app/components/base/svg-gallery/index.tsx index 710a0107fb..b6b2fe9db4 100644 --- a/web/app/components/base/svg-gallery/index.tsx +++ b/web/app/components/base/svg-gallery/index.tsx @@ -3,7 +3,7 @@ import { SVG } from '@svgdotjs/svg.js' import DOMPurify from 'dompurify' import ImagePreview from '@/app/components/base/image-uploader/image-preview' -export const SVGRenderer = ({ content }: { content: string }) => { +const SVGRenderer = ({ content }: { content: string }) => { const svgRef = useRef(null) const [imagePreview, setImagePreview] = useState('') const [windowSize, setWindowSize] = useState({ diff --git a/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/json-schema-generator/index.tsx b/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/json-schema-generator/index.tsx index 64138b3cbd..1a4eb3cfdb 100644 --- a/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/json-schema-generator/index.tsx +++ b/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/json-schema-generator/index.tsx @@ -30,7 +30,7 @@ enum GeneratorView { result = 'result', } -export const JsonSchemaGenerator: FC = ({ +const JsonSchemaGenerator: FC = ({ onApply, crossAxisOffset, }) => { From 24b4289d6cfb641e0619099a64d176b57b8d22da Mon Sep 17 00:00:00 2001 From: longbingljw Date: Tue, 23 Sep 2025 17:06:06 +0800 Subject: [PATCH 020/126] fix:add some explanation for oceanbase parser selection (#26071) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/configs/middleware/vdb/oceanbase_config.py | 8 ++++++-- api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py | 2 +- docker/.env.example | 2 ++ 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/api/configs/middleware/vdb/oceanbase_config.py b/api/configs/middleware/vdb/oceanbase_config.py index 99f4c49407..7c9376f86b 100644 --- a/api/configs/middleware/vdb/oceanbase_config.py +++ b/api/configs/middleware/vdb/oceanbase_config.py @@ -40,8 +40,12 @@ class OceanBaseVectorConfig(BaseSettings): OCEANBASE_FULLTEXT_PARSER: str | None = Field( description=( - "Fulltext parser to use for text indexing. Options: 'japanese_ftparser' (Japanese), " - "'thai_ftparser' (Thai), 'ik' (Chinese). Default is 'ik'" + "Fulltext parser to use for text indexing. " + "Built-in options: 'ngram' (N-gram tokenizer for English/numbers), " + "'beng' (Basic English tokenizer), 'space' (Space-based tokenizer), " + "'ngram2' (Improved N-gram tokenizer), 'ik' (Chinese tokenizer). " + "External plugins (require installation): 'japanese_ftparser' (Japanese tokenizer), " + "'thai_ftparser' (Thai tokenizer). Default is 'ik'" ), default="ik", ) diff --git a/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py b/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py index 49cf900126..b3db7332e8 100644 --- a/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py +++ b/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py @@ -123,7 +123,7 @@ class OceanBaseVector(BaseVector): # Get parser from config or use default ik parser parser_name = dify_config.OCEANBASE_FULLTEXT_PARSER or "ik" - allowed_parsers = ["ik", "japanese_ftparser", "thai_ftparser"] + allowed_parsers = ["ngram", "beng", "space", "ngram2", "ik", "japanese_ftparser", "thai_ftparser"] if parser_name not in allowed_parsers: raise ValueError( f"Invalid OceanBase full-text parser: {parser_name}. " diff --git a/docker/.env.example b/docker/.env.example index d4e8ab3beb..07b4088470 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -655,6 +655,8 @@ LINDORM_USING_UGC=True LINDORM_QUERY_TIMEOUT=1 # OceanBase Vector configuration, only available when VECTOR_STORE is `oceanbase` +# Built-in fulltext parsers are `ngram`, `beng`, `space`, `ngram2`, `ik` +# External fulltext parsers (require plugin installation) are `japanese_ftparser`, `thai_ftparser` OCEANBASE_VECTOR_HOST=oceanbase OCEANBASE_VECTOR_PORT=2881 OCEANBASE_VECTOR_USER=root@test From d9e45a1abe01ca778b3e9440c0ffa7e045bbbeb0 Mon Sep 17 00:00:00 2001 From: Wu Tianwei <30284043+WTW0313@users.noreply.github.com> Date: Tue, 23 Sep 2025 18:18:22 +0800 Subject: [PATCH 021/126] feat(pipeline): add language support to built-in pipeline templates and update related components (#26124) --- .../list/built-in-pipeline-list.tsx | 11 ++++++++++- web/app/components/plugins/plugin-item/index.tsx | 7 +++---- web/app/components/tools/provider/detail.tsx | 3 +-- web/models/pipeline.ts | 1 + web/service/use-pipeline.ts | 6 ++++-- 5 files changed, 19 insertions(+), 9 deletions(-) diff --git a/web/app/components/datasets/create-from-pipeline/list/built-in-pipeline-list.tsx b/web/app/components/datasets/create-from-pipeline/list/built-in-pipeline-list.tsx index 9d3d0e7717..6d22f2115a 100644 --- a/web/app/components/datasets/create-from-pipeline/list/built-in-pipeline-list.tsx +++ b/web/app/components/datasets/create-from-pipeline/list/built-in-pipeline-list.tsx @@ -1,9 +1,18 @@ import { usePipelineTemplateList } from '@/service/use-pipeline' import TemplateCard from './template-card' import CreateCard from './create-card' +import { useI18N } from '@/context/i18n' +import { useMemo } from 'react' +import { LanguagesSupported } from '@/i18n-config/language' const BuiltInPipelineList = () => { - const { data: pipelineList, isLoading } = usePipelineTemplateList({ type: 'built-in' }) + const { locale } = useI18N() + const language = useMemo(() => { + if (['zh-Hans', 'ja-JP'].includes(locale)) + return locale + return LanguagesSupported[0] + }, [locale]) + const { data: pipelineList, isLoading } = usePipelineTemplateList({ type: 'built-in', language }) const list = pipelineList?.pipeline_templates || [] return ( diff --git a/web/app/components/plugins/plugin-item/index.tsx b/web/app/components/plugins/plugin-item/index.tsx index c228ca4db4..ed7cf47bb7 100644 --- a/web/app/components/plugins/plugin-item/index.tsx +++ b/web/app/components/plugins/plugin-item/index.tsx @@ -146,7 +146,6 @@ const PluginItem: FC = ({ {/* Organization & Name */}
= ({ {category === PluginType.extension && ( <>
·
-
- +
+ = ({ && <>
{t('plugin.from')} marketplace
- +
} diff --git a/web/app/components/tools/provider/detail.tsx b/web/app/components/tools/provider/detail.tsx index 87d09bd527..dd2972a9d6 100644 --- a/web/app/components/tools/provider/detail.tsx +++ b/web/app/components/tools/provider/detail.tsx @@ -244,9 +244,8 @@ const ProviderDetail = ({
</div> - <div className='mb-1 flex h-4 items-center justify-between'> + <div className='mb-1 mt-0.5 flex h-4 items-center justify-between'> <OrgInfo - className="mt-0.5" packageNameClassName='w-auto' orgName={collection.author} packageName={collection.name} diff --git a/web/models/pipeline.ts b/web/models/pipeline.ts index d644d43bf3..1c2211b6d9 100644 --- a/web/models/pipeline.ts +++ b/web/models/pipeline.ts @@ -17,6 +17,7 @@ export enum DatasourceType { export type PipelineTemplateListParams = { type: 'built-in' | 'customized' + language?: string } export type PipelineTemplate = { diff --git a/web/service/use-pipeline.ts b/web/service/use-pipeline.ts index 870fb5bc84..a7b9c89410 100644 --- a/web/service/use-pipeline.ts +++ b/web/service/use-pipeline.ts @@ -40,8 +40,9 @@ const NAME_SPACE = 'pipeline' export const PipelineTemplateListQueryKeyPrefix = [NAME_SPACE, 'template-list'] export const usePipelineTemplateList = (params: PipelineTemplateListParams) => { + const { type, language } = params return useQuery<PipelineTemplateListResponse>({ - queryKey: [...PipelineTemplateListQueryKeyPrefix, params.type], + queryKey: [...PipelineTemplateListQueryKeyPrefix, type, language], queryFn: () => { return get<PipelineTemplateListResponse>('/rag/pipeline/templates', { params }) }, @@ -55,7 +56,7 @@ export const useInvalidCustomizedTemplateList = () => { export const usePipelineTemplateById = (params: PipelineTemplateByIdRequest, enabled: boolean) => { const { template_id, type } = params return useQuery<PipelineTemplateByIdResponse>({ - queryKey: [NAME_SPACE, 'template', template_id], + queryKey: [NAME_SPACE, 'template', type, template_id], queryFn: () => { return get<PipelineTemplateByIdResponse>(`/rag/pipeline/templates/${template_id}`, { params: { @@ -64,6 +65,7 @@ export const usePipelineTemplateById = (params: PipelineTemplateByIdRequest, ena }) }, enabled, + staleTime: 0, }) } From 2913d17fe23bda01c5d00d3c8e139845bf9203a2 Mon Sep 17 00:00:00 2001 From: QuantumGhost <obelisk.reg+git@gmail.com> Date: Tue, 23 Sep 2025 18:48:02 +0800 Subject: [PATCH 022/126] ci: Add hotfix/** branches to build-push workflow triggers (#26129) --- .github/workflows/build-push.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build-push.yml b/.github/workflows/build-push.yml index 17af047267..24a9da4400 100644 --- a/.github/workflows/build-push.yml +++ b/.github/workflows/build-push.yml @@ -8,6 +8,7 @@ on: - "deploy/enterprise" - "build/**" - "release/e-*" + - "hotfix/**" tags: - "*" From 96a0b9991e274c8e61fa2f2aaf46b7df3eb52906 Mon Sep 17 00:00:00 2001 From: QuantumGhost <obelisk.reg+git@gmail.com> Date: Tue, 23 Sep 2025 21:30:46 +0800 Subject: [PATCH 023/126] fix(api): Fix variable truncation for `list[File]` value in output mapping (#26133) --- api/services/variable_truncator.py | 8 ++++++++ api/tests/unit_tests/services/test_variable_truncator.py | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/api/services/variable_truncator.py b/api/services/variable_truncator.py index 4362bb0291..d02508e4f3 100644 --- a/api/services/variable_truncator.py +++ b/api/services/variable_truncator.py @@ -262,6 +262,14 @@ class VariableTruncator: target_length = self._array_element_limit for i, item in enumerate(value): + # Dirty fix: + # The output of `Start` node may contain list of `File` elements, + # causing `AssertionError` while invoking `_truncate_json_primitives`. + # + # This check ensures that `list[File]` are handled separately + if isinstance(item, File): + truncated_value.append(item) + continue if i >= target_length: return _PartResult(truncated_value, used_size, True) if i > 0: diff --git a/api/tests/unit_tests/services/test_variable_truncator.py b/api/tests/unit_tests/services/test_variable_truncator.py index 0ad056c985..6761f939e3 100644 --- a/api/tests/unit_tests/services/test_variable_truncator.py +++ b/api/tests/unit_tests/services/test_variable_truncator.py @@ -588,3 +588,11 @@ class TestIntegrationScenarios: if isinstance(result.result, ObjectSegment): result_size = truncator.calculate_json_size(result.result.value) assert result_size <= original_size + + def test_file_and_array_file_variable_mapping(self, file): + truncator = VariableTruncator(string_length_limit=30, array_element_limit=3, max_size_bytes=300) + + mapping = {"array_file": [file]} + truncated_mapping, truncated = truncator.truncate_variable_mapping(mapping) + assert truncated is False + assert truncated_mapping == mapping From 25c69ac54089e9676e8e926603e28ac252f6f231 Mon Sep 17 00:00:00 2001 From: Asuka Minato <i@asukaminato.eu.org> Date: Wed, 24 Sep 2025 04:32:48 +0900 Subject: [PATCH 024/126] one example of Session (#24135) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: -LAN- <laipz8200@outlook.com> --- api/commands.py | 152 +++++++++--------- api/controllers/console/app/conversation.py | 5 +- .../console/datasets/datasets_document.py | 5 +- api/models/dataset.py | 4 +- api/models/model.py | 6 +- api/services/app_service.py | 3 +- api/services/dataset_service.py | 12 +- api/services/plugin/plugin_migration.py | 2 +- api/services/tag_service.py | 3 +- api/tasks/document_indexing_sync_task.py | 3 +- 10 files changed, 100 insertions(+), 95 deletions(-) diff --git a/api/commands.py b/api/commands.py index cb8aa8430a..82efe34611 100644 --- a/api/commands.py +++ b/api/commands.py @@ -10,6 +10,7 @@ from flask import current_app from pydantic import TypeAdapter from sqlalchemy import select from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy.orm import sessionmaker from configs import dify_config from constants.languages import languages @@ -61,31 +62,30 @@ def reset_password(email, new_password, password_confirm): if str(new_password).strip() != str(password_confirm).strip(): click.echo(click.style("Passwords do not match.", fg="red")) return + with sessionmaker(db.engine, expire_on_commit=False).begin() as session: + account = session.query(Account).where(Account.email == email).one_or_none() - account = db.session.query(Account).where(Account.email == email).one_or_none() + if not account: + click.echo(click.style(f"Account not found for email: {email}", fg="red")) + return - if not account: - click.echo(click.style(f"Account not found for email: {email}", fg="red")) - return + try: + valid_password(new_password) + except: + click.echo(click.style(f"Invalid password. Must match {password_pattern}", fg="red")) + return - try: - valid_password(new_password) - except: - click.echo(click.style(f"Invalid password. Must match {password_pattern}", fg="red")) - return + # generate password salt + salt = secrets.token_bytes(16) + base64_salt = base64.b64encode(salt).decode() - # generate password salt - salt = secrets.token_bytes(16) - base64_salt = base64.b64encode(salt).decode() - - # encrypt password with salt - password_hashed = hash_password(new_password, salt) - base64_password_hashed = base64.b64encode(password_hashed).decode() - account.password = base64_password_hashed - account.password_salt = base64_salt - db.session.commit() - AccountService.reset_login_error_rate_limit(email) - click.echo(click.style("Password reset successfully.", fg="green")) + # encrypt password with salt + password_hashed = hash_password(new_password, salt) + base64_password_hashed = base64.b64encode(password_hashed).decode() + account.password = base64_password_hashed + account.password_salt = base64_salt + AccountService.reset_login_error_rate_limit(email) + click.echo(click.style("Password reset successfully.", fg="green")) @click.command("reset-email", help="Reset the account email.") @@ -100,22 +100,21 @@ def reset_email(email, new_email, email_confirm): if str(new_email).strip() != str(email_confirm).strip(): click.echo(click.style("New emails do not match.", fg="red")) return + with sessionmaker(db.engine, expire_on_commit=False).begin() as session: + account = session.query(Account).where(Account.email == email).one_or_none() - account = db.session.query(Account).where(Account.email == email).one_or_none() + if not account: + click.echo(click.style(f"Account not found for email: {email}", fg="red")) + return - if not account: - click.echo(click.style(f"Account not found for email: {email}", fg="red")) - return + try: + email_validate(new_email) + except: + click.echo(click.style(f"Invalid email: {new_email}", fg="red")) + return - try: - email_validate(new_email) - except: - click.echo(click.style(f"Invalid email: {new_email}", fg="red")) - return - - account.email = new_email - db.session.commit() - click.echo(click.style("Email updated successfully.", fg="green")) + account.email = new_email + click.echo(click.style("Email updated successfully.", fg="green")) @click.command( @@ -139,25 +138,24 @@ def reset_encrypt_key_pair(): if dify_config.EDITION != "SELF_HOSTED": click.echo(click.style("This command is only for SELF_HOSTED installations.", fg="red")) return + with sessionmaker(db.engine, expire_on_commit=False).begin() as session: + tenants = session.query(Tenant).all() + for tenant in tenants: + if not tenant: + click.echo(click.style("No workspaces found. Run /install first.", fg="red")) + return - tenants = db.session.query(Tenant).all() - for tenant in tenants: - if not tenant: - click.echo(click.style("No workspaces found. Run /install first.", fg="red")) - return + tenant.encrypt_public_key = generate_key_pair(tenant.id) - tenant.encrypt_public_key = generate_key_pair(tenant.id) + session.query(Provider).where(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete() + session.query(ProviderModel).where(ProviderModel.tenant_id == tenant.id).delete() - db.session.query(Provider).where(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete() - db.session.query(ProviderModel).where(ProviderModel.tenant_id == tenant.id).delete() - db.session.commit() - - click.echo( - click.style( - f"Congratulations! The asymmetric key pair of workspace {tenant.id} has been reset.", - fg="green", + click.echo( + click.style( + f"Congratulations! The asymmetric key pair of workspace {tenant.id} has been reset.", + fg="green", + ) ) - ) @click.command("vdb-migrate", help="Migrate vector db.") @@ -182,14 +180,15 @@ def migrate_annotation_vector_database(): try: # get apps info per_page = 50 - apps = ( - db.session.query(App) - .where(App.status == "normal") - .order_by(App.created_at.desc()) - .limit(per_page) - .offset((page - 1) * per_page) - .all() - ) + with sessionmaker(db.engine, expire_on_commit=False).begin() as session: + apps = ( + session.query(App) + .where(App.status == "normal") + .order_by(App.created_at.desc()) + .limit(per_page) + .offset((page - 1) * per_page) + .all() + ) if not apps: break except SQLAlchemyError: @@ -203,26 +202,27 @@ def migrate_annotation_vector_database(): ) try: click.echo(f"Creating app annotation index: {app.id}") - app_annotation_setting = ( - db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app.id).first() - ) + with sessionmaker(db.engine, expire_on_commit=False).begin() as session: + app_annotation_setting = ( + session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app.id).first() + ) - if not app_annotation_setting: - skipped_count = skipped_count + 1 - click.echo(f"App annotation setting disabled: {app.id}") - continue - # get dataset_collection_binding info - dataset_collection_binding = ( - db.session.query(DatasetCollectionBinding) - .where(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id) - .first() - ) - if not dataset_collection_binding: - click.echo(f"App annotation collection binding not found: {app.id}") - continue - annotations = db.session.scalars( - select(MessageAnnotation).where(MessageAnnotation.app_id == app.id) - ).all() + if not app_annotation_setting: + skipped_count = skipped_count + 1 + click.echo(f"App annotation setting disabled: {app.id}") + continue + # get dataset_collection_binding info + dataset_collection_binding = ( + session.query(DatasetCollectionBinding) + .where(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id) + .first() + ) + if not dataset_collection_binding: + click.echo(f"App annotation collection binding not found: {app.id}") + continue + annotations = session.scalars( + select(MessageAnnotation).where(MessageAnnotation.app_id == app.id) + ).all() dataset = Dataset( id=app.id, tenant_id=app.tenant_id, diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py index c0cbf6613e..f104ab5dee 100644 --- a/api/controllers/console/app/conversation.py +++ b/api/controllers/console/app/conversation.py @@ -1,6 +1,7 @@ from datetime import datetime import pytz # pip install pytz +import sqlalchemy as sa from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from flask_restx.inputs import int_range @@ -70,7 +71,7 @@ class CompletionConversationApi(Resource): parser.add_argument("limit", type=int_range(1, 100), default=20, location="args") args = parser.parse_args() - query = db.select(Conversation).where( + query = sa.select(Conversation).where( Conversation.app_id == app_model.id, Conversation.mode == "completion", Conversation.is_deleted.is_(False) ) @@ -236,7 +237,7 @@ class ChatConversationApi(Resource): .subquery() ) - query = db.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.is_deleted.is_(False)) + query = sa.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.is_deleted.is_(False)) if args["keyword"]: keyword_filter = f"%{args['keyword']}%" diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 5de1f6c6ee..e6f5daa87b 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -4,6 +4,7 @@ from argparse import ArgumentTypeError from collections.abc import Sequence from typing import Literal, cast +import sqlalchemy as sa from flask import request from flask_login import current_user from flask_restx import Resource, fields, marshal, marshal_with, reqparse @@ -211,13 +212,13 @@ class DatasetDocumentListApi(Resource): if sort == "hit_count": sub_query = ( - db.select(DocumentSegment.document_id, db.func.sum(DocumentSegment.hit_count).label("total_hit_count")) + sa.select(DocumentSegment.document_id, sa.func.sum(DocumentSegment.hit_count).label("total_hit_count")) .group_by(DocumentSegment.document_id) .subquery() ) query = query.outerjoin(sub_query, sub_query.c.document_id == Document.id).order_by( - sort_logic(db.func.coalesce(sub_query.c.total_hit_count, 0)), + sort_logic(sa.func.coalesce(sub_query.c.total_hit_count, 0)), sort_logic(Document.position), ) elif sort == "created_at": diff --git a/api/models/dataset.py b/api/models/dataset.py index 2c4059f800..25ebe14738 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -910,7 +910,7 @@ class AppDatasetJoin(Base): id = mapped_column(StringUUID, primary_key=True, nullable=False, server_default=sa.text("uuid_generate_v4()")) app_id = mapped_column(StringUUID, nullable=False) dataset_id = mapped_column(StringUUID, nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=db.func.current_timestamp()) + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp()) @property def app(self): @@ -931,7 +931,7 @@ class DatasetQuery(Base): source_app_id = mapped_column(StringUUID, nullable=True) created_by_role = mapped_column(String, nullable=False) created_by = mapped_column(StringUUID, nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=db.func.current_timestamp()) + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp()) class DatasetKeywordTable(Base): diff --git a/api/models/model.py b/api/models/model.py index 9bcb81b41b..a8218c3a4e 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -1731,7 +1731,7 @@ class MessageChain(Base): type: Mapped[str] = mapped_column(String(255), nullable=False) input = mapped_column(sa.Text, nullable=True) output = mapped_column(sa.Text, nullable=True) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=db.func.current_timestamp()) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.func.current_timestamp()) class MessageAgentThought(Base): @@ -1769,7 +1769,7 @@ class MessageAgentThought(Base): latency: Mapped[float | None] = mapped_column(sa.Float, nullable=True) created_by_role = mapped_column(String, nullable=False) created_by = mapped_column(StringUUID, nullable=False) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=db.func.current_timestamp()) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.func.current_timestamp()) @property def files(self) -> list[Any]: @@ -1872,7 +1872,7 @@ class DatasetRetrieverResource(Base): index_node_hash = mapped_column(sa.Text, nullable=True) retriever_from = mapped_column(sa.Text, nullable=False) created_by = mapped_column(StringUUID, nullable=False) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=db.func.current_timestamp()) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.func.current_timestamp()) class Tag(Base): diff --git a/api/services/app_service.py b/api/services/app_service.py index d524adbf3e..4fc6cf2494 100644 --- a/api/services/app_service.py +++ b/api/services/app_service.py @@ -2,6 +2,7 @@ import json import logging from typing import TypedDict, cast +import sqlalchemy as sa from flask_sqlalchemy.pagination import Pagination from configs import dify_config @@ -65,7 +66,7 @@ class AppService: return None app_models = db.paginate( - db.select(App).where(*filters).order_by(App.created_at.desc()), + sa.select(App).where(*filters).order_by(App.created_at.desc()), page=args["page"], per_page=args["limit"], error_out=False, diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index 8b3720026d..c9dd78ddd1 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -115,12 +115,12 @@ class DatasetService: # Check if permitted_dataset_ids is not empty to avoid WHERE false condition if permitted_dataset_ids and len(permitted_dataset_ids) > 0: query = query.where( - db.or_( + sa.or_( Dataset.permission == DatasetPermissionEnum.ALL_TEAM, - db.and_( + sa.and_( Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id ), - db.and_( + sa.and_( Dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM, Dataset.id.in_(permitted_dataset_ids), ), @@ -128,9 +128,9 @@ class DatasetService: ) else: query = query.where( - db.or_( + sa.or_( Dataset.permission == DatasetPermissionEnum.ALL_TEAM, - db.and_( + sa.and_( Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id ), ) @@ -1879,7 +1879,7 @@ class DocumentService: # for notion_info in notion_info_list: # workspace_id = notion_info.workspace_id # data_source_binding = DataSourceOauthBinding.query.filter( - # db.and_( + # sa.and_( # DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, # DataSourceOauthBinding.provider == "notion", # DataSourceOauthBinding.disabled == False, diff --git a/api/services/plugin/plugin_migration.py b/api/services/plugin/plugin_migration.py index 5db19711e6..99946d8fa9 100644 --- a/api/services/plugin/plugin_migration.py +++ b/api/services/plugin/plugin_migration.py @@ -471,7 +471,7 @@ class PluginMigration: total_failed_tenant = 0 while True: # paginate - tenants = db.paginate(db.select(Tenant).order_by(Tenant.created_at.desc()), page=page, per_page=100) + tenants = db.paginate(sa.select(Tenant).order_by(Tenant.created_at.desc()), page=page, per_page=100) if tenants.items is None or len(tenants.items) == 0: break diff --git a/api/services/tag_service.py b/api/services/tag_service.py index 4674335ba8..db7ed3d5c3 100644 --- a/api/services/tag_service.py +++ b/api/services/tag_service.py @@ -1,5 +1,6 @@ import uuid +import sqlalchemy as sa from flask_login import current_user from sqlalchemy import func, select from werkzeug.exceptions import NotFound @@ -18,7 +19,7 @@ class TagService: .where(Tag.type == tag_type, Tag.tenant_id == current_tenant_id) ) if keyword: - query = query.where(db.and_(Tag.name.ilike(f"%{keyword}%"))) + query = query.where(sa.and_(Tag.name.ilike(f"%{keyword}%"))) query = query.group_by(Tag.id, Tag.type, Tag.name, Tag.created_at) results: list = query.order_by(Tag.created_at.desc()).all() return results diff --git a/api/tasks/document_indexing_sync_task.py b/api/tasks/document_indexing_sync_task.py index 10da9a9af4..4c1f38c3bb 100644 --- a/api/tasks/document_indexing_sync_task.py +++ b/api/tasks/document_indexing_sync_task.py @@ -2,6 +2,7 @@ import logging import time import click +import sqlalchemy as sa from celery import shared_task from sqlalchemy import select @@ -51,7 +52,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str): data_source_binding = ( db.session.query(DataSourceOauthBinding) .where( - db.and_( + sa.and_( DataSourceOauthBinding.tenant_id == document.tenant_id, DataSourceOauthBinding.provider == "notion", DataSourceOauthBinding.disabled == False, From a4acc64afd87d07fc1522a17f26bd287802ecc42 Mon Sep 17 00:00:00 2001 From: quicksand <quicksandzn@gmail.com> Date: Wed, 24 Sep 2025 10:09:35 +0800 Subject: [PATCH 025/126] fix(api):LLM node losing Flask context during parallel iterations (#26098) --- .../nodes/iteration/iteration_node.py | 36 +++++++++++-------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py index 593281c9b5..1a417b5739 100644 --- a/api/core/workflow/nodes/iteration/iteration_node.py +++ b/api/core/workflow/nodes/iteration/iteration_node.py @@ -1,9 +1,11 @@ +import contextvars import logging from collections.abc import Generator, Mapping, Sequence from concurrent.futures import Future, ThreadPoolExecutor, as_completed from datetime import UTC, datetime from typing import TYPE_CHECKING, Any, NewType, cast +from flask import Flask, current_app from typing_extensions import TypeIs from core.variables import IntegerVariable, NoneSegment @@ -35,6 +37,7 @@ from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.iteration.entities import ErrorHandleMode, IterationNodeData from libs.datetime_utils import naive_utc_now +from libs.flask_utils import preserve_flask_contexts from .exc import ( InvalidIteratorValueError, @@ -239,6 +242,8 @@ class IterationNode(Node): self._execute_single_iteration_parallel, index=index, item=item, + flask_app=current_app._get_current_object(), # type: ignore + context_vars=contextvars.copy_context(), ) future_to_index[future] = index @@ -281,26 +286,29 @@ class IterationNode(Node): self, index: int, item: object, + flask_app: Flask, + context_vars: contextvars.Context, ) -> tuple[datetime, list[GraphNodeEventBase], object | None, int]: """Execute a single iteration in parallel mode and return results.""" - iter_start_at = datetime.now(UTC).replace(tzinfo=None) - events: list[GraphNodeEventBase] = [] - outputs_temp: list[object] = [] + with preserve_flask_contexts(flask_app=flask_app, context_vars=context_vars): + iter_start_at = datetime.now(UTC).replace(tzinfo=None) + events: list[GraphNodeEventBase] = [] + outputs_temp: list[object] = [] - graph_engine = self._create_graph_engine(index, item) + graph_engine = self._create_graph_engine(index, item) - # Collect events instead of yielding them directly - for event in self._run_single_iter( - variable_pool=graph_engine.graph_runtime_state.variable_pool, - outputs=outputs_temp, - graph_engine=graph_engine, - ): - events.append(event) + # Collect events instead of yielding them directly + for event in self._run_single_iter( + variable_pool=graph_engine.graph_runtime_state.variable_pool, + outputs=outputs_temp, + graph_engine=graph_engine, + ): + events.append(event) - # Get the output value from the temporary outputs list - output_value = outputs_temp[0] if outputs_temp else None + # Get the output value from the temporary outputs list + output_value = outputs_temp[0] if outputs_temp else None - return iter_start_at, events, output_value, graph_engine.graph_runtime_state.total_tokens + return iter_start_at, events, output_value, graph_engine.graph_runtime_state.total_tokens def _handle_iteration_success( self, From 9b360592920a69c1da8201be94211110ffa1e3de Mon Sep 17 00:00:00 2001 From: yangzheli <43645580+yangzheli@users.noreply.github.com> Date: Wed, 24 Sep 2025 15:53:59 +0800 Subject: [PATCH 026/126] fix(search-input): ensure proper value extraction in composition end handler (#26147) --- web/app/components/base/search-input/index.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/app/components/base/search-input/index.tsx b/web/app/components/base/search-input/index.tsx index cf9bc1c376..3330b55330 100644 --- a/web/app/components/base/search-input/index.tsx +++ b/web/app/components/base/search-input/index.tsx @@ -53,7 +53,7 @@ const SearchInput: FC<SearchInputProps> = ({ }} onCompositionEnd={(e) => { isComposing.current = false - onChange(e.data) + onChange(e.currentTarget.value) }} onFocus={() => setFocus(true)} onBlur={() => setFocus(false)} From 960bb8a9b4421232029a35a3c2ef8d9c09dc82c3 Mon Sep 17 00:00:00 2001 From: Jyong <76649700+JohnJyong@users.noreply.github.com> Date: Wed, 24 Sep 2025 21:32:37 +0800 Subject: [PATCH 027/126] delete end_user check (#26187) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/controllers/service_api/dataset/document.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/api/controllers/service_api/dataset/document.py b/api/controllers/service_api/dataset/document.py index d26c64fe36..e01bc8940c 100644 --- a/api/controllers/service_api/dataset/document.py +++ b/api/controllers/service_api/dataset/document.py @@ -30,7 +30,6 @@ from extensions.ext_database import db from fields.document_fields import document_fields, document_status_fields from libs.login import current_user from models.dataset import Dataset, Document, DocumentSegment -from models.model import EndUser from services.dataset_service import DatasetService, DocumentService from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig from services.file_service import FileService @@ -311,8 +310,6 @@ class DocumentAddByFileApi(DatasetApiResource): if not file.filename: raise FilenameNotExistsError - if not isinstance(current_user, EndUser): - raise ValueError("Invalid user account") if not current_user: raise ValueError("current_user is required") upload_file = FileService(db.engine).upload_file( @@ -406,9 +403,6 @@ class DocumentUpdateByFileApi(DatasetApiResource): if not current_user: raise ValueError("current_user is required") - if not isinstance(current_user, EndUser): - raise ValueError("Invalid user account") - try: upload_file = FileService(db.engine).upload_file( filename=file.filename, From e937c8c72e56ec8690c1790ff40cb4311bb63510 Mon Sep 17 00:00:00 2001 From: Blackoutta <37723456+Blackoutta@users.noreply.github.com> Date: Wed, 24 Sep 2025 22:14:50 +0800 Subject: [PATCH 028/126] improve: pooling httpx clients for requests to code sandbox and ssrf (#26052) --- api/.env.example | 7 ++ api/configs/feature/__init__.py | 35 ++++++++ .../helper/code_executor/code_executor.py | 39 ++++++--- api/core/helper/http_client_pooling.py | 59 +++++++++++++ api/core/helper/ssrf_proxy.py | 86 ++++++++++++------- docker/.env.example | 7 ++ docker/docker-compose.yaml | 7 ++ 7 files changed, 199 insertions(+), 41 deletions(-) create mode 100644 api/core/helper/http_client_pooling.py diff --git a/api/.env.example b/api/.env.example index 78a363e506..64e79bf0b8 100644 --- a/api/.env.example +++ b/api/.env.example @@ -408,6 +408,9 @@ SSRF_DEFAULT_TIME_OUT=5 SSRF_DEFAULT_CONNECT_TIME_OUT=5 SSRF_DEFAULT_READ_TIME_OUT=5 SSRF_DEFAULT_WRITE_TIME_OUT=5 +SSRF_POOL_MAX_CONNECTIONS=100 +SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20 +SSRF_POOL_KEEPALIVE_EXPIRY=5.0 BATCH_UPLOAD_LIMIT=10 KEYWORD_DATA_SOURCE_TYPE=database @@ -418,6 +421,10 @@ WORKFLOW_FILE_UPLOAD_LIMIT=10 # CODE EXECUTION CONFIGURATION CODE_EXECUTION_ENDPOINT=http://127.0.0.1:8194 CODE_EXECUTION_API_KEY=dify-sandbox +CODE_EXECUTION_SSL_VERIFY=True +CODE_EXECUTION_POOL_MAX_CONNECTIONS=100 +CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20 +CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0 CODE_MAX_NUMBER=9223372036854775807 CODE_MIN_NUMBER=-9223372036854775808 CODE_MAX_STRING_LENGTH=80000 diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index b17f30210c..e836059ca6 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -113,6 +113,21 @@ class CodeExecutionSandboxConfig(BaseSettings): default=10.0, ) + CODE_EXECUTION_POOL_MAX_CONNECTIONS: PositiveInt = Field( + description="Maximum number of concurrent connections for the code execution HTTP client", + default=100, + ) + + CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS: PositiveInt = Field( + description="Maximum number of persistent keep-alive connections for the code execution HTTP client", + default=20, + ) + + CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY: PositiveFloat | None = Field( + description="Keep-alive expiry in seconds for idle connections (set to None to disable)", + default=5.0, + ) + CODE_MAX_NUMBER: PositiveInt = Field( description="Maximum allowed numeric value in code execution", default=9223372036854775807, @@ -153,6 +168,11 @@ class CodeExecutionSandboxConfig(BaseSettings): default=1000, ) + CODE_EXECUTION_SSL_VERIFY: bool = Field( + description="Enable or disable SSL verification for code execution requests", + default=True, + ) + class PluginConfig(BaseSettings): """ @@ -404,6 +424,21 @@ class HttpConfig(BaseSettings): default=5, ) + SSRF_POOL_MAX_CONNECTIONS: PositiveInt = Field( + description="Maximum number of concurrent connections for the SSRF HTTP client", + default=100, + ) + + SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS: PositiveInt = Field( + description="Maximum number of persistent keep-alive connections for the SSRF HTTP client", + default=20, + ) + + SSRF_POOL_KEEPALIVE_EXPIRY: PositiveFloat | None = Field( + description="Keep-alive expiry in seconds for idle SSRF connections (set to None to disable)", + default=5.0, + ) + RESPECT_XFORWARD_HEADERS_ENABLED: bool = Field( description="Enable handling of X-Forwarded-For, X-Forwarded-Proto, and X-Forwarded-Port headers" " when the app is behind a single trusted reverse proxy.", diff --git a/api/core/helper/code_executor/code_executor.py b/api/core/helper/code_executor/code_executor.py index c44a8e1840..0c1d03dc13 100644 --- a/api/core/helper/code_executor/code_executor.py +++ b/api/core/helper/code_executor/code_executor.py @@ -4,7 +4,7 @@ from enum import StrEnum from threading import Lock from typing import Any -from httpx import Timeout, post +import httpx from pydantic import BaseModel from yarl import URL @@ -13,9 +13,17 @@ from core.helper.code_executor.javascript.javascript_transformer import NodeJsTe from core.helper.code_executor.jinja2.jinja2_transformer import Jinja2TemplateTransformer from core.helper.code_executor.python3.python3_transformer import Python3TemplateTransformer from core.helper.code_executor.template_transformer import TemplateTransformer +from core.helper.http_client_pooling import get_pooled_http_client logger = logging.getLogger(__name__) code_execution_endpoint_url = URL(str(dify_config.CODE_EXECUTION_ENDPOINT)) +CODE_EXECUTION_SSL_VERIFY = dify_config.CODE_EXECUTION_SSL_VERIFY +_CODE_EXECUTOR_CLIENT_LIMITS = httpx.Limits( + max_connections=dify_config.CODE_EXECUTION_POOL_MAX_CONNECTIONS, + max_keepalive_connections=dify_config.CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS, + keepalive_expiry=dify_config.CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY, +) +_CODE_EXECUTOR_CLIENT_KEY = "code_executor:http_client" class CodeExecutionError(Exception): @@ -38,6 +46,13 @@ class CodeLanguage(StrEnum): JAVASCRIPT = "javascript" +def _build_code_executor_client() -> httpx.Client: + return httpx.Client( + verify=CODE_EXECUTION_SSL_VERIFY, + limits=_CODE_EXECUTOR_CLIENT_LIMITS, + ) + + class CodeExecutor: dependencies_cache: dict[str, str] = {} dependencies_cache_lock = Lock() @@ -76,17 +91,21 @@ class CodeExecutor: "enable_network": True, } + timeout = httpx.Timeout( + connect=dify_config.CODE_EXECUTION_CONNECT_TIMEOUT, + read=dify_config.CODE_EXECUTION_READ_TIMEOUT, + write=dify_config.CODE_EXECUTION_WRITE_TIMEOUT, + pool=None, + ) + + client = get_pooled_http_client(_CODE_EXECUTOR_CLIENT_KEY, _build_code_executor_client) + try: - response = post( + response = client.post( str(url), json=data, headers=headers, - timeout=Timeout( - connect=dify_config.CODE_EXECUTION_CONNECT_TIMEOUT, - read=dify_config.CODE_EXECUTION_READ_TIMEOUT, - write=dify_config.CODE_EXECUTION_WRITE_TIMEOUT, - pool=None, - ), + timeout=timeout, ) if response.status_code == 503: raise CodeExecutionError("Code execution service is unavailable") @@ -106,8 +125,8 @@ class CodeExecutor: try: response_data = response.json() - except: - raise CodeExecutionError("Failed to parse response") + except Exception as e: + raise CodeExecutionError("Failed to parse response") from e if (code := response_data.get("code")) != 0: raise CodeExecutionError(f"Got error code: {code}. Got error msg: {response_data.get('message')}") diff --git a/api/core/helper/http_client_pooling.py b/api/core/helper/http_client_pooling.py new file mode 100644 index 0000000000..f4c3ff0e8b --- /dev/null +++ b/api/core/helper/http_client_pooling.py @@ -0,0 +1,59 @@ +"""HTTP client pooling utilities.""" + +from __future__ import annotations + +import atexit +import threading +from collections.abc import Callable + +import httpx + +ClientBuilder = Callable[[], httpx.Client] + + +class HttpClientPoolFactory: + """Thread-safe factory that maintains reusable HTTP client instances.""" + + def __init__(self) -> None: + self._clients: dict[str, httpx.Client] = {} + self._lock = threading.Lock() + + def get_or_create(self, key: str, builder: ClientBuilder) -> httpx.Client: + """Return a pooled client associated with ``key`` creating it on demand.""" + client = self._clients.get(key) + if client is not None: + return client + + with self._lock: + client = self._clients.get(key) + if client is None: + client = builder() + self._clients[key] = client + return client + + def close_all(self) -> None: + """Close all pooled clients and clear the pool.""" + with self._lock: + for client in self._clients.values(): + client.close() + self._clients.clear() + + +_factory = HttpClientPoolFactory() + + +def get_pooled_http_client(key: str, builder: ClientBuilder) -> httpx.Client: + """Return a pooled client for the given ``key`` using ``builder`` when missing.""" + return _factory.get_or_create(key, builder) + + +def close_all_pooled_clients() -> None: + """Close every client created through the pooling factory.""" + _factory.close_all() + + +def _register_shutdown_hook() -> None: + atexit.register(close_all_pooled_clients) + + +_register_shutdown_hook() diff --git a/api/core/helper/ssrf_proxy.py b/api/core/helper/ssrf_proxy.py index cbb78939d2..0de026f3c7 100644 --- a/api/core/helper/ssrf_proxy.py +++ b/api/core/helper/ssrf_proxy.py @@ -8,27 +8,23 @@ import time import httpx from configs import dify_config +from core.helper.http_client_pooling import get_pooled_http_client logger = logging.getLogger(__name__) SSRF_DEFAULT_MAX_RETRIES = dify_config.SSRF_DEFAULT_MAX_RETRIES -http_request_node_ssl_verify = True # Default value for http_request_node_ssl_verify is True -try: - config_value = dify_config.HTTP_REQUEST_NODE_SSL_VERIFY - http_request_node_ssl_verify_lower = str(config_value).lower() - if http_request_node_ssl_verify_lower == "true": - http_request_node_ssl_verify = True - elif http_request_node_ssl_verify_lower == "false": - http_request_node_ssl_verify = False - else: - raise ValueError("Invalid value. HTTP_REQUEST_NODE_SSL_VERIFY should be 'True' or 'False'") -except NameError: - http_request_node_ssl_verify = True - BACKOFF_FACTOR = 0.5 STATUS_FORCELIST = [429, 500, 502, 503, 504] +_SSL_VERIFIED_POOL_KEY = "ssrf:verified" +_SSL_UNVERIFIED_POOL_KEY = "ssrf:unverified" +_SSRF_CLIENT_LIMITS = httpx.Limits( + max_connections=dify_config.SSRF_POOL_MAX_CONNECTIONS, + max_keepalive_connections=dify_config.SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS, + keepalive_expiry=dify_config.SSRF_POOL_KEEPALIVE_EXPIRY, +) + class MaxRetriesExceededError(ValueError): """Raised when the maximum number of retries is exceeded.""" @@ -36,6 +32,45 @@ class MaxRetriesExceededError(ValueError): pass +def _create_proxy_mounts() -> dict[str, httpx.HTTPTransport]: + return { + "http://": httpx.HTTPTransport( + proxy=dify_config.SSRF_PROXY_HTTP_URL, + ), + "https://": httpx.HTTPTransport( + proxy=dify_config.SSRF_PROXY_HTTPS_URL, + ), + } + + +def _build_ssrf_client(verify: bool) -> httpx.Client: + if dify_config.SSRF_PROXY_ALL_URL: + return httpx.Client( + proxy=dify_config.SSRF_PROXY_ALL_URL, + verify=verify, + limits=_SSRF_CLIENT_LIMITS, + ) + + if dify_config.SSRF_PROXY_HTTP_URL and dify_config.SSRF_PROXY_HTTPS_URL: + return httpx.Client( + mounts=_create_proxy_mounts(), + verify=verify, + limits=_SSRF_CLIENT_LIMITS, + ) + + return httpx.Client(verify=verify, limits=_SSRF_CLIENT_LIMITS) + + +def _get_ssrf_client(ssl_verify_enabled: bool) -> httpx.Client: + if not isinstance(ssl_verify_enabled, bool): + raise ValueError("SSRF client verify flag must be a boolean") + + return get_pooled_http_client( + _SSL_VERIFIED_POOL_KEY if ssl_verify_enabled else _SSL_UNVERIFIED_POOL_KEY, + lambda: _build_ssrf_client(verify=ssl_verify_enabled), + ) + + def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs): if "allow_redirects" in kwargs: allow_redirects = kwargs.pop("allow_redirects") @@ -50,33 +85,22 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs): write=dify_config.SSRF_DEFAULT_WRITE_TIME_OUT, ) - if "ssl_verify" not in kwargs: - kwargs["ssl_verify"] = http_request_node_ssl_verify - - ssl_verify = kwargs.pop("ssl_verify") + # prioritize per-call option, which can be switched on and off inside the HTTP node on the web UI + verify_option = kwargs.pop("ssl_verify", dify_config.HTTP_REQUEST_NODE_SSL_VERIFY) + client = _get_ssrf_client(verify_option) retries = 0 while retries <= max_retries: try: - if dify_config.SSRF_PROXY_ALL_URL: - with httpx.Client(proxy=dify_config.SSRF_PROXY_ALL_URL, verify=ssl_verify) as client: - response = client.request(method=method, url=url, **kwargs) - elif dify_config.SSRF_PROXY_HTTP_URL and dify_config.SSRF_PROXY_HTTPS_URL: - proxy_mounts = { - "http://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTP_URL, verify=ssl_verify), - "https://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTPS_URL, verify=ssl_verify), - } - with httpx.Client(mounts=proxy_mounts, verify=ssl_verify) as client: - response = client.request(method=method, url=url, **kwargs) - else: - with httpx.Client(verify=ssl_verify) as client: - response = client.request(method=method, url=url, **kwargs) + response = client.request(method=method, url=url, **kwargs) if response.status_code not in STATUS_FORCELIST: return response else: logger.warning( - "Received status code %s for URL %s which is in the force list", response.status_code, url + "Received status code %s for URL %s which is in the force list", + response.status_code, + url, ) except httpx.RequestError as e: diff --git a/docker/.env.example b/docker/.env.example index 07b4088470..eebc18118f 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -859,6 +859,10 @@ OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5 # The sandbox service endpoint. CODE_EXECUTION_ENDPOINT=http://sandbox:8194 CODE_EXECUTION_API_KEY=dify-sandbox +CODE_EXECUTION_SSL_VERIFY=True +CODE_EXECUTION_POOL_MAX_CONNECTIONS=100 +CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20 +CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0 CODE_MAX_NUMBER=9223372036854775807 CODE_MIN_NUMBER=-9223372036854775808 CODE_MAX_DEPTH=5 @@ -1134,6 +1138,9 @@ SSRF_DEFAULT_TIME_OUT=5 SSRF_DEFAULT_CONNECT_TIME_OUT=5 SSRF_DEFAULT_READ_TIME_OUT=5 SSRF_DEFAULT_WRITE_TIME_OUT=5 +SSRF_POOL_MAX_CONNECTIONS=100 +SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20 +SSRF_POOL_KEEPALIVE_EXPIRY=5.0 # ------------------------------ # docker env var for specifying vector db type at startup diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index dc94883b75..dd3d42c0f7 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -382,6 +382,10 @@ x-shared-env: &shared-api-worker-env OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES: ${OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES:-5} CODE_EXECUTION_ENDPOINT: ${CODE_EXECUTION_ENDPOINT:-http://sandbox:8194} CODE_EXECUTION_API_KEY: ${CODE_EXECUTION_API_KEY:-dify-sandbox} + CODE_EXECUTION_SSL_VERIFY: ${CODE_EXECUTION_SSL_VERIFY:-True} + CODE_EXECUTION_POOL_MAX_CONNECTIONS: ${CODE_EXECUTION_POOL_MAX_CONNECTIONS:-100} + CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS: ${CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS:-20} + CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY: ${CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY:-5.0} CODE_MAX_NUMBER: ${CODE_MAX_NUMBER:-9223372036854775807} CODE_MIN_NUMBER: ${CODE_MIN_NUMBER:--9223372036854775808} CODE_MAX_DEPTH: ${CODE_MAX_DEPTH:-5} @@ -497,6 +501,9 @@ x-shared-env: &shared-api-worker-env SSRF_DEFAULT_CONNECT_TIME_OUT: ${SSRF_DEFAULT_CONNECT_TIME_OUT:-5} SSRF_DEFAULT_READ_TIME_OUT: ${SSRF_DEFAULT_READ_TIME_OUT:-5} SSRF_DEFAULT_WRITE_TIME_OUT: ${SSRF_DEFAULT_WRITE_TIME_OUT:-5} + SSRF_POOL_MAX_CONNECTIONS: ${SSRF_POOL_MAX_CONNECTIONS:-100} + SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS: ${SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS:-20} + SSRF_POOL_KEEPALIVE_EXPIRY: ${SSRF_POOL_KEEPALIVE_EXPIRY:-5.0} EXPOSE_NGINX_PORT: ${EXPOSE_NGINX_PORT:-80} EXPOSE_NGINX_SSL_PORT: ${EXPOSE_NGINX_SSL_PORT:-443} POSITION_TOOL_PINS: ${POSITION_TOOL_PINS:-} From 6841a09667d87dc1ad4c4bbfbc6bcc79dd3aafe7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E3=83=AA=E3=82=A4=E3=83=8E=20Lin?= <sorphwer@gmail.com> Date: Thu, 25 Sep 2025 10:39:44 +0800 Subject: [PATCH 029/126] fix: remote filename will be 'inline' if Content-Disposition: inline (#25877) Fixed the issue that filename will be 'inline' if response header contains `Content-Disposition: inline` while retrieving file by url. Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/factories/file_factory.py | 41 +++++-- .../unit_tests/factories/test_file_factory.py | 115 ++++++++++++++++++ 2 files changed, 147 insertions(+), 9 deletions(-) create mode 100644 api/tests/unit_tests/factories/test_file_factory.py diff --git a/api/factories/file_factory.py b/api/factories/file_factory.py index 588168bd39..d66c757249 100644 --- a/api/factories/file_factory.py +++ b/api/factories/file_factory.py @@ -8,6 +8,7 @@ from typing import Any import httpx from sqlalchemy import select from sqlalchemy.orm import Session +from werkzeug.http import parse_options_header from constants import AUDIO_EXTENSIONS, DOCUMENT_EXTENSIONS, IMAGE_EXTENSIONS, VIDEO_EXTENSIONS from core.file import File, FileBelongsTo, FileTransferMethod, FileType, FileUploadConfig, helpers @@ -247,6 +248,25 @@ def _build_from_remote_url( ) +def _extract_filename(url_path: str, content_disposition: str | None) -> str | None: + filename = None + # Try to extract from Content-Disposition header first + if content_disposition: + _, params = parse_options_header(content_disposition) + # RFC 5987 https://datatracker.ietf.org/doc/html/rfc5987: filename* takes precedence over filename + filename = params.get("filename*") or params.get("filename") + # Fallback to URL path if no filename from header + if not filename: + filename = os.path.basename(url_path) + return filename or None + + +def _guess_mime_type(filename: str) -> str: + """Guess MIME type from filename, returning empty string if None.""" + guessed_mime, _ = mimetypes.guess_type(filename) + return guessed_mime or "" + + def _get_remote_file_info(url: str): file_size = -1 parsed_url = urllib.parse.urlparse(url) @@ -254,23 +274,26 @@ def _get_remote_file_info(url: str): filename = os.path.basename(url_path) # Initialize mime_type from filename as fallback - mime_type, _ = mimetypes.guess_type(filename) - if mime_type is None: - mime_type = "" + mime_type = _guess_mime_type(filename) resp = ssrf_proxy.head(url, follow_redirects=True) if resp.status_code == httpx.codes.OK: - if content_disposition := resp.headers.get("Content-Disposition"): - filename = str(content_disposition.split("filename=")[-1].strip('"')) - # Re-guess mime_type from updated filename - mime_type, _ = mimetypes.guess_type(filename) - if mime_type is None: - mime_type = "" + content_disposition = resp.headers.get("Content-Disposition") + extracted_filename = _extract_filename(url_path, content_disposition) + if extracted_filename: + filename = extracted_filename + mime_type = _guess_mime_type(filename) file_size = int(resp.headers.get("Content-Length", file_size)) # Fallback to Content-Type header if mime_type is still empty if not mime_type: mime_type = resp.headers.get("Content-Type", "").split(";")[0].strip() + if not filename: + extension = mimetypes.guess_extension(mime_type) or ".bin" + filename = f"{uuid.uuid4().hex}{extension}" + if not mime_type: + mime_type = _guess_mime_type(filename) + return mime_type, filename, file_size diff --git a/api/tests/unit_tests/factories/test_file_factory.py b/api/tests/unit_tests/factories/test_file_factory.py new file mode 100644 index 0000000000..777fe5a6e7 --- /dev/null +++ b/api/tests/unit_tests/factories/test_file_factory.py @@ -0,0 +1,115 @@ +import re + +import pytest + +from factories.file_factory import _get_remote_file_info + + +class _FakeResponse: + def __init__(self, status_code: int, headers: dict[str, str]): + self.status_code = status_code + self.headers = headers + + +def _mock_head(monkeypatch: pytest.MonkeyPatch, headers: dict[str, str], status_code: int = 200): + def _fake_head(url: str, follow_redirects: bool = True): + return _FakeResponse(status_code=status_code, headers=headers) + + monkeypatch.setattr("factories.file_factory.ssrf_proxy.head", _fake_head) + + +class TestGetRemoteFileInfo: + """Tests for _get_remote_file_info focusing on filename extraction rules.""" + + def test_inline_no_filename(self, monkeypatch: pytest.MonkeyPatch): + _mock_head( + monkeypatch, + { + "Content-Disposition": "inline", + "Content-Type": "application/pdf", + "Content-Length": "123", + }, + ) + mime_type, filename, size = _get_remote_file_info("http://example.com/some/path/file.pdf") + assert filename == "file.pdf" + assert mime_type == "application/pdf" + assert size == 123 + + def test_attachment_no_filename(self, monkeypatch: pytest.MonkeyPatch): + _mock_head( + monkeypatch, + { + "Content-Disposition": "attachment", + "Content-Type": "application/octet-stream", + "Content-Length": "456", + }, + ) + mime_type, filename, size = _get_remote_file_info("http://example.com/downloads/data.bin") + assert filename == "data.bin" + assert mime_type == "application/octet-stream" + assert size == 456 + + def test_attachment_quoted_space_filename(self, monkeypatch: pytest.MonkeyPatch): + _mock_head( + monkeypatch, + { + "Content-Disposition": 'attachment; filename="file name.jpg"', + "Content-Type": "image/jpeg", + "Content-Length": "789", + }, + ) + mime_type, filename, size = _get_remote_file_info("http://example.com/ignored") + assert filename == "file name.jpg" + assert mime_type == "image/jpeg" + assert size == 789 + + def test_attachment_filename_star_percent20(self, monkeypatch: pytest.MonkeyPatch): + _mock_head( + monkeypatch, + { + "Content-Disposition": "attachment; filename*=UTF-8''file%20name.jpg", + "Content-Type": "image/jpeg", + }, + ) + mime_type, filename, _ = _get_remote_file_info("http://example.com/ignored") + assert filename == "file name.jpg" + assert mime_type == "image/jpeg" + + def test_attachment_filename_star_chinese(self, monkeypatch: pytest.MonkeyPatch): + _mock_head( + monkeypatch, + { + "Content-Disposition": "attachment; filename*=UTF-8''%E6%B5%8B%E8%AF%95%E6%96%87%E4%BB%B6.jpg", + "Content-Type": "image/jpeg", + }, + ) + mime_type, filename, _ = _get_remote_file_info("http://example.com/ignored") + assert filename == "测试文件.jpg" + assert mime_type == "image/jpeg" + + def test_filename_from_url_when_no_header(self, monkeypatch: pytest.MonkeyPatch): + _mock_head( + monkeypatch, + { + # No Content-Disposition + "Content-Type": "text/plain", + "Content-Length": "12", + }, + ) + mime_type, filename, size = _get_remote_file_info("http://example.com/static/file.txt") + assert filename == "file.txt" + assert mime_type == "text/plain" + assert size == 12 + + def test_no_filename_in_url_or_header_generates_uuid_bin(self, monkeypatch: pytest.MonkeyPatch): + _mock_head( + monkeypatch, + { + "Content-Disposition": "inline", + "Content-Type": "application/octet-stream", + }, + ) + mime_type, filename, _ = _get_remote_file_info("http://example.com/test/") + # Should generate a random hex filename with .bin extension + assert re.match(r"^[0-9a-f]{32}\.bin$", filename) is not None + assert mime_type == "application/octet-stream" From f104839672ccf111b2799fc31a85870e5e997b7d Mon Sep 17 00:00:00 2001 From: "Junyan Qin (Chin)" <rockchinq@gmail.com> Date: Thu, 25 Sep 2025 15:33:27 +0800 Subject: [PATCH 030/126] perf: provide X-Dify-Version for marketplace api access (#26210) --- api/core/helper/marketplace.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api/core/helper/marketplace.py b/api/core/helper/marketplace.py index 89dae4808f..10f304c087 100644 --- a/api/core/helper/marketplace.py +++ b/api/core/helper/marketplace.py @@ -23,7 +23,7 @@ def batch_fetch_plugin_manifests(plugin_ids: list[str]) -> Sequence[MarketplaceP return [] url = str(marketplace_api_url / "api/v1/plugins/batch") - response = httpx.post(url, json={"plugin_ids": plugin_ids}) + response = httpx.post(url, json={"plugin_ids": plugin_ids}, headers={"X-Dify-Version": dify_config.project.version}) response.raise_for_status() return [MarketplacePluginDeclaration(**plugin) for plugin in response.json()["data"]["plugins"]] @@ -36,7 +36,7 @@ def batch_fetch_plugin_manifests_ignore_deserialization_error( return [] url = str(marketplace_api_url / "api/v1/plugins/batch") - response = httpx.post(url, json={"plugin_ids": plugin_ids}) + response = httpx.post(url, json={"plugin_ids": plugin_ids}, headers={"X-Dify-Version": dify_config.project.version}) response.raise_for_status() result: list[MarketplacePluginDeclaration] = [] for plugin in response.json()["data"]["plugins"]: From 915023b8095316a2ce93994a0fc425f3cd4bf102 Mon Sep 17 00:00:00 2001 From: zxhlyh <jasonapring2015@outlook.com> Date: Thu, 25 Sep 2025 18:02:43 +0800 Subject: [PATCH 031/126] Chore/remove add node restrict of workflow (#26218) Co-authored-by: -LAN- <laipz8200@outlook.com> --- api/.env.example | 1 - api/configs/feature/__init__.py | 5 - api/controllers/console/app/workflow.py | 19 --- .../rag_pipeline/rag_pipeline_workflow.py | 17 -- api/tests/integration_tests/.env.example | 1 - .../unit_tests/configs/test_dify_config.py | 2 - docker/.env.example | 1 - docker/docker-compose.yaml | 1 - .../rag-pipeline/hooks/use-pipeline-config.ts | 10 -- .../workflow-app/hooks/use-workflow-init.ts | 7 - web/app/components/workflow/constants.ts | 2 - .../workflow/hooks/use-nodes-interactions.ts | 45 ++--- .../components/workflow/hooks/use-workflow.ts | 54 +----- web/app/components/workflow/index.tsx | 2 - web/app/components/workflow/limit-tips.tsx | 39 ----- .../nodes/_base/components/next-step/add.tsx | 9 +- .../nodes/_base/components/node-handle.tsx | 7 +- .../workflow/store/workflow/workflow-slice.ts | 8 - web/app/components/workflow/utils/workflow.ts | 156 ------------------ 19 files changed, 17 insertions(+), 369 deletions(-) delete mode 100644 web/app/components/workflow/limit-tips.tsx diff --git a/api/.env.example b/api/.env.example index 64e79bf0b8..d53de3779b 100644 --- a/api/.env.example +++ b/api/.env.example @@ -468,7 +468,6 @@ INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000 WORKFLOW_MAX_EXECUTION_STEPS=500 WORKFLOW_MAX_EXECUTION_TIME=1200 WORKFLOW_CALL_MAX_DEPTH=5 -WORKFLOW_PARALLEL_DEPTH_LIMIT=3 MAX_VARIABLE_SIZE=204800 # GraphEngine Worker Pool Configuration diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index e836059ca6..363cf4e2b5 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -577,11 +577,6 @@ class WorkflowConfig(BaseSettings): default=5, ) - WORKFLOW_PARALLEL_DEPTH_LIMIT: PositiveInt = Field( - description="Maximum allowed depth for nested parallel executions", - default=3, - ) - MAX_VARIABLE_SIZE: PositiveInt = Field( description="Maximum size in bytes for a single variable in workflows. Default to 200 KB.", default=200 * 1024, diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index e70765546c..1f5cbbeca5 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -9,7 +9,6 @@ from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden, InternalServerError, NotFound import services -from configs import dify_config from controllers.console import api, console_ns from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync from controllers.console.app.wraps import get_app_model @@ -797,24 +796,6 @@ class ConvertToWorkflowApi(Resource): } -@console_ns.route("/apps/<uuid:app_id>/workflows/draft/config") -class WorkflowConfigApi(Resource): - """Resource for workflow configuration.""" - - @api.doc("get_workflow_config") - @api.doc(description="Get workflow configuration") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "Workflow configuration retrieved successfully") - @setup_required - @login_required - @account_initialization_required - @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) - def get(self, app_model: App): - return { - "parallel_depth_limit": dify_config.WORKFLOW_PARALLEL_DEPTH_LIMIT, - } - - @console_ns.route("/apps/<uuid:app_id>/workflows") class PublishedAllWorkflowApi(Resource): @api.doc("get_all_published_workflows") diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py index d00be3a573..01ddb8a871 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py @@ -9,7 +9,6 @@ from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden, InternalServerError, NotFound import services -from configs import dify_config from controllers.console import api from controllers.console.app.error import ( ConversationCompletedError, @@ -609,18 +608,6 @@ class DefaultRagPipelineBlockConfigApi(Resource): return rag_pipeline_service.get_default_block_config(node_type=block_type, filters=filters) -class RagPipelineConfigApi(Resource): - """Resource for rag pipeline configuration.""" - - @setup_required - @login_required - @account_initialization_required - def get(self, pipeline_id): - return { - "parallel_depth_limit": dify_config.WORKFLOW_PARALLEL_DEPTH_LIMIT, - } - - class PublishedAllRagPipelineApi(Resource): @setup_required @login_required @@ -985,10 +972,6 @@ api.add_resource( DraftRagPipelineApi, "/rag/pipelines/<uuid:pipeline_id>/workflows/draft", ) -api.add_resource( - RagPipelineConfigApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/config", -) api.add_resource( DraftRagPipelineRunApi, "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/run", diff --git a/api/tests/integration_tests/.env.example b/api/tests/integration_tests/.env.example index 92df93fb13..23a0ecf714 100644 --- a/api/tests/integration_tests/.env.example +++ b/api/tests/integration_tests/.env.example @@ -167,7 +167,6 @@ INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000 WORKFLOW_MAX_EXECUTION_STEPS=500 WORKFLOW_MAX_EXECUTION_TIME=1200 WORKFLOW_CALL_MAX_DEPTH=5 -WORKFLOW_PARALLEL_DEPTH_LIMIT=3 MAX_VARIABLE_SIZE=204800 # App configuration diff --git a/api/tests/unit_tests/configs/test_dify_config.py b/api/tests/unit_tests/configs/test_dify_config.py index fbe14f1cb5..f4e3d97719 100644 --- a/api/tests/unit_tests/configs/test_dify_config.py +++ b/api/tests/unit_tests/configs/test_dify_config.py @@ -40,8 +40,6 @@ def test_dify_config(monkeypatch: pytest.MonkeyPatch): # annotated field with configured value assert config.HTTP_REQUEST_MAX_WRITE_TIMEOUT == 30 - assert config.WORKFLOW_PARALLEL_DEPTH_LIMIT == 3 - # values from pyproject.toml assert Version(config.project.version) >= Version("1.0.0") diff --git a/docker/.env.example b/docker/.env.example index eebc18118f..c0f084796e 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -881,7 +881,6 @@ WORKFLOW_MAX_EXECUTION_STEPS=500 WORKFLOW_MAX_EXECUTION_TIME=1200 WORKFLOW_CALL_MAX_DEPTH=5 MAX_VARIABLE_SIZE=204800 -WORKFLOW_PARALLEL_DEPTH_LIMIT=3 WORKFLOW_FILE_UPLOAD_LIMIT=10 # GraphEngine Worker Pool Configuration diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index dd3d42c0f7..2617f84e7d 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -402,7 +402,6 @@ x-shared-env: &shared-api-worker-env WORKFLOW_MAX_EXECUTION_TIME: ${WORKFLOW_MAX_EXECUTION_TIME:-1200} WORKFLOW_CALL_MAX_DEPTH: ${WORKFLOW_CALL_MAX_DEPTH:-5} MAX_VARIABLE_SIZE: ${MAX_VARIABLE_SIZE:-204800} - WORKFLOW_PARALLEL_DEPTH_LIMIT: ${WORKFLOW_PARALLEL_DEPTH_LIMIT:-3} WORKFLOW_FILE_UPLOAD_LIMIT: ${WORKFLOW_FILE_UPLOAD_LIMIT:-10} GRAPH_ENGINE_MIN_WORKERS: ${GRAPH_ENGINE_MIN_WORKERS:-1} GRAPH_ENGINE_MAX_WORKERS: ${GRAPH_ENGINE_MAX_WORKERS:-10} diff --git a/web/app/components/rag-pipeline/hooks/use-pipeline-config.ts b/web/app/components/rag-pipeline/hooks/use-pipeline-config.ts index 5f0daf29ce..38168d1e93 100644 --- a/web/app/components/rag-pipeline/hooks/use-pipeline-config.ts +++ b/web/app/components/rag-pipeline/hooks/use-pipeline-config.ts @@ -14,16 +14,6 @@ export const usePipelineConfig = () => { const pipelineId = useStore(s => s.pipelineId) const workflowStore = useWorkflowStore() - const handleUpdateWorkflowConfig = useCallback((config: Record<string, any>) => { - const { setWorkflowConfig } = workflowStore.getState() - - setWorkflowConfig(config) - }, [workflowStore]) - useWorkflowConfig( - pipelineId ? `/rag/pipelines/${pipelineId}/workflows/draft/config` : '', - handleUpdateWorkflowConfig, - ) - const handleUpdateNodesDefaultConfigs = useCallback((nodesDefaultConfigs: Record<string, any> | Record<string, any>[]) => { const { setNodesDefaultConfigs } = workflowStore.getState() let res: Record<string, any> = {} diff --git a/web/app/components/workflow-app/hooks/use-workflow-init.ts b/web/app/components/workflow-app/hooks/use-workflow-init.ts index e0c341d087..fadd2007bc 100644 --- a/web/app/components/workflow-app/hooks/use-workflow-init.ts +++ b/web/app/components/workflow-app/hooks/use-workflow-init.ts @@ -33,13 +33,6 @@ export const useWorkflowInit = () => { workflowStore.setState({ appId: appDetail.id, appName: appDetail.name }) }, [appDetail.id, workflowStore]) - const handleUpdateWorkflowConfig = useCallback((config: Record<string, any>) => { - const { setWorkflowConfig } = workflowStore.getState() - - setWorkflowConfig(config) - }, [workflowStore]) - useWorkflowConfig(`/apps/${appDetail.id}/workflows/draft/config`, handleUpdateWorkflowConfig) - const handleUpdateWorkflowFileUploadConfig = useCallback((config: FileUploadConfigResponse) => { const { setFileUploadConfig } = workflowStore.getState() setFileUploadConfig(config) diff --git a/web/app/components/workflow/constants.ts b/web/app/components/workflow/constants.ts index 875d2acf8f..a8c6a458fc 100644 --- a/web/app/components/workflow/constants.ts +++ b/web/app/components/workflow/constants.ts @@ -35,8 +35,6 @@ export const NODE_LAYOUT_HORIZONTAL_PADDING = 60 export const NODE_LAYOUT_VERTICAL_PADDING = 60 export const NODE_LAYOUT_MIN_DISTANCE = 100 -export const PARALLEL_DEPTH_LIMIT = 3 - export const RETRIEVAL_OUTPUT_STRUCT = `{ "content": "", "title": "", diff --git a/web/app/components/workflow/hooks/use-nodes-interactions.ts b/web/app/components/workflow/hooks/use-nodes-interactions.ts index 4000ce5c7b..c721442d86 100644 --- a/web/app/components/workflow/hooks/use-nodes-interactions.ts +++ b/web/app/components/workflow/hooks/use-nodes-interactions.ts @@ -70,7 +70,7 @@ export const useNodesInteractions = () => { const reactflow = useReactFlow() const { store: workflowHistoryStore } = useWorkflowHistoryStore() const { handleSyncWorkflowDraft } = useNodesSyncDraft() - const { checkNestedParallelLimit, getAfterNodesInSameBranch } = useWorkflow() + const { getAfterNodesInSameBranch } = useWorkflow() const { getNodesReadOnly } = useNodesReadOnly() const { getWorkflowReadOnly } = useWorkflowReadOnly() const { handleSetHelpline } = useHelpline() @@ -436,21 +436,13 @@ export const useNodesInteractions = () => { draft.push(newEdge) }) - if (checkNestedParallelLimit(newNodes, newEdges, targetNode)) { - setNodes(newNodes) - setEdges(newEdges) + setNodes(newNodes) + setEdges(newEdges) - handleSyncWorkflowDraft() - saveStateToHistory(WorkflowHistoryEvent.NodeConnect, { - nodeId: targetNode?.id, - }) - } - else { - const { setConnectingNodePayload, setEnteringNodePayload } - = workflowStore.getState() - setConnectingNodePayload(undefined) - setEnteringNodePayload(undefined) - } + handleSyncWorkflowDraft() + saveStateToHistory(WorkflowHistoryEvent.NodeConnect, { + nodeId: targetNode?.id, + }) }, [ getNodesReadOnly, @@ -458,7 +450,6 @@ export const useNodesInteractions = () => { workflowStore, handleSyncWorkflowDraft, saveStateToHistory, - checkNestedParallelLimit, ], ) @@ -934,13 +925,8 @@ export const useNodesInteractions = () => { if (newEdge) draft.push(newEdge) }) - if (checkNestedParallelLimit(newNodes, newEdges, prevNode)) { - setNodes(newNodes) - setEdges(newEdges) - } - else { - return false - } + setNodes(newNodes) + setEdges(newEdges) } if (!prevNodeId && nextNodeId) { const nextNodeIndex = nodes.findIndex(node => node.id === nextNodeId) @@ -1087,17 +1073,11 @@ export const useNodesInteractions = () => { draft.push(newEdge) }) - if (checkNestedParallelLimit(newNodes, newEdges, nextNode)) { - setNodes(newNodes) - setEdges(newEdges) - } - else { - return false - } + setNodes(newNodes) + setEdges(newEdges) } else { - if (checkNestedParallelLimit(newNodes, edges)) setNodes(newNodes) - else return false + setNodes(newNodes) } } if (prevNodeId && nextNodeId) { @@ -1297,7 +1277,6 @@ export const useNodesInteractions = () => { saveStateToHistory, workflowStore, getAfterNodesInSameBranch, - checkNestedParallelLimit, nodesMetaDataMap, ], ) diff --git a/web/app/components/workflow/hooks/use-workflow.ts b/web/app/components/workflow/hooks/use-workflow.ts index 1fc1eedffa..02a2f09d63 100644 --- a/web/app/components/workflow/hooks/use-workflow.ts +++ b/web/app/components/workflow/hooks/use-workflow.ts @@ -2,7 +2,6 @@ import { useCallback, } from 'react' import { uniqBy } from 'lodash-es' -import { useTranslation } from 'react-i18next' import { getIncomers, getOutgoers, @@ -24,9 +23,7 @@ import { useStore, useWorkflowStore, } from '../store' -import { getParallelInfo } from '../utils' import { - PARALLEL_DEPTH_LIMIT, SUPPORT_OUTPUT_VARS_NODE, } from '../constants' import type { IterationNodeType } from '../nodes/iteration/types' @@ -44,7 +41,6 @@ import { import { CUSTOM_ITERATION_START_NODE } from '@/app/components/workflow/nodes/iteration-start/constants' import { CUSTOM_LOOP_START_NODE } from '@/app/components/workflow/nodes/loop-start/constants' import { basePath } from '@/utils/var' -import { MAX_PARALLEL_LIMIT } from '@/config' import { useNodesMetaData } from '.' export const useIsChatMode = () => { @@ -54,9 +50,7 @@ export const useIsChatMode = () => { } export const useWorkflow = () => { - const { t } = useTranslation() const store = useStoreApi() - const workflowStore = useWorkflowStore() const { getAvailableBlocks } = useAvailableBlocks() const { nodesMap } = useNodesMetaData() @@ -290,20 +284,6 @@ export const useWorkflow = () => { return isUsed }, [isVarUsedInNodes]) - const checkParallelLimit = useCallback((nodeId: string, nodeHandle = 'source') => { - const { - edges, - } = store.getState() - const connectedEdges = edges.filter(edge => edge.source === nodeId && edge.sourceHandle === nodeHandle) - if (connectedEdges.length > MAX_PARALLEL_LIMIT - 1) { - const { setShowTips } = workflowStore.getState() - setShowTips(t('workflow.common.parallelTip.limit', { num: MAX_PARALLEL_LIMIT })) - return false - } - - return true - }, [store, workflowStore, t]) - const getRootNodesById = useCallback((nodeId: string) => { const { getNodes, @@ -374,33 +354,6 @@ export const useWorkflow = () => { return startNodes }, [nodesMap, getRootNodesById]) - const checkNestedParallelLimit = useCallback((nodes: Node[], edges: Edge[], targetNode?: Node) => { - const startNodes = getStartNodes(nodes, targetNode) - - for (let i = 0; i < startNodes.length; i++) { - const { - parallelList, - hasAbnormalEdges, - } = getParallelInfo(startNodes[i], nodes, edges) - const { workflowConfig } = workflowStore.getState() - - if (hasAbnormalEdges) - return false - - for (let i = 0; i < parallelList.length; i++) { - const parallel = parallelList[i] - - if (parallel.depth > (workflowConfig?.parallel_depth_limit || PARALLEL_DEPTH_LIMIT)) { - const { setShowTips } = workflowStore.getState() - setShowTips(t('workflow.common.parallelTip.depthLimit', { num: (workflowConfig?.parallel_depth_limit || PARALLEL_DEPTH_LIMIT) })) - return false - } - } - } - - return true - }, [t, workflowStore, getStartNodes]) - const isValidConnection = useCallback(({ source, sourceHandle, target }: Connection) => { const { edges, @@ -410,9 +363,6 @@ export const useWorkflow = () => { const sourceNode: Node = nodes.find(node => node.id === source)! const targetNode: Node = nodes.find(node => node.id === target)! - if (!checkParallelLimit(source!, sourceHandle || 'source')) - return false - if (sourceNode.type === CUSTOM_NOTE_NODE || targetNode.type === CUSTOM_NOTE_NODE) return false @@ -445,7 +395,7 @@ export const useWorkflow = () => { } return !hasCycle(targetNode) - }, [store, checkParallelLimit, getAvailableBlocks]) + }, [store, getAvailableBlocks]) return { getNodeById, @@ -457,8 +407,6 @@ export const useWorkflow = () => { isVarUsedInNodes, removeUsedVarInNodes, isNodeVarsUsedInNodes, - checkParallelLimit, - checkNestedParallelLimit, isValidConnection, getBeforeNodeById, getIterationNodeChildren, diff --git a/web/app/components/workflow/index.tsx b/web/app/components/workflow/index.tsx index 1c0c6d4545..75c4d51390 100644 --- a/web/app/components/workflow/index.tsx +++ b/web/app/components/workflow/index.tsx @@ -71,7 +71,6 @@ import PanelContextmenu from './panel-contextmenu' import NodeContextmenu from './node-contextmenu' import SelectionContextmenu from './selection-contextmenu' import SyncingDataModal from './syncing-data-modal' -import LimitTips from './limit-tips' import { setupScrollToNodeListener } from './utils/node-navigation' import { useStore, @@ -378,7 +377,6 @@ export const Workflow: FC<WorkflowProps> = memo(({ /> ) } - <LimitTips /> {children} <ReactFlow nodeTypes={nodeTypes} diff --git a/web/app/components/workflow/limit-tips.tsx b/web/app/components/workflow/limit-tips.tsx deleted file mode 100644 index f0181bc6c9..0000000000 --- a/web/app/components/workflow/limit-tips.tsx +++ /dev/null @@ -1,39 +0,0 @@ -import { - RiAlertFill, - RiCloseLine, -} from '@remixicon/react' -import { useStore } from './store' -import ActionButton from '@/app/components/base/action-button' - -const LimitTips = () => { - const showTips = useStore(s => s.showTips) - const setShowTips = useStore(s => s.setShowTips) - - if (!showTips) - return null - - return ( - <div className='absolute bottom-16 left-1/2 z-[9] flex h-10 -translate-x-1/2 items-center rounded-xl border border-components-panel-border bg-components-panel-bg-blur p-2 shadow-md'> - <div - className='absolute inset-0 rounded-xl opacity-[0.4]' - style={{ - background: 'linear-gradient(92deg, rgba(247, 144, 9, 0.25) 0%, rgba(255, 255, 255, 0.00) 100%)', - }} - ></div> - <div className='flex h-5 w-5 items-center justify-center'> - <RiAlertFill className='h-4 w-4 text-text-warning-secondary' /> - </div> - <div className='system-xs-medium mx-1 px-1 text-text-primary'> - {showTips} - </div> - <ActionButton - className='z-[1]' - onClick={() => setShowTips('')} - > - <RiCloseLine className='h-4 w-4' /> - </ActionButton> - </div> - ) -} - -export default LimitTips diff --git a/web/app/components/workflow/nodes/_base/components/next-step/add.tsx b/web/app/components/workflow/nodes/_base/components/next-step/add.tsx index 4add079fa2..601bc8ea75 100644 --- a/web/app/components/workflow/nodes/_base/components/next-step/add.tsx +++ b/web/app/components/workflow/nodes/_base/components/next-step/add.tsx @@ -12,7 +12,6 @@ import { useAvailableBlocks, useNodesInteractions, useNodesReadOnly, - useWorkflow, } from '@/app/components/workflow/hooks' import BlockSelector from '@/app/components/workflow/block-selector' import type { @@ -39,7 +38,6 @@ const Add = ({ const { handleNodeAdd } = useNodesInteractions() const { nodesReadOnly } = useNodesReadOnly() const { availableNextBlocks } = useAvailableBlocks(nodeData.type, nodeData.isInIteration || nodeData.isInLoop) - const { checkParallelLimit } = useWorkflow() const handleSelect = useCallback<OnSelectBlock>((type, toolDefaultValue) => { handleNodeAdd( @@ -52,14 +50,11 @@ const Add = ({ prevNodeSourceHandle: sourceHandle, }, ) - }, [nodeId, sourceHandle, handleNodeAdd]) + }, [handleNodeAdd]) const handleOpenChange = useCallback((newOpen: boolean) => { - if (newOpen && !checkParallelLimit(nodeId, sourceHandle)) - return - setOpen(newOpen) - }, [checkParallelLimit, nodeId, sourceHandle]) + }, []) const tip = useMemo(() => { if (isFailBranch) diff --git a/web/app/components/workflow/nodes/_base/components/node-handle.tsx b/web/app/components/workflow/nodes/_base/components/node-handle.tsx index 907c3b2c07..d1d79a0faa 100644 --- a/web/app/components/workflow/nodes/_base/components/node-handle.tsx +++ b/web/app/components/workflow/nodes/_base/components/node-handle.tsx @@ -22,7 +22,6 @@ import { useIsChatMode, useNodesInteractions, useNodesReadOnly, - useWorkflow, } from '../../../hooks' import { useStore, @@ -132,7 +131,6 @@ export const NodeSourceHandle = memo(({ const { availableNextBlocks } = useAvailableBlocks(data.type, data.isInIteration || data.isInLoop) const isConnectable = !!availableNextBlocks.length const isChatMode = useIsChatMode() - const { checkParallelLimit } = useWorkflow() const connected = data._connectedSourceHandleIds?.includes(handleId) const handleOpenChange = useCallback((v: boolean) => { @@ -140,9 +138,8 @@ export const NodeSourceHandle = memo(({ }, []) const handleHandleClick = useCallback((e: MouseEvent) => { e.stopPropagation() - if (checkParallelLimit(id, handleId)) - setOpen(v => !v) - }, [checkParallelLimit, id, handleId]) + setOpen(v => !v) + }, []) const handleSelect = useCallback((type: BlockEnum, toolDefaultValue?: ToolDefaultValue) => { handleNodeAdd( { diff --git a/web/app/components/workflow/store/workflow/workflow-slice.ts b/web/app/components/workflow/store/workflow/workflow-slice.ts index 02a4db4c17..91dac42adb 100644 --- a/web/app/components/workflow/store/workflow/workflow-slice.ts +++ b/web/app/components/workflow/store/workflow/workflow-slice.ts @@ -29,10 +29,6 @@ export type WorkflowSliceShape = { setControlPromptEditorRerenderKey: (controlPromptEditorRerenderKey: number) => void showImportDSLModal: boolean setShowImportDSLModal: (showImportDSLModal: boolean) => void - showTips: string - setShowTips: (showTips: string) => void - workflowConfig?: Record<string, any> - setWorkflowConfig: (workflowConfig: Record<string, any>) => void fileUploadConfig?: FileUploadConfigResponse setFileUploadConfig: (fileUploadConfig: FileUploadConfigResponse) => void } @@ -59,10 +55,6 @@ export const createWorkflowSlice: StateCreator<WorkflowSliceShape> = set => ({ setControlPromptEditorRerenderKey: controlPromptEditorRerenderKey => set(() => ({ controlPromptEditorRerenderKey })), showImportDSLModal: false, setShowImportDSLModal: showImportDSLModal => set(() => ({ showImportDSLModal })), - showTips: '', - setShowTips: showTips => set(() => ({ showTips })), - workflowConfig: undefined, - setWorkflowConfig: workflowConfig => set(() => ({ workflowConfig })), fileUploadConfig: undefined, setFileUploadConfig: fileUploadConfig => set(() => ({ fileUploadConfig })), }) diff --git a/web/app/components/workflow/utils/workflow.ts b/web/app/components/workflow/utils/workflow.ts index fd0c30e5cf..48cb819086 100644 --- a/web/app/components/workflow/utils/workflow.ts +++ b/web/app/components/workflow/utils/workflow.ts @@ -1,12 +1,8 @@ import { - getConnectedEdges, - getIncomers, getOutgoers, } from 'reactflow' import { v4 as uuid4 } from 'uuid' import { - groupBy, - isEqual, uniqBy, } from 'lodash-es' import type { @@ -168,158 +164,6 @@ export const changeNodesAndEdgesId = (nodes: Node[], edges: Edge[]) => { return [newNodes, newEdges] as [Node[], Edge[]] } -type ParallelInfoItem = { - parallelNodeId: string - depth: number - isBranch?: boolean -} -type NodeParallelInfo = { - parallelNodeId: string - edgeHandleId: string - depth: number -} -type NodeHandle = { - node: Node - handle: string -} -type NodeStreamInfo = { - upstreamNodes: Set<string> - downstreamEdges: Set<string> -} -export const getParallelInfo = (startNode: Node, nodes: Node[], edges: Edge[]) => { - if (!startNode) - throw new Error('Start node not found') - - const parallelList = [] as ParallelInfoItem[] - const nextNodeHandles = [{ node: startNode, handle: 'source' }] - let hasAbnormalEdges = false - - const traverse = (firstNodeHandle: NodeHandle) => { - const nodeEdgesSet = {} as Record<string, Set<string>> - const totalEdgesSet = new Set<string>() - const nextHandles = [firstNodeHandle] - const streamInfo = {} as Record<string, NodeStreamInfo> - const parallelListItem = { - parallelNodeId: '', - depth: 0, - } as ParallelInfoItem - const nodeParallelInfoMap = {} as Record<string, NodeParallelInfo> - nodeParallelInfoMap[firstNodeHandle.node.id] = { - parallelNodeId: '', - edgeHandleId: '', - depth: 0, - } - - while (nextHandles.length) { - const currentNodeHandle = nextHandles.shift()! - const { node: currentNode, handle: currentHandle = 'source' } = currentNodeHandle - const currentNodeHandleKey = currentNode.id - const connectedEdges = edges.filter(edge => edge.source === currentNode.id && edge.sourceHandle === currentHandle) - const connectedEdgesLength = connectedEdges.length - const outgoers = nodes.filter(node => connectedEdges.some(edge => edge.target === node.id)) - const incomers = getIncomers(currentNode, nodes, edges) - - if (!streamInfo[currentNodeHandleKey]) { - streamInfo[currentNodeHandleKey] = { - upstreamNodes: new Set<string>(), - downstreamEdges: new Set<string>(), - } - } - - if (nodeEdgesSet[currentNodeHandleKey]?.size > 0 && incomers.length > 1) { - const newSet = new Set<string>() - for (const item of totalEdgesSet) { - if (!streamInfo[currentNodeHandleKey].downstreamEdges.has(item)) - newSet.add(item) - } - if (isEqual(nodeEdgesSet[currentNodeHandleKey], newSet)) { - parallelListItem.depth = nodeParallelInfoMap[currentNode.id].depth - nextNodeHandles.push({ node: currentNode, handle: currentHandle }) - break - } - } - - if (nodeParallelInfoMap[currentNode.id].depth > parallelListItem.depth) - parallelListItem.depth = nodeParallelInfoMap[currentNode.id].depth - - outgoers.forEach((outgoer) => { - const outgoerConnectedEdges = getConnectedEdges([outgoer], edges).filter(edge => edge.source === outgoer.id) - const sourceEdgesGroup = groupBy(outgoerConnectedEdges, 'sourceHandle') - const incomers = getIncomers(outgoer, nodes, edges) - - if (outgoers.length > 1 && incomers.length > 1) - hasAbnormalEdges = true - - Object.keys(sourceEdgesGroup).forEach((sourceHandle) => { - nextHandles.push({ node: outgoer, handle: sourceHandle }) - }) - if (!outgoerConnectedEdges.length) - nextHandles.push({ node: outgoer, handle: 'source' }) - - const outgoerKey = outgoer.id - if (!nodeEdgesSet[outgoerKey]) - nodeEdgesSet[outgoerKey] = new Set<string>() - - if (nodeEdgesSet[currentNodeHandleKey]) { - for (const item of nodeEdgesSet[currentNodeHandleKey]) - nodeEdgesSet[outgoerKey].add(item) - } - - if (!streamInfo[outgoerKey]) { - streamInfo[outgoerKey] = { - upstreamNodes: new Set<string>(), - downstreamEdges: new Set<string>(), - } - } - - if (!nodeParallelInfoMap[outgoer.id]) { - nodeParallelInfoMap[outgoer.id] = { - ...nodeParallelInfoMap[currentNode.id], - } - } - - if (connectedEdgesLength > 1) { - const edge = connectedEdges.find(edge => edge.target === outgoer.id)! - nodeEdgesSet[outgoerKey].add(edge.id) - totalEdgesSet.add(edge.id) - - streamInfo[currentNodeHandleKey].downstreamEdges.add(edge.id) - streamInfo[outgoerKey].upstreamNodes.add(currentNodeHandleKey) - - for (const item of streamInfo[currentNodeHandleKey].upstreamNodes) - streamInfo[item].downstreamEdges.add(edge.id) - - if (!parallelListItem.parallelNodeId) - parallelListItem.parallelNodeId = currentNode.id - - const prevDepth = nodeParallelInfoMap[currentNode.id].depth + 1 - const currentDepth = nodeParallelInfoMap[outgoer.id].depth - - nodeParallelInfoMap[outgoer.id].depth = Math.max(prevDepth, currentDepth) - } - else { - for (const item of streamInfo[currentNodeHandleKey].upstreamNodes) - streamInfo[outgoerKey].upstreamNodes.add(item) - - nodeParallelInfoMap[outgoer.id].depth = nodeParallelInfoMap[currentNode.id].depth - } - }) - } - - parallelList.push(parallelListItem) - } - - while (nextNodeHandles.length) { - const nodeHandle = nextNodeHandles.shift()! - traverse(nodeHandle) - } - - return { - parallelList, - hasAbnormalEdges, - } -} - export const hasErrorHandleNode = (nodeType?: BlockEnum) => { return nodeType === BlockEnum.LLM || nodeType === BlockEnum.Tool || nodeType === BlockEnum.HttpRequest || nodeType === BlockEnum.Code } From eab6f6540924d51c86184b120c62b0f30bc065c1 Mon Sep 17 00:00:00 2001 From: Wood <tuiskuwood@outlook.com> Date: Thu, 25 Sep 2025 22:43:00 +0800 Subject: [PATCH 032/126] Fix array-only filtering in List Operator picker; remove file children fallback and align child types. (#26240) --- .../nodes/_base/components/variable/utils.ts | 38 +++++++++---------- .../variable/var-reference-vars.tsx | 9 +++-- .../workflow/nodes/list-operator/panel.tsx | 1 + 3 files changed, 23 insertions(+), 25 deletions(-) diff --git a/web/app/components/workflow/nodes/_base/components/variable/utils.ts b/web/app/components/workflow/nodes/_base/components/variable/utils.ts index d3621d5050..10919e198b 100644 --- a/web/app/components/workflow/nodes/_base/components/variable/utils.ts +++ b/web/app/components/workflow/nodes/_base/components/variable/utils.ts @@ -42,6 +42,7 @@ import type { RAGPipelineVariable } from '@/models/pipeline' import { AGENT_OUTPUT_STRUCT, + FILE_STRUCT, HTTP_REQUEST_OUTPUT_STRUCT, KNOWLEDGE_RETRIEVAL_OUTPUT_STRUCT, LLM_OUTPUT_STRUCT, @@ -138,6 +139,10 @@ export const varTypeToStructType = (type: VarType): Type => { [VarType.boolean]: Type.boolean, [VarType.object]: Type.object, [VarType.array]: Type.array, + [VarType.arrayString]: Type.array, + [VarType.arrayNumber]: Type.array, + [VarType.arrayObject]: Type.array, + [VarType.arrayFile]: Type.array, } as any )[type] || Type.string ) @@ -282,15 +287,6 @@ const findExceptVarInObject = ( children: filteredObj.children, } }) - - if (isFile && Array.isArray(childrenResult)) { - if (childrenResult.length === 0) { - childrenResult = OUTPUT_FILE_SUB_VARIABLES.map(key => ({ - variable: key, - type: key === 'size' ? VarType.number : VarType.string, - })) - } - } } else { childrenResult = [] @@ -586,17 +582,15 @@ const formatItem = ( variable: outputKey, type: output.type === 'array' - ? (`Array[${ - output.items?.type - ? output.items.type.slice(0, 1).toLocaleUpperCase() - + output.items.type.slice(1) - : 'Unknown' + ? (`Array[${output.items?.type + ? output.items.type.slice(0, 1).toLocaleUpperCase() + + output.items.type.slice(1) + : 'Unknown' }]` as VarType) - : (`${ - output.type - ? output.type.slice(0, 1).toLocaleUpperCase() - + output.type.slice(1) - : 'Unknown' + : (`${output.type + ? output.type.slice(0, 1).toLocaleUpperCase() + + output.type.slice(1) + : 'Unknown' }` as VarType), }) }, @@ -690,9 +684,10 @@ const formatItem = ( const children = (() => { if (isFile) { return OUTPUT_FILE_SUB_VARIABLES.map((key) => { + const def = FILE_STRUCT.find(c => c.variable === key) return { variable: key, - type: key === 'size' ? VarType.number : VarType.string, + type: def?.type || VarType.string, } }) } @@ -714,9 +709,10 @@ const formatItem = ( if (isFile) { return { children: OUTPUT_FILE_SUB_VARIABLES.map((key) => { + const def = FILE_STRUCT.find(c => c.variable === key) return { variable: key, - type: key === 'size' ? VarType.number : VarType.string, + type: def?.type || VarType.string, } }), } diff --git a/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx b/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx index 9b6ade246c..614d01a11e 100644 --- a/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx +++ b/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx @@ -18,7 +18,6 @@ import { Type } from '../../../llm/types' import PickerStructurePanel from '@/app/components/workflow/nodes/_base/components/variable/object-child-tree-panel/picker' import { isSpecialVar, varTypeToStructType } from './utils' import type { Field } from '@/app/components/workflow/nodes/llm/types' -import { FILE_STRUCT } from '@/app/components/workflow/constants' import { noop } from 'lodash-es' import { CodeAssistant, MagicEdit } from '@/app/components/base/icons/src/vender/line/general' import ManageInputField from './manage-input-field' @@ -106,8 +105,9 @@ const Item: FC<ItemProps> = ({ const objStructuredOutput: StructuredOutput | null = useMemo(() => { if (!isObj) return null - const properties: Record<string, Field> = {}; - (isFile ? FILE_STRUCT : (itemData.children as Var[])).forEach((c) => { + const properties: Record<string, Field> = {} + const childrenVars = (itemData.children as Var[]) || [] + childrenVars.forEach((c) => { properties[c.variable] = { type: varTypeToStructType(c.type), } @@ -120,7 +120,7 @@ const Item: FC<ItemProps> = ({ additionalProperties: false, }, } - }, [isFile, isObj, itemData.children]) + }, [isObj, itemData.children]) const structuredOutput = (() => { if (isStructureOutput) @@ -448,4 +448,5 @@ const VarReferenceVars: FC<Props> = ({ </> ) } + export default React.memo(VarReferenceVars) diff --git a/web/app/components/workflow/nodes/list-operator/panel.tsx b/web/app/components/workflow/nodes/list-operator/panel.tsx index 9a89629f09..e76befcac0 100644 --- a/web/app/components/workflow/nodes/list-operator/panel.tsx +++ b/web/app/components/workflow/nodes/list-operator/panel.tsx @@ -55,6 +55,7 @@ const Panel: FC<NodePanelProps<ListFilterNodeType>> = ({ value={inputs.variable || []} onChange={handleVarChanges} filterVar={filterVar} + isSupportFileVar={false} typePlaceHolder='Array' /> </Field> From fb8114792a9803f987d45bfb6e12e7eec82e2425 Mon Sep 17 00:00:00 2001 From: Wood <tuiskuwood@outlook.com> Date: Thu, 25 Sep 2025 22:43:26 +0800 Subject: [PATCH 033/126] =?UTF-8?q?fix:=20sync=20FileUploader=20context=20?= =?UTF-8?q?with=20props=20to=20fix=20inconsistent=20file=20parameter=20sta?= =?UTF-8?q?te=20in=20=E2=80=9CView=20cached=20variables=E2=80=9D.=20(#2619?= =?UTF-8?q?9)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../components/base/file-uploader/store.tsx | 31 +++++++++++++++++-- 1 file changed, 28 insertions(+), 3 deletions(-) diff --git a/web/app/components/base/file-uploader/store.tsx b/web/app/components/base/file-uploader/store.tsx index cddfdf6f27..7f7cfd5693 100644 --- a/web/app/components/base/file-uploader/store.tsx +++ b/web/app/components/base/file-uploader/store.tsx @@ -1,6 +1,7 @@ import { createContext, useContext, + useEffect, useRef, } from 'react' import { @@ -18,13 +19,11 @@ type Shape = { export const createFileStore = ( value: FileEntity[] = [], - onChange?: (files: FileEntity[]) => void, ) => { return create<Shape>(set => ({ files: value ? [...value] : [], setFiles: (files) => { set({ files }) - onChange?.(files) }, })) } @@ -55,9 +54,35 @@ export const FileContextProvider = ({ onChange, }: FileProviderProps) => { const storeRef = useRef<FileStore | undefined>(undefined) + const onChangeRef = useRef<FileProviderProps['onChange']>(onChange) + const isSyncingRef = useRef(false) if (!storeRef.current) - storeRef.current = createFileStore(value, onChange) + storeRef.current = createFileStore(value) + + // keep latest onChange + useEffect(() => { + onChangeRef.current = onChange + }, [onChange]) + + // subscribe to store changes and call latest onChange + useEffect(() => { + const store = storeRef.current! + const unsubscribe = store.subscribe((state: Shape) => { + if (isSyncingRef.current) return + onChangeRef.current?.(state.files) + }) + return unsubscribe + }, []) + + // sync external value into internal store when value changes + useEffect(() => { + const store = storeRef.current! + const nextFiles = value ? [...value] : [] + isSyncingRef.current = true + store.setState({ files: nextFiles }) + isSyncingRef.current = false + }, [value]) return ( <FileContext.Provider value={storeRef.current}> From 0cac330bc24fd831a778280203eddcc5a80fa2f9 Mon Sep 17 00:00:00 2001 From: lyzno1 <92089059+lyzno1@users.noreply.github.com> Date: Thu, 25 Sep 2025 22:43:37 +0800 Subject: [PATCH 034/126] fix: add echarts and zrender to transpilePackages for ESM compatibility (#26208) --- web/next.config.js | 1 + 1 file changed, 1 insertion(+) diff --git a/web/next.config.js b/web/next.config.js index 9c5e331f34..6a7a7a798d 100644 --- a/web/next.config.js +++ b/web/next.config.js @@ -91,6 +91,7 @@ const remoteImageURLs = [hasSetWebPrefix ? new URL(`${process.env.NEXT_PUBLIC_WE /** @type {import('next').NextConfig} */ const nextConfig = { basePath: process.env.NEXT_PUBLIC_BASE_PATH || '', + transpilePackages: ['echarts', 'zrender'], turbopack: { rules: codeInspectorPlugin({ bundler: 'turbopack' From 9b83b0aaddb8dda05b25a6149e3ea72922e623bd Mon Sep 17 00:00:00 2001 From: Masahiro Hiramori <contact@mshr-h.com> Date: Thu, 25 Sep 2025 23:49:54 +0900 Subject: [PATCH 035/126] chore: fix inaccurate translation in ja-JP (#26243) --- web/i18n/ja-JP/dataset-pipeline.ts | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/web/i18n/ja-JP/dataset-pipeline.ts b/web/i18n/ja-JP/dataset-pipeline.ts index ea3296840a..b261d88ae4 100644 --- a/web/i18n/ja-JP/dataset-pipeline.ts +++ b/web/i18n/ja-JP/dataset-pipeline.ts @@ -4,12 +4,12 @@ const translation = { title: '空白の知識パイプライン', description: 'データ処理と構造を完全に制御できるカスタムパイプラインをゼロから作成します。', }, - backToKnowledge: '知識に戻る', + backToKnowledge: 'ナレッジベースに戻る', caution: '注意', importDSL: 'DSLファイルからインポートする', errorTip: 'ナレッジベースの作成に失敗しました', - createKnowledge: '知識を創造する', - successTip: '知識ベースが正常に作成されました', + createKnowledge: 'ナレッジベースを作成する', + successTip: 'ナレッジベースが正常に作成されました', }, templates: { customized: 'カスタマイズされた', @@ -21,10 +21,10 @@ const translation = { preview: 'プレビュー', dataSource: 'データソース', editInfo: '情報を編集する', - exportPipeline: '輸出パイプライン', + exportPipeline: 'パイプラインをエクスポートする', saveAndProcess: '保存して処理する', backToDataSource: 'データソースに戻る', - useTemplate: 'この知識パイプラインを使用してください', + useTemplate: 'このナレッジパイプラインを使用してください', process: 'プロセス', }, deletePipeline: { @@ -37,7 +37,7 @@ const translation = { tip: '<CustomLink>ドキュメントに移動</CustomLink>して、ドキュメントを追加または管理してください。', }, error: { - message: '知識パイプラインの公開に失敗しました', + message: 'ナレッジパイプラインの公開に失敗しました', }, }, publishTemplate: { @@ -147,19 +147,19 @@ const translation = { content: 'この操作は永久的です。以前の方法に戻すことはできません。変換することを確認してください。', }, warning: 'この操作は元に戻せません。', - title: '知識パイプラインに変換する', + title: 'ナレッジパイプラインに変換する', successMessage: 'データセットをパイプラインに正常に変換しました', errorMessage: 'データセットをパイプラインに変換できませんでした', - descriptionChunk1: '既存の知識ベースを文書処理のためにナレッジパイプラインを使用するように変換できます。', + descriptionChunk1: '既存のナレッジベースを文書処理のためにナレッジパイプラインを使用するように変換できます。', descriptionChunk2: '— よりオープンで柔軟なアプローチを採用し、私たちのマーケットプレイスからのプラグインへのアクセスを提供します。これにより、すべての将来のドキュメントに新しい処理方法が適用されることになります。', }, - knowledgeNameAndIcon: '知識の名前とアイコン', + knowledgeNameAndIcon: 'ナレッジの名前とアイコン', inputField: '入力フィールド', pipelineNameAndIcon: 'パイプライン名とアイコン', knowledgePermissions: '権限', knowledgeNameAndIconPlaceholder: 'ナレッジベースの名前を入力してください', editPipelineInfo: 'パイプライン情報を編集する', - knowledgeDescription: '知識の説明', + knowledgeDescription: 'ナレッジベースの説明', knowledgeDescriptionPlaceholder: 'このナレッジベースに何が含まれているかを説明してください。詳細な説明は、AIがデータセットの内容により正確にアクセスできるようにします。空の場合、Difyはデフォルトのヒット戦略を使用します。(オプション)', } From e682749d0398093acce233e5f1566224724b34f6 Mon Sep 17 00:00:00 2001 From: heyszt <270985384@qq.com> Date: Thu, 25 Sep 2025 22:51:15 +0800 Subject: [PATCH 036/126] aliyun_trace: unify the span attribute & compatible CMS 2.0 endpoint (#26194) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> --- .../advanced_chat/generate_task_pipeline.py | 2 +- api/core/ops/aliyun_trace/aliyun_trace.py | 472 +++++++++--------- .../aliyun_trace/data_exporter/traceclient.py | 66 ++- .../entities/aliyun_trace_entity.py | 17 +- api/core/ops/aliyun_trace/entities/semconv.py | 75 ++- api/core/ops/aliyun_trace/utils.py | 95 ++++ api/core/ops/entities/config_entity.py | 3 +- .../unit_tests/core/ops/test_config_entity.py | 27 +- 8 files changed, 441 insertions(+), 316 deletions(-) create mode 100644 api/core/ops/aliyun_trace/utils.py diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index 71588870fa..e021b0aca7 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -551,7 +551,7 @@ class AdvancedChatAppGenerateTaskPipeline: total_steps=validated_state.node_run_steps, outputs=event.outputs, exceptions_count=event.exceptions_count, - conversation_id=None, + conversation_id=self._conversation_id, trace_manager=trace_manager, external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), ) diff --git a/api/core/ops/aliyun_trace/aliyun_trace.py b/api/core/ops/aliyun_trace/aliyun_trace.py index 7e817a6bff..c0727326ce 100644 --- a/api/core/ops/aliyun_trace/aliyun_trace.py +++ b/api/core/ops/aliyun_trace/aliyun_trace.py @@ -1,38 +1,28 @@ -import json import logging from collections.abc import Sequence -from urllib.parse import urljoin -from opentelemetry.trace import Link, Status, StatusCode -from sqlalchemy import select -from sqlalchemy.orm import Session, sessionmaker +from sqlalchemy.orm import sessionmaker from core.ops.aliyun_trace.data_exporter.traceclient import ( TraceClient, + build_endpoint, convert_datetime_to_nanoseconds, convert_to_span_id, convert_to_trace_id, - create_link, generate_span_id, ) -from core.ops.aliyun_trace.entities.aliyun_trace_entity import SpanData +from core.ops.aliyun_trace.entities.aliyun_trace_entity import SpanData, TraceMetadata from core.ops.aliyun_trace.entities.semconv import ( GEN_AI_COMPLETION, - GEN_AI_FRAMEWORK, GEN_AI_MODEL_NAME, GEN_AI_PROMPT, GEN_AI_PROMPT_TEMPLATE_TEMPLATE, GEN_AI_PROMPT_TEMPLATE_VARIABLE, GEN_AI_RESPONSE_FINISH_REASON, - GEN_AI_SESSION_ID, - GEN_AI_SPAN_KIND, GEN_AI_SYSTEM, GEN_AI_USAGE_INPUT_TOKENS, GEN_AI_USAGE_OUTPUT_TOKENS, GEN_AI_USAGE_TOTAL_TOKENS, - GEN_AI_USER_ID, - INPUT_VALUE, - OUTPUT_VALUE, RETRIEVAL_DOCUMENT, RETRIEVAL_QUERY, TOOL_DESCRIPTION, @@ -40,6 +30,15 @@ from core.ops.aliyun_trace.entities.semconv import ( TOOL_PARAMETERS, GenAISpanKind, ) +from core.ops.aliyun_trace.utils import ( + create_common_span_attributes, + create_links_from_trace_id, + create_status_from_error, + extract_retrieval_documents, + get_user_id_from_message_data, + get_workflow_node_status, + serialize_json_data, +) from core.ops.base_trace_instance import BaseTraceInstance from core.ops.entities.config_entity import AliyunConfig from core.ops.entities.trace_entity import ( @@ -52,12 +51,11 @@ from core.ops.entities.trace_entity import ( ToolTraceInfo, WorkflowTraceInfo, ) -from core.rag.models.document import Document from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository from core.workflow.entities import WorkflowNodeExecution -from core.workflow.enums import NodeType, WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus +from core.workflow.enums import NodeType, WorkflowNodeExecutionMetadataKey from extensions.ext_database import db -from models import Account, App, EndUser, TenantAccountJoin, WorkflowNodeExecutionTriggeredFrom +from models import WorkflowNodeExecutionTriggeredFrom logger = logging.getLogger(__name__) @@ -68,8 +66,7 @@ class AliyunDataTrace(BaseTraceInstance): aliyun_config: AliyunConfig, ): super().__init__(aliyun_config) - base_url = aliyun_config.endpoint.rstrip("/") - endpoint = urljoin(base_url, f"adapt_{aliyun_config.license_key}/api/otlp/traces") + endpoint = build_endpoint(aliyun_config.endpoint, aliyun_config.license_key) self.trace_client = TraceClient(service_name=aliyun_config.app_name, endpoint=endpoint) def trace(self, trace_info: BaseTraceInfo): @@ -95,423 +92,422 @@ class AliyunDataTrace(BaseTraceInstance): try: return self.trace_client.get_project_url() except Exception as e: - logger.info("Aliyun get run url failed: %s", str(e), exc_info=True) - raise ValueError(f"Aliyun get run url failed: {str(e)}") + logger.info("Aliyun get project url failed: %s", str(e), exc_info=True) + raise ValueError(f"Aliyun get project url failed: {str(e)}") def workflow_trace(self, trace_info: WorkflowTraceInfo): - trace_id = convert_to_trace_id(trace_info.workflow_run_id) - links = [] - if trace_info.trace_id: - links.append(create_link(trace_id_str=trace_info.trace_id)) - workflow_span_id = convert_to_span_id(trace_info.workflow_run_id, "workflow") - self.add_workflow_span(trace_id, workflow_span_id, trace_info, links) + trace_metadata = TraceMetadata( + trace_id=convert_to_trace_id(trace_info.workflow_run_id), + workflow_span_id=convert_to_span_id(trace_info.workflow_run_id, "workflow"), + session_id=trace_info.metadata.get("conversation_id") or "", + user_id=str(trace_info.metadata.get("user_id") or ""), + links=create_links_from_trace_id(trace_info.trace_id), + ) + + self.add_workflow_span(trace_info, trace_metadata) workflow_node_executions = self.get_workflow_node_executions(trace_info) for node_execution in workflow_node_executions: - node_span = self.build_workflow_node_span(node_execution, trace_id, trace_info, workflow_span_id) + node_span = self.build_workflow_node_span(node_execution, trace_info, trace_metadata) self.trace_client.add_span(node_span) def message_trace(self, trace_info: MessageTraceInfo): message_data = trace_info.message_data if message_data is None: return + message_id = trace_info.message_id + user_id = get_user_id_from_message_data(message_data) + status = create_status_from_error(trace_info.error) - user_id = message_data.from_account_id - if message_data.from_end_user_id: - end_user_data: EndUser | None = ( - db.session.query(EndUser).where(EndUser.id == message_data.from_end_user_id).first() - ) - if end_user_data is not None: - user_id = end_user_data.session_id + trace_metadata = TraceMetadata( + trace_id=convert_to_trace_id(message_id), + workflow_span_id=0, + session_id=trace_info.metadata.get("conversation_id") or "", + user_id=user_id, + links=create_links_from_trace_id(trace_info.trace_id), + ) - status: Status = Status(StatusCode.OK) - if trace_info.error: - status = Status(StatusCode.ERROR, trace_info.error) - - trace_id = convert_to_trace_id(message_id) - links = [] - if trace_info.trace_id: - links.append(create_link(trace_id_str=trace_info.trace_id)) + inputs_json = serialize_json_data(trace_info.inputs) + outputs_str = str(trace_info.outputs) message_span_id = convert_to_span_id(message_id, "message") message_span = SpanData( - trace_id=trace_id, + trace_id=trace_metadata.trace_id, parent_span_id=None, span_id=message_span_id, name="message", start_time=convert_datetime_to_nanoseconds(trace_info.start_time), end_time=convert_datetime_to_nanoseconds(trace_info.end_time), - attributes={ - GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id") or "", - GEN_AI_USER_ID: str(user_id), - GEN_AI_SPAN_KIND: GenAISpanKind.CHAIN.value, - GEN_AI_FRAMEWORK: "dify", - INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False), - OUTPUT_VALUE: str(trace_info.outputs), - }, + attributes=create_common_span_attributes( + session_id=trace_metadata.session_id, + user_id=trace_metadata.user_id, + span_kind=GenAISpanKind.CHAIN, + inputs=inputs_json, + outputs=outputs_str, + ), status=status, - links=links, + links=trace_metadata.links, ) self.trace_client.add_span(message_span) - app_model_config = getattr(trace_info.message_data, "app_model_config", {}) + app_model_config = getattr(message_data, "app_model_config", {}) pre_prompt = getattr(app_model_config, "pre_prompt", "") - inputs_data = getattr(trace_info.message_data, "inputs", {}) + inputs_data = getattr(message_data, "inputs", {}) + llm_span = SpanData( - trace_id=trace_id, + trace_id=trace_metadata.trace_id, parent_span_id=message_span_id, span_id=convert_to_span_id(message_id, "llm"), name="llm", start_time=convert_datetime_to_nanoseconds(trace_info.start_time), end_time=convert_datetime_to_nanoseconds(trace_info.end_time), attributes={ - GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id") or "", - GEN_AI_USER_ID: str(user_id), - GEN_AI_SPAN_KIND: GenAISpanKind.LLM.value, - GEN_AI_FRAMEWORK: "dify", + **create_common_span_attributes( + session_id=trace_metadata.session_id, + user_id=trace_metadata.user_id, + span_kind=GenAISpanKind.LLM, + inputs=inputs_json, + outputs=outputs_str, + ), GEN_AI_MODEL_NAME: trace_info.metadata.get("ls_model_name") or "", GEN_AI_SYSTEM: trace_info.metadata.get("ls_provider") or "", GEN_AI_USAGE_INPUT_TOKENS: str(trace_info.message_tokens), GEN_AI_USAGE_OUTPUT_TOKENS: str(trace_info.answer_tokens), GEN_AI_USAGE_TOTAL_TOKENS: str(trace_info.total_tokens), - GEN_AI_PROMPT_TEMPLATE_VARIABLE: json.dumps(inputs_data, ensure_ascii=False), + GEN_AI_PROMPT_TEMPLATE_VARIABLE: serialize_json_data(inputs_data), GEN_AI_PROMPT_TEMPLATE_TEMPLATE: pre_prompt, - GEN_AI_PROMPT: json.dumps(trace_info.inputs, ensure_ascii=False), - GEN_AI_COMPLETION: str(trace_info.outputs), - INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False), - OUTPUT_VALUE: str(trace_info.outputs), + GEN_AI_PROMPT: inputs_json, + GEN_AI_COMPLETION: outputs_str, }, status=status, + links=trace_metadata.links, ) self.trace_client.add_span(llm_span) def dataset_retrieval_trace(self, trace_info: DatasetRetrievalTraceInfo): if trace_info.message_data is None: return + message_id = trace_info.message_id - trace_id = convert_to_trace_id(message_id) - links = [] - if trace_info.trace_id: - links.append(create_link(trace_id_str=trace_info.trace_id)) + trace_metadata = TraceMetadata( + trace_id=convert_to_trace_id(message_id), + workflow_span_id=0, + session_id=trace_info.metadata.get("conversation_id") or "", + user_id=str(trace_info.metadata.get("user_id") or ""), + links=create_links_from_trace_id(trace_info.trace_id), + ) documents_data = extract_retrieval_documents(trace_info.documents) + documents_json = serialize_json_data(documents_data) + inputs_str = str(trace_info.inputs) + dataset_retrieval_span = SpanData( - trace_id=trace_id, + trace_id=trace_metadata.trace_id, parent_span_id=convert_to_span_id(message_id, "message"), span_id=generate_span_id(), name="dataset_retrieval", start_time=convert_datetime_to_nanoseconds(trace_info.start_time), end_time=convert_datetime_to_nanoseconds(trace_info.end_time), attributes={ - GEN_AI_SPAN_KIND: GenAISpanKind.RETRIEVER.value, - GEN_AI_FRAMEWORK: "dify", - RETRIEVAL_QUERY: str(trace_info.inputs), - RETRIEVAL_DOCUMENT: json.dumps(documents_data, ensure_ascii=False), - INPUT_VALUE: str(trace_info.inputs), - OUTPUT_VALUE: json.dumps(documents_data, ensure_ascii=False), + **create_common_span_attributes( + session_id=trace_metadata.session_id, + user_id=trace_metadata.user_id, + span_kind=GenAISpanKind.RETRIEVER, + inputs=inputs_str, + outputs=documents_json, + ), + RETRIEVAL_QUERY: inputs_str, + RETRIEVAL_DOCUMENT: documents_json, }, - links=links, + links=trace_metadata.links, ) self.trace_client.add_span(dataset_retrieval_span) def tool_trace(self, trace_info: ToolTraceInfo): if trace_info.message_data is None: return + message_id = trace_info.message_id + status = create_status_from_error(trace_info.error) - status: Status = Status(StatusCode.OK) - if trace_info.error: - status = Status(StatusCode.ERROR, trace_info.error) + trace_metadata = TraceMetadata( + trace_id=convert_to_trace_id(message_id), + workflow_span_id=0, + session_id=trace_info.metadata.get("conversation_id") or "", + user_id=str(trace_info.metadata.get("user_id") or ""), + links=create_links_from_trace_id(trace_info.trace_id), + ) - trace_id = convert_to_trace_id(message_id) - links = [] - if trace_info.trace_id: - links.append(create_link(trace_id_str=trace_info.trace_id)) + tool_config_json = serialize_json_data(trace_info.tool_config) + tool_inputs_json = serialize_json_data(trace_info.tool_inputs) + inputs_json = serialize_json_data(trace_info.inputs) tool_span = SpanData( - trace_id=trace_id, + trace_id=trace_metadata.trace_id, parent_span_id=convert_to_span_id(message_id, "message"), span_id=generate_span_id(), name=trace_info.tool_name, start_time=convert_datetime_to_nanoseconds(trace_info.start_time), end_time=convert_datetime_to_nanoseconds(trace_info.end_time), attributes={ - GEN_AI_SPAN_KIND: GenAISpanKind.TOOL.value, - GEN_AI_FRAMEWORK: "dify", + **create_common_span_attributes( + session_id=trace_metadata.session_id, + user_id=trace_metadata.user_id, + span_kind=GenAISpanKind.TOOL, + inputs=inputs_json, + outputs=str(trace_info.tool_outputs), + ), TOOL_NAME: trace_info.tool_name, - TOOL_DESCRIPTION: json.dumps(trace_info.tool_config, ensure_ascii=False), - TOOL_PARAMETERS: json.dumps(trace_info.tool_inputs, ensure_ascii=False), - INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False), - OUTPUT_VALUE: str(trace_info.tool_outputs), + TOOL_DESCRIPTION: tool_config_json, + TOOL_PARAMETERS: tool_inputs_json, }, status=status, - links=links, + links=trace_metadata.links, ) self.trace_client.add_span(tool_span) def get_workflow_node_executions(self, trace_info: WorkflowTraceInfo) -> Sequence[WorkflowNodeExecution]: - # through workflow_run_id get all_nodes_execution using repository - session_factory = sessionmaker(bind=db.engine) - # Find the app's creator account - with Session(db.engine, expire_on_commit=False) as session: - # Get the app to find its creator - app_id = trace_info.metadata.get("app_id") - if not app_id: - raise ValueError("No app_id found in trace_info metadata") - app_stmt = select(App).where(App.id == app_id) - app = session.scalar(app_stmt) - if not app: - raise ValueError(f"App with id {app_id} not found") + app_id = trace_info.metadata.get("app_id") + if not app_id: + raise ValueError("No app_id found in trace_info metadata") - if not app.created_by: - raise ValueError(f"App with id {app_id} has no creator (created_by is None)") - account_stmt = select(Account).where(Account.id == app.created_by) - service_account = session.scalar(account_stmt) - if not service_account: - raise ValueError(f"Creator account with id {app.created_by} not found for app {app_id}") - current_tenant = ( - session.query(TenantAccountJoin).filter_by(account_id=service_account.id, current=True).first() - ) - if not current_tenant: - raise ValueError(f"Current tenant not found for account {service_account.id}") - service_account.set_tenant_id(current_tenant.tenant_id) + service_account = self.get_service_account_with_tenant(app_id) + + session_factory = sessionmaker(bind=db.engine) workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository( session_factory=session_factory, user=service_account, app_id=app_id, triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN, ) - # Get all executions for this workflow run - workflow_node_executions = workflow_node_execution_repository.get_by_workflow_run( - workflow_run_id=trace_info.workflow_run_id - ) - return workflow_node_executions + + return workflow_node_execution_repository.get_by_workflow_run(workflow_run_id=trace_info.workflow_run_id) def build_workflow_node_span( - self, node_execution: WorkflowNodeExecution, trace_id: int, trace_info: WorkflowTraceInfo, workflow_span_id: int + self, node_execution: WorkflowNodeExecution, trace_info: WorkflowTraceInfo, trace_metadata: TraceMetadata ): try: if node_execution.node_type == NodeType.LLM: - node_span = self.build_workflow_llm_span(trace_id, workflow_span_id, trace_info, node_execution) + node_span = self.build_workflow_llm_span(trace_info, node_execution, trace_metadata) elif node_execution.node_type == NodeType.KNOWLEDGE_RETRIEVAL: - node_span = self.build_workflow_retrieval_span(trace_id, workflow_span_id, trace_info, node_execution) + node_span = self.build_workflow_retrieval_span(trace_info, node_execution, trace_metadata) elif node_execution.node_type == NodeType.TOOL: - node_span = self.build_workflow_tool_span(trace_id, workflow_span_id, trace_info, node_execution) + node_span = self.build_workflow_tool_span(trace_info, node_execution, trace_metadata) else: - node_span = self.build_workflow_task_span(trace_id, workflow_span_id, trace_info, node_execution) + node_span = self.build_workflow_task_span(trace_info, node_execution, trace_metadata) return node_span except Exception as e: logger.debug("Error occurred in build_workflow_node_span: %s", e, exc_info=True) return None - def get_workflow_node_status(self, node_execution: WorkflowNodeExecution) -> Status: - span_status: Status = Status(StatusCode.UNSET) - if node_execution.status == WorkflowNodeExecutionStatus.SUCCEEDED: - span_status = Status(StatusCode.OK) - elif node_execution.status in [WorkflowNodeExecutionStatus.FAILED, WorkflowNodeExecutionStatus.EXCEPTION]: - span_status = Status(StatusCode.ERROR, str(node_execution.error)) - return span_status - def build_workflow_task_span( - self, trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + self, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution, trace_metadata: TraceMetadata ) -> SpanData: + inputs_json = serialize_json_data(node_execution.inputs) + outputs_json = serialize_json_data(node_execution.outputs) return SpanData( - trace_id=trace_id, - parent_span_id=workflow_span_id, + trace_id=trace_metadata.trace_id, + parent_span_id=trace_metadata.workflow_span_id, span_id=convert_to_span_id(node_execution.id, "node"), name=node_execution.title, start_time=convert_datetime_to_nanoseconds(node_execution.created_at), end_time=convert_datetime_to_nanoseconds(node_execution.finished_at), - attributes={ - GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id") or "", - GEN_AI_SPAN_KIND: GenAISpanKind.TASK.value, - GEN_AI_FRAMEWORK: "dify", - INPUT_VALUE: json.dumps(node_execution.inputs, ensure_ascii=False), - OUTPUT_VALUE: json.dumps(node_execution.outputs, ensure_ascii=False), - }, - status=self.get_workflow_node_status(node_execution), + attributes=create_common_span_attributes( + session_id=trace_metadata.session_id, + user_id=trace_metadata.user_id, + span_kind=GenAISpanKind.TASK, + inputs=inputs_json, + outputs=outputs_json, + ), + status=get_workflow_node_status(node_execution), + links=trace_metadata.links, ) def build_workflow_tool_span( - self, trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + self, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution, trace_metadata: TraceMetadata ) -> SpanData: tool_des = {} if node_execution.metadata: tool_des = node_execution.metadata.get(WorkflowNodeExecutionMetadataKey.TOOL_INFO, {}) + + inputs_json = serialize_json_data(node_execution.inputs or {}) + outputs_json = serialize_json_data(node_execution.outputs) + return SpanData( - trace_id=trace_id, - parent_span_id=workflow_span_id, + trace_id=trace_metadata.trace_id, + parent_span_id=trace_metadata.workflow_span_id, span_id=convert_to_span_id(node_execution.id, "node"), name=node_execution.title, start_time=convert_datetime_to_nanoseconds(node_execution.created_at), end_time=convert_datetime_to_nanoseconds(node_execution.finished_at), attributes={ - GEN_AI_SPAN_KIND: GenAISpanKind.TOOL.value, - GEN_AI_FRAMEWORK: "dify", + **create_common_span_attributes( + session_id=trace_metadata.session_id, + user_id=trace_metadata.user_id, + span_kind=GenAISpanKind.TOOL, + inputs=inputs_json, + outputs=outputs_json, + ), TOOL_NAME: node_execution.title, - TOOL_DESCRIPTION: json.dumps(tool_des, ensure_ascii=False), - TOOL_PARAMETERS: json.dumps(node_execution.inputs or {}, ensure_ascii=False), - INPUT_VALUE: json.dumps(node_execution.inputs or {}, ensure_ascii=False), - OUTPUT_VALUE: json.dumps(node_execution.outputs, ensure_ascii=False), + TOOL_DESCRIPTION: serialize_json_data(tool_des), + TOOL_PARAMETERS: inputs_json, }, - status=self.get_workflow_node_status(node_execution), + status=get_workflow_node_status(node_execution), + links=trace_metadata.links, ) def build_workflow_retrieval_span( - self, trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + self, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution, trace_metadata: TraceMetadata ) -> SpanData: - input_value = "" - if node_execution.inputs: - input_value = str(node_execution.inputs.get("query", "")) - output_value = "" - if node_execution.outputs: - output_value = json.dumps(node_execution.outputs.get("result", []), ensure_ascii=False) + input_value = str(node_execution.inputs.get("query", "")) if node_execution.inputs else "" + output_value = serialize_json_data(node_execution.outputs.get("result", [])) if node_execution.outputs else "" + return SpanData( - trace_id=trace_id, - parent_span_id=workflow_span_id, + trace_id=trace_metadata.trace_id, + parent_span_id=trace_metadata.workflow_span_id, span_id=convert_to_span_id(node_execution.id, "node"), name=node_execution.title, start_time=convert_datetime_to_nanoseconds(node_execution.created_at), end_time=convert_datetime_to_nanoseconds(node_execution.finished_at), attributes={ - GEN_AI_SPAN_KIND: GenAISpanKind.RETRIEVER.value, - GEN_AI_FRAMEWORK: "dify", + **create_common_span_attributes( + session_id=trace_metadata.session_id, + user_id=trace_metadata.user_id, + span_kind=GenAISpanKind.RETRIEVER, + inputs=input_value, + outputs=output_value, + ), RETRIEVAL_QUERY: input_value, RETRIEVAL_DOCUMENT: output_value, - INPUT_VALUE: input_value, - OUTPUT_VALUE: output_value, }, - status=self.get_workflow_node_status(node_execution), + status=get_workflow_node_status(node_execution), + links=trace_metadata.links, ) def build_workflow_llm_span( - self, trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + self, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution, trace_metadata: TraceMetadata ) -> SpanData: process_data = node_execution.process_data or {} outputs = node_execution.outputs or {} usage_data = process_data.get("usage", {}) if "usage" in process_data else outputs.get("usage", {}) + + prompts_json = serialize_json_data(process_data.get("prompts", [])) + text_output = str(outputs.get("text", "")) + return SpanData( - trace_id=trace_id, - parent_span_id=workflow_span_id, + trace_id=trace_metadata.trace_id, + parent_span_id=trace_metadata.workflow_span_id, span_id=convert_to_span_id(node_execution.id, "node"), name=node_execution.title, start_time=convert_datetime_to_nanoseconds(node_execution.created_at), end_time=convert_datetime_to_nanoseconds(node_execution.finished_at), attributes={ - GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id") or "", - GEN_AI_SPAN_KIND: GenAISpanKind.LLM.value, - GEN_AI_FRAMEWORK: "dify", + **create_common_span_attributes( + session_id=trace_metadata.session_id, + user_id=trace_metadata.user_id, + span_kind=GenAISpanKind.LLM, + inputs=prompts_json, + outputs=text_output, + ), GEN_AI_MODEL_NAME: process_data.get("model_name") or "", GEN_AI_SYSTEM: process_data.get("model_provider") or "", GEN_AI_USAGE_INPUT_TOKENS: str(usage_data.get("prompt_tokens", 0)), GEN_AI_USAGE_OUTPUT_TOKENS: str(usage_data.get("completion_tokens", 0)), GEN_AI_USAGE_TOTAL_TOKENS: str(usage_data.get("total_tokens", 0)), - GEN_AI_PROMPT: json.dumps(process_data.get("prompts", []), ensure_ascii=False), - GEN_AI_COMPLETION: str(outputs.get("text", "")), + GEN_AI_PROMPT: prompts_json, + GEN_AI_COMPLETION: text_output, GEN_AI_RESPONSE_FINISH_REASON: outputs.get("finish_reason") or "", - INPUT_VALUE: json.dumps(process_data.get("prompts", []), ensure_ascii=False), - OUTPUT_VALUE: str(outputs.get("text", "")), }, - status=self.get_workflow_node_status(node_execution), + status=get_workflow_node_status(node_execution), + links=trace_metadata.links, ) - def add_workflow_span( - self, trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, links: Sequence[Link] - ): + def add_workflow_span(self, trace_info: WorkflowTraceInfo, trace_metadata: TraceMetadata): message_span_id = None if trace_info.message_id: message_span_id = convert_to_span_id(trace_info.message_id, "message") - user_id = trace_info.metadata.get("user_id") - status: Status = Status(StatusCode.OK) - if trace_info.error: - status = Status(StatusCode.ERROR, trace_info.error) - if message_span_id: # chatflow + status = create_status_from_error(trace_info.error) + + inputs_json = serialize_json_data(trace_info.workflow_run_inputs) + outputs_json = serialize_json_data(trace_info.workflow_run_outputs) + + if message_span_id: message_span = SpanData( - trace_id=trace_id, + trace_id=trace_metadata.trace_id, parent_span_id=None, span_id=message_span_id, name="message", start_time=convert_datetime_to_nanoseconds(trace_info.start_time), end_time=convert_datetime_to_nanoseconds(trace_info.end_time), - attributes={ - GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id") or "", - GEN_AI_USER_ID: str(user_id), - GEN_AI_SPAN_KIND: GenAISpanKind.CHAIN.value, - GEN_AI_FRAMEWORK: "dify", - INPUT_VALUE: trace_info.workflow_run_inputs.get("sys.query") or "", - OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False), - }, + attributes=create_common_span_attributes( + session_id=trace_metadata.session_id, + user_id=trace_metadata.user_id, + span_kind=GenAISpanKind.CHAIN, + inputs=trace_info.workflow_run_inputs.get("sys.query") or "", + outputs=outputs_json, + ), status=status, - links=links, + links=trace_metadata.links, ) self.trace_client.add_span(message_span) workflow_span = SpanData( - trace_id=trace_id, + trace_id=trace_metadata.trace_id, parent_span_id=message_span_id, - span_id=workflow_span_id, + span_id=trace_metadata.workflow_span_id, name="workflow", start_time=convert_datetime_to_nanoseconds(trace_info.start_time), end_time=convert_datetime_to_nanoseconds(trace_info.end_time), - attributes={ - GEN_AI_USER_ID: str(user_id), - GEN_AI_SPAN_KIND: GenAISpanKind.CHAIN.value, - GEN_AI_FRAMEWORK: "dify", - INPUT_VALUE: json.dumps(trace_info.workflow_run_inputs, ensure_ascii=False), - OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False), - }, + attributes=create_common_span_attributes( + session_id=trace_metadata.session_id, + user_id=trace_metadata.user_id, + span_kind=GenAISpanKind.CHAIN, + inputs=inputs_json, + outputs=outputs_json, + ), status=status, - links=links, + links=trace_metadata.links, ) self.trace_client.add_span(workflow_span) def suggested_question_trace(self, trace_info: SuggestedQuestionTraceInfo): message_id = trace_info.message_id - status: Status = Status(StatusCode.OK) - if trace_info.error: - status = Status(StatusCode.ERROR, trace_info.error) + status = create_status_from_error(trace_info.error) - trace_id = convert_to_trace_id(message_id) - links = [] - if trace_info.trace_id: - links.append(create_link(trace_id_str=trace_info.trace_id)) + trace_metadata = TraceMetadata( + trace_id=convert_to_trace_id(message_id), + workflow_span_id=0, + session_id=trace_info.metadata.get("conversation_id") or "", + user_id=str(trace_info.metadata.get("user_id") or ""), + links=create_links_from_trace_id(trace_info.trace_id), + ) + + inputs_json = serialize_json_data(trace_info.inputs) + suggested_question_json = serialize_json_data(trace_info.suggested_question) suggested_question_span = SpanData( - trace_id=trace_id, + trace_id=trace_metadata.trace_id, parent_span_id=convert_to_span_id(message_id, "message"), span_id=convert_to_span_id(message_id, "suggested_question"), name="suggested_question", start_time=convert_datetime_to_nanoseconds(trace_info.start_time), end_time=convert_datetime_to_nanoseconds(trace_info.end_time), attributes={ - GEN_AI_SPAN_KIND: GenAISpanKind.LLM.value, - GEN_AI_FRAMEWORK: "dify", + **create_common_span_attributes( + session_id=trace_metadata.session_id, + user_id=trace_metadata.user_id, + span_kind=GenAISpanKind.LLM, + inputs=inputs_json, + outputs=suggested_question_json, + ), GEN_AI_MODEL_NAME: trace_info.metadata.get("ls_model_name") or "", GEN_AI_SYSTEM: trace_info.metadata.get("ls_provider") or "", - GEN_AI_PROMPT: json.dumps(trace_info.inputs, ensure_ascii=False), - GEN_AI_COMPLETION: json.dumps(trace_info.suggested_question, ensure_ascii=False), - INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False), - OUTPUT_VALUE: json.dumps(trace_info.suggested_question, ensure_ascii=False), + GEN_AI_PROMPT: inputs_json, + GEN_AI_COMPLETION: suggested_question_json, }, status=status, - links=links, + links=trace_metadata.links, ) self.trace_client.add_span(suggested_question_span) - - -def extract_retrieval_documents(documents: list[Document]): - documents_data = [] - for document in documents: - document_data = { - "content": document.page_content, - "metadata": { - "dataset_id": document.metadata.get("dataset_id"), - "doc_id": document.metadata.get("doc_id"), - "document_id": document.metadata.get("document_id"), - }, - "score": document.metadata.get("score"), - } - documents_data.append(document_data) - return documents_data diff --git a/api/core/ops/aliyun_trace/data_exporter/traceclient.py b/api/core/ops/aliyun_trace/data_exporter/traceclient.py index baaf9fd9f6..f54405b5de 100644 --- a/api/core/ops/aliyun_trace/data_exporter/traceclient.py +++ b/api/core/ops/aliyun_trace/data_exporter/traceclient.py @@ -7,6 +7,8 @@ import uuid from collections import deque from collections.abc import Sequence from datetime import datetime +from typing import Final +from urllib.parse import urljoin import httpx from opentelemetry import trace as trace_api @@ -20,8 +22,12 @@ from opentelemetry.trace import Link, SpanContext, TraceFlags from configs import dify_config from core.ops.aliyun_trace.entities.aliyun_trace_entity import SpanData -INVALID_SPAN_ID = 0x0000000000000000 -INVALID_TRACE_ID = 0x00000000000000000000000000000000 +INVALID_SPAN_ID: Final[int] = 0x0000000000000000 +INVALID_TRACE_ID: Final[int] = 0x00000000000000000000000000000000 +DEFAULT_TIMEOUT: Final[int] = 5 +DEFAULT_MAX_QUEUE_SIZE: Final[int] = 1000 +DEFAULT_SCHEDULE_DELAY_SEC: Final[int] = 5 +DEFAULT_MAX_EXPORT_BATCH_SIZE: Final[int] = 50 logger = logging.getLogger(__name__) @@ -31,9 +37,9 @@ class TraceClient: self, service_name: str, endpoint: str, - max_queue_size: int = 1000, - schedule_delay_sec: int = 5, - max_export_batch_size: int = 50, + max_queue_size: int = DEFAULT_MAX_QUEUE_SIZE, + schedule_delay_sec: int = DEFAULT_SCHEDULE_DELAY_SEC, + max_export_batch_size: int = DEFAULT_MAX_EXPORT_BATCH_SIZE, ): self.endpoint = endpoint self.resource = Resource( @@ -63,9 +69,9 @@ class TraceClient: def export(self, spans: Sequence[ReadableSpan]): self.exporter.export(spans) - def api_check(self): + def api_check(self) -> bool: try: - response = httpx.head(self.endpoint, timeout=5) + response = httpx.head(self.endpoint, timeout=DEFAULT_TIMEOUT) if response.status_code == 405: return True else: @@ -75,12 +81,13 @@ class TraceClient: logger.debug("AliyunTrace API check failed: %s", str(e)) raise ValueError(f"AliyunTrace API check failed: {str(e)}") - def get_project_url(self): + def get_project_url(self) -> str: return "https://arms.console.aliyun.com/#/llm" - def add_span(self, span_data: SpanData): + def add_span(self, span_data: SpanData | None) -> None: if span_data is None: return + span: ReadableSpan = self.span_builder.build_span(span_data) with self.condition: if len(self.queue) == self.max_queue_size: @@ -92,14 +99,14 @@ class TraceClient: if len(self.queue) >= self.max_export_batch_size: self.condition.notify() - def _worker(self): + def _worker(self) -> None: while not self.done: with self.condition: if len(self.queue) < self.max_export_batch_size and not self.done: self.condition.wait(timeout=self.schedule_delay_sec) self._export_batch() - def _export_batch(self): + def _export_batch(self) -> None: spans_to_export: list[ReadableSpan] = [] with self.condition: while len(spans_to_export) < self.max_export_batch_size and self.queue: @@ -111,7 +118,7 @@ class TraceClient: except Exception as e: logger.debug("Error exporting spans: %s", e) - def shutdown(self): + def shutdown(self) -> None: with self.condition: self.done = True self.condition.notify_all() @@ -121,7 +128,7 @@ class TraceClient: class SpanBuilder: - def __init__(self, resource): + def __init__(self, resource: Resource) -> None: self.resource = resource self.instrumentation_scope = InstrumentationScope( __name__, @@ -167,8 +174,12 @@ class SpanBuilder: def create_link(trace_id_str: str) -> Link: - placeholder_span_id = 0x0000000000000000 - trace_id = int(trace_id_str, 16) + placeholder_span_id = INVALID_SPAN_ID + try: + trace_id = int(trace_id_str, 16) + except ValueError as e: + raise ValueError(f"Invalid trace ID format: {trace_id_str}") from e + span_context = SpanContext( trace_id=trace_id, span_id=placeholder_span_id, is_remote=False, trace_flags=TraceFlags(TraceFlags.SAMPLED) ) @@ -184,26 +195,29 @@ def generate_span_id() -> int: def convert_to_trace_id(uuid_v4: str | None) -> int: + if uuid_v4 is None: + raise ValueError("UUID cannot be None") try: uuid_obj = uuid.UUID(uuid_v4) return uuid_obj.int - except Exception as e: - raise ValueError(f"Invalid UUID input: {e}") + except ValueError as e: + raise ValueError(f"Invalid UUID input: {uuid_v4}") from e def convert_string_to_id(string: str | None) -> int: if not string: return generate_span_id() hash_bytes = hashlib.sha256(string.encode("utf-8")).digest() - id = int.from_bytes(hash_bytes[:8], byteorder="big", signed=False) - return id + return int.from_bytes(hash_bytes[:8], byteorder="big", signed=False) def convert_to_span_id(uuid_v4: str | None, span_type: str) -> int: + if uuid_v4 is None: + raise ValueError("UUID cannot be None") try: uuid_obj = uuid.UUID(uuid_v4) - except Exception as e: - raise ValueError(f"Invalid UUID input: {e}") + except ValueError as e: + raise ValueError(f"Invalid UUID input: {uuid_v4}") from e combined_key = f"{uuid_obj.hex}-{span_type}" return convert_string_to_id(combined_key) @@ -212,5 +226,11 @@ def convert_datetime_to_nanoseconds(start_time_a: datetime | None) -> int | None if start_time_a is None: return None timestamp_in_seconds = start_time_a.timestamp() - timestamp_in_nanoseconds = int(timestamp_in_seconds * 1e9) - return timestamp_in_nanoseconds + return int(timestamp_in_seconds * 1e9) + + +def build_endpoint(base_url: str, license_key: str) -> str: + if "log.aliyuncs.com" in base_url: # cms2.0 endpoint + return urljoin(base_url, f"adapt_{license_key}/api/v1/traces") + else: # xtrace endpoint + return urljoin(base_url, f"adapt_{license_key}/api/otlp/traces") diff --git a/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py b/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py index f3dcbc5b8f..0ee71fc23f 100644 --- a/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py +++ b/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py @@ -1,18 +1,33 @@ from collections.abc import Sequence +from dataclasses import dataclass +from typing import Any from opentelemetry import trace as trace_api from opentelemetry.sdk.trace import Event, Status, StatusCode from pydantic import BaseModel, Field +@dataclass +class TraceMetadata: + """Metadata for trace operations, containing common attributes for all spans in a trace.""" + + trace_id: int + workflow_span_id: int + session_id: str + user_id: str + links: list[trace_api.Link] + + class SpanData(BaseModel): + """Data model for span information in Aliyun trace system.""" + model_config = {"arbitrary_types_allowed": True} trace_id: int = Field(..., description="The unique identifier for the trace.") parent_span_id: int | None = Field(None, description="The ID of the parent span, if any.") span_id: int = Field(..., description="The unique identifier for this span.") name: str = Field(..., description="The name of the span.") - attributes: dict[str, str] = Field(default_factory=dict, description="Attributes associated with the span.") + attributes: dict[str, Any] = Field(default_factory=dict, description="Attributes associated with the span.") events: Sequence[Event] = Field(default_factory=list, description="Events recorded in the span.") links: Sequence[trace_api.Link] = Field(default_factory=list, description="Links to other spans.") status: Status = Field(default=Status(StatusCode.UNSET), description="The status of the span.") diff --git a/api/core/ops/aliyun_trace/entities/semconv.py b/api/core/ops/aliyun_trace/entities/semconv.py index c9427c776a..7a22db21e2 100644 --- a/api/core/ops/aliyun_trace/entities/semconv.py +++ b/api/core/ops/aliyun_trace/entities/semconv.py @@ -1,56 +1,37 @@ from enum import StrEnum +from typing import Final -# public -GEN_AI_SESSION_ID = "gen_ai.session.id" +# Public attributes +GEN_AI_SESSION_ID: Final[str] = "gen_ai.session.id" +GEN_AI_USER_ID: Final[str] = "gen_ai.user.id" +GEN_AI_USER_NAME: Final[str] = "gen_ai.user.name" +GEN_AI_SPAN_KIND: Final[str] = "gen_ai.span.kind" +GEN_AI_FRAMEWORK: Final[str] = "gen_ai.framework" -GEN_AI_USER_ID = "gen_ai.user.id" +# Chain attributes +INPUT_VALUE: Final[str] = "input.value" +OUTPUT_VALUE: Final[str] = "output.value" -GEN_AI_USER_NAME = "gen_ai.user.name" +# Retriever attributes +RETRIEVAL_QUERY: Final[str] = "retrieval.query" +RETRIEVAL_DOCUMENT: Final[str] = "retrieval.document" -GEN_AI_SPAN_KIND = "gen_ai.span.kind" +# LLM attributes +GEN_AI_MODEL_NAME: Final[str] = "gen_ai.model_name" +GEN_AI_SYSTEM: Final[str] = "gen_ai.system" +GEN_AI_USAGE_INPUT_TOKENS: Final[str] = "gen_ai.usage.input_tokens" +GEN_AI_USAGE_OUTPUT_TOKENS: Final[str] = "gen_ai.usage.output_tokens" +GEN_AI_USAGE_TOTAL_TOKENS: Final[str] = "gen_ai.usage.total_tokens" +GEN_AI_PROMPT_TEMPLATE_TEMPLATE: Final[str] = "gen_ai.prompt_template.template" +GEN_AI_PROMPT_TEMPLATE_VARIABLE: Final[str] = "gen_ai.prompt_template.variable" +GEN_AI_PROMPT: Final[str] = "gen_ai.prompt" +GEN_AI_COMPLETION: Final[str] = "gen_ai.completion" +GEN_AI_RESPONSE_FINISH_REASON: Final[str] = "gen_ai.response.finish_reason" -GEN_AI_FRAMEWORK = "gen_ai.framework" - - -# Chain -INPUT_VALUE = "input.value" - -OUTPUT_VALUE = "output.value" - - -# Retriever -RETRIEVAL_QUERY = "retrieval.query" - -RETRIEVAL_DOCUMENT = "retrieval.document" - - -# LLM -GEN_AI_MODEL_NAME = "gen_ai.model_name" - -GEN_AI_SYSTEM = "gen_ai.system" - -GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens" - -GEN_AI_USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens" - -GEN_AI_USAGE_TOTAL_TOKENS = "gen_ai.usage.total_tokens" - -GEN_AI_PROMPT_TEMPLATE_TEMPLATE = "gen_ai.prompt_template.template" - -GEN_AI_PROMPT_TEMPLATE_VARIABLE = "gen_ai.prompt_template.variable" - -GEN_AI_PROMPT = "gen_ai.prompt" - -GEN_AI_COMPLETION = "gen_ai.completion" - -GEN_AI_RESPONSE_FINISH_REASON = "gen_ai.response.finish_reason" - -# Tool -TOOL_NAME = "tool.name" - -TOOL_DESCRIPTION = "tool.description" - -TOOL_PARAMETERS = "tool.parameters" +# Tool attributes +TOOL_NAME: Final[str] = "tool.name" +TOOL_DESCRIPTION: Final[str] = "tool.description" +TOOL_PARAMETERS: Final[str] = "tool.parameters" class GenAISpanKind(StrEnum): diff --git a/api/core/ops/aliyun_trace/utils.py b/api/core/ops/aliyun_trace/utils.py new file mode 100644 index 0000000000..2ec9e75dcd --- /dev/null +++ b/api/core/ops/aliyun_trace/utils.py @@ -0,0 +1,95 @@ +import json +from typing import Any + +from opentelemetry.trace import Link, Status, StatusCode + +from core.ops.aliyun_trace.entities.semconv import ( + GEN_AI_FRAMEWORK, + GEN_AI_SESSION_ID, + GEN_AI_SPAN_KIND, + GEN_AI_USER_ID, + INPUT_VALUE, + OUTPUT_VALUE, + GenAISpanKind, +) +from core.rag.models.document import Document +from core.workflow.entities import WorkflowNodeExecution +from core.workflow.enums import WorkflowNodeExecutionStatus +from extensions.ext_database import db +from models import EndUser + +# Constants +DEFAULT_JSON_ENSURE_ASCII = False +DEFAULT_FRAMEWORK_NAME = "dify" + + +def get_user_id_from_message_data(message_data) -> str: + user_id = message_data.from_account_id + if message_data.from_end_user_id: + end_user_data: EndUser | None = ( + db.session.query(EndUser).where(EndUser.id == message_data.from_end_user_id).first() + ) + if end_user_data is not None: + user_id = end_user_data.session_id + return user_id + + +def create_status_from_error(error: str | None) -> Status: + if error: + return Status(StatusCode.ERROR, error) + return Status(StatusCode.OK) + + +def get_workflow_node_status(node_execution: WorkflowNodeExecution) -> Status: + if node_execution.status == WorkflowNodeExecutionStatus.SUCCEEDED: + return Status(StatusCode.OK) + if node_execution.status in [WorkflowNodeExecutionStatus.FAILED, WorkflowNodeExecutionStatus.EXCEPTION]: + return Status(StatusCode.ERROR, str(node_execution.error)) + return Status(StatusCode.UNSET) + + +def create_links_from_trace_id(trace_id: str | None) -> list[Link]: + from core.ops.aliyun_trace.data_exporter.traceclient import create_link + + links = [] + if trace_id: + links.append(create_link(trace_id_str=trace_id)) + return links + + +def extract_retrieval_documents(documents: list[Document]) -> list[dict[str, Any]]: + documents_data = [] + for document in documents: + document_data = { + "content": document.page_content, + "metadata": { + "dataset_id": document.metadata.get("dataset_id"), + "doc_id": document.metadata.get("doc_id"), + "document_id": document.metadata.get("document_id"), + }, + "score": document.metadata.get("score"), + } + documents_data.append(document_data) + return documents_data + + +def serialize_json_data(data: Any, ensure_ascii: bool = DEFAULT_JSON_ENSURE_ASCII) -> str: + return json.dumps(data, ensure_ascii=ensure_ascii) + + +def create_common_span_attributes( + session_id: str = "", + user_id: str = "", + span_kind: str = GenAISpanKind.CHAIN, + framework: str = DEFAULT_FRAMEWORK_NAME, + inputs: str = "", + outputs: str = "", +) -> dict[str, Any]: + return { + GEN_AI_SESSION_ID: session_id, + GEN_AI_USER_ID: user_id, + GEN_AI_SPAN_KIND: span_kind, + GEN_AI_FRAMEWORK: framework, + INPUT_VALUE: inputs, + OUTPUT_VALUE: outputs, + } diff --git a/api/core/ops/entities/config_entity.py b/api/core/ops/entities/config_entity.py index 851a77fbc1..4ba6eb0780 100644 --- a/api/core/ops/entities/config_entity.py +++ b/api/core/ops/entities/config_entity.py @@ -191,7 +191,8 @@ class AliyunConfig(BaseTracingConfig): @field_validator("endpoint") @classmethod def endpoint_validator(cls, v, info: ValidationInfo): - return cls.validate_endpoint_url(v, "https://tracing-analysis-dc-hz.aliyuncs.com") + # aliyun uses two URL formats, which may include a URL path + return validate_url_with_path(v, "https://tracing-analysis-dc-hz.aliyuncs.com") OPS_FILE_PATH = "ops_trace/" diff --git a/api/tests/unit_tests/core/ops/test_config_entity.py b/api/tests/unit_tests/core/ops/test_config_entity.py index 1dc380ad0b..2cbff54c42 100644 --- a/api/tests/unit_tests/core/ops/test_config_entity.py +++ b/api/tests/unit_tests/core/ops/test_config_entity.py @@ -329,20 +329,20 @@ class TestAliyunConfig: assert config.endpoint == "https://tracing-analysis-dc-hz.aliyuncs.com" def test_endpoint_validation_with_path(self): - """Test endpoint validation normalizes URL by removing path""" + """Test endpoint validation preserves path for Aliyun endpoints""" config = AliyunConfig( license_key="test_license", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com/api/v1/traces" ) - assert config.endpoint == "https://tracing-analysis-dc-hz.aliyuncs.com" + assert config.endpoint == "https://tracing-analysis-dc-hz.aliyuncs.com/api/v1/traces" def test_endpoint_validation_invalid_scheme(self): """Test endpoint validation rejects invalid schemes""" - with pytest.raises(ValidationError, match="URL scheme must be one of"): + with pytest.raises(ValidationError, match="URL must start with https:// or http://"): AliyunConfig(license_key="test_license", endpoint="ftp://invalid.tracing-analysis-dc-hz.aliyuncs.com") def test_endpoint_validation_no_scheme(self): """Test endpoint validation rejects URLs without scheme""" - with pytest.raises(ValidationError, match="URL scheme must be one of"): + with pytest.raises(ValidationError, match="URL must start with https:// or http://"): AliyunConfig(license_key="test_license", endpoint="invalid.tracing-analysis-dc-hz.aliyuncs.com") def test_license_key_required(self): @@ -350,6 +350,23 @@ class TestAliyunConfig: with pytest.raises(ValidationError): AliyunConfig(license_key="", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com") + def test_valid_endpoint_format_examples(self): + """Test valid endpoint format examples from comments""" + valid_endpoints = [ + # cms2.0 public endpoint + "https://proj-xtrace-123456-cn-heyuan.cn-heyuan.log.aliyuncs.com/apm/trace/opentelemetry", + # cms2.0 intranet endpoint + "https://proj-xtrace-123456-cn-heyuan.cn-heyuan-intranet.log.aliyuncs.com/apm/trace/opentelemetry", + # xtrace public endpoint + "http://tracing-cn-heyuan.arms.aliyuncs.com", + # xtrace intranet endpoint + "http://tracing-cn-heyuan-internal.arms.aliyuncs.com", + ] + + for endpoint in valid_endpoints: + config = AliyunConfig(license_key="test_license", endpoint=endpoint) + assert config.endpoint == endpoint + class TestConfigIntegration: """Integration tests for configuration classes""" @@ -382,7 +399,7 @@ class TestConfigIntegration: assert arize_config.endpoint == "https://arize.com" assert phoenix_with_path_config.endpoint == "https://app.phoenix.arize.com/s/dify-integration" assert phoenix_without_path_config.endpoint == "https://app.phoenix.arize.com" - assert aliyun_config.endpoint == "https://tracing-analysis-dc-hz.aliyuncs.com" + assert aliyun_config.endpoint == "https://tracing-analysis-dc-hz.aliyuncs.com/api/v1/traces" def test_project_default_values(self): """Test that project default values are set correctly""" From 89affe3139c58f7beb2330bbcd9354da809bd081 Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Fri, 26 Sep 2025 09:20:33 +0800 Subject: [PATCH 037/126] fix opened panel be affected --- .../core/collaboration-manager.ts | 26 +++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/web/app/components/workflow/collaboration/core/collaboration-manager.ts b/web/app/components/workflow/collaboration/core/collaboration-manager.ts index dcffab9043..22c99ab208 100644 --- a/web/app/components/workflow/collaboration/core/collaboration-manager.ts +++ b/web/app/components/workflow/collaboration/core/collaboration-manager.ts @@ -180,7 +180,7 @@ export class CollaborationManager { onPush: (isUndo, range, event) => { console.log('UndoManager onPush:', { isUndo, range, event }) // Store current selection state when an operation is pushed - const selectedNode = this.reactFlowStore?.getState().getNodes().find((n: Node) => n.data.selected) + const selectedNode = this.reactFlowStore?.getState().getNodes().find((n: Node) => n.data?.selected) // Emit event to update UI button states when new operation is pushed setTimeout(() => { @@ -680,7 +680,29 @@ export class CollaborationManager { requestAnimationFrame(() => { // Get ReactFlow's native setters, not the collaborative ones const state = this.reactFlowStore.getState() - const updatedNodes = Array.from(this.nodesMap.values()) + const previousNodes: Node[] = state.getNodes() + const selectedIds = new Set( + previousNodes + .filter(node => node.data?.selected) + .map(node => node.id), + ) + + const updatedNodes = Array + .from(this.nodesMap.values()) + .map((node: Node) => { + const clonedNode: Node = { + ...node, + data: { + ...(node.data || {}), + }, + } + + if (selectedIds.has(clonedNode.id)) + clonedNode.data.selected = true + + return clonedNode + }) + console.log('Updating React nodes from subscription') // Call ReactFlow's native setter directly to avoid triggering collaboration From cc54363c277b4f39fcc60ca6dea95360931713ce Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Fri, 26 Sep 2025 10:48:00 +0800 Subject: [PATCH 038/126] sync the prompt editor --- .../components/base/prompt-editor/index.tsx | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/web/app/components/base/prompt-editor/index.tsx b/web/app/components/base/prompt-editor/index.tsx index 50fdc1f920..0b73a7b8c9 100644 --- a/web/app/components/base/prompt-editor/index.tsx +++ b/web/app/components/base/prompt-editor/index.tsx @@ -2,6 +2,7 @@ import type { FC } from 'react' import React, { useEffect } from 'react' +import { useLexicalComposerContext } from '@lexical/react/LexicalComposerContext' import type { EditorState, } from 'lexical' @@ -80,6 +81,29 @@ import { import { useEventEmitterContextContext } from '@/context/event-emitter' import cn from '@/utils/classnames' +const ValueSyncPlugin: FC<{ value?: string }> = ({ value }) => { + const [editor] = useLexicalComposerContext() + + useEffect(() => { + if (value === undefined) + return + + const incomingValue = value ?? '' + const shouldUpdate = editor.getEditorState().read(() => { + const currentText = $getRoot().getChildren().map(node => node.getTextContent()).join('\n') + return currentText !== incomingValue + }) + + if (!shouldUpdate) + return + + const editorState = editor.parseEditorState(textToEditorState(incomingValue)) + editor.setEditorState(editorState) + }, [editor, value]) + + return null +} + export type PromptEditorProps = { instanceId?: string compact?: boolean @@ -293,6 +317,7 @@ const PromptEditor: FC<PromptEditorProps> = ({ <VariableValueBlock /> ) } + <ValueSyncPlugin value={value} /> <OnChangePlugin onChange={handleEditorChange} /> <OnBlurBlock onBlur={onBlur} onFocus={onFocus} /> <UpdateBlock instanceId={instanceId} /> From 0e9d43d605c3bbdb8edf133c678accb60856e118 Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Fri, 26 Sep 2025 11:13:20 +0800 Subject: [PATCH 039/126] http node data sync --- .../nodes/http/hooks/use-key-value-list.ts | 44 +++++++++++++------ 1 file changed, 31 insertions(+), 13 deletions(-) diff --git a/web/app/components/workflow/nodes/http/hooks/use-key-value-list.ts b/web/app/components/workflow/nodes/http/hooks/use-key-value-list.ts index a61cad646f..05960666d7 100644 --- a/web/app/components/workflow/nodes/http/hooks/use-key-value-list.ts +++ b/web/app/components/workflow/nodes/http/hooks/use-key-value-list.ts @@ -15,30 +15,48 @@ const strToKeyValueList = (value: string) => { }) } +const normalizeList = (items: KeyValue[]) => { + return items.map(item => ({ + ...item, + id: item.id || uniqueId(UNIQUE_ID_PREFIX), + })) +} + +const stringifyList = (items: KeyValue[], noFilter?: boolean) => { + const source = noFilter ? items : items.filter(item => item.key && item.value) + return source.map(item => `${item.key}:${item.value}`).join('\n') +} + const useKeyValueList = (value: string, onChange: (value: string) => void, noFilter?: boolean) => { - const [list, doSetList] = useState<KeyValue[]>(value ? strToKeyValueList(value) : []) - const setList = (l: KeyValue[]) => { - doSetList(l.map((item) => { - return { - ...item, - id: item.id || uniqueId(UNIQUE_ID_PREFIX), - } - })) - } - useEffect(() => { + const [list, doSetList] = useState<KeyValue[]>(value ? normalizeList(strToKeyValueList(value)) : []) + const setList = useCallback((nextList: KeyValue[]) => { + const normalized = normalizeList(nextList) + doSetList(normalized) + if (noFilter) return - const newValue = list.filter(item => item.key && item.value).map(item => `${item.key}:${item.value}`).join('\n') + + const newValue = stringifyList(normalized, noFilter) if (newValue !== value) onChange(newValue) - }, [list, noFilter]) + }, [noFilter, onChange, value]) + useEffect(() => { + doSetList((prev) => { + const targetItems = value ? strToKeyValueList(value) : [] + const currentValue = stringifyList(prev, noFilter) + const targetValue = stringifyList(targetItems, noFilter) + if (currentValue === targetValue) + return prev + return normalizeList(targetItems) + }) + }, [value, noFilter]) const addItem = useCallback(() => { setList([...list, { id: uniqueId(UNIQUE_ID_PREFIX), key: '', value: '', }]) - }, [list]) + }, [list, setList]) const [isKeyValueEdit, { toggle: toggleIsKeyValueEdit, From 7a74b5ee3e015723e6d3e91ffea5ce7c91f0d4cb Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Fri, 26 Sep 2025 14:04:50 +0800 Subject: [PATCH 040/126] fix add child node resize parent node size --- .../nodes/iteration/use-interactions.ts | 27 +++++++----------- .../workflow/nodes/loop/use-interactions.ts | 28 +++++++------------ 2 files changed, 20 insertions(+), 35 deletions(-) diff --git a/web/app/components/workflow/nodes/iteration/use-interactions.ts b/web/app/components/workflow/nodes/iteration/use-interactions.ts index 35767f2b62..5d61d0267f 100644 --- a/web/app/components/workflow/nodes/iteration/use-interactions.ts +++ b/web/app/components/workflow/nodes/iteration/use-interactions.ts @@ -1,7 +1,6 @@ import { useCallback } from 'react' import produce from 'immer' import { useTranslation } from 'react-i18next' -import { useStoreApi } from 'reactflow' import type { BlockEnum, ChildNodeTypeCount, @@ -16,19 +15,16 @@ import { } from '../../constants' import { CUSTOM_ITERATION_START_NODE } from '../iteration-start/constants' import { useNodesMetaData } from '@/app/components/workflow/hooks' +import { useCollaborativeWorkflow } from '@/app/components/workflow/hooks/use-collaborative-workflow' export const useNodeIterationInteractions = () => { const { t } = useTranslation() - const store = useStoreApi() const { nodesMap: nodesMetaDataMap } = useNodesMetaData() + const collaborativeWorkflow = useCollaborativeWorkflow() const handleNodeIterationRerender = useCallback((nodeId: string) => { - const { - getNodes, - setNodes, - } = store.getState() + const { nodes, setNodes } = collaborativeWorkflow.getState() - const nodes = getNodes() const currentNode = nodes.find(n => n.id === nodeId)! const childrenNodes = nodes.filter(n => n.parentId === nodeId) let rightNode: Node @@ -72,11 +68,10 @@ export const useNodeIterationInteractions = () => { setNodes(newNodes) } - }, [store]) + }, [collaborativeWorkflow]) const handleNodeIterationChildDrag = useCallback((node: Node) => { - const { getNodes } = store.getState() - const nodes = getNodes() + const { nodes } = collaborativeWorkflow.getState() const restrictPosition: { x?: number; y?: number } = { x: undefined, y: undefined } @@ -98,21 +93,19 @@ export const useNodeIterationInteractions = () => { return { restrictPosition, } - }, [store]) + }, [collaborativeWorkflow]) const handleNodeIterationChildSizeChange = useCallback((nodeId: string) => { - const { getNodes } = store.getState() - const nodes = getNodes() + const { nodes } = collaborativeWorkflow.getState() const currentNode = nodes.find(n => n.id === nodeId)! const parentId = currentNode.parentId if (parentId) handleNodeIterationRerender(parentId) - }, [store, handleNodeIterationRerender]) + }, [collaborativeWorkflow, handleNodeIterationRerender]) const handleNodeIterationChildrenCopy = useCallback((nodeId: string, newNodeId: string, idMapping: Record<string, string>) => { - const { getNodes } = store.getState() - const nodes = getNodes() + const { nodes } = collaborativeWorkflow.getState() const childrenNodes = nodes.filter(n => n.parentId === nodeId && n.type !== CUSTOM_ITERATION_START_NODE) const newIdMapping = { ...idMapping } const childNodeTypeCount: ChildNodeTypeCount = {} @@ -154,7 +147,7 @@ export const useNodeIterationInteractions = () => { copyChildren, newIdMapping, } - }, [store, t]) + }, [collaborativeWorkflow, t]) return { handleNodeIterationRerender, diff --git a/web/app/components/workflow/nodes/loop/use-interactions.ts b/web/app/components/workflow/nodes/loop/use-interactions.ts index 532de56e54..8e8622a554 100644 --- a/web/app/components/workflow/nodes/loop/use-interactions.ts +++ b/web/app/components/workflow/nodes/loop/use-interactions.ts @@ -1,6 +1,5 @@ import { useCallback } from 'react' import produce from 'immer' -import { useStoreApi } from 'reactflow' import type { BlockEnum, Node, @@ -14,18 +13,14 @@ import { } from '../../constants' import { CUSTOM_LOOP_START_NODE } from '../loop-start/constants' import { useNodesMetaData } from '@/app/components/workflow/hooks' +import { useCollaborativeWorkflow } from '@/app/components/workflow/hooks/use-collaborative-workflow' export const useNodeLoopInteractions = () => { - const store = useStoreApi() + const collaborativeWorkflow = useCollaborativeWorkflow() const { nodesMap: nodesMetaDataMap } = useNodesMetaData() const handleNodeLoopRerender = useCallback((nodeId: string) => { - const { - getNodes, - setNodes, - } = store.getState() - - const nodes = getNodes() + const { nodes, setNodes } = collaborativeWorkflow.getState() const currentNode = nodes.find(n => n.id === nodeId)! const childrenNodes = nodes.filter(n => n.parentId === nodeId) let rightNode: Node @@ -69,11 +64,10 @@ export const useNodeLoopInteractions = () => { setNodes(newNodes) } - }, [store]) + }, [collaborativeWorkflow]) const handleNodeLoopChildDrag = useCallback((node: Node) => { - const { getNodes } = store.getState() - const nodes = getNodes() + const { nodes } = collaborativeWorkflow.getState() const restrictPosition: { x?: number; y?: number } = { x: undefined, y: undefined } @@ -95,21 +89,19 @@ export const useNodeLoopInteractions = () => { return { restrictPosition, } - }, [store]) + }, [collaborativeWorkflow]) const handleNodeLoopChildSizeChange = useCallback((nodeId: string) => { - const { getNodes } = store.getState() - const nodes = getNodes() + const { nodes } = collaborativeWorkflow.getState() const currentNode = nodes.find(n => n.id === nodeId)! const parentId = currentNode.parentId if (parentId) handleNodeLoopRerender(parentId) - }, [store, handleNodeLoopRerender]) + }, [collaborativeWorkflow, handleNodeLoopRerender]) const handleNodeLoopChildrenCopy = useCallback((nodeId: string, newNodeId: string) => { - const { getNodes } = store.getState() - const nodes = getNodes() + const { nodes } = collaborativeWorkflow.getState() const childrenNodes = nodes.filter(n => n.parentId === nodeId && n.type !== CUSTOM_LOOP_START_NODE) return childrenNodes.map((child, index) => { @@ -140,7 +132,7 @@ export const useNodeLoopInteractions = () => { newNode.id = `${newNodeId}${newNode.id + index}` return newNode }) - }, [store, nodesMetaDataMap]) + }, [collaborativeWorkflow, nodesMetaDataMap]) return { handleNodeLoopRerender, From 54ae43ef470db99e257284d63b3c1c5172d09f66 Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Fri, 26 Sep 2025 14:07:34 +0800 Subject: [PATCH 041/126] sync children node data --- .../core/collaboration-manager.ts | 48 ++++++++++++++++++- 1 file changed, 47 insertions(+), 1 deletion(-) diff --git a/web/app/components/workflow/collaboration/core/collaboration-manager.ts b/web/app/components/workflow/collaboration/core/collaboration-manager.ts index 22c99ab208..41c1fdd811 100644 --- a/web/app/components/workflow/collaboration/core/collaboration-manager.ts +++ b/web/app/components/workflow/collaboration/core/collaboration-manager.ts @@ -540,7 +540,8 @@ export class CollaborationManager { const oldNodesMap = new Map(oldNodes.map(node => [node.id, node])) const newNodesMap = new Map(newNodes.map(node => [node.id, node])) - const shouldSyncDataKey = (key: string) => !key.startsWith('_') && key !== 'selected' + const syncDataAllowList = new Set(['_children']) + const shouldSyncDataKey = (key: string) => (syncDataAllowList.has(key) || !key.startsWith('_')) && key !== 'selected' // Delete removed nodes oldNodes.forEach((oldNode) => { @@ -549,6 +550,44 @@ export class CollaborationManager { }) // Add or update nodes with fine-grained sync for data properties + const copyOptionalNodeProps = (source: Node, target: any) => { + const optionalProps: Array<keyof Node | keyof any> = [ + 'parentId', + 'positionAbsolute', + 'extent', + 'zIndex', + 'draggable', + 'selectable', + 'dragHandle', + 'dragging', + 'connectable', + 'expandParent', + 'focusable', + 'hidden', + 'style', + 'className', + 'ariaLabel', + 'markerStart', + 'markerEnd', + 'resizing', + 'deletable', + ] + + optionalProps.forEach((prop) => { + const value = (source as any)[prop] + if (value === undefined) { + if (prop in target) + delete target[prop] + return + } + + if (value !== null && typeof value === 'object') + target[prop as string] = JSON.parse(JSON.stringify(value)) + else + target[prop as string] = value + }) + } + newNodes.forEach((newNode) => { const oldNode = oldNodesMap.get(newNode.id) @@ -565,6 +604,8 @@ export class CollaborationManager { data: {}, } + copyOptionalNodeProps(newNode, nodeData) + // Clone data properties, excluding private ones Object.entries(newNode.data).forEach(([key, value]) => { if (shouldSyncDataKey(key) && value !== undefined) @@ -592,6 +633,9 @@ export class CollaborationManager { if (oldNode.height !== newNode.height) updatedNode.height = newNode.height + // Ensure optional node props stay in sync + copyOptionalNodeProps(newNode, updatedNode) + // Ensure data object exists if (!updatedNode.data) updatedNode.data = {} @@ -632,6 +676,8 @@ export class CollaborationManager { data: {}, } + copyOptionalNodeProps(newNode, nodeData) + Object.entries(newNode.data).forEach(([key, value]) => { if (shouldSyncDataKey(key) && value !== undefined) nodeData.data[key] = JSON.parse(JSON.stringify(value)) From 3bb67885ef91e1995589b43a855820aca4b3c1be Mon Sep 17 00:00:00 2001 From: quicksand <quicksandzn@gmail.com> Date: Fri, 26 Sep 2025 16:16:55 +0800 Subject: [PATCH 042/126] =?UTF-8?q?fix(api):=20resolve=20error=20in=20agen?= =?UTF-8?q?t=E2=80=91strategy=20prompt=20generator=20(#26278)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/core/llm_generator/llm_generator.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index e07d0ec14e..e64ac25ab1 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -28,7 +28,6 @@ from core.ops.ops_trace_manager import TraceQueueManager, TraceTask from core.ops.utils import measure_time from core.prompt.utils.prompt_template_parser import PromptTemplateParser from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey -from core.workflow.node_events import AgentLogEvent from extensions.ext_database import db from extensions.ext_storage import storage from models import App, Message, WorkflowNodeExecutionModel @@ -462,19 +461,18 @@ class LLMGenerator: ) def agent_log_of(node_execution: WorkflowNodeExecutionModel) -> Sequence: - raw_agent_log = node_execution.execution_metadata_dict.get(WorkflowNodeExecutionMetadataKey.AGENT_LOG) + raw_agent_log = node_execution.execution_metadata_dict.get(WorkflowNodeExecutionMetadataKey.AGENT_LOG, []) if not raw_agent_log: return [] - parsed: Sequence[AgentLogEvent] = json.loads(raw_agent_log) - def dict_of_event(event: AgentLogEvent): - return { - "status": event.status, - "error": event.error, - "data": event.data, + return [ + { + "status": event["status"], + "error": event["error"], + "data": event["data"], } - - return [dict_of_event(event) for event in parsed] + for event in raw_agent_log + ] inputs = last_run.load_full_inputs(session, storage) last_run_dict = { From 2a0abc51b1ee4e02db60a8884e52459fb5a553ea Mon Sep 17 00:00:00 2001 From: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Date: Fri, 26 Sep 2025 16:18:47 +0800 Subject: [PATCH 043/126] =?UTF-8?q?minor:=20fix=20translation=20with=20the?= =?UTF-8?q?=20key=20value=20uses=20=E3=80=8C=E3=83=8A=E3=83=AC=E3=83=83?= =?UTF-8?q?=E3=82=B8=E3=81=AE=E5=90=8D=E5=89=8D=E3=81=A8=E3=82=A2=E3=82=A4?= =?UTF-8?q?=E3=82=B3=E3=83=B3=E3=80=8D=20while=20the=20rest=20of=20the=20f?= =?UTF-8?q?ile=20uses=20=E3=80=8C=E3=83=8A=E3=83=AC=E3=83=83=E3=82=B8?= =?UTF-8?q?=E3=83=99=E3=83=BC=E3=82=B9=E3=80=8D=20(#26270)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> --- web/i18n/ja-JP/dataset-pipeline.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/i18n/ja-JP/dataset-pipeline.ts b/web/i18n/ja-JP/dataset-pipeline.ts index b261d88ae4..6450131933 100644 --- a/web/i18n/ja-JP/dataset-pipeline.ts +++ b/web/i18n/ja-JP/dataset-pipeline.ts @@ -153,7 +153,7 @@ const translation = { descriptionChunk1: '既存のナレッジベースを文書処理のためにナレッジパイプラインを使用するように変換できます。', descriptionChunk2: '— よりオープンで柔軟なアプローチを採用し、私たちのマーケットプレイスからのプラグインへのアクセスを提供します。これにより、すべての将来のドキュメントに新しい処理方法が適用されることになります。', }, - knowledgeNameAndIcon: 'ナレッジの名前とアイコン', + knowledgeNameAndIcon: 'ナレッジベースの名前とアイコン', inputField: '入力フィールド', pipelineNameAndIcon: 'パイプライン名とアイコン', knowledgePermissions: '権限', From e6d4331994c1c5e9c9920fd84dcf4f2723138735 Mon Sep 17 00:00:00 2001 From: lyzno1 <92089059+lyzno1@users.noreply.github.com> Date: Fri, 26 Sep 2025 16:20:12 +0800 Subject: [PATCH 044/126] refactor(web): simplify lint scripts, remove duplicates and standardize naming (#26259) --- AGENTS.md | 2 +- web/package.json | 9 +++------ 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 44f7b30360..b214258069 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -38,7 +38,7 @@ uv run --directory api basedpyright # Type checking ```bash cd web pnpm lint # Run ESLint -pnpm eslint-fix # Fix ESLint issues +pnpm lint:fix # Fix ESLint issues pnpm test # Run Jest tests ``` diff --git a/web/package.json b/web/package.json index 78e62c9aa7..cf49429659 100644 --- a/web/package.json +++ b/web/package.json @@ -24,12 +24,9 @@ "build:docker": "next build && node scripts/optimize-standalone.js", "start": "cp -r .next/static .next/standalone/.next/static && cp -r public .next/standalone/public && cross-env PORT=$npm_config_port HOSTNAME=$npm_config_host node .next/standalone/server.js", "lint": "eslint --concurrency=auto --cache --cache-location node_modules/.cache/eslint/.eslint-cache", - "lint-only-show-error": "eslint --concurrency=auto --cache --cache-location node_modules/.cache/eslint/.eslint-cache --quiet", - "fix": "eslint --concurrency=auto --fix .", - "eslint": "eslint --concurrency=auto --cache --cache-location node_modules/.cache/eslint/.eslint-cache", - "eslint-fix": "eslint --concurrency=auto --cache --cache-location node_modules/.cache/eslint/.eslint-cache --fix", - "eslint-fix-only-show-error": "eslint --concurrency=auto --cache --cache-location node_modules/.cache/eslint/.eslint-cache --fix --quiet", - "eslint-complexity": "eslint --concurrency=auto --rule 'complexity: [error, {max: 15}]' --quiet", + "lint:fix": "eslint --concurrency=auto --cache --cache-location node_modules/.cache/eslint/.eslint-cache --fix", + "lint:quiet": "eslint --concurrency=auto --cache --cache-location node_modules/.cache/eslint/.eslint-cache --quiet", + "lint:complexity": "eslint --concurrency=auto --cache --cache-location node_modules/.cache/eslint/.eslint-cache --rule 'complexity: [error, {max: 15}]' --quiet", "prepare": "cd ../ && node -e \"if (process.env.NODE_ENV !== 'production'){process.exit(1)} \" || husky ./web/.husky", "gen-icons": "node ./app/components/base/icons/script.mjs", "uglify-embed": "node ./bin/uglify-embed", From 46375aacdb42a53af24e2ac6f2e5f4777c0392d3 Mon Sep 17 00:00:00 2001 From: Asuka Minato <i@asukaminato.eu.org> Date: Fri, 26 Sep 2025 17:21:00 +0900 Subject: [PATCH 045/126] fmt first (#26221) --- .github/workflows/autofix.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/autofix.yml b/.github/workflows/autofix.yml index 068ba686fa..ef69e08da9 100644 --- a/.github/workflows/autofix.yml +++ b/.github/workflows/autofix.yml @@ -15,10 +15,12 @@ jobs: # Use uv to ensure we have the same ruff version in CI and locally. - uses: astral-sh/setup-uv@v6 with: - python-version: "3.12" + python-version: "3.11" - run: | cd api uv sync --dev + # fmt first to avoid line too long + uv run ruff format .. # Fix lint errors uv run ruff check --fix . # Format code From 0c1ec35244a361d368cad7df8d48a8e243060767 Mon Sep 17 00:00:00 2001 From: Cluas <Cluas@live.cn> Date: Fri, 26 Sep 2025 16:23:16 +0800 Subject: [PATCH 046/126] fix: resolve UUID parsing error for default user session lookup (#26109) --- api/controllers/inner_api/plugin/wraps.py | 28 +++++++++++++---------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/api/controllers/inner_api/plugin/wraps.py b/api/controllers/inner_api/plugin/wraps.py index 04102c49f3..b683aa3160 100644 --- a/api/controllers/inner_api/plugin/wraps.py +++ b/api/controllers/inner_api/plugin/wraps.py @@ -24,20 +24,14 @@ def get_user(tenant_id: str, user_id: str | None) -> EndUser: NOTE: user_id is not trusted, it could be maliciously set to any value. As a result, it could only be considered as an end user id. """ + if not user_id: + user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID.value + is_anonymous = user_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID.value try: with Session(db.engine) as session: - if not user_id: - user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID.value + user_model = None - user_model = ( - session.query(EndUser) - .where( - EndUser.id == user_id, - EndUser.tenant_id == tenant_id, - ) - .first() - ) - if not user_model: + if is_anonymous: user_model = ( session.query(EndUser) .where( @@ -46,11 +40,21 @@ def get_user(tenant_id: str, user_id: str | None) -> EndUser: ) .first() ) + else: + user_model = ( + session.query(EndUser) + .where( + EndUser.id == user_id, + EndUser.tenant_id == tenant_id, + ) + .first() + ) + if not user_model: user_model = EndUser( tenant_id=tenant_id, type="service_api", - is_anonymous=user_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID.value, + is_anonymous=is_anonymous, session_id=user_id, ) session.add(user_model) From 319ecdd312864bfbceb2aa07ac3447798d691ece Mon Sep 17 00:00:00 2001 From: Yongtao Huang <yongtaoh2022@gmail.com> Date: Sat, 27 Sep 2025 09:50:32 +0800 Subject: [PATCH 047/126] Fix: avoid mutating node props (#26266) Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- web/app/components/workflow/nodes/iteration/node.tsx | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/web/app/components/workflow/nodes/iteration/node.tsx b/web/app/components/workflow/nodes/iteration/node.tsx index 0232c8755f..59b96b1e2d 100644 --- a/web/app/components/workflow/nodes/iteration/node.tsx +++ b/web/app/components/workflow/nodes/iteration/node.tsx @@ -2,6 +2,7 @@ import type { FC } from 'react' import { memo, useEffect, + useState, } from 'react' import { Background, @@ -27,19 +28,20 @@ const Node: FC<NodeProps<IterationNodeType>> = ({ const nodesInitialized = useNodesInitialized() const { handleNodeIterationRerender } = useNodeIterationInteractions() const { t } = useTranslation() + const [showTips, setShowTips] = useState(data._isShowTips) useEffect(() => { if (nodesInitialized) handleNodeIterationRerender(id) - if (data.is_parallel && data._isShowTips) { + if (data.is_parallel && showTips) { Toast.notify({ type: 'warning', message: t(`${i18nPrefix}.answerNodeWarningDesc`), duration: 5000, }) - data._isShowTips = false + setShowTips(false) } - }, [nodesInitialized, id, handleNodeIterationRerender, data, t]) + }, [nodesInitialized, id, handleNodeIterationRerender, data.is_parallel, showTips, t]) return ( <div className={cn( From 4da93ba579ed129c54b62a56454b6457283b62e2 Mon Sep 17 00:00:00 2001 From: heyszt <270985384@qq.com> Date: Sat, 27 Sep 2025 09:51:23 +0800 Subject: [PATCH 048/126] update gen_ai semconv for aliyun trace (#26288) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/core/ops/aliyun_trace/aliyun_trace.py | 40 ++++---- api/core/ops/aliyun_trace/entities/semconv.py | 9 +- api/core/ops/aliyun_trace/utils.py | 95 +++++++++++++++++++ 3 files changed, 123 insertions(+), 21 deletions(-) diff --git a/api/core/ops/aliyun_trace/aliyun_trace.py b/api/core/ops/aliyun_trace/aliyun_trace.py index c0727326ce..a7d8576d8d 100644 --- a/api/core/ops/aliyun_trace/aliyun_trace.py +++ b/api/core/ops/aliyun_trace/aliyun_trace.py @@ -14,12 +14,12 @@ from core.ops.aliyun_trace.data_exporter.traceclient import ( from core.ops.aliyun_trace.entities.aliyun_trace_entity import SpanData, TraceMetadata from core.ops.aliyun_trace.entities.semconv import ( GEN_AI_COMPLETION, - GEN_AI_MODEL_NAME, + GEN_AI_INPUT_MESSAGE, + GEN_AI_OUTPUT_MESSAGE, GEN_AI_PROMPT, - GEN_AI_PROMPT_TEMPLATE_TEMPLATE, - GEN_AI_PROMPT_TEMPLATE_VARIABLE, + GEN_AI_PROVIDER_NAME, + GEN_AI_REQUEST_MODEL, GEN_AI_RESPONSE_FINISH_REASON, - GEN_AI_SYSTEM, GEN_AI_USAGE_INPUT_TOKENS, GEN_AI_USAGE_OUTPUT_TOKENS, GEN_AI_USAGE_TOTAL_TOKENS, @@ -35,6 +35,9 @@ from core.ops.aliyun_trace.utils import ( create_links_from_trace_id, create_status_from_error, extract_retrieval_documents, + format_input_messages, + format_output_messages, + format_retrieval_documents, get_user_id_from_message_data, get_workflow_node_status, serialize_json_data, @@ -151,10 +154,6 @@ class AliyunDataTrace(BaseTraceInstance): ) self.trace_client.add_span(message_span) - app_model_config = getattr(message_data, "app_model_config", {}) - pre_prompt = getattr(app_model_config, "pre_prompt", "") - inputs_data = getattr(message_data, "inputs", {}) - llm_span = SpanData( trace_id=trace_metadata.trace_id, parent_span_id=message_span_id, @@ -170,13 +169,11 @@ class AliyunDataTrace(BaseTraceInstance): inputs=inputs_json, outputs=outputs_str, ), - GEN_AI_MODEL_NAME: trace_info.metadata.get("ls_model_name") or "", - GEN_AI_SYSTEM: trace_info.metadata.get("ls_provider") or "", + GEN_AI_REQUEST_MODEL: trace_info.metadata.get("ls_model_name") or "", + GEN_AI_PROVIDER_NAME: trace_info.metadata.get("ls_provider") or "", GEN_AI_USAGE_INPUT_TOKENS: str(trace_info.message_tokens), GEN_AI_USAGE_OUTPUT_TOKENS: str(trace_info.answer_tokens), GEN_AI_USAGE_TOTAL_TOKENS: str(trace_info.total_tokens), - GEN_AI_PROMPT_TEMPLATE_VARIABLE: serialize_json_data(inputs_data), - GEN_AI_PROMPT_TEMPLATE_TEMPLATE: pre_prompt, GEN_AI_PROMPT: inputs_json, GEN_AI_COMPLETION: outputs_str, }, @@ -364,6 +361,10 @@ class AliyunDataTrace(BaseTraceInstance): input_value = str(node_execution.inputs.get("query", "")) if node_execution.inputs else "" output_value = serialize_json_data(node_execution.outputs.get("result", [])) if node_execution.outputs else "" + retrieval_documents = node_execution.outputs.get("result", []) if node_execution.outputs else [] + semantic_retrieval_documents = format_retrieval_documents(retrieval_documents) + semantic_retrieval_documents_json = serialize_json_data(semantic_retrieval_documents) + return SpanData( trace_id=trace_metadata.trace_id, parent_span_id=trace_metadata.workflow_span_id, @@ -380,7 +381,7 @@ class AliyunDataTrace(BaseTraceInstance): outputs=output_value, ), RETRIEVAL_QUERY: input_value, - RETRIEVAL_DOCUMENT: output_value, + RETRIEVAL_DOCUMENT: semantic_retrieval_documents_json, }, status=get_workflow_node_status(node_execution), links=trace_metadata.links, @@ -396,6 +397,9 @@ class AliyunDataTrace(BaseTraceInstance): prompts_json = serialize_json_data(process_data.get("prompts", [])) text_output = str(outputs.get("text", "")) + gen_ai_input_message = format_input_messages(process_data) + gen_ai_output_message = format_output_messages(outputs) + return SpanData( trace_id=trace_metadata.trace_id, parent_span_id=trace_metadata.workflow_span_id, @@ -411,14 +415,16 @@ class AliyunDataTrace(BaseTraceInstance): inputs=prompts_json, outputs=text_output, ), - GEN_AI_MODEL_NAME: process_data.get("model_name") or "", - GEN_AI_SYSTEM: process_data.get("model_provider") or "", + GEN_AI_REQUEST_MODEL: process_data.get("model_name") or "", + GEN_AI_PROVIDER_NAME: process_data.get("model_provider") or "", GEN_AI_USAGE_INPUT_TOKENS: str(usage_data.get("prompt_tokens", 0)), GEN_AI_USAGE_OUTPUT_TOKENS: str(usage_data.get("completion_tokens", 0)), GEN_AI_USAGE_TOTAL_TOKENS: str(usage_data.get("total_tokens", 0)), GEN_AI_PROMPT: prompts_json, GEN_AI_COMPLETION: text_output, GEN_AI_RESPONSE_FINISH_REASON: outputs.get("finish_reason") or "", + GEN_AI_INPUT_MESSAGE: gen_ai_input_message, + GEN_AI_OUTPUT_MESSAGE: gen_ai_output_message, }, status=get_workflow_node_status(node_execution), links=trace_metadata.links, @@ -502,8 +508,8 @@ class AliyunDataTrace(BaseTraceInstance): inputs=inputs_json, outputs=suggested_question_json, ), - GEN_AI_MODEL_NAME: trace_info.metadata.get("ls_model_name") or "", - GEN_AI_SYSTEM: trace_info.metadata.get("ls_provider") or "", + GEN_AI_REQUEST_MODEL: trace_info.metadata.get("ls_model_name") or "", + GEN_AI_PROVIDER_NAME: trace_info.metadata.get("ls_provider") or "", GEN_AI_PROMPT: inputs_json, GEN_AI_COMPLETION: suggested_question_json, }, diff --git a/api/core/ops/aliyun_trace/entities/semconv.py b/api/core/ops/aliyun_trace/entities/semconv.py index 7a22db21e2..c823fcab8a 100644 --- a/api/core/ops/aliyun_trace/entities/semconv.py +++ b/api/core/ops/aliyun_trace/entities/semconv.py @@ -17,17 +17,18 @@ RETRIEVAL_QUERY: Final[str] = "retrieval.query" RETRIEVAL_DOCUMENT: Final[str] = "retrieval.document" # LLM attributes -GEN_AI_MODEL_NAME: Final[str] = "gen_ai.model_name" -GEN_AI_SYSTEM: Final[str] = "gen_ai.system" +GEN_AI_REQUEST_MODEL: Final[str] = "gen_ai.request.model" +GEN_AI_PROVIDER_NAME: Final[str] = "gen_ai.provider.name" GEN_AI_USAGE_INPUT_TOKENS: Final[str] = "gen_ai.usage.input_tokens" GEN_AI_USAGE_OUTPUT_TOKENS: Final[str] = "gen_ai.usage.output_tokens" GEN_AI_USAGE_TOTAL_TOKENS: Final[str] = "gen_ai.usage.total_tokens" -GEN_AI_PROMPT_TEMPLATE_TEMPLATE: Final[str] = "gen_ai.prompt_template.template" -GEN_AI_PROMPT_TEMPLATE_VARIABLE: Final[str] = "gen_ai.prompt_template.variable" GEN_AI_PROMPT: Final[str] = "gen_ai.prompt" GEN_AI_COMPLETION: Final[str] = "gen_ai.completion" GEN_AI_RESPONSE_FINISH_REASON: Final[str] = "gen_ai.response.finish_reason" +GEN_AI_INPUT_MESSAGE: Final[str] = "gen_ai.input.messages" +GEN_AI_OUTPUT_MESSAGE: Final[str] = "gen_ai.output.messages" + # Tool attributes TOOL_NAME: Final[str] = "tool.name" TOOL_DESCRIPTION: Final[str] = "tool.description" diff --git a/api/core/ops/aliyun_trace/utils.py b/api/core/ops/aliyun_trace/utils.py index 2ec9e75dcd..7f68889e92 100644 --- a/api/core/ops/aliyun_trace/utils.py +++ b/api/core/ops/aliyun_trace/utils.py @@ -1,4 +1,5 @@ import json +from collections.abc import Mapping from typing import Any from opentelemetry.trace import Link, Status, StatusCode @@ -93,3 +94,97 @@ def create_common_span_attributes( INPUT_VALUE: inputs, OUTPUT_VALUE: outputs, } + + +def format_retrieval_documents(retrieval_documents: list) -> list: + try: + if not isinstance(retrieval_documents, list): + return [] + + semantic_documents = [] + for doc in retrieval_documents: + if not isinstance(doc, dict): + continue + + metadata = doc.get("metadata", {}) + content = doc.get("content", "") + title = doc.get("title", "") + score = metadata.get("score", 0.0) + document_id = metadata.get("document_id", "") + + semantic_metadata = {} + if title: + semantic_metadata["title"] = title + if metadata.get("source"): + semantic_metadata["source"] = metadata["source"] + elif metadata.get("_source"): + semantic_metadata["source"] = metadata["_source"] + if metadata.get("doc_metadata"): + doc_metadata = metadata["doc_metadata"] + if isinstance(doc_metadata, dict): + semantic_metadata.update(doc_metadata) + + semantic_doc = { + "document": {"content": content, "metadata": semantic_metadata, "score": score, "id": document_id} + } + semantic_documents.append(semantic_doc) + + return semantic_documents + except Exception: + return [] + + +def format_input_messages(process_data: Mapping[str, Any]) -> str: + try: + if not isinstance(process_data, dict): + return serialize_json_data([]) + + prompts = process_data.get("prompts", []) + if not prompts: + return serialize_json_data([]) + + valid_roles = {"system", "user", "assistant", "tool"} + input_messages = [] + for prompt in prompts: + if not isinstance(prompt, dict): + continue + + role = prompt.get("role", "") + text = prompt.get("text", "") + + if not role or role not in valid_roles: + continue + + if text: + message = {"role": role, "parts": [{"type": "text", "content": text}]} + input_messages.append(message) + + return serialize_json_data(input_messages) + except Exception: + return serialize_json_data([]) + + +def format_output_messages(outputs: Mapping[str, Any]) -> str: + try: + if not isinstance(outputs, dict): + return serialize_json_data([]) + + text = outputs.get("text", "") + finish_reason = outputs.get("finish_reason", "") + + if not text: + return serialize_json_data([]) + + valid_finish_reasons = {"stop", "length", "content_filter", "tool_call", "error"} + if finish_reason not in valid_finish_reasons: + finish_reason = "stop" + + output_message = { + "role": "assistant", + "parts": [{"type": "text", "content": text}], + "finish_reason": finish_reason, + } + + return serialize_json_data([output_message]) + except Exception: + return serialize_json_data([]) From 4906eeac189f959ae45d7db0230c8bef07a35a31 Mon Sep 17 00:00:00 2001 From: -LAN- <laipz8200@outlook.com> Date: Sat, 27 Sep 2025 10:05:30 +0800 Subject: [PATCH 049/126] chore: streamline AGENTS.md guidance (#26308) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- AGENTS.md | 89 +++++++++++++++++-------------------------------------- 1 file changed, 28 insertions(+), 61 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index b214258069..5859cd1bd9 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -4,84 +4,51 @@ Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management. -The codebase consists of: +The codebase is split into: -- **Backend API** (`/api`): Python Flask application with Domain-Driven Design architecture -- **Frontend Web** (`/web`): Next.js 15 application with TypeScript and React 19 +- **Backend API** (`/api`): Python Flask application organized with Domain-Driven Design +- **Frontend Web** (`/web`): Next.js 15 application using TypeScript and React 19 - **Docker deployment** (`/docker`): Containerized deployment configurations -## Development Commands +## Backend Workflow -### Backend (API) +- Run backend CLI commands through `uv run --project api <command>`. -All Python commands must be prefixed with `uv run --project api`: +- Backend QA gate requires passing `make lint`, `make type-check`, and `uv run --project api --dev dev/pytest/pytest_unit_tests.sh` before review. -```bash -# Start development servers -./dev/start-api # Start API server -./dev/start-worker # Start Celery worker +- Use Makefile targets for linting and formatting; `make lint` and `make type-check` cover the required checks. -# Run tests -uv run --project api pytest # Run all tests -uv run --project api pytest tests/unit_tests/ # Unit tests only -uv run --project api pytest tests/integration_tests/ # Integration tests +- Integration tests are CI-only and are not expected to run in the local environment. -# Code quality -./dev/reformat # Run all formatters and linters -uv run --project api ruff check --fix ./ # Fix linting issues -uv run --project api ruff format ./ # Format code -uv run --directory api basedpyright # Type checking -``` - -### Frontend (Web) +## Frontend Workflow ```bash cd web -pnpm lint # Run ESLint -pnpm lint:fix # Fix ESLint issues -pnpm test # Run Jest tests +pnpm lint +pnpm lint:fix +pnpm test ``` -## Testing Guidelines +## Testing & Quality Practices -### Backend Testing +- Follow TDD: red → green → refactor. +- Use `pytest` for backend tests with Arrange-Act-Assert structure. +- Enforce strong typing; avoid `Any` and prefer explicit type annotations. +- Write self-documenting code; only add comments that explain intent. -- Use `pytest` for all backend tests -- Write tests first (TDD approach) -- Test structure: Arrange-Act-Assert +## Language Style -## Code Style Requirements +- **Python**: Keep type hints on functions and attributes, and implement relevant special methods (e.g., `__repr__`, `__str__`). +- **TypeScript**: Use the strict config, lean on ESLint + Prettier workflows, and avoid `any` types. -### Python +## General Practices -- Use type hints for all functions and class attributes -- No `Any` types unless absolutely necessary -- Implement special methods (`__repr__`, `__str__`) appropriately +- Prefer editing existing files; add new documentation only when requested. +- Inject dependencies through constructors and preserve clean architecture boundaries. +- Handle errors with domain-specific exceptions at the correct layer. -### TypeScript/JavaScript +## Project Conventions -- Strict TypeScript configuration -- ESLint with Prettier integration -- Avoid `any` type - -## Important Notes - -- **Environment Variables**: Always use UV for Python commands: `uv run --project api <command>` -- **Comments**: Only write meaningful comments that explain "why", not "what" -- **File Creation**: Always prefer editing existing files over creating new ones -- **Documentation**: Don't create documentation files unless explicitly requested -- **Code Quality**: Always run `./dev/reformat` before committing backend changes - -## Common Development Tasks - -### Adding a New API Endpoint - -1. Create controller in `/api/controllers/` -1. Add service logic in `/api/services/` -1. Update routes in controller's `__init__.py` -1. Write tests in `/api/tests/` - -## Project-Specific Conventions - -- All async tasks use Celery with Redis as broker -- **Internationalization**: Frontend supports multiple languages with English (`web/i18n/en-US/`) as the source. All user-facing text must use i18n keys, no hardcoded strings. Edit corresponding module files in `en-US/` directory for translations. +- Backend architecture adheres to DDD and Clean Architecture principles. +- Async work runs through Celery with Redis as the broker. +- Frontend user-facing strings must use `web/i18n/en-US/`; avoid hardcoded text. From 9a7245e1df4b9648e761f183396f12fbd828b589 Mon Sep 17 00:00:00 2001 From: Asuka Minato <i@asukaminato.eu.org> Date: Sat, 27 Sep 2025 11:54:25 +0900 Subject: [PATCH 050/126] rm assigned but unused (#25639) --- .../config/automatic/version-selector.tsx | 5 -- web/app/components/develop/code.tsx | 11 --- web/app/components/explore/sidebar/index.tsx | 12 --- .../members-page/operation/index.tsx | 4 +- .../variable/var-reference-vars.tsx | 76 ------------------- .../components/mail-and-password-auth.tsx | 2 - 6 files changed, 2 insertions(+), 108 deletions(-) diff --git a/web/app/components/app/configuration/config/automatic/version-selector.tsx b/web/app/components/app/configuration/config/automatic/version-selector.tsx index b5de8b3126..c3d3e1d91c 100644 --- a/web/app/components/app/configuration/config/automatic/version-selector.tsx +++ b/web/app/components/app/configuration/config/automatic/version-selector.tsx @@ -5,11 +5,6 @@ import cn from '@/utils/classnames' import { RiArrowDownSLine, RiCheckLine } from '@remixicon/react' import { useTranslation } from 'react-i18next' -type Option = { - label: string - value: number -} - type VersionSelectorProps = { versionLen: number; value: number; diff --git a/web/app/components/develop/code.tsx b/web/app/components/develop/code.tsx index ee67921031..eadc87a5ca 100644 --- a/web/app/components/develop/code.tsx +++ b/web/app/components/develop/code.tsx @@ -13,17 +13,6 @@ import classNames from '@/utils/classnames' import { writeTextToClipboard } from '@/utils/clipboard' import type { PropsWithChildren, ReactElement, ReactNode } from 'react' -const languageNames = { - js: 'JavaScript', - ts: 'TypeScript', - javascript: 'JavaScript', - typescript: 'TypeScript', - php: 'PHP', - python: 'Python', - ruby: 'Ruby', - go: 'Go', -} as { [key: string]: string } - type IChildrenProps = { children: React.ReactNode [key: string]: any diff --git a/web/app/components/explore/sidebar/index.tsx b/web/app/components/explore/sidebar/index.tsx index c5866c31d4..2173f0fcb7 100644 --- a/web/app/components/explore/sidebar/index.tsx +++ b/web/app/components/explore/sidebar/index.tsx @@ -26,18 +26,6 @@ const DiscoveryIcon = () => ( </svg> ) -const SelectedChatIcon = () => ( - <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"> - <path fillRule="evenodd" clipRule="evenodd" d="M8.00016 1.3335C4.31826 1.3335 1.3335 4.31826 1.3335 8.00016C1.3335 8.88571 1.50651 9.7325 1.8212 10.5074C1.84962 10.5773 1.86597 10.6178 1.87718 10.6476L1.88058 10.6568L1.88016 10.66C1.87683 10.6846 1.87131 10.7181 1.86064 10.7821L1.46212 13.1732C1.44424 13.2803 1.42423 13.4001 1.41638 13.5041C1.40782 13.6176 1.40484 13.7981 1.48665 13.9888C1.58779 14.2246 1.77569 14.4125 2.0115 14.5137C2.20224 14.5955 2.38274 14.5925 2.49619 14.5839C2.60025 14.5761 2.72006 14.5561 2.82715 14.5382L5.2182 14.1397C5.28222 14.129 5.31576 14.1235 5.34036 14.1202L5.34353 14.1197L5.35274 14.1231C5.38258 14.1344 5.42298 14.1507 5.49297 14.1791C6.26783 14.4938 7.11462 14.6668 8.00016 14.6668C11.6821 14.6668 14.6668 11.6821 14.6668 8.00016C14.6668 4.31826 11.6821 1.3335 8.00016 1.3335ZM4.00016 8.00016C4.00016 7.44788 4.44788 7.00016 5.00016 7.00016C5.55245 7.00016 6.00016 7.44788 6.00016 8.00016C6.00016 8.55245 5.55245 9.00016 5.00016 9.00016C4.44788 9.00016 4.00016 8.55245 4.00016 8.00016ZM7.00016 8.00016C7.00016 7.44788 7.44788 7.00016 8.00016 7.00016C8.55245 7.00016 9.00016 7.44788 9.00016 8.00016C9.00016 8.55245 8.55245 9.00016 8.00016 9.00016C7.44788 9.00016 7.00016 8.55245 7.00016 8.00016ZM11.0002 7.00016C10.4479 7.00016 10.0002 7.44788 10.0002 8.00016C10.0002 8.55245 10.4479 9.00016 11.0002 9.00016C11.5524 9.00016 12.0002 8.55245 12.0002 8.00016C12.0002 7.44788 11.5524 7.00016 11.0002 7.00016Z" fill="#155EEF" /> - </svg> -) - -const ChatIcon = () => ( - <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"> - <path d="M5 8H5.00667M8 8H8.00667M11 8H11.0067M8 14C11.3137 14 14 11.3137 14 8C14 4.68629 11.3137 2 8 2C4.68629 2 2 4.68629 2 8C2 8.7981 2.15582 9.5598 2.43871 10.2563C2.49285 10.3897 2.51992 10.4563 2.532 10.5102C2.54381 10.5629 2.54813 10.6019 2.54814 10.6559C2.54814 10.7111 2.53812 10.7713 2.51807 10.8916L2.12275 13.2635C2.08135 13.5119 2.06065 13.6361 2.09917 13.7259C2.13289 13.8045 2.19552 13.8671 2.27412 13.9008C2.36393 13.9393 2.48812 13.9186 2.73651 13.8772L5.10843 13.4819C5.22872 13.4619 5.28887 13.4519 5.34409 13.4519C5.3981 13.4519 5.43711 13.4562 5.48981 13.468C5.54369 13.4801 5.61035 13.5072 5.74366 13.5613C6.4402 13.8442 7.2019 14 8 14ZM5.33333 8C5.33333 8.1841 5.1841 8.33333 5 8.33333C4.81591 8.33333 4.66667 8.1841 4.66667 8C4.66667 7.81591 4.81591 7.66667 5 7.66667C5.1841 7.66667 5.33333 7.81591 5.33333 8ZM8.33333 8C8.33333 8.1841 8.1841 8.33333 8 8.33333C7.81591 8.33333 7.66667 8.1841 7.66667 8C7.66667 7.81591 7.81591 7.66667 8 7.66667C8.1841 7.66667 8.33333 7.81591 8.33333 8ZM11.3333 8C11.3333 8.1841 11.1841 8.33333 11 8.33333C10.8159 8.33333 10.6667 8.1841 10.6667 8C10.6667 7.81591 10.8159 7.66667 11 7.66667C11.1841 7.66667 11.3333 7.81591 11.3333 8Z" stroke="#344054" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round" /> - </svg> -) - export type IExploreSideBarProps = { controlUpdateInstalledApps: number } diff --git a/web/app/components/header/account-setting/members-page/operation/index.tsx b/web/app/components/header/account-setting/members-page/operation/index.tsx index 46f842dd4b..b06ec63228 100644 --- a/web/app/components/header/account-setting/members-page/operation/index.tsx +++ b/web/app/components/header/account-setting/members-page/operation/index.tsx @@ -33,13 +33,13 @@ const Operation = ({ const roleList = useMemo(() => { if (operatorRole === 'owner') { return [ - ...['admin', 'editor', 'normal'], + 'admin', 'editor', 'normal', ...(datasetOperatorEnabled ? ['dataset_operator'] : []), ] } if (operatorRole === 'admin') { return [ - ...['editor', 'normal'], + 'editor', 'normal', ...(datasetOperatorEnabled ? ['dataset_operator'] : []), ] } diff --git a/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx b/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx index 614d01a11e..067dbf8652 100644 --- a/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx +++ b/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx @@ -24,18 +24,6 @@ import ManageInputField from './manage-input-field' import { VariableIconWithColor } from '@/app/components/workflow/nodes/_base/components/variable/variable-label' import { Variable02 } from '@/app/components/base/icons/src/vender/solid/development' -type ObjectChildrenProps = { - nodeId: string - title: string - data: Var[] - objPath: string[] - onChange: (value: ValueSelector, item: Var) => void - onHovering?: (value: boolean) => void - itemWidth?: number - isSupportFileVar?: boolean - preferSchemaType?: boolean -} - type ItemProps = { nodeId: string title: string @@ -54,8 +42,6 @@ type ItemProps = { preferSchemaType?: boolean } -const objVarTypes = [VarType.object, VarType.file] - const Item: FC<ItemProps> = ({ nodeId, title, @@ -240,68 +226,6 @@ const Item: FC<ItemProps> = ({ ) } -const ObjectChildren: FC<ObjectChildrenProps> = ({ - title, - nodeId, - objPath, - data, - onChange, - onHovering, - itemWidth, - isSupportFileVar, - preferSchemaType, -}) => { - const currObjPath = objPath - const itemRef = useRef<HTMLDivElement>(null) - const [isItemHovering, setIsItemHovering] = useState(false) - useHover(itemRef, { - onChange: (hovering) => { - if (hovering) { - setIsItemHovering(true) - } - else { - setTimeout(() => { - setIsItemHovering(false) - }, 100) - } - }, - }) - const [isChildrenHovering, setIsChildrenHovering] = useState(false) - const isHovering = isItemHovering || isChildrenHovering - useEffect(() => { - onHovering && onHovering(isHovering) - }, [isHovering]) - useEffect(() => { - onHovering && onHovering(isItemHovering) - }, [isItemHovering]) - // absolute top-[-2px] - return ( - <div ref={itemRef} className=' space-y-1 rounded-lg border border-gray-200 bg-white shadow-lg' style={{ - right: itemWidth ? itemWidth - 10 : 215, - minWidth: 252, - }}> - <div className='flex h-[22px] items-center px-3 text-xs font-normal text-gray-700'><span className='text-gray-500'>{title}.</span>{currObjPath.join('.')}</div> - { - (data && data.length > 0) - && data.map((v, i) => ( - <Item - key={i} - nodeId={nodeId} - title={title} - objPath={objPath} - itemData={v} - onChange={onChange} - onHovering={setIsChildrenHovering} - isSupportFileVar={isSupportFileVar} - isException={v.isException} - preferSchemaType={preferSchemaType} - /> - )) - } - </div> - ) -} - type Props = { hideSearch?: boolean searchBoxClassName?: string diff --git a/web/app/signin/components/mail-and-password-auth.tsx b/web/app/signin/components/mail-and-password-auth.tsx index cccbd3a6ee..aaadc0b197 100644 --- a/web/app/signin/components/mail-and-password-auth.tsx +++ b/web/app/signin/components/mail-and-password-auth.tsx @@ -19,8 +19,6 @@ type MailAndPasswordAuthProps = { allowRegistration: boolean } -const passwordRegex = /^(?=.*[a-zA-Z])(?=.*\d).{8,}$/ - export default function MailAndPasswordAuth({ isInvite, isEmailSetup, allowRegistration }: MailAndPasswordAuthProps) { const { t } = useTranslation() const { locale } = useContext(I18NContext) From ff34969f217064d6d4fd414f9b332c44e7f24b02 Mon Sep 17 00:00:00 2001 From: crazywoola <100913391+crazywoola@users.noreply.github.com> Date: Sat, 27 Sep 2025 10:57:45 +0800 Subject: [PATCH 051/126] Chore/add sec report (#26313) --- .github/ISSUE_TEMPLATE/config.yml | 3 +++ .../CONTRIBUTING_CN.md | 2 +- .../CONTRIBUTING_DE.md | 2 +- .../CONTRIBUTING_ES.md | 2 +- .../CONTRIBUTING_FR.md | 2 +- .../CONTRIBUTING_JA.md | 2 +- .../CONTRIBUTING_KR.md | 2 +- .../CONTRIBUTING_PT.md | 2 +- .../CONTRIBUTING_TR.md | 2 +- .../CONTRIBUTING_TW.md | 2 +- .../CONTRIBUTING_VI.md | 2 +- README.md | 24 +++++++++---------- README_AR.md => README/README_AR.md | 8 +++---- README_BN.md => README/README_BN.md | 6 ++--- README_CN.md => README/README_CN.md | 10 ++++---- README_DE.md => README/README_DE.md | 8 +++---- README_ES.md => README/README_ES.md | 10 ++++---- README_FR.md => README/README_FR.md | 10 ++++---- README_JA.md => README/README_JA.md | 10 ++++---- README_KL.md => README/README_KL.md | 6 ++--- README_KR.md => README/README_KR.md | 8 +++---- README_PT.md => README/README_PT.md | 8 +++---- README_SI.md => README/README_SI.md | 6 ++--- README_TR.md => README/README_TR.md | 8 +++---- README_TW.md => README/README_TW.md | 8 +++---- README_VI.md => README/README_VI.md | 8 +++---- 26 files changed, 82 insertions(+), 79 deletions(-) rename CONTRIBUTING_CN.md => CONTRIBUTING/CONTRIBUTING_CN.md (96%) rename CONTRIBUTING_DE.md => CONTRIBUTING/CONTRIBUTING_DE.md (96%) rename CONTRIBUTING_ES.md => CONTRIBUTING/CONTRIBUTING_ES.md (96%) rename CONTRIBUTING_FR.md => CONTRIBUTING/CONTRIBUTING_FR.md (96%) rename CONTRIBUTING_JA.md => CONTRIBUTING/CONTRIBUTING_JA.md (96%) rename CONTRIBUTING_KR.md => CONTRIBUTING/CONTRIBUTING_KR.md (96%) rename CONTRIBUTING_PT.md => CONTRIBUTING/CONTRIBUTING_PT.md (96%) rename CONTRIBUTING_TR.md => CONTRIBUTING/CONTRIBUTING_TR.md (96%) rename CONTRIBUTING_TW.md => CONTRIBUTING/CONTRIBUTING_TW.md (96%) rename CONTRIBUTING_VI.md => CONTRIBUTING/CONTRIBUTING_VI.md (96%) rename README_AR.md => README/README_AR.md (97%) rename README_BN.md => README/README_BN.md (98%) rename README_CN.md => README/README_CN.md (97%) rename README_DE.md => README/README_DE.md (96%) rename README_ES.md => README/README_ES.md (97%) rename README_FR.md => README/README_FR.md (97%) rename README_JA.md => README/README_JA.md (97%) rename README_KL.md => README/README_KL.md (98%) rename README_KR.md => README/README_KR.md (97%) rename README_PT.md => README/README_PT.md (97%) rename README_SI.md => README/README_SI.md (97%) rename README_TR.md => README/README_TR.md (97%) rename README_TW.md => README/README_TW.md (97%) rename README_VI.md => README/README_VI.md (97%) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index c1666d24cf..859f499b8e 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,5 +1,8 @@ blank_issues_enabled: false contact_links: + - name: "\U0001F510 Security Vulnerabilities" + url: "https://github.com/langgenius/dify/security/advisories/new" + about: Report security vulnerabilities through GitHub Security Advisories to ensure responsible disclosure. 💡 Please do not report security vulnerabilities in public issues. - name: "\U0001F4A1 Model Providers & Plugins" url: "https://github.com/langgenius/dify-official-plugins/issues/new/choose" about: Report issues with official plugins or model providers, you will need to provide the plugin version and other relevant details. diff --git a/CONTRIBUTING_CN.md b/CONTRIBUTING/CONTRIBUTING_CN.md similarity index 96% rename from CONTRIBUTING_CN.md rename to CONTRIBUTING/CONTRIBUTING_CN.md index c278c8fd7a..8c52d8939c 100644 --- a/CONTRIBUTING_CN.md +++ b/CONTRIBUTING/CONTRIBUTING_CN.md @@ -6,7 +6,7 @@ 本指南和 Dify 一样在不断完善中。如果有任何滞后于项目实际情况的地方,恳请谅解,我们也欢迎任何改进建议。 -关于许可证,请花一分钟阅读我们简短的[许可和贡献者协议](./LICENSE)。同时也请遵循社区[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 +关于许可证,请花一分钟阅读我们简短的[许可和贡献者协议](../LICENSE)。同时也请遵循社区[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 ## 开始之前 diff --git a/CONTRIBUTING_DE.md b/CONTRIBUTING/CONTRIBUTING_DE.md similarity index 96% rename from CONTRIBUTING_DE.md rename to CONTRIBUTING/CONTRIBUTING_DE.md index f819e80bbb..c9e52c4fd7 100644 --- a/CONTRIBUTING_DE.md +++ b/CONTRIBUTING/CONTRIBUTING_DE.md @@ -6,7 +6,7 @@ Wir müssen wendig sein und schnell liefern, aber wir möchten auch sicherstelle Dieser Leitfaden ist, wie Dify selbst, in ständiger Entwicklung. Wir sind dankbar für Ihr Verständnis, falls er manchmal hinter dem eigentlichen Projekt zurückbleibt, und begrüßen jedes Feedback zur Verbesserung. -Bitte nehmen Sie sich einen Moment Zeit, um unsere [Lizenz- und Mitwirkungsvereinbarung](./LICENSE) zu lesen. Die Community hält sich außerdem an den [Verhaltenskodex](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +Bitte nehmen Sie sich einen Moment Zeit, um unsere [Lizenz- und Mitwirkungsvereinbarung](../LICENSE) zu lesen. Die Community hält sich außerdem an den [Verhaltenskodex](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Bevor Sie loslegen diff --git a/CONTRIBUTING_ES.md b/CONTRIBUTING/CONTRIBUTING_ES.md similarity index 96% rename from CONTRIBUTING_ES.md rename to CONTRIBUTING/CONTRIBUTING_ES.md index e19d958c65..764c678fb2 100644 --- a/CONTRIBUTING_ES.md +++ b/CONTRIBUTING/CONTRIBUTING_ES.md @@ -6,7 +6,7 @@ Necesitamos ser ágiles y enviar rápidamente dado donde estamos, pero también Esta guía, como Dify mismo, es un trabajo en constante progreso. Agradecemos mucho tu comprensión si a veces se queda atrás del proyecto real, y damos la bienvenida a cualquier comentario para que podamos mejorar. -En términos de licencia, por favor tómate un minuto para leer nuestro breve [Acuerdo de Licencia y Colaborador](./LICENSE). La comunidad también se adhiere al [código de conducta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +En términos de licencia, por favor tómate un minuto para leer nuestro breve [Acuerdo de Licencia y Colaborador](../LICENSE). La comunidad también se adhiere al [código de conducta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Antes de empezar diff --git a/CONTRIBUTING_FR.md b/CONTRIBUTING/CONTRIBUTING_FR.md similarity index 96% rename from CONTRIBUTING_FR.md rename to CONTRIBUTING/CONTRIBUTING_FR.md index 335e943fcd..8df491a0a0 100644 --- a/CONTRIBUTING_FR.md +++ b/CONTRIBUTING/CONTRIBUTING_FR.md @@ -6,7 +6,7 @@ Nous devons être agiles et livrer rapidement compte tenu de notre position, mai Ce guide, comme Dify lui-même, est un travail en constante évolution. Nous apprécions grandement votre compréhension si parfois il est en retard par rapport au projet réel, et nous accueillons tout commentaire pour nous aider à nous améliorer. -En termes de licence, veuillez prendre une minute pour lire notre bref [Accord de Licence et de Contributeur](./LICENSE). La communauté adhère également au [code de conduite](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +En termes de licence, veuillez prendre une minute pour lire notre bref [Accord de Licence et de Contributeur](../LICENSE). La communauté adhère également au [code de conduite](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Avant de vous lancer diff --git a/CONTRIBUTING_JA.md b/CONTRIBUTING/CONTRIBUTING_JA.md similarity index 96% rename from CONTRIBUTING_JA.md rename to CONTRIBUTING/CONTRIBUTING_JA.md index 2d0d79fc16..dd3d6cbfc5 100644 --- a/CONTRIBUTING_JA.md +++ b/CONTRIBUTING/CONTRIBUTING_JA.md @@ -6,7 +6,7 @@ Difyに貢献しようとお考えですか?素晴らしいですね。私た このガイドは、Dify自体と同様に、常に進化し続けています。実際のプロジェクトの進行状況と多少のずれが生じる場合もございますが、ご理解いただけますと幸いです。改善のためのフィードバックも歓迎いたします。 -ライセンスについては、[ライセンスと貢献者同意書](./LICENSE)をご一読ください。また、コミュニティは[行動規範](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)に従っています。 +ライセンスについては、[ライセンスと貢献者同意書](../LICENSE)をご一読ください。また、コミュニティは[行動規範](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)に従っています。 ## 始める前に diff --git a/CONTRIBUTING_KR.md b/CONTRIBUTING/CONTRIBUTING_KR.md similarity index 96% rename from CONTRIBUTING_KR.md rename to CONTRIBUTING/CONTRIBUTING_KR.md index 14b1c9a9ca..f94d5bfbc9 100644 --- a/CONTRIBUTING_KR.md +++ b/CONTRIBUTING/CONTRIBUTING_KR.md @@ -6,7 +6,7 @@ Dify에 기여하려고 하시는군요 - 정말 멋집니다, 당신이 무엇 이 가이드는 Dify 자체와 마찬가지로 끊임없이 진행 중인 작업입니다. 때로는 실제 프로젝트보다 뒤처질 수 있다는 점을 이해해 주시면 감사하겠으며, 개선을 위한 피드백은 언제든지 환영합니다. -라이센스 측면에서, 간략한 [라이센스 및 기여자 동의서](./LICENSE)를 읽어보는 시간을 가져주세요. 커뮤니티는 또한 [행동 강령](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)을 준수합니다. +라이센스 측면에서, 간략한 [라이센스 및 기여자 동의서](../LICENSE)를 읽어보는 시간을 가져주세요. 커뮤니티는 또한 [행동 강령](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)을 준수합니다. ## 시작하기 전에 diff --git a/CONTRIBUTING_PT.md b/CONTRIBUTING/CONTRIBUTING_PT.md similarity index 96% rename from CONTRIBUTING_PT.md rename to CONTRIBUTING/CONTRIBUTING_PT.md index aeabcad51f..2aec1e2196 100644 --- a/CONTRIBUTING_PT.md +++ b/CONTRIBUTING/CONTRIBUTING_PT.md @@ -6,7 +6,7 @@ Precisamos ser ágeis e entregar rapidamente considerando onde estamos, mas tamb Este guia, como o próprio Dify, é um trabalho em constante evolução. Agradecemos muito a sua compreensão se às vezes ele ficar atrasado em relação ao projeto real, e damos as boas-vindas a qualquer feedback para que possamos melhorar. -Em termos de licenciamento, por favor, dedique um minuto para ler nosso breve [Acordo de Licença e Contribuidor](./LICENSE). A comunidade também adere ao [código de conduta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +Em termos de licenciamento, por favor, dedique um minuto para ler nosso breve [Acordo de Licença e Contribuidor](../LICENSE). A comunidade também adere ao [código de conduta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Antes de começar diff --git a/CONTRIBUTING_TR.md b/CONTRIBUTING/CONTRIBUTING_TR.md similarity index 96% rename from CONTRIBUTING_TR.md rename to CONTRIBUTING/CONTRIBUTING_TR.md index d016802a53..1932a3ab34 100644 --- a/CONTRIBUTING_TR.md +++ b/CONTRIBUTING/CONTRIBUTING_TR.md @@ -6,7 +6,7 @@ Bulunduğumuz noktada çevik olmamız ve hızlı hareket etmemiz gerekiyor, anca Bu rehber, Dify'ın kendisi gibi, sürekli gelişen bir çalışmadır. Bazen gerçek projenin gerisinde kalırsa anlayışınız için çok minnettarız ve gelişmemize yardımcı olacak her türlü geri bildirimi memnuniyetle karşılıyoruz. -Lisanslama konusunda, lütfen kısa [Lisans ve Katkıda Bulunan Anlaşmamızı](./LICENSE) okumak için bir dakikanızı ayırın. Topluluk ayrıca [davranış kurallarına](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md) da uyar. +Lisanslama konusunda, lütfen kısa [Lisans ve Katkıda Bulunan Anlaşmamızı](../LICENSE) okumak için bir dakikanızı ayırın. Topluluk ayrıca [davranış kurallarına](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md) da uyar. ## Başlamadan Önce diff --git a/CONTRIBUTING_TW.md b/CONTRIBUTING/CONTRIBUTING_TW.md similarity index 96% rename from CONTRIBUTING_TW.md rename to CONTRIBUTING/CONTRIBUTING_TW.md index 5c4d7022fe..7fba220a22 100644 --- a/CONTRIBUTING_TW.md +++ b/CONTRIBUTING/CONTRIBUTING_TW.md @@ -6,7 +6,7 @@ 這份指南與 Dify 一樣,都在持續完善中。如果指南內容有落後於實際專案的情況,還請見諒,也歡迎提供改進建議。 -關於授權部分,請花點時間閱讀我們簡短的[授權和貢獻者協議](./LICENSE)。社群也需遵守[行為準則](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 +關於授權部分,請花點時間閱讀我們簡短的[授權和貢獻者協議](../LICENSE)。社群也需遵守[行為準則](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 ## 開始之前 diff --git a/CONTRIBUTING_VI.md b/CONTRIBUTING/CONTRIBUTING_VI.md similarity index 96% rename from CONTRIBUTING_VI.md rename to CONTRIBUTING/CONTRIBUTING_VI.md index 2ad431296a..b9844c4869 100644 --- a/CONTRIBUTING_VI.md +++ b/CONTRIBUTING/CONTRIBUTING_VI.md @@ -6,7 +6,7 @@ Chúng tôi cần phải nhanh nhẹn và triển khai nhanh chóng, nhưng cũn Hướng dẫn này, giống như Dify, đang được phát triển liên tục. Chúng tôi rất cảm kích sự thông cảm của bạn nếu đôi khi nó chưa theo kịp dự án thực tế, và hoan nghênh mọi phản hồi để cải thiện. -Về giấy phép, vui lòng dành chút thời gian đọc [Thỏa thuận Cấp phép và Người đóng góp](./LICENSE) ngắn gọn của chúng tôi. Cộng đồng cũng tuân theo [quy tắc ứng xử](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +Về giấy phép, vui lòng dành chút thời gian đọc [Thỏa thuận Cấp phép và Người đóng góp](../LICENSE) ngắn gọn của chúng tôi. Cộng đồng cũng tuân theo [quy tắc ứng xử](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Trước khi bắt đầu diff --git a/README.md b/README.md index 90da1d3def..8159057f55 100644 --- a/README.md +++ b/README.md @@ -40,18 +40,18 @@ <p align="center"> <a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> - <a href="./README_TW.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a> - <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> - <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> - <a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a> - <a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a> - <a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a> - <a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a> - <a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a> - <a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a> - <a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a> - <a href="./README_DE.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a> - <a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a> + <a href="./README/README_TW.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a> + <a href="./README/README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> + <a href="./README/README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> + <a href="./README/README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a> + <a href="./README/README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a> + <a href="./README/README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a> + <a href="./README/README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a> + <a href="./README/README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a> + <a href="./README/README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a> + <a href="./README/README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a> + <a href="./README/README_DE.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a> + <a href="./README/README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a> </p> Dify is an open-source platform for developing LLM applications. Its intuitive interface combines agentic AI workflows, RAG pipelines, agent capabilities, model management, observability features, and more—allowing you to quickly move from prototype to production. diff --git a/README_AR.md b/README/README_AR.md similarity index 97% rename from README_AR.md rename to README/README_AR.md index 2451757ab5..df29db73da 100644 --- a/README_AR.md +++ b/README/README_AR.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../images/GitHub_README_if.png) <p align="center"> <a href="https://cloud.dify.ai">Dify Cloud</a> · @@ -35,7 +35,7 @@ </p> <p align="center"> - <a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> + <a href="../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> <a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a> @@ -185,7 +185,7 @@ docker compose up -d ## الرخصة -هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية. +هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](../LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية. ## الكشف عن الأمان @@ -193,4 +193,4 @@ docker compose up -d ## الرخصة -هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية. +هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](../LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية. diff --git a/README_BN.md b/README/README_BN.md similarity index 98% rename from README_BN.md rename to README/README_BN.md index ef24dea171..b0a64a6cfe 100644 --- a/README_BN.md +++ b/README/README_BN.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../images/GitHub_README_if.png) <p align="center"> 📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">ডিফাই ওয়ার্কফ্লো ফাইল আপলোড পরিচিতি: গুগল নোটবুক-এলএম পডকাস্ট পুনর্নির্মাণ</a> @@ -39,7 +39,7 @@ </p> <p align="center"> - <a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> + <a href="../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> <a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a> @@ -203,4 +203,4 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন ## লাইসেন্স -এই রিপোজিটরিটি [ডিফাই ওপেন সোর্স লাইসেন্স](LICENSE) এর অধিনে , যা মূলত অ্যাপাচি ২.০, তবে কিছু অতিরিক্ত বিধিনিষেধ রয়েছে। +এই রিপোজিটরিটি [ডিফাই ওপেন সোর্স লাইসেন্স](../LICENSE) এর অধিনে , যা মূলত অ্যাপাচি ২.০, তবে কিছু অতিরিক্ত বিধিনিষেধ রয়েছে। diff --git a/README_CN.md b/README/README_CN.md similarity index 97% rename from README_CN.md rename to README/README_CN.md index 9aaebf4037..9501992bd2 100644 --- a/README_CN.md +++ b/README/README_CN.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../images/GitHub_README_if.png) <div align="center"> <a href="https://cloud.dify.ai">Dify 云服务</a> · @@ -35,7 +35,7 @@ </p> <div align="center"> - <a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> + <a href="../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> <a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a> @@ -180,7 +180,7 @@ docker compose up -d ## Contributing -对于那些想要贡献代码的人,请参阅我们的[贡献指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_CN.md)。 +对于那些想要贡献代码的人,请参阅我们的[贡献指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_CN.md)。 同时,请考虑通过社交媒体、活动和会议来支持 Dify 的分享。 > 我们正在寻找贡献者来帮助将 Dify 翻译成除了中文和英文之外的其他语言。如果您有兴趣帮助,请参阅我们的[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)获取更多信息,并在我们的[Discord 社区服务器](https://discord.gg/8Tpq4AcN9c)的`global-users`频道中留言。 @@ -196,7 +196,7 @@ docker compose up -d 我们欢迎您为 Dify 做出贡献,以帮助改善 Dify。包括:提交代码、问题、新想法,或分享您基于 Dify 创建的有趣且有用的 AI 应用程序。同时,我们也欢迎您在不同的活动、会议和社交媒体上分享 Dify。 - [GitHub Discussion](https://github.com/langgenius/dify/discussions). 👉:分享您的应用程序并与社区交流。 -- [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](CONTRIBUTING.md)。 +- [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](../CONTRIBUTING.md)。 - [电子邮件支持](mailto:hello@dify.ai?subject=%5BGitHub%5DQuestions%20About%20Dify)。👉:关于使用 Dify.AI 的问题。 - [Discord](https://discord.gg/FngNHpbcY7)。👉:分享您的应用程序并与社区交流。 - [X(Twitter)](https://twitter.com/dify_ai)。👉:分享您的应用程序并与社区交流。 @@ -208,4 +208,4 @@ docker compose up -d ## License -本仓库遵循 [Dify Open Source License](LICENSE) 开源协议,该许可证本质上是 Apache 2.0,但有一些额外的限制。 +本仓库遵循 [Dify Open Source License](../LICENSE) 开源协议,该许可证本质上是 Apache 2.0,但有一些额外的限制。 diff --git a/README_DE.md b/README/README_DE.md similarity index 96% rename from README_DE.md rename to README/README_DE.md index a08fe63d4f..d1a5837ab4 100644 --- a/README_DE.md +++ b/README/README_DE.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../images/GitHub_README_if.png) <p align="center"> 📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">Einführung in Dify Workflow File Upload: Google NotebookLM Podcast nachbilden</a> @@ -39,7 +39,7 @@ </p> <p align="center"> - <a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> + <a href="../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> <a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a> @@ -173,7 +173,7 @@ Stellen Sie Dify mit einem Klick in AKS bereit, indem Sie [Azure Devops Pipeline ## Contributing -Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_DE.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren. +Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_DE.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren. > Wir suchen Mitwirkende, die dabei helfen, Dify in weitere Sprachen zu übersetzen – außer Mandarin oder Englisch. Wenn Sie Interesse an einer Mitarbeit haben, lesen Sie bitte die [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) für weitere Informationen und hinterlassen Sie einen Kommentar im `global-users`-Kanal unseres [Discord Community Servers](https://discord.gg/8Tpq4AcN9c). @@ -200,4 +200,4 @@ Um Ihre Privatsphäre zu schützen, vermeiden Sie es bitte, Sicherheitsprobleme ## Lizenz -Dieses Repository steht unter der [Dify Open Source License](LICENSE), die im Wesentlichen Apache 2.0 mit einigen zusätzlichen Einschränkungen ist. +Dieses Repository steht unter der [Dify Open Source License](../LICENSE), die im Wesentlichen Apache 2.0 mit einigen zusätzlichen Einschränkungen ist. diff --git a/README_ES.md b/README/README_ES.md similarity index 97% rename from README_ES.md rename to README/README_ES.md index d8fdbf54e6..60f0a06868 100644 --- a/README_ES.md +++ b/README/README_ES.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../images/GitHub_README_if.png) <p align="center"> <a href="https://cloud.dify.ai">Dify Cloud</a> · @@ -35,7 +35,7 @@ </p> <p align="center"> - <a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> + <a href="../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> <a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a> @@ -170,7 +170,7 @@ Implementa Dify en AKS con un clic usando [Azure Devops Pipeline Helm Chart by @ ## Contribuir -Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_ES.md). +Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_ES.md). Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en eventos y conferencias. > Estamos buscando colaboradores para ayudar con la traducción de Dify a idiomas que no sean el mandarín o el inglés. Si estás interesado en ayudar, consulta el [README de i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para obtener más información y déjanos un comentario en el canal `global-users` de nuestro [Servidor de Comunidad en Discord](https://discord.gg/8Tpq4AcN9c). @@ -198,7 +198,7 @@ Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En ## Licencia -Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. +Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](../LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. ## Divulgación de Seguridad @@ -206,4 +206,4 @@ Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En ## Licencia -Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. +Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](../LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. diff --git a/README_FR.md b/README/README_FR.md similarity index 97% rename from README_FR.md rename to README/README_FR.md index 7474ea50c2..a782bd16f8 100644 --- a/README_FR.md +++ b/README/README_FR.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../images/GitHub_README_if.png) <p align="center"> <a href="https://cloud.dify.ai">Dify Cloud</a> · @@ -35,7 +35,7 @@ </p> <p align="center"> - <a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> + <a href="../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> <a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a> @@ -168,7 +168,7 @@ Déployez Dify sur AKS en un clic en utilisant [Azure Devops Pipeline Helm Chart ## Contribuer -Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_FR.md). +Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_FR.md). Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur les réseaux sociaux et lors d'événements et de conférences. > Nous recherchons des contributeurs pour aider à traduire Dify dans des langues autres que le mandarin ou l'anglais. Si vous êtes intéressé à aider, veuillez consulter le [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) pour plus d'informations, et laissez-nous un commentaire dans le canal `global-users` de notre [Serveur communautaire Discord](https://discord.gg/8Tpq4AcN9c). @@ -196,7 +196,7 @@ Pour protéger votre vie privée, veuillez éviter de publier des problèmes de ## Licence -Ce référentiel est disponible sous la [Licence open source Dify](LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. +Ce référentiel est disponible sous la [Licence open source Dify](../LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. ## Divulgation de sécurité @@ -204,4 +204,4 @@ Pour protéger votre vie privée, veuillez éviter de publier des problèmes de ## Licence -Ce référentiel est disponible sous la [Licence open source Dify](LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. +Ce référentiel est disponible sous la [Licence open source Dify](../LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. diff --git a/README_JA.md b/README/README_JA.md similarity index 97% rename from README_JA.md rename to README/README_JA.md index a782849f6e..23cd0e692b 100644 --- a/README_JA.md +++ b/README/README_JA.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../images/GitHub_README_if.png) <p align="center"> <a href="https://cloud.dify.ai">Dify Cloud</a> · @@ -35,7 +35,7 @@ </p> <p align="center"> - <a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> + <a href="../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> <a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a> @@ -169,7 +169,7 @@ docker compose up -d ## 貢献 -コードに貢献したい方は、[Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_JA.md)を参照してください。 +コードに貢献したい方は、[Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_JA.md)を参照してください。 同時に、DifyをSNSやイベント、カンファレンスで共有してサポートしていただけると幸いです。 > Difyを英語または中国語以外の言語に翻訳してくれる貢献者を募集しています。興味がある場合は、詳細については[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)を参照してください。また、[Discordコミュニティサーバー](https://discord.gg/8Tpq4AcN9c)の`global-users`チャンネルにコメントを残してください。 @@ -183,10 +183,10 @@ docker compose up -d ## コミュニティ & お問い合わせ - [GitHub Discussion](https://github.com/langgenius/dify/discussions). 主に: フィードバックの共有や質問。 -- [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](CONTRIBUTING_JA.md)を参照してください +- [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](../CONTRIBUTING/CONTRIBUTING_JA.md)を参照してください - [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。 - [X(Twitter)](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。 ## ライセンス -このリポジトリは、Dify Open Source License にいくつかの追加制限を加えた[Difyオープンソースライセンス](LICENSE)の下で利用可能です。 +このリポジトリは、Dify Open Source License にいくつかの追加制限を加えた[Difyオープンソースライセンス](../LICENSE)の下で利用可能です。 diff --git a/README_KL.md b/README/README_KL.md similarity index 98% rename from README_KL.md rename to README/README_KL.md index 93da9a6140..cae02f56fe 100644 --- a/README_KL.md +++ b/README/README_KL.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../images/GitHub_README_if.png) <p align="center"> <a href="https://cloud.dify.ai">Dify Cloud</a> · @@ -35,7 +35,7 @@ </p> <p align="center"> - <a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> + <a href="../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> <a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a> @@ -199,4 +199,4 @@ To protect your privacy, please avoid posting security issues on GitHub. Instead ## License -This repository is available under the [Dify Open Source License](LICENSE), which is essentially Apache 2.0 with a few additional restrictions. +This repository is available under the [Dify Open Source License](../LICENSE), which is essentially Apache 2.0 with a few additional restrictions. diff --git a/README_KR.md b/README/README_KR.md similarity index 97% rename from README_KR.md rename to README/README_KR.md index ec28cc0f61..e1a2a82677 100644 --- a/README_KR.md +++ b/README/README_KR.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../images/GitHub_README_if.png) <p align="center"> <a href="https://cloud.dify.ai">Dify 클라우드</a> · @@ -35,7 +35,7 @@ </p> <p align="center"> - <a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> + <a href="../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> <a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a> @@ -162,7 +162,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 ## 기여 -코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_KR.md)를 참조하세요. +코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_KR.md)를 참조하세요. 동시에 Dify를 소셜 미디어와 행사 및 컨퍼런스에 공유하여 지원하는 것을 고려해 주시기 바랍니다. > 우리는 Dify를 중국어나 영어 이외의 언어로 번역하는 데 도움을 줄 수 있는 기여자를 찾고 있습니다. 도움을 주고 싶으시다면 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)에서 더 많은 정보를 확인하시고 [Discord 커뮤니티 서버](https://discord.gg/8Tpq4AcN9c)의 `global-users` 채널에 댓글을 남겨주세요. @@ -190,4 +190,4 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 ## 라이선스 -이 저장소는 기본적으로 몇 가지 추가 제한 사항이 있는 Apache 2.0인 [Dify 오픈 소스 라이선스](LICENSE)에 따라 사용할 수 있습니다. +이 저장소는 기본적으로 몇 가지 추가 제한 사항이 있는 Apache 2.0인 [Dify 오픈 소스 라이선스](../LICENSE)에 따라 사용할 수 있습니다. diff --git a/README_PT.md b/README/README_PT.md similarity index 97% rename from README_PT.md rename to README/README_PT.md index da8f354a49..91132aade4 100644 --- a/README_PT.md +++ b/README/README_PT.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../images/GitHub_README_if.png) <p align="center"> 📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">Introduzindo o Dify Workflow com Upload de Arquivo: Recrie o Podcast Google NotebookLM</a> @@ -39,7 +39,7 @@ </p> <p align="center"> - <a href="./README.md"><img alt="README em Inglês" src="https://img.shields.io/badge/English-d9d9d9"></a> + <a href="../README.md"><img alt="README em Inglês" src="https://img.shields.io/badge/English-d9d9d9"></a> <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> <a href="./README_ES.md"><img alt="README em Espanhol" src="https://img.shields.io/badge/Español-d9d9d9"></a> @@ -168,7 +168,7 @@ Implante o Dify no AKS com um clique usando [Azure Devops Pipeline Helm Chart by ## Contribuindo -Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_PT.md). +Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_PT.md). Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em eventos e conferências. > Estamos buscando contribuidores para ajudar na tradução do Dify para idiomas além de Mandarim e Inglês. Se você tiver interesse em ajudar, consulte o [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para mais informações e deixe-nos um comentário no canal `global-users` em nosso [Servidor da Comunidade no Discord](https://discord.gg/8Tpq4AcN9c). @@ -196,4 +196,4 @@ Para proteger sua privacidade, evite postar problemas de segurança no GitHub. E ## Licença -Este repositório está disponível sob a [Licença de Código Aberto Dify](LICENSE), que é essencialmente Apache 2.0 com algumas restrições adicionais. +Este repositório está disponível sob a [Licença de Código Aberto Dify](../LICENSE), que é essencialmente Apache 2.0 com algumas restrições adicionais. diff --git a/README_SI.md b/README/README_SI.md similarity index 97% rename from README_SI.md rename to README/README_SI.md index c20dc3484f..8cd78c065c 100644 --- a/README_SI.md +++ b/README/README_SI.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../images/GitHub_README_if.png) <p align="center"> 📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">Predstavljamo nalaganje datotek Dify Workflow: znova ustvarite Google NotebookLM Podcast</a> @@ -36,7 +36,7 @@ </p> <p align="center"> - <a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> + <a href="../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> <a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a> @@ -196,4 +196,4 @@ Zaradi zaščite vaše zasebnosti se izogibajte objavljanju varnostnih vprašanj ## Licenca -To skladišče je na voljo pod [odprtokodno licenco Dify](LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami. +To skladišče je na voljo pod [odprtokodno licenco Dify](../LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami. diff --git a/README_TR.md b/README/README_TR.md similarity index 97% rename from README_TR.md rename to README/README_TR.md index 21df0d1605..9836c6be61 100644 --- a/README_TR.md +++ b/README/README_TR.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../images/GitHub_README_if.png) <p align="center"> <a href="https://cloud.dify.ai">Dify Bulut</a> · @@ -35,7 +35,7 @@ </p> <p align="center"> - <a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> + <a href="../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> <a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a> @@ -161,7 +161,7 @@ Dify'ı bulut platformuna tek tıklamayla dağıtın [terraform](https://www.ter ## Katkıda Bulunma -Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_TR.md) bakabilirsiniz. +Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_TR.md) bakabilirsiniz. Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda paylaşarak desteklemeyi düşünün. > Dify'ı Mandarin veya İngilizce dışındaki dillere çevirmemize yardımcı olacak katkıda bulunanlara ihtiyacımız var. Yardımcı olmakla ilgileniyorsanız, lütfen daha fazla bilgi için [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) dosyasına bakın ve [Discord Topluluk Sunucumuzdaki](https://discord.gg/8Tpq4AcN9c) `global-users` kanalında bize bir yorum bırakın. @@ -189,4 +189,4 @@ Gizliliğinizi korumak için, lütfen güvenlik sorunlarını GitHub'da paylaşm ## Lisans -Bu depo, temel olarak Apache 2.0 lisansı ve birkaç ek kısıtlama içeren [Dify Açık Kaynak Lisansı](LICENSE) altında kullanıma sunulmuştur. +Bu depo, temel olarak Apache 2.0 lisansı ve birkaç ek kısıtlama içeren [Dify Açık Kaynak Lisansı](../LICENSE) altında kullanıma sunulmuştur. diff --git a/README_TW.md b/README/README_TW.md similarity index 97% rename from README_TW.md rename to README/README_TW.md index 18d0724784..b9c0b81246 100644 --- a/README_TW.md +++ b/README/README_TW.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../images/GitHub_README_if.png) <p align="center"> 📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">介紹 Dify 工作流程檔案上傳功能:重現 Google NotebookLM Podcast</a> @@ -39,7 +39,7 @@ </p> <p align="center"> - <a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> + <a href="../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> <a href="./README_TW.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a> <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> @@ -173,7 +173,7 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify ## 貢獻 -對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_TW.md)。 +對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_TW.md)。 同時,也請考慮透過在社群媒體和各種活動與會議上分享 Dify 來支持我們。 > 我們正在尋找貢獻者協助將 Dify 翻譯成中文和英文以外的語言。如果您有興趣幫忙,請查看 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) 獲取更多資訊,並在我們的 [Discord 社群伺服器](https://discord.gg/8Tpq4AcN9c) 的 `global-users` 頻道留言給我們。 @@ -201,4 +201,4 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify ## 授權條款 -本代碼庫採用 [Dify 開源授權](LICENSE),這基本上是 Apache 2.0 授權加上一些額外限制條款。 +本代碼庫採用 [Dify 開源授權](../LICENSE),這基本上是 Apache 2.0 授權加上一些額外限制條款。 diff --git a/README_VI.md b/README/README_VI.md similarity index 97% rename from README_VI.md rename to README/README_VI.md index 6d5305fb75..22d74eb31d 100644 --- a/README_VI.md +++ b/README/README_VI.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](./images/GitHub_README_if.png) +![cover-v5-optimized](../images/GitHub_README_if.png) <p align="center"> <a href="https://cloud.dify.ai">Dify Cloud</a> · @@ -35,7 +35,7 @@ </p> <p align="center"> - <a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> + <a href="../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a> <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a> <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a> <a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a> @@ -162,7 +162,7 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De ## Đóng góp -Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_VI.md) của chúng tôi. +Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_VI.md) của chúng tôi. Đồng thời, vui lòng xem xét hỗ trợ Dify bằng cách chia sẻ nó trên mạng xã hội và tại các sự kiện và hội nghị. > Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi. @@ -190,4 +190,4 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De ## Giấy phép -Kho lưu trữ này có sẵn theo [Giấy phép Mã nguồn Mở Dify](LICENSE), về cơ bản là Apache 2.0 với một vài hạn chế bổ sung. +Kho lưu trữ này có sẵn theo [Giấy phép Mã nguồn Mở Dify](../LICENSE), về cơ bản là Apache 2.0 với một vài hạn chế bổ sung. From 0e4f19eee01393da757b583c87f0701c2009b2de Mon Sep 17 00:00:00 2001 From: Timo <57227498+EchterTimo@users.noreply.github.com> Date: Sat, 27 Sep 2025 15:03:05 +0200 Subject: [PATCH 052/126] Fix ChatClient.audio_to_text files keyword to make it work (#26317) Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- sdks/python-client/dify_client/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdks/python-client/dify_client/client.py b/sdks/python-client/dify_client/client.py index 791cb98a1b..201391eae9 100644 --- a/sdks/python-client/dify_client/client.py +++ b/sdks/python-client/dify_client/client.py @@ -139,9 +139,9 @@ class ChatClient(DifyClient): data = {"user": user} return self._send_request("DELETE", f"/conversations/{conversation_id}", data) - def audio_to_text(self, audio_file: dict, user: str): + def audio_to_text(self, audio_file: IO[bytes] | tuple, user: str): data = {"user": user} - files = {"audio_file": audio_file} + files = {"file": audio_file} return self._send_request_with_files("POST", "/audio-to-text", data, files) From 043ec46c331b4ad82d73916f7a9491ecc8d877fb Mon Sep 17 00:00:00 2001 From: "Junyan Qin (Chin)" <rockchinq@gmail.com> Date: Sun, 28 Sep 2025 10:26:11 +0800 Subject: [PATCH 053/126] perf: distribute concurrent plugin auto upgrade tasks (#26282) --- api/schedule/check_upgradable_plugin_task.py | 33 ++++++++++++++------ 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/api/schedule/check_upgradable_plugin_task.py b/api/schedule/check_upgradable_plugin_task.py index 08a5cfce79..a9ad27b059 100644 --- a/api/schedule/check_upgradable_plugin_task.py +++ b/api/schedule/check_upgradable_plugin_task.py @@ -1,3 +1,4 @@ +import math import time import click @@ -8,6 +9,7 @@ from models.account import TenantPluginAutoUpgradeStrategy from tasks.process_tenant_plugin_autoupgrade_check_task import process_tenant_plugin_autoupgrade_check_task AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL = 15 * 60 # 15 minutes +MAX_CONCURRENT_CHECK_TASKS = 20 @app.celery.task(queue="plugin") @@ -30,15 +32,28 @@ def check_upgradable_plugin_task(): .all() ) - for strategy in strategies: - process_tenant_plugin_autoupgrade_check_task.delay( - strategy.tenant_id, - strategy.strategy_setting, - strategy.upgrade_time_of_day, - strategy.upgrade_mode, - strategy.exclude_plugins, - strategy.include_plugins, - ) + total_strategies = len(strategies) + click.echo(click.style(f"Total strategies: {total_strategies}", fg="green")) + + batch_chunk_count = math.ceil( + total_strategies / MAX_CONCURRENT_CHECK_TASKS + ) # make sure all strategies are checked in this interval + batch_interval_time = (AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL / batch_chunk_count) if batch_chunk_count > 0 else 0 + + for i in range(0, total_strategies, MAX_CONCURRENT_CHECK_TASKS): + batch_strategies = strategies[i : i + MAX_CONCURRENT_CHECK_TASKS] + for strategy in batch_strategies: + process_tenant_plugin_autoupgrade_check_task.delay( + strategy.tenant_id, + strategy.strategy_setting, + strategy.upgrade_time_of_day, + strategy.upgrade_mode, + strategy.exclude_plugins, + strategy.include_plugins, + ) + + if batch_interval_time > 0.0001: # if lower than 1ms, skip + time.sleep(batch_interval_time) end_at = time.perf_counter() click.echo( From 244c132656c50bd7e8f88ac049ee8b6499477188 Mon Sep 17 00:00:00 2001 From: goofy <38034027+goofy-z@users.noreply.github.com> Date: Sun, 28 Sep 2025 10:44:20 +0800 Subject: [PATCH 054/126] fix compatibility problem caused by tool node attribute 'tool_node_version' judgement error (#26274) --- api/core/workflow/nodes/agent/agent_node.py | 2 +- api/core/workflow/nodes/tool/tool_node.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/core/workflow/nodes/agent/agent_node.py b/api/core/workflow/nodes/agent/agent_node.py index ec05805879..a01686a4b8 100644 --- a/api/core/workflow/nodes/agent/agent_node.py +++ b/api/core/workflow/nodes/agent/agent_node.py @@ -288,7 +288,7 @@ class AgentNode(Node): # But for backward compatibility with historical data # this version field judgment is still preserved here. runtime_variable_pool: VariablePool | None = None - if node_data.version != "1" or node_data.tool_node_version != "1": + if node_data.version != "1" or node_data.tool_node_version is not None: runtime_variable_pool = variable_pool tool_runtime = ToolManager.get_agent_tool_runtime( self.tenant_id, self.app_id, entity, self.invoke_from, runtime_variable_pool diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index 5f2abcd378..ce1a879ff1 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -79,7 +79,7 @@ class ToolNode(Node): # But for backward compatibility with historical data # this version field judgment is still preserved here. variable_pool: VariablePool | None = None - if node_data.version != "1" or node_data.tool_node_version != "1": + if node_data.version != "1" or node_data.tool_node_version is not None: variable_pool = self.graph_runtime_state.variable_pool tool_runtime = ToolManager.get_workflow_tool_runtime( self.tenant_id, self.app_id, self._node_id, self._node_data, self.invoke_from, variable_pool From 095c56a646943bb3eeaa6aa091d3dccb1e0b7edb Mon Sep 17 00:00:00 2001 From: -LAN- <laipz8200@outlook.com> Date: Sun, 28 Sep 2025 13:37:06 +0800 Subject: [PATCH 055/126] refactor(router): apply ns.route style (#26339) --- api/controllers/console/__init__.py | 122 +++--------------- api/controllers/console/app/app_import.py | 5 + .../console/auth/data_source_bearer_auth.py | 10 +- .../console/auth/email_register.py | 10 +- .../console/auth/forgot_password.py | 5 - api/controllers/console/auth/login.py | 16 +-- api/controllers/console/auth/oauth_server.py | 12 +- api/controllers/console/billing/billing.py | 8 +- api/controllers/console/billing/compliance.py | 6 +- .../console/datasets/data_source.py | 26 ++-- .../console/datasets/datasets_document.py | 27 +--- .../console/datasets/datasets_segments.py | 42 +++--- api/controllers/console/datasets/metadata.py | 14 +- .../datasets/rag_pipeline/datasource_auth.py | 61 ++------- .../datasource_content_preview.py | 9 +- .../datasets/rag_pipeline/rag_pipeline.py | 24 +--- api/controllers/console/explore/audio.py | 10 ++ api/controllers/console/explore/completion.py | 18 +++ .../console/explore/conversation.py | 22 ++++ api/controllers/console/explore/message.py | 18 +++ api/controllers/console/explore/workflow.py | 4 + api/controllers/console/files.py | 5 + api/controllers/console/remote_files.py | 4 + api/controllers/console/spec.py | 7 +- 24 files changed, 182 insertions(+), 303 deletions(-) diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py index ee02ff3937..621f5066e4 100644 --- a/api/controllers/console/__init__.py +++ b/api/controllers/console/__init__.py @@ -1,31 +1,10 @@ +from importlib import import_module + from flask import Blueprint from flask_restx import Namespace from libs.external_api import ExternalApi -from .app.app_import import AppImportApi, AppImportCheckDependenciesApi, AppImportConfirmApi -from .explore.audio import ChatAudioApi, ChatTextApi -from .explore.completion import ChatApi, ChatStopApi, CompletionApi, CompletionStopApi -from .explore.conversation import ( - ConversationApi, - ConversationListApi, - ConversationPinApi, - ConversationRenameApi, - ConversationUnPinApi, -) -from .explore.message import ( - MessageFeedbackApi, - MessageListApi, - MessageMoreLikeThisApi, - MessageSuggestedQuestionApi, -) -from .explore.workflow import ( - InstalledAppWorkflowRunApi, - InstalledAppWorkflowTaskStopApi, -) -from .files import FileApi, FilePreviewApi, FileSupportTypeApi -from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi - bp = Blueprint("console", __name__, url_prefix="/console/api") api = ExternalApi( @@ -35,23 +14,23 @@ api = ExternalApi( description="Console management APIs for app configuration, monitoring, and administration", ) -# Create namespace console_ns = Namespace("console", description="Console management API operations", path="/") -# File -api.add_resource(FileApi, "/files/upload") -api.add_resource(FilePreviewApi, "/files/<uuid:file_id>/preview") -api.add_resource(FileSupportTypeApi, "/files/support-type") +RESOURCE_MODULES = ( + "controllers.console.app.app_import", + "controllers.console.explore.audio", + "controllers.console.explore.completion", + "controllers.console.explore.conversation", + "controllers.console.explore.message", + "controllers.console.explore.workflow", + "controllers.console.files", + "controllers.console.remote_files", +) -# Remote files -api.add_resource(RemoteFileInfoApi, "/remote-files/<path:url>") -api.add_resource(RemoteFileUploadApi, "/remote-files/upload") - -# Import App -api.add_resource(AppImportApi, "/apps/imports") -api.add_resource(AppImportConfirmApi, "/apps/imports/<string:import_id>/confirm") -api.add_resource(AppImportCheckDependenciesApi, "/apps/imports/<string:app_id>/check-dependencies") +for module_name in RESOURCE_MODULES: + import_module(module_name) +# Ensure resource modules are imported so route decorators are evaluated. # Import other controllers from . import ( admin, @@ -150,77 +129,6 @@ from .workspace import ( workspace, ) -# Explore Audio -api.add_resource(ChatAudioApi, "/installed-apps/<uuid:installed_app_id>/audio-to-text", endpoint="installed_app_audio") -api.add_resource(ChatTextApi, "/installed-apps/<uuid:installed_app_id>/text-to-audio", endpoint="installed_app_text") - -# Explore Completion -api.add_resource( - CompletionApi, "/installed-apps/<uuid:installed_app_id>/completion-messages", endpoint="installed_app_completion" -) -api.add_resource( - CompletionStopApi, - "/installed-apps/<uuid:installed_app_id>/completion-messages/<string:task_id>/stop", - endpoint="installed_app_stop_completion", -) -api.add_resource( - ChatApi, "/installed-apps/<uuid:installed_app_id>/chat-messages", endpoint="installed_app_chat_completion" -) -api.add_resource( - ChatStopApi, - "/installed-apps/<uuid:installed_app_id>/chat-messages/<string:task_id>/stop", - endpoint="installed_app_stop_chat_completion", -) - -# Explore Conversation -api.add_resource( - ConversationRenameApi, - "/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/name", - endpoint="installed_app_conversation_rename", -) -api.add_resource( - ConversationListApi, "/installed-apps/<uuid:installed_app_id>/conversations", endpoint="installed_app_conversations" -) -api.add_resource( - ConversationApi, - "/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>", - endpoint="installed_app_conversation", -) -api.add_resource( - ConversationPinApi, - "/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/pin", - endpoint="installed_app_conversation_pin", -) -api.add_resource( - ConversationUnPinApi, - "/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/unpin", - endpoint="installed_app_conversation_unpin", -) - - -# Explore Message -api.add_resource(MessageListApi, "/installed-apps/<uuid:installed_app_id>/messages", endpoint="installed_app_messages") -api.add_resource( - MessageFeedbackApi, - "/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/feedbacks", - endpoint="installed_app_message_feedback", -) -api.add_resource( - MessageMoreLikeThisApi, - "/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/more-like-this", - endpoint="installed_app_more_like_this", -) -api.add_resource( - MessageSuggestedQuestionApi, - "/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/suggested-questions", - endpoint="installed_app_suggested_question", -) -# Explore Workflow -api.add_resource(InstalledAppWorkflowRunApi, "/installed-apps/<uuid:installed_app_id>/workflows/run") -api.add_resource( - InstalledAppWorkflowTaskStopApi, "/installed-apps/<uuid:installed_app_id>/workflows/tasks/<string:task_id>/stop" -) - api.add_namespace(console_ns) __all__ = [ diff --git a/api/controllers/console/app/app_import.py b/api/controllers/console/app/app_import.py index aee93a8814..c14f597c25 100644 --- a/api/controllers/console/app/app_import.py +++ b/api/controllers/console/app/app_import.py @@ -20,7 +20,10 @@ from services.app_dsl_service import AppDslService, ImportStatus from services.enterprise.enterprise_service import EnterpriseService from services.feature_service import FeatureService +from .. import console_ns + +@console_ns.route("/apps/imports") class AppImportApi(Resource): @setup_required @login_required @@ -74,6 +77,7 @@ class AppImportApi(Resource): return result.model_dump(mode="json"), 200 +@console_ns.route("/apps/imports/<string:import_id>/confirm") class AppImportConfirmApi(Resource): @setup_required @login_required @@ -98,6 +102,7 @@ class AppImportConfirmApi(Resource): return result.model_dump(mode="json"), 200 +@console_ns.route("/apps/imports/<string:app_id>/check-dependencies") class AppImportCheckDependenciesApi(Resource): @setup_required @login_required diff --git a/api/controllers/console/auth/data_source_bearer_auth.py b/api/controllers/console/auth/data_source_bearer_auth.py index 796e6916cc..207303b212 100644 --- a/api/controllers/console/auth/data_source_bearer_auth.py +++ b/api/controllers/console/auth/data_source_bearer_auth.py @@ -2,7 +2,7 @@ from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.auth.error import ApiKeyAuthFailedError from libs.login import login_required from services.auth.api_key_auth_service import ApiKeyAuthService @@ -10,6 +10,7 @@ from services.auth.api_key_auth_service import ApiKeyAuthService from ..wraps import account_initialization_required, setup_required +@console_ns.route("/api-key-auth/data-source") class ApiKeyAuthDataSource(Resource): @setup_required @login_required @@ -33,6 +34,7 @@ class ApiKeyAuthDataSource(Resource): return {"sources": []} +@console_ns.route("/api-key-auth/data-source/binding") class ApiKeyAuthDataSourceBinding(Resource): @setup_required @login_required @@ -54,6 +56,7 @@ class ApiKeyAuthDataSourceBinding(Resource): return {"result": "success"}, 200 +@console_ns.route("/api-key-auth/data-source/<uuid:binding_id>") class ApiKeyAuthDataSourceBindingDelete(Resource): @setup_required @login_required @@ -66,8 +69,3 @@ class ApiKeyAuthDataSourceBindingDelete(Resource): ApiKeyAuthService.delete_provider_auth(current_user.current_tenant_id, binding_id) return {"result": "success"}, 204 - - -api.add_resource(ApiKeyAuthDataSource, "/api-key-auth/data-source") -api.add_resource(ApiKeyAuthDataSourceBinding, "/api-key-auth/data-source/binding") -api.add_resource(ApiKeyAuthDataSourceBindingDelete, "/api-key-auth/data-source/<uuid:binding_id>") diff --git a/api/controllers/console/auth/email_register.py b/api/controllers/console/auth/email_register.py index 91de19a78a..d3613d9183 100644 --- a/api/controllers/console/auth/email_register.py +++ b/api/controllers/console/auth/email_register.py @@ -5,7 +5,7 @@ from sqlalchemy.orm import Session from configs import dify_config from constants.languages import languages -from controllers.console import api +from controllers.console import console_ns from controllers.console.auth.error import ( EmailAlreadyInUseError, EmailCodeError, @@ -25,6 +25,7 @@ from services.billing_service import BillingService from services.errors.account import AccountNotFoundError, AccountRegisterError +@console_ns.route("/email-register/send-email") class EmailRegisterSendEmailApi(Resource): @setup_required @email_password_login_enabled @@ -52,6 +53,7 @@ class EmailRegisterSendEmailApi(Resource): return {"result": "success", "data": token} +@console_ns.route("/email-register/validity") class EmailRegisterCheckApi(Resource): @setup_required @email_password_login_enabled @@ -92,6 +94,7 @@ class EmailRegisterCheckApi(Resource): return {"is_valid": True, "email": token_data.get("email"), "token": new_token} +@console_ns.route("/email-register") class EmailRegisterResetApi(Resource): @setup_required @email_password_login_enabled @@ -148,8 +151,3 @@ class EmailRegisterResetApi(Resource): raise AccountInFreezeError() return account - - -api.add_resource(EmailRegisterSendEmailApi, "/email-register/send-email") -api.add_resource(EmailRegisterCheckApi, "/email-register/validity") -api.add_resource(EmailRegisterResetApi, "/email-register") diff --git a/api/controllers/console/auth/forgot_password.py b/api/controllers/console/auth/forgot_password.py index 36ccb1d562..704bcf8fb8 100644 --- a/api/controllers/console/auth/forgot_password.py +++ b/api/controllers/console/auth/forgot_password.py @@ -221,8 +221,3 @@ class ForgotPasswordResetApi(Resource): TenantService.create_tenant_member(tenant, account, role="owner") account.current_tenant = tenant tenant_was_created.send(tenant) - - -api.add_resource(ForgotPasswordSendEmailApi, "/forgot-password") -api.add_resource(ForgotPasswordCheckApi, "/forgot-password/validity") -api.add_resource(ForgotPasswordResetApi, "/forgot-password/resets") diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py index 3b35ab3c23..ba614aa828 100644 --- a/api/controllers/console/auth/login.py +++ b/api/controllers/console/auth/login.py @@ -7,7 +7,7 @@ from flask_restx import Resource, reqparse import services from configs import dify_config from constants.languages import languages -from controllers.console import api +from controllers.console import console_ns from controllers.console.auth.error import ( AuthenticationFailedError, EmailCodeError, @@ -34,6 +34,7 @@ from services.errors.workspace import WorkSpaceNotAllowedCreateError, Workspaces from services.feature_service import FeatureService +@console_ns.route("/login") class LoginApi(Resource): """Resource for user login.""" @@ -91,6 +92,7 @@ class LoginApi(Resource): return {"result": "success", "data": token_pair.model_dump()} +@console_ns.route("/logout") class LogoutApi(Resource): @setup_required def get(self): @@ -102,6 +104,7 @@ class LogoutApi(Resource): return {"result": "success"} +@console_ns.route("/reset-password") class ResetPasswordSendEmailApi(Resource): @setup_required @email_password_login_enabled @@ -130,6 +133,7 @@ class ResetPasswordSendEmailApi(Resource): return {"result": "success", "data": token} +@console_ns.route("/email-code-login") class EmailCodeLoginSendEmailApi(Resource): @setup_required def post(self): @@ -162,6 +166,7 @@ class EmailCodeLoginSendEmailApi(Resource): return {"result": "success", "data": token} +@console_ns.route("/email-code-login/validity") class EmailCodeLoginApi(Resource): @setup_required def post(self): @@ -218,6 +223,7 @@ class EmailCodeLoginApi(Resource): return {"result": "success", "data": token_pair.model_dump()} +@console_ns.route("/refresh-token") class RefreshTokenApi(Resource): def post(self): parser = reqparse.RequestParser() @@ -229,11 +235,3 @@ class RefreshTokenApi(Resource): return {"result": "success", "data": new_token_pair.model_dump()} except Exception as e: return {"result": "fail", "data": str(e)}, 401 - - -api.add_resource(LoginApi, "/login") -api.add_resource(LogoutApi, "/logout") -api.add_resource(EmailCodeLoginSendEmailApi, "/email-code-login") -api.add_resource(EmailCodeLoginApi, "/email-code-login/validity") -api.add_resource(ResetPasswordSendEmailApi, "/reset-password") -api.add_resource(RefreshTokenApi, "/refresh-token") diff --git a/api/controllers/console/auth/oauth_server.py b/api/controllers/console/auth/oauth_server.py index a54c1443f8..46281860ae 100644 --- a/api/controllers/console/auth/oauth_server.py +++ b/api/controllers/console/auth/oauth_server.py @@ -14,7 +14,7 @@ from models.account import Account from models.model import OAuthProviderApp from services.oauth_server import OAUTH_ACCESS_TOKEN_EXPIRES_IN, OAuthGrantType, OAuthServerService -from .. import api +from .. import console_ns P = ParamSpec("P") R = TypeVar("R") @@ -86,6 +86,7 @@ def oauth_server_access_token_required(view: Callable[Concatenate[T, OAuthProvid return decorated +@console_ns.route("/oauth/provider") class OAuthServerAppApi(Resource): @setup_required @oauth_server_client_id_required @@ -108,6 +109,7 @@ class OAuthServerAppApi(Resource): ) +@console_ns.route("/oauth/provider/authorize") class OAuthServerUserAuthorizeApi(Resource): @setup_required @login_required @@ -125,6 +127,7 @@ class OAuthServerUserAuthorizeApi(Resource): ) +@console_ns.route("/oauth/provider/token") class OAuthServerUserTokenApi(Resource): @setup_required @oauth_server_client_id_required @@ -180,6 +183,7 @@ class OAuthServerUserTokenApi(Resource): ) +@console_ns.route("/oauth/provider/account") class OAuthServerUserAccountApi(Resource): @setup_required @oauth_server_client_id_required @@ -194,9 +198,3 @@ class OAuthServerUserAccountApi(Resource): "timezone": account.timezone, } ) - - -api.add_resource(OAuthServerAppApi, "/oauth/provider") -api.add_resource(OAuthServerUserAuthorizeApi, "/oauth/provider/authorize") -api.add_resource(OAuthServerUserTokenApi, "/oauth/provider/token") -api.add_resource(OAuthServerUserAccountApi, "/oauth/provider/account") diff --git a/api/controllers/console/billing/billing.py b/api/controllers/console/billing/billing.py index 39fc7dec6b..fa89f45122 100644 --- a/api/controllers/console/billing/billing.py +++ b/api/controllers/console/billing/billing.py @@ -1,12 +1,13 @@ from flask_restx import Resource, reqparse -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required from libs.login import current_user, login_required from models.model import Account from services.billing_service import BillingService +@console_ns.route("/billing/subscription") class Subscription(Resource): @setup_required @login_required @@ -26,6 +27,7 @@ class Subscription(Resource): ) +@console_ns.route("/billing/invoices") class Invoices(Resource): @setup_required @login_required @@ -36,7 +38,3 @@ class Invoices(Resource): BillingService.is_tenant_owner_or_admin(current_user) assert current_user.current_tenant_id is not None return BillingService.get_invoices(current_user.email, current_user.current_tenant_id) - - -api.add_resource(Subscription, "/billing/subscription") -api.add_resource(Invoices, "/billing/invoices") diff --git a/api/controllers/console/billing/compliance.py b/api/controllers/console/billing/compliance.py index 4bc073f679..e489b48c82 100644 --- a/api/controllers/console/billing/compliance.py +++ b/api/controllers/console/billing/compliance.py @@ -6,10 +6,11 @@ from libs.helper import extract_remote_ip from libs.login import login_required from services.billing_service import BillingService -from .. import api +from .. import console_ns from ..wraps import account_initialization_required, only_edition_cloud, setup_required +@console_ns.route("/compliance/download") class ComplianceApi(Resource): @setup_required @login_required @@ -30,6 +31,3 @@ class ComplianceApi(Resource): ip=ip_address, device_info=device_info, ) - - -api.add_resource(ComplianceApi, "/compliance/download") diff --git a/api/controllers/console/datasets/data_source.py b/api/controllers/console/datasets/data_source.py index 3a9530af84..370e0c0d14 100644 --- a/api/controllers/console/datasets/data_source.py +++ b/api/controllers/console/datasets/data_source.py @@ -9,7 +9,7 @@ from sqlalchemy import select from sqlalchemy.orm import Session from werkzeug.exceptions import NotFound -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.datasource.entities.datasource_entities import DatasourceProviderType, OnlineDocumentPagesMessage from core.datasource.online_document.online_document_plugin import OnlineDocumentDatasourcePlugin @@ -27,6 +27,10 @@ from services.datasource_provider_service import DatasourceProviderService from tasks.document_indexing_sync_task import document_indexing_sync_task +@console_ns.route( + "/data-source/integrates", + "/data-source/integrates/<uuid:binding_id>/<string:action>", +) class DataSourceApi(Resource): @setup_required @login_required @@ -109,6 +113,7 @@ class DataSourceApi(Resource): return {"result": "success"}, 200 +@console_ns.route("/notion/pre-import/pages") class DataSourceNotionListApi(Resource): @setup_required @login_required @@ -196,6 +201,10 @@ class DataSourceNotionListApi(Resource): return {"notion_info": {**workspace_info, "pages": pages}}, 200 +@console_ns.route( + "/notion/workspaces/<uuid:workspace_id>/pages/<uuid:page_id>/<string:page_type>/preview", + "/datasets/notion-indexing-estimate", +) class DataSourceNotionApi(Resource): @setup_required @login_required @@ -269,6 +278,7 @@ class DataSourceNotionApi(Resource): return response.model_dump(), 200 +@console_ns.route("/datasets/<uuid:dataset_id>/notion/sync") class DataSourceNotionDatasetSyncApi(Resource): @setup_required @login_required @@ -285,6 +295,7 @@ class DataSourceNotionDatasetSyncApi(Resource): return {"result": "success"}, 200 +@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/notion/sync") class DataSourceNotionDocumentSyncApi(Resource): @setup_required @login_required @@ -301,16 +312,3 @@ class DataSourceNotionDocumentSyncApi(Resource): raise NotFound("Document not found.") document_indexing_sync_task.delay(dataset_id_str, document_id_str) return {"result": "success"}, 200 - - -api.add_resource(DataSourceApi, "/data-source/integrates", "/data-source/integrates/<uuid:binding_id>/<string:action>") -api.add_resource(DataSourceNotionListApi, "/notion/pre-import/pages") -api.add_resource( - DataSourceNotionApi, - "/notion/workspaces/<uuid:workspace_id>/pages/<uuid:page_id>/<string:page_type>/preview", - "/datasets/notion-indexing-estimate", -) -api.add_resource(DataSourceNotionDatasetSyncApi, "/datasets/<uuid:dataset_id>/notion/sync") -api.add_resource( - DataSourceNotionDocumentSyncApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/notion/sync" -) diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index e6f5daa87b..6aaede0fb3 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -1114,6 +1114,7 @@ class WebsiteDocumentSyncApi(DocumentResource): return {"result": "success"}, 200 +@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/pipeline-execution-log") class DocumentPipelineExecutionLogApi(DocumentResource): @setup_required @login_required @@ -1147,29 +1148,3 @@ class DocumentPipelineExecutionLogApi(DocumentResource): "input_data": log.input_data, "datasource_node_id": log.datasource_node_id, }, 200 - - -api.add_resource(GetProcessRuleApi, "/datasets/process-rule") -api.add_resource(DatasetDocumentListApi, "/datasets/<uuid:dataset_id>/documents") -api.add_resource(DatasetInitApi, "/datasets/init") -api.add_resource( - DocumentIndexingEstimateApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-estimate" -) -api.add_resource(DocumentBatchIndexingEstimateApi, "/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-estimate") -api.add_resource(DocumentBatchIndexingStatusApi, "/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-status") -api.add_resource(DocumentIndexingStatusApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-status") -api.add_resource(DocumentApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>") -api.add_resource( - DocumentProcessingApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/<string:action>" -) -api.add_resource(DocumentMetadataApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/metadata") -api.add_resource(DocumentStatusApi, "/datasets/<uuid:dataset_id>/documents/status/<string:action>/batch") -api.add_resource(DocumentPauseApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/pause") -api.add_resource(DocumentRecoverApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/resume") -api.add_resource(DocumentRetryApi, "/datasets/<uuid:dataset_id>/retry") -api.add_resource(DocumentRenameApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/rename") - -api.add_resource(WebsiteDocumentSyncApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/website-sync") -api.add_resource( - DocumentPipelineExecutionLogApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/pipeline-execution-log" -) diff --git a/api/controllers/console/datasets/datasets_segments.py b/api/controllers/console/datasets/datasets_segments.py index 463fd2d7ec..ba552821d2 100644 --- a/api/controllers/console/datasets/datasets_segments.py +++ b/api/controllers/console/datasets/datasets_segments.py @@ -7,7 +7,7 @@ from sqlalchemy import select from werkzeug.exceptions import Forbidden, NotFound import services -from controllers.console import api +from controllers.console import console_ns from controllers.console.app.error import ProviderNotInitializeError from controllers.console.datasets.error import ( ChildChunkDeleteIndexError, @@ -37,6 +37,7 @@ from services.errors.chunk import ChildChunkIndexingError as ChildChunkIndexingS from tasks.batch_create_segment_to_index_task import batch_create_segment_to_index_task +@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments") class DatasetDocumentSegmentListApi(Resource): @setup_required @login_required @@ -139,6 +140,7 @@ class DatasetDocumentSegmentListApi(Resource): return {"result": "success"}, 204 +@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segment/<string:action>") class DatasetDocumentSegmentApi(Resource): @setup_required @login_required @@ -193,6 +195,7 @@ class DatasetDocumentSegmentApi(Resource): return {"result": "success"}, 200 +@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segment") class DatasetDocumentSegmentAddApi(Resource): @setup_required @login_required @@ -244,6 +247,7 @@ class DatasetDocumentSegmentAddApi(Resource): return {"data": marshal(segment, segment_fields), "doc_form": document.doc_form}, 200 +@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>") class DatasetDocumentSegmentUpdateApi(Resource): @setup_required @login_required @@ -345,6 +349,10 @@ class DatasetDocumentSegmentUpdateApi(Resource): return {"result": "success"}, 204 +@console_ns.route( + "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/batch_import", + "/datasets/batch_import_status/<uuid:job_id>", +) class DatasetDocumentSegmentBatchImportApi(Resource): @setup_required @login_required @@ -393,7 +401,9 @@ class DatasetDocumentSegmentBatchImportApi(Resource): @setup_required @login_required @account_initialization_required - def get(self, job_id): + def get(self, job_id=None, dataset_id=None, document_id=None): + if job_id is None: + raise NotFound("The job does not exist.") job_id = str(job_id) indexing_cache_key = f"segment_batch_import_{job_id}" cache_result = redis_client.get(indexing_cache_key) @@ -403,6 +413,7 @@ class DatasetDocumentSegmentBatchImportApi(Resource): return {"job_id": job_id, "job_status": cache_result.decode()}, 200 +@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>/child_chunks") class ChildChunkAddApi(Resource): @setup_required @login_required @@ -553,6 +564,9 @@ class ChildChunkAddApi(Resource): return {"data": marshal(child_chunks, child_chunk_fields)}, 200 +@console_ns.route( + "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>/child_chunks/<uuid:child_chunk_id>" +) class ChildChunkUpdateApi(Resource): @setup_required @login_required @@ -666,27 +680,3 @@ class ChildChunkUpdateApi(Resource): except ChildChunkIndexingServiceError as e: raise ChildChunkIndexingError(str(e)) return {"data": marshal(child_chunk, child_chunk_fields)}, 200 - - -api.add_resource(DatasetDocumentSegmentListApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments") -api.add_resource( - DatasetDocumentSegmentApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segment/<string:action>" -) -api.add_resource(DatasetDocumentSegmentAddApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segment") -api.add_resource( - DatasetDocumentSegmentUpdateApi, - "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>", -) -api.add_resource( - DatasetDocumentSegmentBatchImportApi, - "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/batch_import", - "/datasets/batch_import_status/<uuid:job_id>", -) -api.add_resource( - ChildChunkAddApi, - "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>/child_chunks", -) -api.add_resource( - ChildChunkUpdateApi, - "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>/child_chunks/<uuid:child_chunk_id>", -) diff --git a/api/controllers/console/datasets/metadata.py b/api/controllers/console/datasets/metadata.py index 21ab5e4fe1..53dc80eaa5 100644 --- a/api/controllers/console/datasets/metadata.py +++ b/api/controllers/console/datasets/metadata.py @@ -4,7 +4,7 @@ from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from werkzeug.exceptions import NotFound -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required from fields.dataset_fields import dataset_metadata_fields from libs.login import login_required @@ -16,6 +16,7 @@ from services.entities.knowledge_entities.knowledge_entities import ( from services.metadata_service import MetadataService +@console_ns.route("/datasets/<uuid:dataset_id>/metadata") class DatasetMetadataCreateApi(Resource): @setup_required @login_required @@ -50,6 +51,7 @@ class DatasetMetadataCreateApi(Resource): return MetadataService.get_dataset_metadatas(dataset), 200 +@console_ns.route("/datasets/<uuid:dataset_id>/metadata/<uuid:metadata_id>") class DatasetMetadataApi(Resource): @setup_required @login_required @@ -87,6 +89,7 @@ class DatasetMetadataApi(Resource): return {"result": "success"}, 204 +@console_ns.route("/datasets/metadata/built-in") class DatasetMetadataBuiltInFieldApi(Resource): @setup_required @login_required @@ -97,6 +100,7 @@ class DatasetMetadataBuiltInFieldApi(Resource): return {"fields": built_in_fields}, 200 +@console_ns.route("/datasets/<uuid:dataset_id>/metadata/built-in/<string:action>") class DatasetMetadataBuiltInFieldActionApi(Resource): @setup_required @login_required @@ -116,6 +120,7 @@ class DatasetMetadataBuiltInFieldActionApi(Resource): return {"result": "success"}, 200 +@console_ns.route("/datasets/<uuid:dataset_id>/documents/metadata") class DocumentMetadataEditApi(Resource): @setup_required @login_required @@ -136,10 +141,3 @@ class DocumentMetadataEditApi(Resource): MetadataService.update_documents_metadata(dataset, metadata_args) return {"result": "success"}, 200 - - -api.add_resource(DatasetMetadataCreateApi, "/datasets/<uuid:dataset_id>/metadata") -api.add_resource(DatasetMetadataApi, "/datasets/<uuid:dataset_id>/metadata/<uuid:metadata_id>") -api.add_resource(DatasetMetadataBuiltInFieldApi, "/datasets/metadata/built-in") -api.add_resource(DatasetMetadataBuiltInFieldActionApi, "/datasets/<uuid:dataset_id>/metadata/built-in/<string:action>") -api.add_resource(DocumentMetadataEditApi, "/datasets/<uuid:dataset_id>/documents/metadata") diff --git a/api/controllers/console/datasets/rag_pipeline/datasource_auth.py b/api/controllers/console/datasets/rag_pipeline/datasource_auth.py index 1a845cf326..154d9e646b 100644 --- a/api/controllers/console/datasets/rag_pipeline/datasource_auth.py +++ b/api/controllers/console/datasets/rag_pipeline/datasource_auth.py @@ -5,7 +5,7 @@ from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden, NotFound from configs import dify_config -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import ( account_initialization_required, setup_required, @@ -19,6 +19,7 @@ from services.datasource_provider_service import DatasourceProviderService from services.plugin.oauth_service import OAuthProxyService +@console_ns.route("/oauth/plugin/<path:provider_id>/datasource/get-authorization-url") class DatasourcePluginOAuthAuthorizationUrl(Resource): @setup_required @login_required @@ -68,6 +69,7 @@ class DatasourcePluginOAuthAuthorizationUrl(Resource): return response +@console_ns.route("/oauth/plugin/<path:provider_id>/datasource/callback") class DatasourceOAuthCallback(Resource): @setup_required def get(self, provider_id: str): @@ -123,6 +125,7 @@ class DatasourceOAuthCallback(Resource): return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback") +@console_ns.route("/auth/plugin/datasource/<path:provider_id>") class DatasourceAuth(Resource): @setup_required @login_required @@ -165,6 +168,7 @@ class DatasourceAuth(Resource): return {"result": datasources}, 200 +@console_ns.route("/auth/plugin/datasource/<path:provider_id>/delete") class DatasourceAuthDeleteApi(Resource): @setup_required @login_required @@ -188,6 +192,7 @@ class DatasourceAuthDeleteApi(Resource): return {"result": "success"}, 200 +@console_ns.route("/auth/plugin/datasource/<path:provider_id>/update") class DatasourceAuthUpdateApi(Resource): @setup_required @login_required @@ -213,6 +218,7 @@ class DatasourceAuthUpdateApi(Resource): return {"result": "success"}, 201 +@console_ns.route("/auth/plugin/datasource/list") class DatasourceAuthListApi(Resource): @setup_required @login_required @@ -225,6 +231,7 @@ class DatasourceAuthListApi(Resource): return {"result": jsonable_encoder(datasources)}, 200 +@console_ns.route("/auth/plugin/datasource/default-list") class DatasourceHardCodeAuthListApi(Resource): @setup_required @login_required @@ -237,6 +244,7 @@ class DatasourceHardCodeAuthListApi(Resource): return {"result": jsonable_encoder(datasources)}, 200 +@console_ns.route("/auth/plugin/datasource/<path:provider_id>/custom-client") class DatasourceAuthOauthCustomClient(Resource): @setup_required @login_required @@ -271,6 +279,7 @@ class DatasourceAuthOauthCustomClient(Resource): return {"result": "success"}, 200 +@console_ns.route("/auth/plugin/datasource/<path:provider_id>/default") class DatasourceAuthDefaultApi(Resource): @setup_required @login_required @@ -291,6 +300,7 @@ class DatasourceAuthDefaultApi(Resource): return {"result": "success"}, 200 +@console_ns.route("/auth/plugin/datasource/<path:provider_id>/update-name") class DatasourceUpdateProviderNameApi(Resource): @setup_required @login_required @@ -311,52 +321,3 @@ class DatasourceUpdateProviderNameApi(Resource): credential_id=args["credential_id"], ) return {"result": "success"}, 200 - - -api.add_resource( - DatasourcePluginOAuthAuthorizationUrl, - "/oauth/plugin/<path:provider_id>/datasource/get-authorization-url", -) -api.add_resource( - DatasourceOAuthCallback, - "/oauth/plugin/<path:provider_id>/datasource/callback", -) -api.add_resource( - DatasourceAuth, - "/auth/plugin/datasource/<path:provider_id>", -) - -api.add_resource( - DatasourceAuthUpdateApi, - "/auth/plugin/datasource/<path:provider_id>/update", -) - -api.add_resource( - DatasourceAuthDeleteApi, - "/auth/plugin/datasource/<path:provider_id>/delete", -) - -api.add_resource( - DatasourceAuthListApi, - "/auth/plugin/datasource/list", -) - -api.add_resource( - DatasourceHardCodeAuthListApi, - "/auth/plugin/datasource/default-list", -) - -api.add_resource( - DatasourceAuthOauthCustomClient, - "/auth/plugin/datasource/<path:provider_id>/custom-client", -) - -api.add_resource( - DatasourceAuthDefaultApi, - "/auth/plugin/datasource/<path:provider_id>/default", -) - -api.add_resource( - DatasourceUpdateProviderNameApi, - "/auth/plugin/datasource/<path:provider_id>/update-name", -) diff --git a/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py b/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py index 05fa681a33..6c04cc877a 100644 --- a/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py +++ b/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py @@ -4,7 +4,7 @@ from flask_restx import ( # type: ignore ) from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.datasets.wraps import get_rag_pipeline from controllers.console.wraps import account_initialization_required, setup_required from libs.login import current_user, login_required @@ -13,6 +13,7 @@ from models.dataset import Pipeline from services.rag_pipeline.rag_pipeline import RagPipelineService +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/datasource/nodes/<string:node_id>/preview") class DataSourceContentPreviewApi(Resource): @setup_required @login_required @@ -49,9 +50,3 @@ class DataSourceContentPreviewApi(Resource): credential_id=args.get("credential_id"), ) return preview_content, 200 - - -api.add_resource( - DataSourceContentPreviewApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/published/datasource/nodes/<string:node_id>/preview", -) diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py index f04b0e04c3..6641911243 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py @@ -4,7 +4,7 @@ from flask import request from flask_restx import Resource, reqparse from sqlalchemy.orm import Session -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import ( account_initialization_required, enterprise_license_required, @@ -32,6 +32,7 @@ def _validate_description_length(description): return description +@console_ns.route("/rag/pipeline/templates") class PipelineTemplateListApi(Resource): @setup_required @login_required @@ -45,6 +46,7 @@ class PipelineTemplateListApi(Resource): return pipeline_templates, 200 +@console_ns.route("/rag/pipeline/templates/<string:template_id>") class PipelineTemplateDetailApi(Resource): @setup_required @login_required @@ -57,6 +59,7 @@ class PipelineTemplateDetailApi(Resource): return pipeline_template, 200 +@console_ns.route("/rag/pipeline/customized/templates/<string:template_id>") class CustomizedPipelineTemplateApi(Resource): @setup_required @login_required @@ -112,6 +115,7 @@ class CustomizedPipelineTemplateApi(Resource): return {"data": template.yaml_content}, 200 +@console_ns.route("/rag/pipelines/<string:pipeline_id>/customized/publish") class PublishCustomizedPipelineTemplateApi(Resource): @setup_required @login_required @@ -144,21 +148,3 @@ class PublishCustomizedPipelineTemplateApi(Resource): rag_pipeline_service = RagPipelineService() rag_pipeline_service.publish_customized_pipeline_template(pipeline_id, args) return {"result": "success"} - - -api.add_resource( - PipelineTemplateListApi, - "/rag/pipeline/templates", -) -api.add_resource( - PipelineTemplateDetailApi, - "/rag/pipeline/templates/<string:template_id>", -) -api.add_resource( - CustomizedPipelineTemplateApi, - "/rag/pipeline/customized/templates/<string:template_id>", -) -api.add_resource( - PublishCustomizedPipelineTemplateApi, - "/rag/pipelines/<string:pipeline_id>/customized/publish", -) diff --git a/api/controllers/console/explore/audio.py b/api/controllers/console/explore/audio.py index dc275fe18a..7c20fb49d8 100644 --- a/api/controllers/console/explore/audio.py +++ b/api/controllers/console/explore/audio.py @@ -26,9 +26,15 @@ from services.errors.audio import ( UnsupportedAudioTypeServiceError, ) +from .. import console_ns + logger = logging.getLogger(__name__) +@console_ns.route( + "/installed-apps/<uuid:installed_app_id>/audio-to-text", + endpoint="installed_app_audio", +) class ChatAudioApi(InstalledAppResource): def post(self, installed_app): app_model = installed_app.app @@ -65,6 +71,10 @@ class ChatAudioApi(InstalledAppResource): raise InternalServerError() +@console_ns.route( + "/installed-apps/<uuid:installed_app_id>/text-to-audio", + endpoint="installed_app_text", +) class ChatTextApi(InstalledAppResource): def post(self, installed_app): from flask_restx import reqparse diff --git a/api/controllers/console/explore/completion.py b/api/controllers/console/explore/completion.py index a99708b7cd..1102b815eb 100644 --- a/api/controllers/console/explore/completion.py +++ b/api/controllers/console/explore/completion.py @@ -33,10 +33,16 @@ from models.model import AppMode from services.app_generate_service import AppGenerateService from services.errors.llm import InvokeRateLimitError +from .. import console_ns + logger = logging.getLogger(__name__) # define completion api for user +@console_ns.route( + "/installed-apps/<uuid:installed_app_id>/completion-messages", + endpoint="installed_app_completion", +) class CompletionApi(InstalledAppResource): def post(self, installed_app): app_model = installed_app.app @@ -87,6 +93,10 @@ class CompletionApi(InstalledAppResource): raise InternalServerError() +@console_ns.route( + "/installed-apps/<uuid:installed_app_id>/completion-messages/<string:task_id>/stop", + endpoint="installed_app_stop_completion", +) class CompletionStopApi(InstalledAppResource): def post(self, installed_app, task_id): app_model = installed_app.app @@ -100,6 +110,10 @@ class CompletionStopApi(InstalledAppResource): return {"result": "success"}, 200 +@console_ns.route( + "/installed-apps/<uuid:installed_app_id>/chat-messages", + endpoint="installed_app_chat_completion", +) class ChatApi(InstalledAppResource): def post(self, installed_app): app_model = installed_app.app @@ -153,6 +167,10 @@ class ChatApi(InstalledAppResource): raise InternalServerError() +@console_ns.route( + "/installed-apps/<uuid:installed_app_id>/chat-messages/<string:task_id>/stop", + endpoint="installed_app_stop_chat_completion", +) class ChatStopApi(InstalledAppResource): def post(self, installed_app, task_id): app_model = installed_app.app diff --git a/api/controllers/console/explore/conversation.py b/api/controllers/console/explore/conversation.py index 1aef9c544d..feabea2524 100644 --- a/api/controllers/console/explore/conversation.py +++ b/api/controllers/console/explore/conversation.py @@ -16,7 +16,13 @@ from services.conversation_service import ConversationService from services.errors.conversation import ConversationNotExistsError, LastConversationNotExistsError from services.web_conversation_service import WebConversationService +from .. import console_ns + +@console_ns.route( + "/installed-apps/<uuid:installed_app_id>/conversations", + endpoint="installed_app_conversations", +) class ConversationListApi(InstalledAppResource): @marshal_with(conversation_infinite_scroll_pagination_fields) def get(self, installed_app): @@ -52,6 +58,10 @@ class ConversationListApi(InstalledAppResource): raise NotFound("Last Conversation Not Exists.") +@console_ns.route( + "/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>", + endpoint="installed_app_conversation", +) class ConversationApi(InstalledAppResource): def delete(self, installed_app, c_id): app_model = installed_app.app @@ -70,6 +80,10 @@ class ConversationApi(InstalledAppResource): return {"result": "success"}, 204 +@console_ns.route( + "/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/name", + endpoint="installed_app_conversation_rename", +) class ConversationRenameApi(InstalledAppResource): @marshal_with(simple_conversation_fields) def post(self, installed_app, c_id): @@ -95,6 +109,10 @@ class ConversationRenameApi(InstalledAppResource): raise NotFound("Conversation Not Exists.") +@console_ns.route( + "/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/pin", + endpoint="installed_app_conversation_pin", +) class ConversationPinApi(InstalledAppResource): def patch(self, installed_app, c_id): app_model = installed_app.app @@ -114,6 +132,10 @@ class ConversationPinApi(InstalledAppResource): return {"result": "success"} +@console_ns.route( + "/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/unpin", + endpoint="installed_app_conversation_unpin", +) class ConversationUnPinApi(InstalledAppResource): def patch(self, installed_app, c_id): app_model = installed_app.app diff --git a/api/controllers/console/explore/message.py b/api/controllers/console/explore/message.py index c46c1c1f4f..b045e47846 100644 --- a/api/controllers/console/explore/message.py +++ b/api/controllers/console/explore/message.py @@ -36,9 +36,15 @@ from services.errors.message import ( ) from services.message_service import MessageService +from .. import console_ns + logger = logging.getLogger(__name__) +@console_ns.route( + "/installed-apps/<uuid:installed_app_id>/messages", + endpoint="installed_app_messages", +) class MessageListApi(InstalledAppResource): @marshal_with(message_infinite_scroll_pagination_fields) def get(self, installed_app): @@ -66,6 +72,10 @@ class MessageListApi(InstalledAppResource): raise NotFound("First Message Not Exists.") +@console_ns.route( + "/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/feedbacks", + endpoint="installed_app_message_feedback", +) class MessageFeedbackApi(InstalledAppResource): def post(self, installed_app, message_id): app_model = installed_app.app @@ -93,6 +103,10 @@ class MessageFeedbackApi(InstalledAppResource): return {"result": "success"} +@console_ns.route( + "/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/more-like-this", + endpoint="installed_app_more_like_this", +) class MessageMoreLikeThisApi(InstalledAppResource): def get(self, installed_app, message_id): app_model = installed_app.app @@ -139,6 +153,10 @@ class MessageMoreLikeThisApi(InstalledAppResource): raise InternalServerError() +@console_ns.route( + "/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/suggested-questions", + endpoint="installed_app_suggested_question", +) class MessageSuggestedQuestionApi(InstalledAppResource): def get(self, installed_app, message_id): app_model = installed_app.app diff --git a/api/controllers/console/explore/workflow.py b/api/controllers/console/explore/workflow.py index 61e0f1b36a..e32f2814eb 100644 --- a/api/controllers/console/explore/workflow.py +++ b/api/controllers/console/explore/workflow.py @@ -27,9 +27,12 @@ from models.model import AppMode, InstalledApp from services.app_generate_service import AppGenerateService from services.errors.llm import InvokeRateLimitError +from .. import console_ns + logger = logging.getLogger(__name__) +@console_ns.route("/installed-apps/<uuid:installed_app_id>/workflows/run") class InstalledAppWorkflowRunApi(InstalledAppResource): def post(self, installed_app: InstalledApp): """ @@ -70,6 +73,7 @@ class InstalledAppWorkflowRunApi(InstalledAppResource): raise InternalServerError() +@console_ns.route("/installed-apps/<uuid:installed_app_id>/workflows/tasks/<string:task_id>/stop") class InstalledAppWorkflowTaskStopApi(InstalledAppResource): def post(self, installed_app: InstalledApp, task_id: str): """ diff --git a/api/controllers/console/files.py b/api/controllers/console/files.py index 105f802878..34f186e2f0 100644 --- a/api/controllers/console/files.py +++ b/api/controllers/console/files.py @@ -26,9 +26,12 @@ from libs.login import login_required from models import Account from services.file_service import FileService +from . import console_ns + PREVIEW_WORDS_LIMIT = 3000 +@console_ns.route("/files/upload") class FileApi(Resource): @setup_required @login_required @@ -88,6 +91,7 @@ class FileApi(Resource): return upload_file, 201 +@console_ns.route("/files/<uuid:file_id>/preview") class FilePreviewApi(Resource): @setup_required @login_required @@ -98,6 +102,7 @@ class FilePreviewApi(Resource): return {"content": text} +@console_ns.route("/files/support-type") class FileSupportTypeApi(Resource): @setup_required @login_required diff --git a/api/controllers/console/remote_files.py b/api/controllers/console/remote_files.py index dd4f34b9bd..7aaf807fb0 100644 --- a/api/controllers/console/remote_files.py +++ b/api/controllers/console/remote_files.py @@ -19,7 +19,10 @@ from fields.file_fields import file_fields_with_signed_url, remote_file_info_fie from models.account import Account from services.file_service import FileService +from . import console_ns + +@console_ns.route("/remote-files/<path:url>") class RemoteFileInfoApi(Resource): @marshal_with(remote_file_info_fields) def get(self, url): @@ -35,6 +38,7 @@ class RemoteFileInfoApi(Resource): } +@console_ns.route("/remote-files/upload") class RemoteFileUploadApi(Resource): @marshal_with(file_fields_with_signed_url) def post(self): diff --git a/api/controllers/console/spec.py b/api/controllers/console/spec.py index ca54715fe0..1795e2d172 100644 --- a/api/controllers/console/spec.py +++ b/api/controllers/console/spec.py @@ -2,7 +2,6 @@ import logging from flask_restx import Resource -from controllers.console import api from controllers.console.wraps import ( account_initialization_required, setup_required, @@ -10,9 +9,12 @@ from controllers.console.wraps import ( from core.schemas.schema_manager import SchemaManager from libs.login import login_required +from . import console_ns + logger = logging.getLogger(__name__) +@console_ns.route("/spec/schema-definitions") class SpecSchemaDefinitionsApi(Resource): @setup_required @login_required @@ -30,6 +32,3 @@ class SpecSchemaDefinitionsApi(Resource): logger.exception("Failed to get schema definitions from local registry") # Return empty array as fallback return [], 200 - - -api.add_resource(SpecSchemaDefinitionsApi, "/spec/schema-definitions") From 87c41c88a3f58844bc5e473c580113761f8aa572 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=9D=9E=E6=B3=95=E6=93=8D=E4=BD=9C?= <hjlarry@163.com> Date: Sun, 28 Sep 2025 13:37:28 +0800 Subject: [PATCH 056/126] fix: some display-related issues (#26335) --- .../builtin_tool/providers/code/_assets/icon.svg | 2 +- .../components/panel/input-field/index.tsx | 2 +- web/i18n/zh-Hant/dataset-pipeline.ts | 12 ++++++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/api/core/tools/builtin_tool/providers/code/_assets/icon.svg b/api/core/tools/builtin_tool/providers/code/_assets/icon.svg index b986ed9426..154726a081 100644 --- a/api/core/tools/builtin_tool/providers/code/_assets/icon.svg +++ b/api/core/tools/builtin_tool/providers/code/_assets/icon.svg @@ -1 +1 @@ -<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg" class="w-3.5 h-3.5" data-icon="Code" aria-hidden="true"><g id="icons/code"><path id="Vector (Stroke)" fill-rule="evenodd" clip-rule="evenodd" d="M8.32593 1.69675C8.67754 1.78466 8.89132 2.14096 8.80342 2.49257L6.47009 11.8259C6.38218 12.1775 6.02588 12.3913 5.67427 12.3034C5.32265 12.2155 5.10887 11.8592 5.19678 11.5076L7.53011 2.17424C7.61801 1.82263 7.97431 1.60885 8.32593 1.69675ZM3.96414 4.20273C4.22042 4.45901 4.22042 4.87453 3.96413 5.13081L2.45578 6.63914C2.45577 6.63915 2.45578 6.63914 2.45578 6.63914C2.25645 6.83851 2.25643 7.16168 2.45575 7.36103C2.45574 7.36103 2.45576 7.36104 2.45575 7.36103L3.96413 8.86936C4.22041 9.12564 4.22042 9.54115 3.96414 9.79744C3.70787 10.0537 3.29235 10.0537 3.03607 9.79745L1.52769 8.28913C0.815811 7.57721 0.815803 6.42302 1.52766 5.7111L3.03606 4.20272C3.29234 3.94644 3.70786 3.94644 3.96414 4.20273ZM10.0361 4.20273C10.2923 3.94644 10.7078 3.94644 10.9641 4.20272L12.4725 5.71108C13.1843 6.423 13.1844 7.57717 12.4725 8.28909L10.9641 9.79745C10.7078 10.0537 10.2923 10.0537 10.036 9.79744C9.77977 9.54115 9.77978 9.12564 10.0361 8.86936L11.5444 7.36107C11.7437 7.16172 11.7438 6.83854 11.5444 6.63917C11.5444 6.63915 11.5445 6.63918 11.5444 6.63917L10.0361 5.13081C9.77978 4.87453 9.77978 4.45901 10.0361 4.20273Z" fill="currentColor"></path></g></svg> \ No newline at end of file +<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg" class="w-3.5 h-3.5" data-icon="Code" aria-hidden="true"><g id="icons/code"><path id="Vector (Stroke)" fill-rule="evenodd" clip-rule="evenodd" d="M8.32593 1.69675C8.67754 1.78466 8.89132 2.14096 8.80342 2.49257L6.47009 11.8259C6.38218 12.1775 6.02588 12.3913 5.67427 12.3034C5.32265 12.2155 5.10887 11.8592 5.19678 11.5076L7.53011 2.17424C7.61801 1.82263 7.97431 1.60885 8.32593 1.69675ZM3.96414 4.20273C4.22042 4.45901 4.22042 4.87453 3.96413 5.13081L2.45578 6.63914C2.45577 6.63915 2.45578 6.63914 2.45578 6.63914C2.25645 6.83851 2.25643 7.16168 2.45575 7.36103C2.45574 7.36103 2.45576 7.36104 2.45575 7.36103L3.96413 8.86936C4.22041 9.12564 4.22042 9.54115 3.96414 9.79744C3.70787 10.0537 3.29235 10.0537 3.03607 9.79745L1.52769 8.28913C0.815811 7.57721 0.815803 6.42302 1.52766 5.7111L3.03606 4.20272C3.29234 3.94644 3.70786 3.94644 3.96414 4.20273ZM10.0361 4.20273C10.2923 3.94644 10.7078 3.94644 10.9641 4.20272L12.4725 5.71108C13.1843 6.423 13.1844 7.57717 12.4725 8.28909L10.9641 9.79745C10.7078 10.0537 10.2923 10.0537 10.036 9.79744C9.77977 9.54115 9.77978 9.12564 10.0361 8.86936L11.5444 7.36107C11.7437 7.16172 11.7438 6.83854 11.5444 6.63917C11.5444 6.63915 11.5445 6.63918 11.5444 6.63917L10.0361 5.13081C9.77978 4.87453 9.77978 4.45901 10.0361 4.20273Z" fill="#2e90fa"></path></g></svg> \ No newline at end of file diff --git a/web/app/components/rag-pipeline/components/panel/input-field/index.tsx b/web/app/components/rag-pipeline/components/panel/input-field/index.tsx index eec18bb471..da00433f30 100644 --- a/web/app/components/rag-pipeline/components/panel/input-field/index.tsx +++ b/web/app/components/rag-pipeline/components/panel/input-field/index.tsx @@ -102,7 +102,7 @@ const InputFieldPanel = () => { return ( <div className='mr-1 flex h-full w-[400px] flex-col rounded-2xl border-y-[0.5px] border-l-[0.5px] border-components-panel-border bg-components-panel-bg-alt shadow-xl shadow-shadow-shadow-5'> <div className='flex shrink-0 items-center p-4 pb-0'> - <div className='system-xl-semibold grow'> + <div className='system-xl-semibold grow text-text-primary'> {t('datasetPipeline.inputFieldPanel.title')} </div> <Button diff --git a/web/i18n/zh-Hant/dataset-pipeline.ts b/web/i18n/zh-Hant/dataset-pipeline.ts index afaff92dcb..588bf4cc3f 100644 --- a/web/i18n/zh-Hant/dataset-pipeline.ts +++ b/web/i18n/zh-Hant/dataset-pipeline.ts @@ -15,17 +15,17 @@ const translation = { customized: '客製化', }, operations: { - convert: '化', + convert: '轉換', saveAndProcess: '儲存和處理', - choose: '選', + choose: '選擇', useTemplate: '使用此知識管道', dataSource: '資料來源', editInfo: '編輯資訊', - process: '過程', + process: '處理', backToDataSource: '返回資料來源', - exportPipeline: '匯出管線', - details: '詳', - preview: '預展', + exportPipeline: '匯出知識流水線', + details: '詳情', + preview: '預覽', }, deletePipeline: { title: '您確定要刪除此管線範本嗎?', From 36406cd62f181e37061a8e58e890d224129ce17f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=9D=9E=E6=B3=95=E6=93=8D=E4=BD=9C?= <hjlarry@163.com> Date: Sun, 28 Sep 2025 13:37:42 +0800 Subject: [PATCH 057/126] chore: time from now i18n support (#26328) --- .../datasets/list/dataset-card/index.tsx | 12 ++--- .../rag-pipeline-header/publisher/popup.tsx | 2 +- web/app/components/workflow/hooks/index.ts | 1 - .../hooks/use-format-time-from-now.ts | 12 ----- web/hooks/use-format-time-from-now.ts | 47 ++++++++++++++++++- 5 files changed, 50 insertions(+), 24 deletions(-) delete mode 100644 web/app/components/workflow/hooks/use-format-time-from-now.ts diff --git a/web/app/components/datasets/list/dataset-card/index.tsx b/web/app/components/datasets/list/dataset-card/index.tsx index f95e7b2199..db8ee0226d 100644 --- a/web/app/components/datasets/list/dataset-card/index.tsx +++ b/web/app/components/datasets/list/dataset-card/index.tsx @@ -11,9 +11,6 @@ import cn from '@/utils/classnames' import { useHover } from 'ahooks' import { RiFileTextFill, RiMoreFill, RiRobot2Fill } from '@remixicon/react' import Tooltip from '@/app/components/base/tooltip' -import { useGetLanguage } from '@/context/i18n' -import dayjs from 'dayjs' -import relativeTime from 'dayjs/plugin/relativeTime' import { checkIsUsedInApp, deleteDataset } from '@/service/datasets' import RenameDatasetModal from '../../rename-modal' import Confirm from '@/app/components/base/confirm' @@ -24,7 +21,7 @@ import AppIcon from '@/app/components/base/app-icon' import CornerLabel from '@/app/components/base/corner-label' import { DOC_FORM_ICON_WITH_BG, DOC_FORM_TEXT } from '@/models/datasets' import { useExportPipelineDSL } from '@/service/use-pipeline' -dayjs.extend(relativeTime) +import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now' const EXTERNAL_PROVIDER = 'external' @@ -87,10 +84,7 @@ const DatasetCard = ({ return t('dataset.partialEnabled', { count: dataset.document_count, num: availableDocCount }) }, [t, dataset.document_count, dataset.total_available_documents]) - const language = useGetLanguage() - const formatTimeFromNow = useCallback((time: number) => { - return dayjs(time * 1_000).locale(language === 'zh_Hans' ? 'zh-cn' : language.replace('_', '-')).fromNow() - }, [language]) + const { formatTimeFromNow } = useFormatTimeFromNow() const openRenameModal = useCallback(() => { setShowRenameModal(true) @@ -269,7 +263,7 @@ const DatasetCard = ({ </Tooltip> )} <span className='system-xs-regular text-divider-deep'>/</span> - <span className='system-xs-regular'>{`${t('dataset.updated')} ${formatTimeFromNow(dataset.updated_at)}`}</span> + <span className='system-xs-regular'>{`${t('dataset.updated')} ${formatTimeFromNow(dataset.updated_at * 1000)}`}</span> </div> <div className='absolute right-2 top-2 z-[5] hidden group-hover:block'> <CustomPopover diff --git a/web/app/components/rag-pipeline/components/rag-pipeline-header/publisher/popup.tsx b/web/app/components/rag-pipeline/components/rag-pipeline-header/publisher/popup.tsx index c50f027e99..42ca643cb0 100644 --- a/web/app/components/rag-pipeline/components/rag-pipeline-header/publisher/popup.tsx +++ b/web/app/components/rag-pipeline/components/rag-pipeline-header/publisher/popup.tsx @@ -21,7 +21,6 @@ import { import Button from '@/app/components/base/button' import { useChecklistBeforePublish, - useFormatTimeFromNow, } from '@/app/components/workflow/hooks' import Divider from '@/app/components/base/divider' import { getKeyboardKeyCodeBySystem, getKeyboardKeyNameBySystem } from '@/app/components/workflow/utils' @@ -47,6 +46,7 @@ import { SparklesSoft } from '@/app/components/base/icons/src/public/common' import { useModalContextSelector } from '@/context/modal-context' import Link from 'next/link' import { useDatasetApiAccessUrl } from '@/hooks/use-api-access-url' +import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now' const PUBLISH_SHORTCUT = ['ctrl', '⇧', 'P'] diff --git a/web/app/components/workflow/hooks/index.ts b/web/app/components/workflow/hooks/index.ts index 49ec8c0072..1dbba6b0e2 100644 --- a/web/app/components/workflow/hooks/index.ts +++ b/web/app/components/workflow/hooks/index.ts @@ -22,4 +22,3 @@ export * from './use-DSL' export * from './use-inspect-vars-crud' export * from './use-set-workflow-vars-with-value' export * from './use-workflow-search' -export * from './use-format-time-from-now' diff --git a/web/app/components/workflow/hooks/use-format-time-from-now.ts b/web/app/components/workflow/hooks/use-format-time-from-now.ts deleted file mode 100644 index b2b521557f..0000000000 --- a/web/app/components/workflow/hooks/use-format-time-from-now.ts +++ /dev/null @@ -1,12 +0,0 @@ -import dayjs from 'dayjs' -import { useCallback } from 'react' -import { useI18N } from '@/context/i18n' - -export const useFormatTimeFromNow = () => { - const { locale } = useI18N() - const formatTimeFromNow = useCallback((time: number) => { - return dayjs(time).locale(locale === 'zh-Hans' ? 'zh-cn' : locale).fromNow() - }, [locale]) - - return { formatTimeFromNow } -} diff --git a/web/hooks/use-format-time-from-now.ts b/web/hooks/use-format-time-from-now.ts index 82704252ac..db3be93df2 100644 --- a/web/hooks/use-format-time-from-now.ts +++ b/web/hooks/use-format-time-from-now.ts @@ -2,14 +2,59 @@ import dayjs from 'dayjs' import relativeTime from 'dayjs/plugin/relativeTime' import { useCallback } from 'react' import { useI18N } from '@/context/i18n' +import type { Locale } from '@/i18n-config' +import 'dayjs/locale/de' +import 'dayjs/locale/es' +import 'dayjs/locale/fa' +import 'dayjs/locale/fr' +import 'dayjs/locale/hi' +import 'dayjs/locale/id' +import 'dayjs/locale/it' +import 'dayjs/locale/ja' +import 'dayjs/locale/ko' +import 'dayjs/locale/pl' +import 'dayjs/locale/pt-br' +import 'dayjs/locale/ro' +import 'dayjs/locale/ru' +import 'dayjs/locale/sl' +import 'dayjs/locale/th' +import 'dayjs/locale/tr' +import 'dayjs/locale/uk' +import 'dayjs/locale/vi' import 'dayjs/locale/zh-cn' +import 'dayjs/locale/zh-tw' dayjs.extend(relativeTime) +const localeMap: Record<Locale, string> = { + 'en-US': 'en', + 'zh-Hans': 'zh-cn', + 'zh-Hant': 'zh-tw', + 'pt-BR': 'pt-br', + 'es-ES': 'es', + 'fr-FR': 'fr', + 'de-DE': 'de', + 'ja-JP': 'ja', + 'ko-KR': 'ko', + 'ru-RU': 'ru', + 'it-IT': 'it', + 'th-TH': 'th', + 'id-ID': 'id', + 'uk-UA': 'uk', + 'vi-VN': 'vi', + 'ro-RO': 'ro', + 'pl-PL': 'pl', + 'hi-IN': 'hi', + 'tr-TR': 'tr', + 'fa-IR': 'fa', + 'sl-SI': 'sl', +} + export const useFormatTimeFromNow = () => { const { locale } = useI18N() const formatTimeFromNow = useCallback((time: number) => { - return dayjs(time).locale(locale === 'zh-Hans' ? 'zh-cn' : locale).fromNow() + const dayjsLocale = localeMap[locale] ?? 'en' + return dayjs(time).locale(dayjsLocale).fromNow() }, [locale]) return { formatTimeFromNow } From 272102c06da6243901f1acc1fb2b9e90aa48968c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=9D=9E=E6=B3=95=E6=93=8D=E4=BD=9C?= <hjlarry@163.com> Date: Sun, 28 Sep 2025 13:37:51 +0800 Subject: [PATCH 058/126] doc: fix graph engine readme (#26337) --- api/core/workflow/README.md | 6 +++--- api/core/workflow/graph_engine/layers/README.md | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/api/core/workflow/README.md b/api/core/workflow/README.md index bef19ba90b..72f5dbe1e2 100644 --- a/api/core/workflow/README.md +++ b/api/core/workflow/README.md @@ -60,8 +60,8 @@ Extensible middleware for cross-cutting concerns: ```python engine = GraphEngine(graph) -engine.add_layer(DebugLoggingLayer(level="INFO")) -engine.add_layer(ExecutionLimitsLayer(max_nodes=100)) +engine.layer(DebugLoggingLayer(level="INFO")) +engine.layer(ExecutionLimitsLayer(max_nodes=100)) ``` ### Event-Driven Architecture @@ -117,7 +117,7 @@ The codebase enforces strict layering via import-linter: 1. Create class inheriting from `Layer` base 1. Override lifecycle methods: `on_graph_start()`, `on_event()`, `on_graph_end()` -1. Add to engine via `engine.add_layer()` +1. Add to engine via `engine.layer()` ### Debugging Workflow Execution diff --git a/api/core/workflow/graph_engine/layers/README.md b/api/core/workflow/graph_engine/layers/README.md index 8ee35baec0..17845ee1f0 100644 --- a/api/core/workflow/graph_engine/layers/README.md +++ b/api/core/workflow/graph_engine/layers/README.md @@ -30,7 +30,7 @@ debug_layer = DebugLoggingLayer( ) engine = GraphEngine(graph) -engine.add_layer(debug_layer) +engine.layer(debug_layer) engine.run() ``` From beb1448441708b8f03254f886c115a202d02540e Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Sun, 28 Sep 2025 13:43:43 +0800 Subject: [PATCH 059/126] [Chore/Refactor] Add missing 'type' attribute on 'button' components (#26249) Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: asukaminato0721 <30024051+asukaminato0721@users.noreply.github.com> --- .../document-detail-navigation-fix.test.tsx | 2 +- web/app/components/app-sidebar/app-info.tsx | 2 +- .../app-sidebar/sidebar-animation-issues.spec.tsx | 4 ++-- .../components/app/annotation/header-opts/index.tsx | 8 ++++---- web/app/components/app/create-app-modal/index.tsx | 2 +- web/app/components/apps/app-card.tsx | 12 +++++++----- web/app/components/apps/new-app-card.tsx | 5 +++-- .../components/base/app-icon-picker/ImageInput.tsx | 2 +- web/app/components/base/app-icon-picker/index.tsx | 2 +- web/app/components/base/audio-btn/index.tsx | 2 +- .../components/base/audio-gallery/AudioPlayer.tsx | 2 +- web/app/components/base/chat/chat/content-switch.tsx | 4 ++-- .../base/date-and-time-picker/calendar/item.tsx | 2 +- .../base/date-and-time-picker/date-picker/header.tsx | 6 +++--- .../year-and-month-picker/header.tsx | 2 +- web/app/components/base/mermaid/index.tsx | 2 +- web/app/components/base/pagination/pagination.tsx | 4 ++-- web/app/components/base/select/locale-signin.tsx | 2 +- web/app/components/base/select/locale.tsx | 2 +- web/app/components/base/theme-selector.tsx | 6 +++--- web/app/components/base/toast/index.spec.tsx | 4 ++-- .../components/base/video-gallery/VideoPlayer.tsx | 6 +++--- .../pricing/plans/self-hosted-plan-item/button.tsx | 2 +- .../list/template-card/edit-pipeline-info.tsx | 2 +- web/app/components/datasets/create/website/index.tsx | 6 +++--- .../components/datasets/documents/detail/index.tsx | 2 +- .../datasets/documents/detail/metadata/index.tsx | 2 +- web/app/components/datasets/documents/operations.tsx | 2 +- web/app/components/develop/doc.tsx | 4 ++-- .../visual-editor/schema-node.tsx | 1 + web/app/components/workflow/run/tracing-panel.tsx | 2 +- web/service/demo/index.tsx | 2 +- 32 files changed, 56 insertions(+), 52 deletions(-) diff --git a/web/__tests__/document-detail-navigation-fix.test.tsx b/web/__tests__/document-detail-navigation-fix.test.tsx index 200ed09ea9..a358744998 100644 --- a/web/__tests__/document-detail-navigation-fix.test.tsx +++ b/web/__tests__/document-detail-navigation-fix.test.tsx @@ -54,7 +54,7 @@ const DocumentDetailWithFix = ({ datasetId, documentId }: { datasetId: string; d return ( <div data-testid="document-detail-fixed"> - <button data-testid="back-button-fixed" onClick={backToPrev}> + <button type="button" data-testid="back-button-fixed" onClick={backToPrev}> Back to Documents </button> <div data-testid="document-info"> diff --git a/web/app/components/app-sidebar/app-info.tsx b/web/app/components/app-sidebar/app-info.tsx index d22577c9ad..baf52946df 100644 --- a/web/app/components/app-sidebar/app-info.tsx +++ b/web/app/components/app-sidebar/app-info.tsx @@ -260,7 +260,7 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx return ( <div> {!onlyShowDetail && ( - <button + <button type="button" onClick={() => { if (isCurrentWorkspaceEditor) setOpen(v => !v) diff --git a/web/app/components/app-sidebar/sidebar-animation-issues.spec.tsx b/web/app/components/app-sidebar/sidebar-animation-issues.spec.tsx index 7c5a7ec21f..54dde5fbd4 100644 --- a/web/app/components/app-sidebar/sidebar-animation-issues.spec.tsx +++ b/web/app/components/app-sidebar/sidebar-animation-issues.spec.tsx @@ -51,7 +51,7 @@ const MockSidebarToggleButton = ({ expand, onToggle }: { expand: boolean; onTogg className="shrink-0 px-4 py-3" data-testid="toggle-section" > - <button + <button type="button" className='flex h-6 w-6 cursor-pointer items-center justify-center' onClick={onToggle} data-testid="toggle-button" @@ -66,7 +66,7 @@ const MockSidebarToggleButton = ({ expand, onToggle }: { expand: boolean; onTogg const MockAppInfo = ({ expand }: { expand: boolean }) => { return ( <div data-testid="app-info" data-expand={expand}> - <button className='block w-full'> + <button type="button" className='block w-full'> {/* Container with layout mode switching - reproduces issue #3 */} <div className={`flex rounded-lg ${expand ? 'flex-col gap-2 p-2 pb-2.5' : 'items-start justify-center gap-1 p-1'}`}> {/* Icon container with justify-between to flex-col switch - reproduces issue #3 */} diff --git a/web/app/components/app/annotation/header-opts/index.tsx b/web/app/components/app/annotation/header-opts/index.tsx index 8c0ae37c8e..024f75867c 100644 --- a/web/app/components/app/annotation/header-opts/index.tsx +++ b/web/app/components/app/annotation/header-opts/index.tsx @@ -100,7 +100,7 @@ const HeaderOptions: FC<Props> = ({ const Operations = () => { return ( <div className="w-full py-1"> - <button className='mx-1 flex h-9 w-[calc(100%_-_8px)] cursor-pointer items-center space-x-2 rounded-lg px-3 py-2 hover:bg-components-panel-on-panel-item-bg-hover disabled:opacity-50' onClick={() => { + <button type="button" className='mx-1 flex h-9 w-[calc(100%_-_8px)] cursor-pointer items-center space-x-2 rounded-lg px-3 py-2 hover:bg-components-panel-on-panel-item-bg-hover disabled:opacity-50' onClick={() => { setShowBulkImportModal(true) }}> <FilePlus02 className='h-4 w-4 text-text-tertiary' /> @@ -135,17 +135,17 @@ const HeaderOptions: FC<Props> = ({ ...list.map(item => [item.question, item.answer]), ]} > - <button disabled={annotationUnavailable} className='mx-1 flex h-9 w-[calc(100%_-_8px)] cursor-pointer items-center space-x-2 rounded-lg px-3 py-2 hover:bg-components-panel-on-panel-item-bg-hover disabled:opacity-50'> + <button type="button" disabled={annotationUnavailable} className='mx-1 flex h-9 w-[calc(100%_-_8px)] cursor-pointer items-center space-x-2 rounded-lg px-3 py-2 hover:bg-components-panel-on-panel-item-bg-hover disabled:opacity-50'> <span className='system-sm-regular grow text-left text-text-secondary'>CSV</span> </button> </CSVDownloader> - <button disabled={annotationUnavailable} className={cn('mx-1 flex h-9 w-[calc(100%_-_8px)] cursor-pointer items-center space-x-2 rounded-lg px-3 py-2 hover:bg-components-panel-on-panel-item-bg-hover disabled:opacity-50', '!border-0')} onClick={JSONLOutput}> + <button type="button" disabled={annotationUnavailable} className={cn('mx-1 flex h-9 w-[calc(100%_-_8px)] cursor-pointer items-center space-x-2 rounded-lg px-3 py-2 hover:bg-components-panel-on-panel-item-bg-hover disabled:opacity-50', '!border-0')} onClick={JSONLOutput}> <span className='system-sm-regular grow text-left text-text-secondary'>JSONL</span> </button> </MenuItems> </Transition> </Menu> - <button + <button type="button" onClick={handleClearAll} className='mx-1 flex h-9 w-[calc(100%_-_8px)] cursor-pointer items-center space-x-2 rounded-lg px-3 py-2 text-red-600 hover:bg-red-50 disabled:opacity-50' > diff --git a/web/app/components/app/create-app-modal/index.tsx b/web/app/components/app/create-app-modal/index.tsx index cd73874c2c..3a07e6e0a1 100644 --- a/web/app/components/app/create-app-modal/index.tsx +++ b/web/app/components/app/create-app-modal/index.tsx @@ -141,7 +141,7 @@ function CreateApp({ onClose, onSuccess, onCreateFromTemplate, defaultAppMode }: </div> <div> <div className='mb-2 flex items-center'> - <button + <button type="button" className='flex cursor-pointer items-center border-0 bg-transparent p-0' onClick={() => setIsAppTypeExpanded(!isAppTypeExpanded)} > diff --git a/web/app/components/apps/app-card.tsx b/web/app/components/apps/app-card.tsx index e96793ff72..cd3495e3c6 100644 --- a/web/app/components/apps/app-card.tsx +++ b/web/app/components/apps/app-card.tsx @@ -263,16 +263,17 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => { <span className='system-sm-regular text-text-secondary'>{t('app.editApp')}</span> </button> <Divider className="my-1" /> - <button className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickDuplicate}> + <button type="button" className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickDuplicate}> <span className='system-sm-regular text-text-secondary'>{t('app.duplicate')}</span> </button> - <button className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickExport}> + <button type="button" className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickExport}> <span className='system-sm-regular text-text-secondary'>{t('app.export')}</span> </button> {(app.mode === 'completion' || app.mode === 'chat') && ( <> <Divider className="my-1" /> <button + type="button" className='mx-1 flex h-8 cursor-pointer items-center rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickSwitch} > @@ -284,14 +285,14 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => { (!systemFeatures.webapp_auth.enabled) ? <> <Divider className="my-1" /> - <button className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickInstalledApp}> + <button type="button" className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickInstalledApp}> <span className='system-sm-regular text-text-secondary'>{t('app.openInExplore')}</span> </button> </> : !(isGettingUserCanAccessApp || !userCanAccessApp?.result) && ( <> <Divider className="my-1" /> - <button className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickInstalledApp}> + <button type="button" className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickInstalledApp}> <span className='system-sm-regular text-text-secondary'>{t('app.openInExplore')}</span> </button> </> @@ -300,13 +301,14 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => { <Divider className="my-1" /> { systemFeatures.webapp_auth.enabled && isCurrentWorkspaceEditor && <> - <button className='mx-1 flex h-8 cursor-pointer items-center rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickAccessControl}> + <button type="button" className='mx-1 flex h-8 cursor-pointer items-center rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickAccessControl}> <span className='text-sm leading-5 text-text-secondary'>{t('app.accessControl')}</span> </button> <Divider className='my-1' /> </> } <button + type="button" className='group mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 py-[6px] hover:bg-state-destructive-hover' onClick={onClickDelete} > diff --git a/web/app/components/apps/new-app-card.tsx b/web/app/components/apps/new-app-card.tsx index 6ceeb47982..7a10bc8527 100644 --- a/web/app/components/apps/new-app-card.tsx +++ b/web/app/components/apps/new-app-card.tsx @@ -59,15 +59,16 @@ const CreateAppCard = ({ > <div className='grow rounded-t-xl p-2'> <div className='px-6 pb-1 pt-2 text-xs font-medium leading-[18px] text-text-tertiary'>{t('app.createApp')}</div> - <button className='mb-1 flex w-full cursor-pointer items-center rounded-lg px-6 py-[7px] text-[13px] font-medium leading-[18px] text-text-tertiary hover:bg-state-base-hover hover:text-text-secondary' onClick={() => setShowNewAppModal(true)}> + <button type="button" className='mb-1 flex w-full cursor-pointer items-center rounded-lg px-6 py-[7px] text-[13px] font-medium leading-[18px] text-text-tertiary hover:bg-state-base-hover hover:text-text-secondary' onClick={() => setShowNewAppModal(true)}> <FilePlus01 className='mr-2 h-4 w-4 shrink-0' /> {t('app.newApp.startFromBlank')} </button> - <button className='flex w-full cursor-pointer items-center rounded-lg px-6 py-[7px] text-[13px] font-medium leading-[18px] text-text-tertiary hover:bg-state-base-hover hover:text-text-secondary' onClick={() => setShowNewAppTemplateDialog(true)}> + <button type="button" className='flex w-full cursor-pointer items-center rounded-lg px-6 py-[7px] text-[13px] font-medium leading-[18px] text-text-tertiary hover:bg-state-base-hover hover:text-text-secondary' onClick={() => setShowNewAppTemplateDialog(true)}> <FilePlus02 className='mr-2 h-4 w-4 shrink-0' /> {t('app.newApp.startFromTemplate')} </button> <button + type="button" onClick={() => setShowCreateFromDSLModal(true)} className='flex w-full cursor-pointer items-center rounded-lg px-6 py-[7px] text-[13px] font-medium leading-[18px] text-text-tertiary hover:bg-state-base-hover hover:text-text-secondary'> <FileArrow01 className='mr-2 h-4 w-4 shrink-0' /> diff --git a/web/app/components/base/app-icon-picker/ImageInput.tsx b/web/app/components/base/app-icon-picker/ImageInput.tsx index 8d9ca50763..a074c8afac 100644 --- a/web/app/components/base/app-icon-picker/ImageInput.tsx +++ b/web/app/components/base/app-icon-picker/ImageInput.tsx @@ -106,7 +106,7 @@ const ImageInput: FC<UploaderProps> = ({ <ImagePlus className="pointer-events-none mb-3 h-[30px] w-[30px]" /> <div className="mb-[2px] text-sm font-medium"> <span className="pointer-events-none">{t('common.imageInput.dropImageHere')} </span> - <button className="text-components-button-primary-bg" onClick={() => inputRef.current?.click()}>{t('common.imageInput.browse')}</button> + <button type="button" className="text-components-button-primary-bg" onClick={() => inputRef.current?.click()}>{t('common.imageInput.browse')}</button> <input ref={inputRef} type="file" className="hidden" onClick={e => ((e.target as HTMLInputElement).value = '')} diff --git a/web/app/components/base/app-icon-picker/index.tsx b/web/app/components/base/app-icon-picker/index.tsx index a8de07bf6b..3deb6a6c8f 100644 --- a/web/app/components/base/app-icon-picker/index.tsx +++ b/web/app/components/base/app-icon-picker/index.tsx @@ -117,7 +117,7 @@ const AppIconPicker: FC<AppIconPickerProps> = ({ {!DISABLE_UPLOAD_IMAGE_AS_ICON && <div className="w-full p-2 pb-0"> <div className='flex items-center justify-center gap-2 rounded-xl bg-background-body p-1 text-text-primary'> {tabs.map(tab => ( - <button + <button type="button" key={tab.key} className={cn( 'system-sm-medium flex h-8 flex-1 shrink-0 items-center justify-center rounded-lg p-2 text-text-tertiary', diff --git a/web/app/components/base/audio-btn/index.tsx b/web/app/components/base/audio-btn/index.tsx index 2a54a8ed73..d83a2beb91 100644 --- a/web/app/components/base/audio-btn/index.tsx +++ b/web/app/components/base/audio-btn/index.tsx @@ -85,7 +85,7 @@ const AudioBtn = ({ <Tooltip popupContent={tooltipContent} > - <button + <button type="button" disabled={audioState === 'loading'} className={`box-border flex h-6 w-6 cursor-pointer items-center justify-center ${isAudition ? 'p-0.5' : 'rounded-md bg-white p-0'}`} onClick={handleToggle} diff --git a/web/app/components/base/audio-gallery/AudioPlayer.tsx b/web/app/components/base/audio-gallery/AudioPlayer.tsx index 67ded638a1..cad7adac02 100644 --- a/web/app/components/base/audio-gallery/AudioPlayer.tsx +++ b/web/app/components/base/audio-gallery/AudioPlayer.tsx @@ -288,7 +288,7 @@ const AudioPlayer: React.FC<AudioPlayerProps> = ({ src }) => { return ( <div className='flex h-9 min-w-[240px] max-w-[420px] items-end gap-2 rounded-[10px] border border-components-panel-border-subtle bg-components-chat-input-audio-bg-alt p-2 shadow-xs backdrop-blur-sm'> <audio ref={audioRef} src={src} preload="auto"/> - <button className='inline-flex shrink-0 cursor-pointer items-center justify-center border-none text-text-accent transition-all hover:text-text-accent-secondary disabled:text-components-button-primary-bg-disabled' onClick={togglePlay} disabled={!isAudioAvailable}> + <button type="button" className='inline-flex shrink-0 cursor-pointer items-center justify-center border-none text-text-accent transition-all hover:text-text-accent-secondary disabled:text-components-button-primary-bg-disabled' onClick={togglePlay} disabled={!isAudioAvailable}> {isPlaying ? ( <RiPauseCircleFill className='h-5 w-5' /> diff --git a/web/app/components/base/chat/chat/content-switch.tsx b/web/app/components/base/chat/chat/content-switch.tsx index cf428f4cb4..948c08186f 100644 --- a/web/app/components/base/chat/chat/content-switch.tsx +++ b/web/app/components/base/chat/chat/content-switch.tsx @@ -16,7 +16,7 @@ export default function ContentSwitch({ return ( count && count > 1 && currentIndex !== undefined && ( <div className="flex items-center justify-center pt-3.5 text-sm"> - <button + <button type="button" className={`${prevDisabled ? 'opacity-30' : 'opacity-100'}`} disabled={prevDisabled} onClick={() => !prevDisabled && switchSibling('prev')} @@ -26,7 +26,7 @@ export default function ContentSwitch({ <span className="px-2 text-xs text-text-primary"> {currentIndex + 1} / {count} </span> - <button + <button type="button" className={`${nextDisabled ? 'opacity-30' : 'opacity-100'}`} disabled={nextDisabled} onClick={() => !nextDisabled && switchSibling('next')} diff --git a/web/app/components/base/date-and-time-picker/calendar/item.tsx b/web/app/components/base/date-and-time-picker/calendar/item.tsx index 20e0b84aa4..1da8b9b3b5 100644 --- a/web/app/components/base/date-and-time-picker/calendar/item.tsx +++ b/web/app/components/base/date-and-time-picker/calendar/item.tsx @@ -13,7 +13,7 @@ const Item: FC<CalendarItemProps> = ({ const isToday = date.isSame(dayjs(), 'date') return ( - <button + <button type="button" onClick={() => onClick(date)} className={cn( 'system-sm-medium relative flex items-center justify-center rounded-lg px-1 py-2', diff --git a/web/app/components/base/date-and-time-picker/date-picker/header.tsx b/web/app/components/base/date-and-time-picker/date-picker/header.tsx index 2631cdb5bc..80b7110e50 100644 --- a/web/app/components/base/date-and-time-picker/date-picker/header.tsx +++ b/web/app/components/base/date-and-time-picker/date-picker/header.tsx @@ -14,7 +14,7 @@ const Header: FC<DatePickerHeaderProps> = ({ return ( <div className='mx-2 mt-2 flex items-center'> <div className='flex-1'> - <button + <button type="button" onClick={handleOpenYearMonthPicker} className='system-md-semibold flex items-center gap-x-0.5 rounded-lg px-2 py-1.5 text-text-primary hover:bg-state-base-hover' > @@ -22,13 +22,13 @@ const Header: FC<DatePickerHeaderProps> = ({ <RiArrowDownSLine className='h-4 w-4 text-text-tertiary' /> </button> </div> - <button + <button type="button" onClick={onClickPrevMonth} className='rounded-lg p-1.5 hover:bg-state-base-hover' > <RiArrowUpSLine className='h-[18px] w-[18px] text-text-secondary' /> </button> - <button + <button type="button" onClick={onClickNextMonth} className='rounded-lg p-1.5 hover:bg-state-base-hover' > diff --git a/web/app/components/base/date-and-time-picker/year-and-month-picker/header.tsx b/web/app/components/base/date-and-time-picker/year-and-month-picker/header.tsx index 63923e6f5c..86407d2326 100644 --- a/web/app/components/base/date-and-time-picker/year-and-month-picker/header.tsx +++ b/web/app/components/base/date-and-time-picker/year-and-month-picker/header.tsx @@ -13,7 +13,7 @@ const Header: FC<YearAndMonthPickerHeaderProps> = ({ return ( <div className='flex border-b-[0.5px] border-divider-regular p-2 pb-1'> {/* Year and Month */} - <button + <button type="button" onClick={onClick} className='system-md-semibold flex items-center gap-x-0.5 rounded-lg px-2 py-1.5 text-text-primary hover:bg-state-base-hover' > diff --git a/web/app/components/base/mermaid/index.tsx b/web/app/components/base/mermaid/index.tsx index c1deab6e09..9b324349f8 100644 --- a/web/app/components/base/mermaid/index.tsx +++ b/web/app/components/base/mermaid/index.tsx @@ -541,7 +541,7 @@ const Flowchart = (props: FlowchartProps) => { {svgString && ( <div className={themeClasses.mermaidDiv} style={{ objectFit: 'cover' }} onClick={handlePreviewClick}> <div className="absolute bottom-2 left-2 z-[100]"> - <button + <button type="button" onClick={(e) => { e.stopPropagation() toggleTheme() diff --git a/web/app/components/base/pagination/pagination.tsx b/web/app/components/base/pagination/pagination.tsx index 6b99dcf9c0..07ace7bcf2 100644 --- a/web/app/components/base/pagination/pagination.tsx +++ b/web/app/components/base/pagination/pagination.tsx @@ -30,7 +30,7 @@ export const PrevButton = ({ className, children, dataTestId, - as = <button />, + as = <button type="button" />, ...buttonProps }: ButtonProps) => { const pagination = React.useContext(PaginationContext) @@ -65,7 +65,7 @@ export const NextButton = ({ className, children, dataTestId, - as = <button />, + as = <button type="button" />, ...buttonProps }: ButtonProps) => { const pagination = React.useContext(PaginationContext) diff --git a/web/app/components/base/select/locale-signin.tsx b/web/app/components/base/select/locale-signin.tsx index 4ce6025edd..2d487c4be3 100644 --- a/web/app/components/base/select/locale-signin.tsx +++ b/web/app/components/base/select/locale-signin.tsx @@ -39,7 +39,7 @@ export default function LocaleSigninSelect({ <div className="max-h-96 overflow-y-auto px-1 py-1 [mask-image:linear-gradient(to_bottom,transparent_0px,black_8px,black_calc(100%-8px),transparent_100%)]"> {items.map((item) => { return <MenuItem key={item.value}> - <button + <button type="button" className={'group flex w-full items-center rounded-lg px-3 py-2 text-sm text-text-secondary data-[active]:bg-state-base-hover'} onClick={(evt) => { evt.preventDefault() diff --git a/web/app/components/base/select/locale.tsx b/web/app/components/base/select/locale.tsx index 8981f09ce3..cc5662f53b 100644 --- a/web/app/components/base/select/locale.tsx +++ b/web/app/components/base/select/locale.tsx @@ -39,7 +39,7 @@ export default function Select({ <div className="px-1 py-1 "> {items.map((item) => { return <MenuItem key={item.value}> - <button + <button type="button" className={'group flex w-full items-center rounded-lg px-3 py-2 text-sm text-text-secondary data-[active]:bg-state-base-hover'} onClick={(evt) => { evt.preventDefault() diff --git a/web/app/components/base/theme-selector.tsx b/web/app/components/base/theme-selector.tsx index 8dfe1d2602..c6978960be 100644 --- a/web/app/components/base/theme-selector.tsx +++ b/web/app/components/base/theme-selector.tsx @@ -54,7 +54,7 @@ export default function ThemeSelector() { </PortalToFollowElemTrigger> <PortalToFollowElemContent className='z-[1000]'> <div className='flex w-[144px] flex-col items-start rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur p-1 shadow-lg'> - <button + <button type="button" className='flex w-full items-center gap-1 rounded-lg px-2 py-1.5 text-text-secondary hover:bg-state-base-hover' onClick={() => handleThemeChange('light')} > @@ -66,7 +66,7 @@ export default function ThemeSelector() { <RiCheckLine className='h-4 w-4 text-text-accent' /> </div>} </button> - <button + <button type="button" className='flex w-full items-center gap-1 rounded-lg px-2 py-1.5 text-text-secondary hover:bg-state-base-hover' onClick={() => handleThemeChange('dark')} > @@ -78,7 +78,7 @@ export default function ThemeSelector() { <RiCheckLine className='h-4 w-4 text-text-accent' /> </div>} </button> - <button + <button type="button" className='flex w-full items-center gap-1 rounded-lg px-2 py-1.5 text-text-secondary hover:bg-state-base-hover' onClick={() => handleThemeChange('system')} > diff --git a/web/app/components/base/toast/index.spec.tsx b/web/app/components/base/toast/index.spec.tsx index 2dac8d27ff..97540cf5b1 100644 --- a/web/app/components/base/toast/index.spec.tsx +++ b/web/app/components/base/toast/index.spec.tsx @@ -13,10 +13,10 @@ const TestComponent = () => { return ( <div> - <button onClick={() => notify({ message: 'Notification message', type: 'info' })}> + <button type="button" onClick={() => notify({ message: 'Notification message', type: 'info' })}> Show Toast </button> - <button onClick={close}>Close Toast</button> + <button type="button" onClick={close}>Close Toast</button> </div> ) } diff --git a/web/app/components/base/video-gallery/VideoPlayer.tsx b/web/app/components/base/video-gallery/VideoPlayer.tsx index d7c86a1af9..c2fcd6ee8d 100644 --- a/web/app/components/base/video-gallery/VideoPlayer.tsx +++ b/web/app/components/base/video-gallery/VideoPlayer.tsx @@ -234,13 +234,13 @@ const VideoPlayer: React.FC<VideoPlayerProps> = ({ src }) => { </div> <div className={styles.controlsContent}> <div className={styles.leftControls}> - <button className={styles.playPauseButton} onClick={togglePlayPause}> + <button type="button" className={styles.playPauseButton} onClick={togglePlayPause}> {isPlaying ? <PauseIcon /> : <PlayIcon />} </button> {!isSmallSize && (<span className={styles.time}>{formatTime(currentTime)} / {formatTime(duration)}</span>)} </div> <div className={styles.rightControls}> - <button className={styles.muteButton} onClick={toggleMute}> + <button type="button" className={styles.muteButton} onClick={toggleMute}> {isMuted ? <UnmuteIcon /> : <MuteIcon />} </button> {!isSmallSize && ( @@ -264,7 +264,7 @@ const VideoPlayer: React.FC<VideoPlayerProps> = ({ src }) => { </div> </div> )} - <button className={styles.fullscreenButton} onClick={toggleFullscreen}> + <button type="button" className={styles.fullscreenButton} onClick={toggleFullscreen}> <FullscreenIcon /> </button> </div> diff --git a/web/app/components/billing/pricing/plans/self-hosted-plan-item/button.tsx b/web/app/components/billing/pricing/plans/self-hosted-plan-item/button.tsx index 5308490d79..ffa4dbcb65 100644 --- a/web/app/components/billing/pricing/plans/self-hosted-plan-item/button.tsx +++ b/web/app/components/billing/pricing/plans/self-hosted-plan-item/button.tsx @@ -31,7 +31,7 @@ const Button = ({ }, [theme]) return ( - <button + <button type="button" className={cn( 'system-xl-semibold flex items-center gap-x-2 py-3 pl-5 pr-4', BUTTON_CLASSNAME[plan], diff --git a/web/app/components/datasets/create-from-pipeline/list/template-card/edit-pipeline-info.tsx b/web/app/components/datasets/create-from-pipeline/list/template-card/edit-pipeline-info.tsx index 411611bcfc..86e0c0d1ce 100644 --- a/web/app/components/datasets/create-from-pipeline/list/template-card/edit-pipeline-info.tsx +++ b/web/app/components/datasets/create-from-pipeline/list/template-card/edit-pipeline-info.tsx @@ -99,7 +99,7 @@ const EditPipelineInfo = ({ {t('datasetPipeline.editPipelineInfo')} </span> </div> - <button + <button type="button" className='absolute right-5 top-5 flex size-8 items-center justify-center' onClick={onClose} > diff --git a/web/app/components/datasets/create/website/index.tsx b/web/app/components/datasets/create/website/index.tsx index 80d6b52315..7190ca3228 100644 --- a/web/app/components/datasets/create/website/index.tsx +++ b/web/app/components/datasets/create/website/index.tsx @@ -61,7 +61,7 @@ const Website: FC<Props> = ({ {t('datasetCreation.stepOne.website.chooseProvider')} </div> <div className='flex space-x-2'> - {ENABLE_WEBSITE_JINAREADER && <button + {ENABLE_WEBSITE_JINAREADER && <button type="button" className={cn('flex items-center justify-center rounded-lg px-4 py-2', selectedProvider === DataSourceProvider.jinaReader ? 'system-sm-medium border-[1.5px] border-components-option-card-option-selected-border bg-components-option-card-option-selected-bg text-text-primary' @@ -76,7 +76,7 @@ const Website: FC<Props> = ({ <span className={cn(s.jinaLogo, 'mr-2')} /> <span>Jina Reader</span> </button>} - {ENABLE_WEBSITE_FIRECRAWL && <button + {ENABLE_WEBSITE_FIRECRAWL && <button type="button" className={cn('rounded-lg px-4 py-2', selectedProvider === DataSourceProvider.fireCrawl ? 'system-sm-medium border-[1.5px] border-components-option-card-option-selected-border bg-components-option-card-option-selected-bg text-text-primary' @@ -90,7 +90,7 @@ const Website: FC<Props> = ({ > 🔥 Firecrawl </button>} - {ENABLE_WEBSITE_WATERCRAWL && <button + {ENABLE_WEBSITE_WATERCRAWL && <button type="button" className={cn('flex items-center justify-center rounded-lg px-4 py-2', selectedProvider === DataSourceProvider.waterCrawl ? 'system-sm-medium border-[1.5px] border-components-option-card-option-selected-border bg-components-option-card-option-selected-bg text-text-primary' diff --git a/web/app/components/datasets/documents/detail/index.tsx b/web/app/components/datasets/documents/detail/index.tsx index f23412db3c..b4f47253fb 100644 --- a/web/app/components/datasets/documents/detail/index.tsx +++ b/web/app/components/datasets/documents/detail/index.tsx @@ -200,7 +200,7 @@ const DocumentDetail: FC<DocumentDetailProps> = ({ datasetId, documentId }) => { onUpdate={handleOperate} className='!w-[200px]' /> - <button + <button type="button" className={style.layoutRightIcon} onClick={() => setShowMetadata(!showMetadata)} > diff --git a/web/app/components/datasets/documents/detail/metadata/index.tsx b/web/app/components/datasets/documents/detail/metadata/index.tsx index 54587dede6..4cb5fe97e9 100644 --- a/web/app/components/datasets/documents/detail/metadata/index.tsx +++ b/web/app/components/datasets/documents/detail/metadata/index.tsx @@ -107,7 +107,7 @@ const IconButton: FC<{ <Tooltip popupContent={metadataMap[type].text} > - <button className={cn(s.iconWrapper, 'group', isChecked ? s.iconCheck : '')}> + <button type="button" className={cn(s.iconWrapper, 'group', isChecked ? s.iconCheck : '')}> <TypeIcon iconName={metadataMap[type].iconName || ''} className={`group-hover:bg-primary-600 ${isChecked ? '!bg-primary-600' : ''}`} diff --git a/web/app/components/datasets/documents/operations.tsx b/web/app/components/datasets/documents/operations.tsx index c1a27d3af8..4c23b700db 100644 --- a/web/app/components/datasets/documents/operations.tsx +++ b/web/app/components/datasets/documents/operations.tsx @@ -177,7 +177,7 @@ const Operations = ({ popupClassName='text-text-secondary system-xs-medium' needsDelay={false} > - <button + <button type="button" className={cn('mr-2 cursor-pointer rounded-lg', !isListScene ? 'border-[0.5px] border-components-button-secondary-border bg-components-button-secondary-bg p-2 shadow-xs shadow-shadow-shadow-3 backdrop-blur-[5px] hover:border-components-button-secondary-border-hover hover:bg-components-button-secondary-bg-hover' diff --git a/web/app/components/develop/doc.tsx b/web/app/components/develop/doc.tsx index ef5e7022c1..82b6b00e44 100644 --- a/web/app/components/develop/doc.tsx +++ b/web/app/components/develop/doc.tsx @@ -168,7 +168,7 @@ const Doc = ({ appDetail }: IDocProps) => { <span className="text-xs font-medium uppercase tracking-wide text-text-tertiary"> {t('appApi.develop.toc')} </span> - <button + <button type="button" onClick={() => setIsTocExpanded(false)} className="group flex h-6 w-6 items-center justify-center rounded-md transition-colors hover:bg-state-base-hover" aria-label="Close" @@ -224,7 +224,7 @@ const Doc = ({ appDetail }: IDocProps) => { </nav> ) : ( - <button + <button type="button" onClick={() => setIsTocExpanded(true)} className="group flex h-11 w-11 items-center justify-center rounded-full border-[0.5px] border-components-panel-border bg-components-panel-bg shadow-lg transition-all duration-150 hover:bg-background-default-hover hover:shadow-xl" aria-label="Open table of contents" diff --git a/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/visual-editor/schema-node.tsx b/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/visual-editor/schema-node.tsx index 36671ab050..4c20232df4 100644 --- a/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/visual-editor/schema-node.tsx +++ b/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/visual-editor/schema-node.tsx @@ -99,6 +99,7 @@ const SchemaNode: FC<SchemaNodeProps> = ({ indentLeft[depth - 1], )}> <button + type="button" onClick={handleExpand} className='py-0.5 text-text-tertiary hover:text-text-accent' > diff --git a/web/app/components/workflow/run/tracing-panel.tsx b/web/app/components/workflow/run/tracing-panel.tsx index 2346b08c9e..22d49792b9 100644 --- a/web/app/components/workflow/run/tracing-panel.tsx +++ b/web/app/components/workflow/run/tracing-panel.tsx @@ -109,7 +109,7 @@ const TracingPanel: FC<TracingPanelProps> = ({ onMouseLeave={handleParallelMouseLeave} > <div className="mb-1 flex items-center"> - <button + <button type="button" onClick={() => toggleCollapse(node.id)} className={cn( 'mr-2 transition-colors', diff --git a/web/service/demo/index.tsx b/web/service/demo/index.tsx index 50889770bd..aa02968549 100644 --- a/web/service/demo/index.tsx +++ b/web/service/demo/index.tsx @@ -51,7 +51,7 @@ const Service: FC = () => { </div> <div> - <button onClick={handleCreateApp}>Click me to Create App</button> + <button type="button" onClick={handleCreateApp}>Click me to Create App</button> </div> <div> From a5387b304e5cb74bf803a6811ca8aa0747ef3e9d Mon Sep 17 00:00:00 2001 From: Yongtao Huang <yongtaoh2022@gmail.com> Date: Sun, 28 Sep 2025 13:44:14 +0800 Subject: [PATCH 060/126] Fix: use correct maxLength prop for verification code input (#26244) --- web/app/signup/check-code/page.tsx | 2 +- web/app/signup/components/input-mail.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/web/app/signup/check-code/page.tsx b/web/app/signup/check-code/page.tsx index 159965908b..540af74872 100644 --- a/web/app/signup/check-code/page.tsx +++ b/web/app/signup/check-code/page.tsx @@ -93,7 +93,7 @@ export default function CheckCode() { <form action=""> <label htmlFor="code" className='system-md-semibold mb-1 text-text-secondary'>{t('login.checkCode.verificationCode')}</label> - <Input value={code} onChange={e => setVerifyCode(e.target.value)} max-length={6} className='mt-1' placeholder={t('login.checkCode.verificationCodePlaceholder') as string} /> + <Input value={code} onChange={e => setVerifyCode(e.target.value)} maxLength={6} className='mt-1' placeholder={t('login.checkCode.verificationCodePlaceholder') as string} /> <Button loading={loading} disabled={loading} className='my-3 w-full' variant='primary' onClick={verify}>{t('login.checkCode.verify')}</Button> <Countdown onResend={resendCode} /> </form> diff --git a/web/app/signup/components/input-mail.tsx b/web/app/signup/components/input-mail.tsx index 4b0b0ec0b1..d2e7bca65b 100644 --- a/web/app/signup/components/input-mail.tsx +++ b/web/app/signup/components/input-mail.tsx @@ -1,5 +1,5 @@ 'use client' -import { noop } from 'lodash' +import { noop } from 'lodash-es' import Input from '@/app/components/base/input' import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' From 66196459d5a9737f65464f257ffa89011fa96c0f Mon Sep 17 00:00:00 2001 From: AkisAya <bchen5@trip.com> Date: Sun, 28 Sep 2025 13:44:51 +0800 Subject: [PATCH 061/126] fix db connection error in embed_documents() (#26196) --- api/core/rag/embedding/cached_embedding.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/api/core/rag/embedding/cached_embedding.py b/api/core/rag/embedding/cached_embedding.py index 5f94129a0c..c2f17cd148 100644 --- a/api/core/rag/embedding/cached_embedding.py +++ b/api/core/rag/embedding/cached_embedding.py @@ -42,6 +42,10 @@ class CacheEmbedding(Embeddings): text_embeddings[i] = embedding.get_embedding() else: embedding_queue_indices.append(i) + + # release database connection, because embedding may take a long time + db.session.close() + if embedding_queue_indices: embedding_queue_texts = [texts[i] for i in embedding_queue_indices] embedding_queue_embeddings = [] From e686cc9eabc82e1bfa97e7a0998ae67bd8be3099 Mon Sep 17 00:00:00 2001 From: lyzno1 <92089059+lyzno1@users.noreply.github.com> Date: Sun, 28 Sep 2025 17:45:33 +0800 Subject: [PATCH 062/126] refactor: standardize dataset-pipeline i18n terminology consistency (#26353) Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- web/i18n/ja-JP/dataset-pipeline.ts | 6 +++--- web/i18n/zh-Hans/dataset-pipeline.ts | 4 ++-- web/i18n/zh-Hant/dataset-pipeline.ts | 24 ++++++++++++------------ 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/web/i18n/ja-JP/dataset-pipeline.ts b/web/i18n/ja-JP/dataset-pipeline.ts index 6450131933..0dddb25356 100644 --- a/web/i18n/ja-JP/dataset-pipeline.ts +++ b/web/i18n/ja-JP/dataset-pipeline.ts @@ -1,7 +1,7 @@ const translation = { creation: { createFromScratch: { - title: '空白の知識パイプライン', + title: '空白のナレッジパイプライン', description: 'データ処理と構造を完全に制御できるカスタムパイプラインをゼロから作成します。', }, backToKnowledge: 'ナレッジベースに戻る', @@ -77,11 +77,11 @@ const translation = { inputFieldPanel: { uniqueInputs: { title: '各入口のユニークな入力', - tooltip: 'ユニークな入力は、選択したデータソースおよびその下流ノードにのみアクセス可能です。他のデータソースを選択する際、ユーザーはこれを記入する必要はありません。最初のステップ(データソース)には、データソース変数で参照される入力フィールドのみが表示されます。他のフィールドは、第二のステップ(ドキュメントの処理)で表示されます。', + tooltip: 'ユニークな入力は選択したデータソースとその下流ノードのみがアクセス可能です。他のデータソースを選択する際、ユーザーはこれを記入する必要がありません。データソース変数で参照される入力フィールドのみが最初のステップ(データソース)に表示され、他のフィールドは第二のステップ(ドキュメント処理)で表示されます。', }, globalInputs: { title: 'すべての入口に対するグローバル入力', - tooltip: 'グローバル入力はすべてのノードで共有されます。ユーザーは任意のデータソースを選択するときにそれらを入力する必要があります。たとえば、区切り文字や最大チャンク長のようなフィールドは、複数のデータソースに一様に適用できます。データソース変数によって参照される入力フィールドのみが最初のステップ(データソース)に表示されます。他のフィールドは2番目のステップ(文書処理)に表示されます。', + tooltip: 'グローバル入力はすべてのノードで共有されます。ユーザーは任意のデータソースを選択する際にこれらを入力する必要があります。区切り文字や最大チャンク長などのフィールドは複数のデータソースに一様に適用できます。データソース変数で参照される入力フィールドのみが最初のステップ(データソース)に表示され、他のフィールドは第二のステップ(ドキュメント処理)に表示されます。', }, preview: { stepOneTitle: 'データソース', diff --git a/web/i18n/zh-Hans/dataset-pipeline.ts b/web/i18n/zh-Hans/dataset-pipeline.ts index 41c538ce20..7fbe8a0532 100644 --- a/web/i18n/zh-Hans/dataset-pipeline.ts +++ b/web/i18n/zh-Hans/dataset-pipeline.ts @@ -87,11 +87,11 @@ const translation = { description: '用户输入字段用于定义和收集知识流水线执行过程中所需的变量,用户可以自定义字段类型,并灵活配置输入,以满足不同数据源或文档处理的需求。', uniqueInputs: { title: '非共享输入', - tooltip: '非共享输入只能被选定的数据源及其下游节点访问。用户在选择其他数据源时不需要填写它。只有数据源变量引用的输入字段才会出现在第一步(数据源)中。所有其他字段将在第二步(Process Documents)中显示。', + tooltip: '非共享输入只能被选定的数据源及其下游节点访问。用户在选择其他数据源时不需要填写它。只有数据源变量引用的输入字段才会出现在第一步(数据源)中。所有其他字段将在第二步(处理文档)中显示。', }, globalInputs: { title: '全局共享输入', - tooltip: '全局共享输入在所有节点之间共享。用户在选择任何数据源时都需要填写它们。例如,像分隔符(delimiter)和最大块长度(Maximum Chunk Length)这样的字段可以跨多个数据源统一应用。只有数据源变量引用的输入字段才会出现在第一步(数据源)中。所有其他字段都显示在第二步(Process Documents)中。', + tooltip: '全局共享输入在所有节点之间共享。用户在选择任何数据源时都需要填写它们。例如,像分隔符和最大块长度这样的字段可以跨多个数据源统一应用。只有数据源变量引用的输入字段才会出现在第一步(数据源)中。所有其他字段都显示在第二步(处理文档)中。', }, addInputField: '添加输入字段', editInputField: '编辑输入字段', diff --git a/web/i18n/zh-Hant/dataset-pipeline.ts b/web/i18n/zh-Hant/dataset-pipeline.ts index 588bf4cc3f..5dc287a7c9 100644 --- a/web/i18n/zh-Hant/dataset-pipeline.ts +++ b/web/i18n/zh-Hant/dataset-pipeline.ts @@ -1,8 +1,8 @@ const translation = { creation: { createFromScratch: { - title: '空白知識管道', - description: '從頭開始建立自訂管道,並完全控制資料處理和結構。', + title: '空白知識流水線', + description: '從頭開始建立自訂流水線,並完全控制資料處理和結構。', }, caution: '小心', backToKnowledge: '返回知識', @@ -18,12 +18,12 @@ const translation = { convert: '轉換', saveAndProcess: '儲存和處理', choose: '選擇', - useTemplate: '使用此知識管道', + useTemplate: '使用此知識流水線', dataSource: '資料來源', editInfo: '編輯資訊', process: '處理', backToDataSource: '返回資料來源', - exportPipeline: '匯出知識流水線', + exportPipeline: '匯出流水線', details: '詳情', preview: '預覽', }, @@ -33,15 +33,15 @@ const translation = { }, publishPipeline: { success: { - message: '知識管道已發布', + message: '知識流水線已發布', }, error: { - message: '無法發佈知識管道', + message: '無法發佈知識流水線', }, }, publishTemplate: { success: { - message: '管道範本已發佈', + message: '流水線範本已發佈', tip: '您可以在建立頁面上使用此範本。', learnMore: '瞭解詳情', }, @@ -134,17 +134,17 @@ const translation = { title: '證實', content: '此動作是永久性的。您將無法恢復到以前的方法。請確認轉換。', }, - title: '轉換為知識管道', + title: '轉換為知識流水線', warning: '此動作無法復原。', descriptionChunk2: '— 一種更開放和靈活的方法,可以訪問我們市場中的插件。這會將新的處理方法套用至所有未來的文件。', - successMessage: '已成功將資料集轉換成管線', - errorMessage: '無法將資料集轉換成管線', - descriptionChunk1: '您現在可以轉換現有的知識庫,以使用知識管道進行文件處理', + successMessage: '已成功將資料集轉換成流水線', + errorMessage: '無法將資料集轉換成流水線', + descriptionChunk1: '您現在可以轉換現有的知識庫,以使用知識流水線進行文件處理', }, knowledgeDescription: '知識說明', knowledgeNameAndIconPlaceholder: '請輸入知識庫的名稱', knowledgeDescriptionPlaceholder: '描述此知識庫中的內容。詳細的描述使人工智慧能夠更準確地存取資料集的內容。如果為空,Dify 將使用預設命中策略。(選用)', - pipelineNameAndIcon: '管線名稱 & 圖示', + pipelineNameAndIcon: '流水線名稱 & 圖示', knowledgeNameAndIcon: '知識名稱和圖示', inputField: '輸入欄位', knowledgePermissions: '權限', From 36580221aabdf5cc85442e8e73f635e5a4e3e5d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=B9=9B=E9=9C=B2=E5=85=88=E7=94=9F?= <zhanluxianshen@163.com> Date: Sun, 28 Sep 2025 17:46:19 +0800 Subject: [PATCH 063/126] fix workflow variable split judge. (#26355) Signed-off-by: zhanluxianshen <zhanluxianshen@163.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- api/core/workflow/variable_loader.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api/core/workflow/variable_loader.py b/api/core/workflow/variable_loader.py index a35215855e..1b31022495 100644 --- a/api/core/workflow/variable_loader.py +++ b/api/core/workflow/variable_loader.py @@ -66,8 +66,8 @@ def load_into_variable_pool( # NOTE(QuantumGhost): this logic needs to be in sync with # `WorkflowEntry.mapping_user_inputs_to_variable_pool`. node_variable_list = key.split(".") - if len(node_variable_list) < 1: - raise ValueError(f"Invalid variable key: {key}. It should have at least one element.") + if len(node_variable_list) < 2: + raise ValueError(f"Invalid variable key: {key}. It should have at least two elements.") if key in user_inputs: continue node_variable_key = ".".join(node_variable_list[1:]) From d00a72a4352a164c02ec3a4ca412c3899670356e Mon Sep 17 00:00:00 2001 From: -LAN- <laipz8200@outlook.com> Date: Sun, 28 Sep 2025 17:46:39 +0800 Subject: [PATCH 064/126] fix(graph_engine): block response nodes during streaming (#26364) --- .../workflow/graph_engine/response_coordinator/coordinator.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/api/core/workflow/graph_engine/response_coordinator/coordinator.py b/api/core/workflow/graph_engine/response_coordinator/coordinator.py index 985992f3f1..3db40c545e 100644 --- a/api/core/workflow/graph_engine/response_coordinator/coordinator.py +++ b/api/core/workflow/graph_engine/response_coordinator/coordinator.py @@ -212,10 +212,11 @@ class ResponseStreamCoordinator: edge = self._graph.edges[edge_id] source_node = self._graph.nodes[edge.tail] - # Check if node is a branch/container (original behavior) + # Check if node is a branch, container, or response node if source_node.execution_type in { NodeExecutionType.BRANCH, NodeExecutionType.CONTAINER, + NodeExecutionType.RESPONSE, } or source_node.blocks_variable_output(variable_selectors): blocking_edges.append(edge_id) From 2e914808eaf78919d9e5d67b76b84ede68ba3e64 Mon Sep 17 00:00:00 2001 From: -LAN- <laipz8200@outlook.com> Date: Sun, 28 Sep 2025 22:19:11 +0800 Subject: [PATCH 065/126] test(graph_engine): block response nodes during streaming (#26377) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../fixtures/workflow/test-answer-order.yml | 222 ++++++++++++++++++ .../test_answer_order_workflow.py | 28 +++ 2 files changed, 250 insertions(+) create mode 100644 api/tests/fixtures/workflow/test-answer-order.yml create mode 100644 api/tests/unit_tests/core/workflow/graph_engine/test_answer_order_workflow.py diff --git a/api/tests/fixtures/workflow/test-answer-order.yml b/api/tests/fixtures/workflow/test-answer-order.yml new file mode 100644 index 0000000000..3c6631aebb --- /dev/null +++ b/api/tests/fixtures/workflow/test-answer-order.yml @@ -0,0 +1,222 @@ +app: + description: 'this is a chatflow with 2 answer nodes. + + + it''s outouts should like: + + + ``` + + --- answer 1 --- + + + foo + + --- answer 2 --- + + + <llm''s outputs> + + ```' + icon: 🤖 + icon_background: '#FFEAD5' + mode: advanced-chat + name: test-answer-order + use_icon_as_answer_icon: false +dependencies: +- current_identifier: null + type: marketplace + value: + marketplace_plugin_unique_identifier: langgenius/openai:0.2.6@e2665624a156f52160927bceac9e169bd7e5ae6b936ae82575e14c90af390e6e + version: null +kind: app +version: 0.4.0 +workflow: + conversation_variables: [] + environment_variables: [] + features: + file_upload: + allowed_file_extensions: + - .JPG + - .JPEG + - .PNG + - .GIF + - .WEBP + - .SVG + allowed_file_types: + - image + allowed_file_upload_methods: + - local_file + - remote_url + enabled: false + fileUploadConfig: + audio_file_size_limit: 50 + batch_count_limit: 5 + file_size_limit: 15 + image_file_size_limit: 10 + video_file_size_limit: 100 + workflow_file_upload_limit: 10 + image: + enabled: false + number_limits: 3 + transfer_methods: + - local_file + - remote_url + number_limits: 3 + opening_statement: '' + retriever_resource: + enabled: true + sensitive_word_avoidance: + enabled: false + speech_to_text: + enabled: false + suggested_questions: [] + suggested_questions_after_answer: + enabled: false + text_to_speech: + enabled: false + language: '' + voice: '' + graph: + edges: + - data: + isInIteration: false + isInLoop: false + sourceType: answer + targetType: answer + id: 1759052466526-source-1759052469368-target + source: '1759052466526' + sourceHandle: source + target: '1759052469368' + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: false + isInLoop: false + sourceType: start + targetType: llm + id: 1759052439553-source-1759052580454-target + source: '1759052439553' + sourceHandle: source + target: '1759052580454' + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: false + isInLoop: false + sourceType: llm + targetType: answer + id: 1759052580454-source-1759052466526-target + source: '1759052580454' + sourceHandle: source + target: '1759052466526' + targetHandle: target + type: custom + zIndex: 0 + nodes: + - data: + selected: false + title: Start + type: start + variables: [] + height: 52 + id: '1759052439553' + position: + x: 30 + y: 242 + positionAbsolute: + x: 30 + y: 242 + sourcePosition: right + targetPosition: left + type: custom + width: 242 + - data: + answer: '--- answer 1 --- + + + foo + + ' + selected: false + title: Answer + type: answer + variables: [] + height: 100 + id: '1759052466526' + position: + x: 632 + y: 242 + positionAbsolute: + x: 632 + y: 242 + selected: true + sourcePosition: right + targetPosition: left + type: custom + width: 242 + - data: + answer: '--- answer 2 --- + + + {{#1759052580454.text#}} + + ' + selected: false + title: Answer 2 + type: answer + variables: [] + height: 103 + id: '1759052469368' + position: + x: 934 + y: 242 + positionAbsolute: + x: 934 + y: 242 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 242 + - data: + context: + enabled: false + variable_selector: [] + model: + completion_params: + temperature: 0.7 + mode: chat + name: gpt-4o + provider: langgenius/openai/openai + prompt_template: + - id: 5c1d873b-06b2-4dce-939e-672882bbd7c0 + role: system + text: '' + - role: user + text: '{{#sys.query#}}' + selected: false + title: LLM + type: llm + vision: + enabled: false + height: 88 + id: '1759052580454' + position: + x: 332 + y: 242 + positionAbsolute: + x: 332 + y: 242 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 242 + viewport: + x: 126.2797574512839 + y: 289.55932160537446 + zoom: 1.0743222672006216 + rag_pipeline_variables: [] diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_answer_order_workflow.py b/api/tests/unit_tests/core/workflow/graph_engine/test_answer_order_workflow.py new file mode 100644 index 0000000000..6569439b56 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_answer_order_workflow.py @@ -0,0 +1,28 @@ +from .test_mock_config import MockConfigBuilder +from .test_table_runner import TableTestRunner, WorkflowTestCase + +LLM_NODE_ID = "1759052580454" + + +def test_answer_nodes_emit_in_order() -> None: + mock_config = ( + MockConfigBuilder() + .with_llm_response("unused default") + .with_node_output(LLM_NODE_ID, {"text": "mocked llm text"}) + .build() + ) + + expected_answer = "--- answer 1 ---\n\nfoo\n--- answer 2 ---\n\nmocked llm text\n" + + case = WorkflowTestCase( + fixture_path="test-answer-order", + query="", + expected_outputs={"answer": expected_answer}, + use_auto_mock=True, + mock_config=mock_config, + ) + + runner = TableTestRunner() + result = runner.run_test_case(case) + + assert result.success, result.error From 8d897153a5a0d0479d11eb5d4e218ebd28a6ee89 Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Sun, 28 Sep 2025 23:42:22 +0800 Subject: [PATCH 066/126] [Chore/Refactor] Apply @console_ns.route decorators to RAG pipeline controllers (#26348) Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: asukaminato0721 <30024051+asukaminato0721@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: -LAN- <laipz8200@outlook.com> --- .../rag_pipeline/rag_pipeline_datasets.py | 8 +- .../rag_pipeline_draft_variable.py | 31 +--- .../rag_pipeline/rag_pipeline_import.py | 25 +--- .../rag_pipeline/rag_pipeline_workflow.py | 139 ++++-------------- 4 files changed, 42 insertions(+), 161 deletions(-) diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py index 34faa4ec85..c741bfbf82 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py @@ -4,7 +4,7 @@ from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden import services -from controllers.console import api +from controllers.console import console_ns from controllers.console.datasets.error import DatasetNameDuplicateError from controllers.console.wraps import ( account_initialization_required, @@ -32,6 +32,7 @@ def _validate_description_length(description): return description +@console_ns.route("/rag/pipeline/dataset") class CreateRagPipelineDatasetApi(Resource): @setup_required @login_required @@ -84,6 +85,7 @@ class CreateRagPipelineDatasetApi(Resource): return import_info, 201 +@console_ns.route("/rag/pipeline/empty-dataset") class CreateEmptyRagPipelineDatasetApi(Resource): @setup_required @login_required @@ -108,7 +110,3 @@ class CreateEmptyRagPipelineDatasetApi(Resource): ), ) return marshal(dataset, dataset_detail_fields), 201 - - -api.add_resource(CreateRagPipelineDatasetApi, "/rag/pipeline/dataset") -api.add_resource(CreateEmptyRagPipelineDatasetApi, "/rag/pipeline/empty-dataset") diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py index db07e7729a..38f75402a8 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py @@ -6,7 +6,7 @@ from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqpars from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.app.error import ( DraftWorkflowNotExist, ) @@ -111,6 +111,7 @@ def _api_prerequisite(f): return wrapper +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables") class RagPipelineVariableCollectionApi(Resource): @_api_prerequisite @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS) @@ -168,6 +169,7 @@ def validate_node_id(node_id: str) -> NoReturn | None: return None +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/variables") class RagPipelineNodeVariableCollectionApi(Resource): @_api_prerequisite @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) @@ -190,6 +192,7 @@ class RagPipelineNodeVariableCollectionApi(Resource): return Response("", 204) +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables/<uuid:variable_id>") class RagPipelineVariableApi(Resource): _PATCH_NAME_FIELD = "name" _PATCH_VALUE_FIELD = "value" @@ -284,6 +287,7 @@ class RagPipelineVariableApi(Resource): return Response("", 204) +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables/<uuid:variable_id>/reset") class RagPipelineVariableResetApi(Resource): @_api_prerequisite def put(self, pipeline: Pipeline, variable_id: str): @@ -325,6 +329,7 @@ def _get_variable_list(pipeline: Pipeline, node_id) -> WorkflowDraftVariableList return draft_vars +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/system-variables") class RagPipelineSystemVariableCollectionApi(Resource): @_api_prerequisite @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) @@ -332,6 +337,7 @@ class RagPipelineSystemVariableCollectionApi(Resource): return _get_variable_list(pipeline, SYSTEM_VARIABLE_NODE_ID) +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/environment-variables") class RagPipelineEnvironmentVariableCollectionApi(Resource): @_api_prerequisite def get(self, pipeline: Pipeline): @@ -364,26 +370,3 @@ class RagPipelineEnvironmentVariableCollectionApi(Resource): ) return {"items": env_vars_list} - - -api.add_resource( - RagPipelineVariableCollectionApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables", -) -api.add_resource( - RagPipelineNodeVariableCollectionApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/variables", -) -api.add_resource( - RagPipelineVariableApi, "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables/<uuid:variable_id>" -) -api.add_resource( - RagPipelineVariableResetApi, "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables/<uuid:variable_id>/reset" -) -api.add_resource( - RagPipelineSystemVariableCollectionApi, "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/system-variables" -) -api.add_resource( - RagPipelineEnvironmentVariableCollectionApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/environment-variables", -) diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py index a447f2848a..e0b918456b 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py @@ -5,7 +5,7 @@ from flask_restx import Resource, marshal_with, reqparse # type: ignore from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.datasets.wraps import get_rag_pipeline from controllers.console.wraps import ( account_initialization_required, @@ -20,6 +20,7 @@ from services.app_dsl_service import ImportStatus from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelineDslService +@console_ns.route("/rag/pipelines/imports") class RagPipelineImportApi(Resource): @setup_required @login_required @@ -66,6 +67,7 @@ class RagPipelineImportApi(Resource): return result.model_dump(mode="json"), 200 +@console_ns.route("/rag/pipelines/imports/<string:import_id>/confirm") class RagPipelineImportConfirmApi(Resource): @setup_required @login_required @@ -90,6 +92,7 @@ class RagPipelineImportConfirmApi(Resource): return result.model_dump(mode="json"), 200 +@console_ns.route("/rag/pipelines/imports/<string:pipeline_id>/check-dependencies") class RagPipelineImportCheckDependenciesApi(Resource): @setup_required @login_required @@ -107,6 +110,7 @@ class RagPipelineImportCheckDependenciesApi(Resource): return result.model_dump(mode="json"), 200 +@console_ns.route("/rag/pipelines/<string:pipeline_id>/exports") class RagPipelineExportApi(Resource): @setup_required @login_required @@ -128,22 +132,3 @@ class RagPipelineExportApi(Resource): ) return {"data": result}, 200 - - -# Import Rag Pipeline -api.add_resource( - RagPipelineImportApi, - "/rag/pipelines/imports", -) -api.add_resource( - RagPipelineImportConfirmApi, - "/rag/pipelines/imports/<string:import_id>/confirm", -) -api.add_resource( - RagPipelineImportCheckDependenciesApi, - "/rag/pipelines/imports/<string:pipeline_id>/check-dependencies", -) -api.add_resource( - RagPipelineExportApi, - "/rag/pipelines/<string:pipeline_id>/exports", -) diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py index 01ddb8a871..a75c121fbe 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py @@ -9,7 +9,7 @@ from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden, InternalServerError, NotFound import services -from controllers.console import api +from controllers.console import console_ns from controllers.console.app.error import ( ConversationCompletedError, DraftWorkflowNotExist, @@ -50,6 +50,7 @@ from services.rag_pipeline.rag_pipeline_transform_service import RagPipelineTran logger = logging.getLogger(__name__) +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft") class DraftRagPipelineApi(Resource): @setup_required @login_required @@ -147,6 +148,7 @@ class DraftRagPipelineApi(Resource): } +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/iteration/nodes/<string:node_id>/run") class RagPipelineDraftRunIterationNodeApi(Resource): @setup_required @login_required @@ -181,6 +183,7 @@ class RagPipelineDraftRunIterationNodeApi(Resource): raise InternalServerError() +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/loop/nodes/<string:node_id>/run") class RagPipelineDraftRunLoopNodeApi(Resource): @setup_required @login_required @@ -215,6 +218,7 @@ class RagPipelineDraftRunLoopNodeApi(Resource): raise InternalServerError() +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/run") class DraftRagPipelineRunApi(Resource): @setup_required @login_required @@ -249,6 +253,7 @@ class DraftRagPipelineRunApi(Resource): raise InvokeRateLimitHttpError(ex.description) +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/run") class PublishedRagPipelineRunApi(Resource): @setup_required @login_required @@ -369,6 +374,7 @@ class PublishedRagPipelineRunApi(Resource): # # return result # +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/datasource/nodes/<string:node_id>/run") class RagPipelinePublishedDatasourceNodeRunApi(Resource): @setup_required @login_required @@ -411,6 +417,7 @@ class RagPipelinePublishedDatasourceNodeRunApi(Resource): ) +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/datasource/nodes/<string:node_id>/run") class RagPipelineDraftDatasourceNodeRunApi(Resource): @setup_required @login_required @@ -453,6 +460,7 @@ class RagPipelineDraftDatasourceNodeRunApi(Resource): ) +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/run") class RagPipelineDraftNodeRunApi(Resource): @setup_required @login_required @@ -486,6 +494,7 @@ class RagPipelineDraftNodeRunApi(Resource): return workflow_node_execution +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs/tasks/<string:task_id>/stop") class RagPipelineTaskStopApi(Resource): @setup_required @login_required @@ -504,6 +513,7 @@ class RagPipelineTaskStopApi(Resource): return {"result": "success"} +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/publish") class PublishedRagPipelineApi(Resource): @setup_required @login_required @@ -559,6 +569,7 @@ class PublishedRagPipelineApi(Resource): } +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/default-workflow-block-configs") class DefaultRagPipelineBlockConfigsApi(Resource): @setup_required @login_required @@ -577,6 +588,7 @@ class DefaultRagPipelineBlockConfigsApi(Resource): return rag_pipeline_service.get_default_block_configs() +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/default-workflow-block-configs/<string:block_type>") class DefaultRagPipelineBlockConfigApi(Resource): @setup_required @login_required @@ -608,6 +620,7 @@ class DefaultRagPipelineBlockConfigApi(Resource): return rag_pipeline_service.get_default_block_config(node_type=block_type, filters=filters) +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows") class PublishedAllRagPipelineApi(Resource): @setup_required @login_required @@ -656,6 +669,7 @@ class PublishedAllRagPipelineApi(Resource): } +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/<string:workflow_id>") class RagPipelineByIdApi(Resource): @setup_required @login_required @@ -713,6 +727,7 @@ class RagPipelineByIdApi(Resource): return workflow +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/processing/parameters") class PublishedRagPipelineSecondStepApi(Resource): @setup_required @login_required @@ -738,6 +753,7 @@ class PublishedRagPipelineSecondStepApi(Resource): } +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/pre-processing/parameters") class PublishedRagPipelineFirstStepApi(Resource): @setup_required @login_required @@ -763,6 +779,7 @@ class PublishedRagPipelineFirstStepApi(Resource): } +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/pre-processing/parameters") class DraftRagPipelineFirstStepApi(Resource): @setup_required @login_required @@ -788,6 +805,7 @@ class DraftRagPipelineFirstStepApi(Resource): } +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/processing/parameters") class DraftRagPipelineSecondStepApi(Resource): @setup_required @login_required @@ -814,6 +832,7 @@ class DraftRagPipelineSecondStepApi(Resource): } +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs") class RagPipelineWorkflowRunListApi(Resource): @setup_required @login_required @@ -835,6 +854,7 @@ class RagPipelineWorkflowRunListApi(Resource): return result +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs/<uuid:run_id>") class RagPipelineWorkflowRunDetailApi(Resource): @setup_required @login_required @@ -853,6 +873,7 @@ class RagPipelineWorkflowRunDetailApi(Resource): return workflow_run +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs/<uuid:run_id>/node-executions") class RagPipelineWorkflowRunNodeExecutionListApi(Resource): @setup_required @login_required @@ -876,6 +897,7 @@ class RagPipelineWorkflowRunNodeExecutionListApi(Resource): return {"data": node_executions} +@console_ns.route("/rag/pipelines/datasource-plugins") class DatasourceListApi(Resource): @setup_required @login_required @@ -891,6 +913,7 @@ class DatasourceListApi(Resource): return jsonable_encoder(RagPipelineManageService.list_rag_pipeline_datasources(tenant_id)) +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/last-run") class RagPipelineWorkflowLastRunApi(Resource): @setup_required @login_required @@ -912,6 +935,7 @@ class RagPipelineWorkflowLastRunApi(Resource): return node_exec +@console_ns.route("/rag/pipelines/transform/datasets/<uuid:dataset_id>") class RagPipelineTransformApi(Resource): @setup_required @login_required @@ -929,6 +953,7 @@ class RagPipelineTransformApi(Resource): return result +@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/datasource/variables-inspect") class RagPipelineDatasourceVariableApi(Resource): @setup_required @login_required @@ -958,6 +983,7 @@ class RagPipelineDatasourceVariableApi(Resource): return workflow_node_execution +@console_ns.route("/rag/pipelines/recommended-plugins") class RagPipelineRecommendedPluginApi(Resource): @setup_required @login_required @@ -966,114 +992,3 @@ class RagPipelineRecommendedPluginApi(Resource): rag_pipeline_service = RagPipelineService() recommended_plugins = rag_pipeline_service.get_recommended_plugins() return recommended_plugins - - -api.add_resource( - DraftRagPipelineApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/draft", -) -api.add_resource( - DraftRagPipelineRunApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/run", -) -api.add_resource( - PublishedRagPipelineRunApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/published/run", -) -api.add_resource( - RagPipelineTaskStopApi, - "/rag/pipelines/<uuid:pipeline_id>/workflow-runs/tasks/<string:task_id>/stop", -) -api.add_resource( - RagPipelineDraftNodeRunApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/run", -) -api.add_resource( - RagPipelinePublishedDatasourceNodeRunApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/published/datasource/nodes/<string:node_id>/run", -) - -api.add_resource( - RagPipelineDraftDatasourceNodeRunApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/datasource/nodes/<string:node_id>/run", -) - -api.add_resource( - RagPipelineDraftRunIterationNodeApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/iteration/nodes/<string:node_id>/run", -) - -api.add_resource( - RagPipelineDraftRunLoopNodeApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/loop/nodes/<string:node_id>/run", -) - -api.add_resource( - PublishedRagPipelineApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/publish", -) -api.add_resource( - PublishedAllRagPipelineApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows", -) -api.add_resource( - DefaultRagPipelineBlockConfigsApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/default-workflow-block-configs", -) -api.add_resource( - DefaultRagPipelineBlockConfigApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/default-workflow-block-configs/<string:block_type>", -) -api.add_resource( - RagPipelineByIdApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/<string:workflow_id>", -) -api.add_resource( - RagPipelineWorkflowRunListApi, - "/rag/pipelines/<uuid:pipeline_id>/workflow-runs", -) -api.add_resource( - RagPipelineWorkflowRunDetailApi, - "/rag/pipelines/<uuid:pipeline_id>/workflow-runs/<uuid:run_id>", -) -api.add_resource( - RagPipelineWorkflowRunNodeExecutionListApi, - "/rag/pipelines/<uuid:pipeline_id>/workflow-runs/<uuid:run_id>/node-executions", -) -api.add_resource( - DatasourceListApi, - "/rag/pipelines/datasource-plugins", -) -api.add_resource( - PublishedRagPipelineSecondStepApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/published/processing/parameters", -) -api.add_resource( - PublishedRagPipelineFirstStepApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/published/pre-processing/parameters", -) -api.add_resource( - DraftRagPipelineSecondStepApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/processing/parameters", -) -api.add_resource( - DraftRagPipelineFirstStepApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/pre-processing/parameters", -) -api.add_resource( - RagPipelineWorkflowLastRunApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/last-run", -) -api.add_resource( - RagPipelineTransformApi, - "/rag/pipelines/transform/datasets/<uuid:dataset_id>", -) -api.add_resource( - RagPipelineDatasourceVariableApi, - "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/datasource/variables-inspect", -) - -api.add_resource( - RagPipelineRecommendedPluginApi, - "/rag/pipelines/recommended-plugins", -) From 029d5d36acd3b430be38fb17e6ba8fba0aded11e Mon Sep 17 00:00:00 2001 From: quicksand <quicksandzn@gmail.com> Date: Mon, 29 Sep 2025 11:55:06 +0800 Subject: [PATCH 067/126] Fix: Knowledge Base node crash when retrieval_model is null (#26397) --- web/app/components/workflow/nodes/knowledge-base/node.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/web/app/components/workflow/nodes/knowledge-base/node.tsx b/web/app/components/workflow/nodes/knowledge-base/node.tsx index 32810f4d3a..29de1bce9e 100644 --- a/web/app/components/workflow/nodes/knowledge-base/node.tsx +++ b/web/app/components/workflow/nodes/knowledge-base/node.tsx @@ -28,9 +28,9 @@ const Node: FC<NodeProps<KnowledgeBaseNodeType>> = ({ data }) => { </div> <div className='system-xs-medium grow truncate text-right text-text-secondary' - title={data.retrieval_model.search_method} + title={data.retrieval_model?.search_method} > - {settingsDisplay[data.retrieval_model.search_method as keyof typeof settingsDisplay]} + {settingsDisplay[data.retrieval_model?.search_method as keyof typeof settingsDisplay]} </div> </div> </div> From cba2b9b2ad433cec06402109c1bc515f71358c3d Mon Sep 17 00:00:00 2001 From: "Junyan Qin (Chin)" <rockchinq@gmail.com> Date: Mon, 29 Sep 2025 12:57:30 +0800 Subject: [PATCH 068/126] fix: switch plugin auto upgrade cache to redis (#26356) --- api/README.md | 4 +- api/extensions/ext_celery.py | 1 + api/schedule/check_upgradable_plugin_task.py | 4 +- ...ss_tenant_plugin_autoupgrade_check_task.py | 108 ++++++++++++++---- 4 files changed, 93 insertions(+), 24 deletions(-) diff --git a/api/README.md b/api/README.md index 5ecf92a4f0..e75ea3d354 100644 --- a/api/README.md +++ b/api/README.md @@ -80,10 +80,10 @@ 1. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service. ```bash -uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation +uv run celery -A app.celery worker -P gevent -c 2 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation ``` -Addition, if you want to debug the celery scheduled tasks, you can use the following command in another terminal: +Additionally, if you want to debug the celery scheduled tasks, you can run the following command in another terminal to start the beat service: ```bash uv run celery -A app.celery beat diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py index 585539e2ce..6d7d81ed87 100644 --- a/api/extensions/ext_celery.py +++ b/api/extensions/ext_celery.py @@ -145,6 +145,7 @@ def init_app(app: DifyApp) -> Celery: } if dify_config.ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK and dify_config.MARKETPLACE_ENABLED: imports.append("schedule.check_upgradable_plugin_task") + imports.append("tasks.process_tenant_plugin_autoupgrade_check_task") beat_schedule["check_upgradable_plugin_task"] = { "task": "schedule.check_upgradable_plugin_task.check_upgradable_plugin_task", "schedule": crontab(minute="*/15"), diff --git a/api/schedule/check_upgradable_plugin_task.py b/api/schedule/check_upgradable_plugin_task.py index a9ad27b059..0712100c01 100644 --- a/api/schedule/check_upgradable_plugin_task.py +++ b/api/schedule/check_upgradable_plugin_task.py @@ -6,7 +6,7 @@ import click import app from extensions.ext_database import db from models.account import TenantPluginAutoUpgradeStrategy -from tasks.process_tenant_plugin_autoupgrade_check_task import process_tenant_plugin_autoupgrade_check_task +from tasks import process_tenant_plugin_autoupgrade_check_task as check_task AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL = 15 * 60 # 15 minutes MAX_CONCURRENT_CHECK_TASKS = 20 @@ -43,7 +43,7 @@ def check_upgradable_plugin_task(): for i in range(0, total_strategies, MAX_CONCURRENT_CHECK_TASKS): batch_strategies = strategies[i : i + MAX_CONCURRENT_CHECK_TASKS] for strategy in batch_strategies: - process_tenant_plugin_autoupgrade_check_task.delay( + check_task.process_tenant_plugin_autoupgrade_check_task.delay( strategy.tenant_id, strategy.strategy_setting, strategy.upgrade_time_of_day, diff --git a/api/tasks/process_tenant_plugin_autoupgrade_check_task.py b/api/tasks/process_tenant_plugin_autoupgrade_check_task.py index bae8f1c4db..124971e8e2 100644 --- a/api/tasks/process_tenant_plugin_autoupgrade_check_task.py +++ b/api/tasks/process_tenant_plugin_autoupgrade_check_task.py @@ -1,5 +1,5 @@ +import json import operator -import traceback import typing import click @@ -9,38 +9,106 @@ from core.helper import marketplace from core.helper.marketplace import MarketplacePluginDeclaration from core.plugin.entities.plugin import PluginInstallationSource from core.plugin.impl.plugin import PluginInstaller +from extensions.ext_redis import redis_client from models.account import TenantPluginAutoUpgradeStrategy RETRY_TIMES_OF_ONE_PLUGIN_IN_ONE_TENANT = 3 +CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_manifests:" +CACHE_REDIS_TTL = 60 * 15 # 15 minutes -cached_plugin_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {} +def _get_redis_cache_key(plugin_id: str) -> str: + """Generate Redis cache key for plugin manifest.""" + return f"{CACHE_REDIS_KEY_PREFIX}{plugin_id}" + + +def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginDeclaration, None, bool]: + """ + Get cached plugin manifest from Redis. + Returns: + - MarketplacePluginDeclaration: if found in cache + - None: if cached as not found (marketplace returned no result) + - False: if not in cache at all + """ + try: + key = _get_redis_cache_key(plugin_id) + cached_data = redis_client.get(key) + if cached_data is None: + return False + + cached_json = json.loads(cached_data) + if cached_json is None: + return None + + return MarketplacePluginDeclaration.model_validate(cached_json) + except Exception: + return False + + +def _set_cached_manifest(plugin_id: str, manifest: typing.Union[MarketplacePluginDeclaration, None]) -> None: + """ + Cache plugin manifest in Redis. + Args: + plugin_id: The plugin ID + manifest: The manifest to cache, or None if not found in marketplace + """ + try: + key = _get_redis_cache_key(plugin_id) + if manifest is None: + # Cache the fact that this plugin was not found + redis_client.setex(key, CACHE_REDIS_TTL, json.dumps(None)) + else: + # Cache the manifest data + redis_client.setex(key, CACHE_REDIS_TTL, manifest.model_dump_json()) + except Exception: + # If Redis fails, continue without caching + # traceback.print_exc() + pass def marketplace_batch_fetch_plugin_manifests( plugin_ids_plain_list: list[str], ) -> list[MarketplacePluginDeclaration]: - global cached_plugin_manifests - # return marketplace.batch_fetch_plugin_manifests(plugin_ids_plain_list) - not_included_plugin_ids = [ - plugin_id for plugin_id in plugin_ids_plain_list if plugin_id not in cached_plugin_manifests - ] - if not_included_plugin_ids: - manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_included_plugin_ids) + """Fetch plugin manifests with Redis caching support.""" + cached_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {} + not_cached_plugin_ids: list[str] = [] + + # Check Redis cache for each plugin + for plugin_id in plugin_ids_plain_list: + cached_result = _get_cached_manifest(plugin_id) + if cached_result is False: + # Not in cache, need to fetch + not_cached_plugin_ids.append(plugin_id) + else: + # Either found manifest or cached as None (not found in marketplace) + # At this point, cached_result is either MarketplacePluginDeclaration or None + if isinstance(cached_result, bool): + # This should never happen due to the if condition above, but for type safety + continue + cached_manifests[plugin_id] = cached_result + + # Fetch uncached plugins from marketplace + if not_cached_plugin_ids: + manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_cached_plugin_ids) + + # Cache the fetched manifests for manifest in manifests: - cached_plugin_manifests[manifest.plugin_id] = manifest + cached_manifests[manifest.plugin_id] = manifest + _set_cached_manifest(manifest.plugin_id, manifest) - if ( - len(manifests) == 0 - ): # this indicates that the plugin not found in marketplace, should set None in cache to prevent future check - for plugin_id in not_included_plugin_ids: - cached_plugin_manifests[plugin_id] = None + # Cache plugins that were not found in marketplace + fetched_plugin_ids = {manifest.plugin_id for manifest in manifests} + for plugin_id in not_cached_plugin_ids: + if plugin_id not in fetched_plugin_ids: + cached_manifests[plugin_id] = None + _set_cached_manifest(plugin_id, None) + # Build result list from cached manifests result: list[MarketplacePluginDeclaration] = [] for plugin_id in plugin_ids_plain_list: - final_manifest = cached_plugin_manifests.get(plugin_id) - if final_manifest is not None: - result.append(final_manifest) + cached_manifest: typing.Union[MarketplacePluginDeclaration, None] = cached_manifests.get(plugin_id) + if cached_manifest is not None: + result.append(cached_manifest) return result @@ -157,10 +225,10 @@ def process_tenant_plugin_autoupgrade_check_task( ) except Exception as e: click.echo(click.style(f"Error when upgrading plugin: {e}", fg="red")) - traceback.print_exc() + # traceback.print_exc() break except Exception as e: click.echo(click.style(f"Error when checking upgradable plugin: {e}", fg="red")) - traceback.print_exc() + # traceback.print_exc() return From bfbe6365555e8b02bf93b4f2084b3eb5aa54e78b Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Fri, 26 Sep 2025 15:18:10 +0800 Subject: [PATCH 069/126] fix docker file websocket mode --- api/docker/entrypoint.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/api/docker/entrypoint.sh b/api/docker/entrypoint.sh index ac8fc7008f..05ad1d575b 100755 --- a/api/docker/entrypoint.sh +++ b/api/docker/entrypoint.sh @@ -38,8 +38,9 @@ elif [[ "${MODE}" == "beat" ]]; then exec celery -A app.celery beat --loglevel ${LOG_LEVEL:-INFO} else if [[ "${DEBUG}" == "true" ]]; then - # TODO: add socketio support - exec flask run --host=${DIFY_BIND_ADDRESS:-0.0.0.0} --port=${DIFY_PORT:-5001} --debug + export HOST=${DIFY_BIND_ADDRESS:-0.0.0.0} + export PORT=${DIFY_PORT:-5001} + exec python -m app else exec gunicorn \ --bind "${DIFY_BIND_ADDRESS:-0.0.0.0}:${DIFY_PORT:-5001}" \ From d2f05511708db20683e059c66f550aebcf3f373b Mon Sep 17 00:00:00 2001 From: KVOJJJin <jzongcode@gmail.com> Date: Mon, 29 Sep 2025 13:42:22 +0800 Subject: [PATCH 070/126] Fix plugin detail panel not show when installed plugins more than 100 (#26405) --- web/app/components/tools/provider-list.tsx | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/web/app/components/tools/provider-list.tsx b/web/app/components/tools/provider-list.tsx index d267b49c79..08a4aa0b5d 100644 --- a/web/app/components/tools/provider-list.tsx +++ b/web/app/components/tools/provider-list.tsx @@ -17,7 +17,7 @@ import CardMoreInfo from '@/app/components/plugins/card/card-more-info' import PluginDetailPanel from '@/app/components/plugins/plugin-detail-panel' import MCPList from './mcp' import { useAllToolProviders } from '@/service/use-tools' -import { useInstalledPluginList, useInvalidateInstalledPluginList } from '@/service/use-plugins' +import { useCheckInstalled, useInvalidateInstalledPluginList } from '@/service/use-plugins' import { useGlobalPublicStore } from '@/context/global-public-context' import { ToolTypeEnum } from '../workflow/block-selector/types' import { useMarketplace } from './marketplace/hooks' @@ -77,12 +77,14 @@ const ProviderList = () => { const currentProvider = useMemo<Collection | undefined>(() => { return filteredCollectionList.find(collection => collection.id === currentProviderId) }, [currentProviderId, filteredCollectionList]) - const { data: pluginList } = useInstalledPluginList() + const { data: checkedInstalledData } = useCheckInstalled({ + pluginIds: currentProvider?.plugin_id ? [currentProvider.plugin_id] : [], + enabled: !!currentProvider?.plugin_id, + }) const invalidateInstalledPluginList = useInvalidateInstalledPluginList() const currentPluginDetail = useMemo(() => { - const detail = pluginList?.plugins.find(plugin => plugin.plugin_id === currentProvider?.plugin_id) - return detail - }, [currentProvider?.plugin_id, pluginList?.plugins]) + return checkedInstalledData?.plugins?.[0] + }, [checkedInstalledData]) const toolListTailRef = useRef<HTMLDivElement>(null) const showMarketplacePanel = useCallback(() => { From 00f3a53f1ca970e4e96a7bd5d93433970231449a Mon Sep 17 00:00:00 2001 From: lyzno1 <92089059+lyzno1@users.noreply.github.com> Date: Mon, 29 Sep 2025 13:47:48 +0800 Subject: [PATCH 071/126] fix(turbopack): fix animation issues with unnamed animations in CSS modules (#26408) --- .../chat/chat/loading-anim/style.module.css | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/web/app/components/base/chat/chat/loading-anim/style.module.css b/web/app/components/base/chat/chat/loading-anim/style.module.css index b1371ec82a..d5a373df6f 100644 --- a/web/app/components/base/chat/chat/loading-anim/style.module.css +++ b/web/app/components/base/chat/chat/loading-anim/style.module.css @@ -1,6 +1,6 @@ .dot-flashing { position: relative; - animation: 1s infinite linear alternate; + animation: dot-flashing 1s infinite linear alternate; animation-delay: 0.5s; } @@ -10,7 +10,7 @@ display: inline-block; position: absolute; top: 0; - animation: 1s infinite linear alternate; + animation: dot-flashing 1s infinite linear alternate; } .dot-flashing::before { @@ -51,15 +51,21 @@ border-radius: 50%; background-color: #667085; color: #667085; - animation-name: dot-flashing; + animation: dot-flashing 1s infinite linear alternate; +} + +.text { + animation-delay: 0.5s; } .text::before { left: -7px; + animation-delay: 0s; } .text::after { left: 7px; + animation-delay: 1s; } .avatar, @@ -70,13 +76,19 @@ border-radius: 50%; background-color: #155EEF; color: #155EEF; - animation-name: dot-flashing-avatar; + animation: dot-flashing-avatar 1s infinite linear alternate; +} + +.avatar { + animation-delay: 0.5s; } .avatar::before { left: -5px; + animation-delay: 0s; } .avatar::after { left: 5px; + animation-delay: 1s; } From 10d51ada59fa45420122831889972558f96afc81 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Sep 2025 13:48:00 +0800 Subject: [PATCH 072/126] chore(deps): bump oracledb from 3.0.0 to 3.3.0 in /api (#26396) Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- api/pyproject.toml | 2 +- api/uv.lock | 26 +++++++++++++------------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/api/pyproject.toml b/api/pyproject.toml index 012702edd2..4bf0b1cac6 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -207,7 +207,7 @@ vdb = [ "couchbase~=4.3.0", "elasticsearch==8.14.0", "opensearch-py==2.4.0", - "oracledb==3.0.0", + "oracledb==3.3.0", "pgvecto-rs[sqlalchemy]~=0.2.1", "pgvector==0.2.5", "pymilvus~=2.5.0", diff --git a/api/uv.lock b/api/uv.lock index 7ce71cd215..262ef3b31d 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1646,7 +1646,7 @@ vdb = [ { name = "elasticsearch", specifier = "==8.14.0" }, { name = "mo-vector", specifier = "~=0.1.13" }, { name = "opensearch-py", specifier = "==2.4.0" }, - { name = "oracledb", specifier = "==3.0.0" }, + { name = "oracledb", specifier = "==3.3.0" }, { name = "pgvecto-rs", extras = ["sqlalchemy"], specifier = "~=0.2.1" }, { name = "pgvector", specifier = "==0.2.5" }, { name = "pymilvus", specifier = "~=2.5.0" }, @@ -4079,23 +4079,23 @@ numpy = [ [[package]] name = "oracledb" -version = "3.0.0" +version = "3.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/39/712f797b75705c21148fa1d98651f63c2e5cc6876e509a0a9e2f5b406572/oracledb-3.0.0.tar.gz", hash = "sha256:64dc86ee5c032febc556798b06e7b000ef6828bb0252084f6addacad3363db85", size = 840431, upload-time = "2025-03-03T19:36:12.223Z" } +sdist = { url = "https://files.pythonhosted.org/packages/51/c9/fae18fa5d803712d188486f8e86ad4f4e00316793ca19745d7c11092c360/oracledb-3.3.0.tar.gz", hash = "sha256:e830d3544a1578296bcaa54c6e8c8ae10a58c7db467c528c4b27adbf9c8b4cb0", size = 811776, upload-time = "2025-07-29T22:34:10.489Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/bf/d872c4b3fc15cd3261fe0ea72b21d181700c92dbc050160e161654987062/oracledb-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:52daa9141c63dfa75c07d445e9bb7f69f43bfb3c5a173ecc48c798fe50288d26", size = 4312963, upload-time = "2025-03-03T19:36:32.576Z" }, - { url = "https://files.pythonhosted.org/packages/b1/ea/01ee29e76a610a53bb34fdc1030f04b7669c3f80b25f661e07850fc6160e/oracledb-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af98941789df4c6aaaf4338f5b5f6b7f2c8c3fe6f8d6a9382f177f350868747a", size = 2661536, upload-time = "2025-03-03T19:36:34.904Z" }, - { url = "https://files.pythonhosted.org/packages/3d/8e/ad380e34a46819224423b4773e58c350bc6269643c8969604097ced8c3bc/oracledb-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9812bb48865aaec35d73af54cd1746679f2a8a13cbd1412ab371aba2e39b3943", size = 2867461, upload-time = "2025-03-03T19:36:36.508Z" }, - { url = "https://files.pythonhosted.org/packages/96/09/ecc4384a27fd6e1e4de824ae9c160e4ad3aaebdaade5b4bdcf56a4d1ff63/oracledb-3.0.0-cp311-cp311-win32.whl", hash = "sha256:6c27fe0de64f2652e949eb05b3baa94df9b981a4a45fa7f8a991e1afb450c8e2", size = 1752046, upload-time = "2025-03-03T19:36:38.313Z" }, - { url = "https://files.pythonhosted.org/packages/62/e8/f34bde24050c6e55eeba46b23b2291f2dd7fd272fa8b322dcbe71be55778/oracledb-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:f922709672002f0b40997456f03a95f03e5712a86c61159951c5ce09334325e0", size = 2101210, upload-time = "2025-03-03T19:36:40.669Z" }, - { url = "https://files.pythonhosted.org/packages/6f/fc/24590c3a3d41e58494bd3c3b447a62835138e5f9b243d9f8da0cfb5da8dc/oracledb-3.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:acd0e747227dea01bebe627b07e958bf36588a337539f24db629dc3431d3f7eb", size = 4351993, upload-time = "2025-03-03T19:36:42.577Z" }, - { url = "https://files.pythonhosted.org/packages/b7/b6/1f3b0b7bb94d53e8857d77b2e8dbdf6da091dd7e377523e24b79dac4fd71/oracledb-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f8b402f77c22af031cd0051aea2472ecd0635c1b452998f511aa08b7350c90a4", size = 2532640, upload-time = "2025-03-03T19:36:45.066Z" }, - { url = "https://files.pythonhosted.org/packages/72/1a/1815f6c086ab49c00921cf155ff5eede5267fb29fcec37cb246339a5ce4d/oracledb-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:378a27782e9a37918bd07a5a1427a77cb6f777d0a5a8eac9c070d786f50120ef", size = 2765949, upload-time = "2025-03-03T19:36:47.47Z" }, - { url = "https://files.pythonhosted.org/packages/33/8d/208900f8d372909792ee70b2daad3f7361181e55f2217c45ed9dff658b54/oracledb-3.0.0-cp312-cp312-win32.whl", hash = "sha256:54a28c2cb08316a527cd1467740a63771cc1c1164697c932aa834c0967dc4efc", size = 1709373, upload-time = "2025-03-03T19:36:49.67Z" }, - { url = "https://files.pythonhosted.org/packages/0c/5e/c21754f19c896102793c3afec2277e2180aa7d505e4d7fcca24b52d14e4f/oracledb-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8289bad6d103ce42b140e40576cf0c81633e344d56e2d738b539341eacf65624", size = 2056452, upload-time = "2025-03-03T19:36:51.363Z" }, + { url = "https://files.pythonhosted.org/packages/3f/35/95d9a502fdc48ce1ef3a513ebd027488353441e15aa0448619abb3d09d32/oracledb-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d9adb74f837838e21898d938e3a725cf73099c65f98b0b34d77146b453e945e0", size = 3963945, upload-time = "2025-07-29T22:34:28.633Z" }, + { url = "https://files.pythonhosted.org/packages/16/a7/8f1ef447d995bb51d9fdc36356697afeceb603932f16410c12d52b2df1a4/oracledb-3.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b063d1007882570f170ebde0f364e78d4a70c8f015735cc900663278b9ceef7", size = 2449385, upload-time = "2025-07-29T22:34:30.592Z" }, + { url = "https://files.pythonhosted.org/packages/b3/fa/6a78480450bc7d256808d0f38ade3385735fb5a90dab662167b4257dcf94/oracledb-3.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:187728f0a2d161676b8c581a9d8f15d9631a8fea1e628f6d0e9fa2f01280cd22", size = 2634943, upload-time = "2025-07-29T22:34:33.142Z" }, + { url = "https://files.pythonhosted.org/packages/5b/90/ea32b569a45fb99fac30b96f1ac0fb38b029eeebb78357bc6db4be9dde41/oracledb-3.3.0-cp311-cp311-win32.whl", hash = "sha256:920f14314f3402c5ab98f2efc5932e0547e9c0a4ca9338641357f73844e3e2b1", size = 1483549, upload-time = "2025-07-29T22:34:35.015Z" }, + { url = "https://files.pythonhosted.org/packages/81/55/ae60f72836eb8531b630299f9ed68df3fe7868c6da16f820a108155a21f9/oracledb-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:825edb97976468db1c7e52c78ba38d75ce7e2b71a2e88f8629bcf02be8e68a8a", size = 1834737, upload-time = "2025-07-29T22:34:36.824Z" }, + { url = "https://files.pythonhosted.org/packages/08/a8/f6b7809d70e98e113786d5a6f1294da81c046d2fa901ad656669fc5d7fae/oracledb-3.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9d25e37d640872731ac9b73f83cbc5fc4743cd744766bdb250488caf0d7696a8", size = 3943512, upload-time = "2025-07-29T22:34:39.237Z" }, + { url = "https://files.pythonhosted.org/packages/df/b9/8145ad8991f4864d3de4a911d439e5bc6cdbf14af448f3ab1e846a54210c/oracledb-3.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0bf7cdc2b668f939aa364f552861bc7a149d7cd3f3794730d43ef07613b2bf9", size = 2276258, upload-time = "2025-07-29T22:34:41.547Z" }, + { url = "https://files.pythonhosted.org/packages/56/bf/f65635ad5df17d6e4a2083182750bb136ac663ff0e9996ce59d77d200f60/oracledb-3.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fe20540fde64a6987046807ea47af93be918fd70b9766b3eb803c01e6d4202e", size = 2458811, upload-time = "2025-07-29T22:34:44.648Z" }, + { url = "https://files.pythonhosted.org/packages/7d/30/e0c130b6278c10b0e6cd77a3a1a29a785c083c549676cf701c5d180b8e63/oracledb-3.3.0-cp312-cp312-win32.whl", hash = "sha256:db080be9345cbf9506ffdaea3c13d5314605355e76d186ec4edfa49960ffb813", size = 1445525, upload-time = "2025-07-29T22:34:46.603Z" }, + { url = "https://files.pythonhosted.org/packages/1a/5c/7254f5e1a33a5d6b8bf6813d4f4fdcf5c4166ec8a7af932d987879d5595c/oracledb-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:be81e3afe79f6c8ece79a86d6067ad1572d2992ce1c590a086f3755a09535eb4", size = 1789976, upload-time = "2025-07-29T22:34:48.5Z" }, ] [[package]] From c8c94ef87036100597ff848aa89f9603b2e29f2c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Sep 2025 13:48:24 +0800 Subject: [PATCH 073/126] chore(deps): bump cos-python-sdk-v5 from 1.9.30 to 1.9.38 in /api (#26395) Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- api/pyproject.toml | 2 +- api/uv.lock | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/api/pyproject.toml b/api/pyproject.toml index 4bf0b1cac6..485fb918ed 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -180,7 +180,7 @@ dev = [ storage = [ "azure-storage-blob==12.13.0", "bce-python-sdk~=0.9.23", - "cos-python-sdk-v5==1.9.30", + "cos-python-sdk-v5==1.9.38", "esdk-obs-python==3.24.6.1", "google-cloud-storage==2.16.0", "opendal~=0.46.0", diff --git a/api/uv.lock b/api/uv.lock index 262ef3b31d..87799977ca 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1076,7 +1076,7 @@ wheels = [ [[package]] name = "cos-python-sdk-v5" -version = "1.9.30" +version = "1.9.38" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "crcmod" }, @@ -1085,7 +1085,10 @@ dependencies = [ { name = "six" }, { name = "xmltodict" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c4/f2/be99b41433b33a76896680920fca621f191875ca410a66778015e47a501b/cos-python-sdk-v5-1.9.30.tar.gz", hash = "sha256:a23fd090211bf90883066d90cd74317860aa67c6d3aa80fe5e44b18c7e9b2a81", size = 108384, upload-time = "2024-06-14T08:02:37.063Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/3c/d208266fec7cc3221b449e236b87c3fc1999d5ac4379d4578480321cfecc/cos_python_sdk_v5-1.9.38.tar.gz", hash = "sha256:491a8689ae2f1a6f04dacba66a877b2c8d361456f9cfd788ed42170a1cbf7a9f", size = 98092, upload-time = "2025-07-22T07:56:20.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/c8/c9c156aa3bc7caba9b4f8a2b6abec3da6263215988f3fec0ea843f137a10/cos_python_sdk_v5-1.9.38-py3-none-any.whl", hash = "sha256:1d3dd3be2bd992b2e9c2dcd018e2596aa38eab022dbc86b4a5d14c8fc88370e6", size = 92601, upload-time = "2025-08-17T05:12:30.867Z" }, +] [[package]] name = "couchbase" @@ -1624,7 +1627,7 @@ dev = [ storage = [ { name = "azure-storage-blob", specifier = "==12.13.0" }, { name = "bce-python-sdk", specifier = "~=0.9.23" }, - { name = "cos-python-sdk-v5", specifier = "==1.9.30" }, + { name = "cos-python-sdk-v5", specifier = "==1.9.38" }, { name = "esdk-obs-python", specifier = "==3.24.6.1" }, { name = "google-cloud-storage", specifier = "==2.16.0" }, { name = "opendal", specifier = "~=0.46.0" }, From 756864c85b7bdd80b3a1d35c4e0d7e2cc39ac7c6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Sep 2025 13:48:31 +0800 Subject: [PATCH 074/126] chore(deps-dev): bump @testing-library/jest-dom from 6.6.3 to 6.8.0 in /web (#26393) Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- web/package.json | 2 +- web/pnpm-lock.yaml | 31 ++++++++++++++++++------------- 2 files changed, 19 insertions(+), 14 deletions(-) diff --git a/web/package.json b/web/package.json index cf49429659..269ff8cba4 100644 --- a/web/package.json +++ b/web/package.json @@ -177,7 +177,7 @@ "@storybook/react": "8.5.0", "@storybook/test": "8.5.0", "@testing-library/dom": "^10.4.0", - "@testing-library/jest-dom": "^6.6.2", + "@testing-library/jest-dom": "^6.8.0", "@testing-library/react": "^16.0.1", "@types/crypto-js": "^4.2.2", "@types/dagre": "^0.7.52", diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index e47985fd71..38f120c126 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -450,8 +450,8 @@ importers: specifier: ^10.4.0 version: 10.4.0 '@testing-library/jest-dom': - specifier: ^6.6.2 - version: 6.6.3 + specifier: ^6.8.0 + version: 6.8.0 '@testing-library/react': specifier: ^16.0.1 version: 16.3.0(@testing-library/dom@10.4.0)(@types/react-dom@19.1.7(@types/react@19.1.11))(@types/react@19.1.11)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) @@ -593,8 +593,8 @@ importers: packages: - '@adobe/css-tools@4.4.3': - resolution: {integrity: sha512-VQKMkwriZbaOgVCby1UDY/LDk5fIjhQicCvVPFqfe+69fWaPWydbWJ3wRt59/YzIwda1I81loas3oCoHxnqvdA==} + '@adobe/css-tools@4.4.4': + resolution: {integrity: sha512-Elp+iwUx5rN5+Y8xLt5/GRoG20WGoDCQ/1Fb+1LiGtvwbDavuSk0jhD/eZdckHAuzcDzccnkv+rEjyWfRx18gg==} '@alloc/quick-lru@5.2.0': resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==} @@ -3208,8 +3208,8 @@ packages: resolution: {integrity: sha512-xGGHpBXYSHUUr6XsKBfs85TWlYKpTc37cSBBVrXcib2MkHLboWlkClhWF37JKlDb9KEq3dHs+f2xR7XJEWGBxA==} engines: {node: '>=14', npm: '>=6', yarn: '>=1'} - '@testing-library/jest-dom@6.6.3': - resolution: {integrity: sha512-IteBhl4XqYNkM54f4ejhLRJiZNqcSCoXUOG2CPK7qbD322KjQozM4kHQOfkG2oln9b9HTYqs+Sae8vBATubxxA==} + '@testing-library/jest-dom@6.8.0': + resolution: {integrity: sha512-WgXcWzVM6idy5JaftTVC8Vs83NKRmGJz4Hqs4oyOuO2J4r/y79vvKZsb+CaGyCSEbUPI6OsewfPd0G1A0/TUZQ==} engines: {node: '>=14', npm: '>=6', yarn: '>=1'} '@testing-library/react@16.3.0': @@ -8216,6 +8216,10 @@ packages: resolution: {integrity: sha512-mnVSV2l+Zv6BLpSD/8V87CW/y9EmmbYzGCIavsnsI6/nwn26DwffM/yztm30Z/I2DY9wdS3vXVCMnHDgZaVNoA==} engines: {node: '>=12'} + strip-indent@4.1.0: + resolution: {integrity: sha512-OA95x+JPmL7kc7zCu+e+TeYxEiaIyndRx0OrBcK2QPPH09oAndr2ALvymxWA+Lx1PYYvFUm4O63pRkdJAaW96w==} + engines: {node: '>=12'} + strip-json-comments@3.1.1: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} @@ -9006,7 +9010,7 @@ packages: snapshots: - '@adobe/css-tools@4.4.3': {} + '@adobe/css-tools@4.4.4': {} '@alloc/quick-lru@5.2.0': {} @@ -12175,7 +12179,7 @@ snapshots: '@testing-library/jest-dom@6.5.0': dependencies: - '@adobe/css-tools': 4.4.3 + '@adobe/css-tools': 4.4.4 aria-query: 5.3.2 chalk: 3.0.0 css.escape: 1.5.1 @@ -12183,14 +12187,13 @@ snapshots: lodash: 4.17.21 redent: 3.0.0 - '@testing-library/jest-dom@6.6.3': + '@testing-library/jest-dom@6.8.0': dependencies: - '@adobe/css-tools': 4.4.3 + '@adobe/css-tools': 4.4.4 aria-query: 5.3.2 - chalk: 3.0.0 css.escape: 1.5.1 dom-accessibility-api: 0.6.3 - lodash: 4.17.21 + picocolors: 1.1.1 redent: 3.0.0 '@testing-library/react@16.3.0(@testing-library/dom@10.4.0)(@types/react-dom@19.1.7(@types/react@19.1.11))(@types/react@19.1.11)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': @@ -17474,7 +17477,7 @@ snapshots: '@types/resolve': 1.20.6 doctrine: 3.0.0 resolve: 1.22.10 - strip-indent: 4.0.0 + strip-indent: 4.1.0 transitivePeerDependencies: - supports-color @@ -18313,6 +18316,8 @@ snapshots: dependencies: min-indent: 1.0.1 + strip-indent@4.1.0: {} + strip-json-comments@3.1.1: {} style-loader@3.3.4(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)): From 30617feff85fa2a08090fe42eada4171661ffff3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Sep 2025 13:50:36 +0800 Subject: [PATCH 075/126] chore(deps-dev): bump @eslint/js from 9.31.0 to 9.36.0 in /web (#26392) Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- web/package.json | 2 +- web/pnpm-lock.yaml | 16 ++++++++-------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/web/package.json b/web/package.json index 269ff8cba4..f40c346f82 100644 --- a/web/package.json +++ b/web/package.json @@ -158,7 +158,7 @@ "@chromatic-com/storybook": "^3.1.0", "@eslint-react/eslint-plugin": "^1.15.0", "@eslint/eslintrc": "^3.1.0", - "@eslint/js": "^9.20.0", + "@eslint/js": "^9.36.0", "@faker-js/faker": "^9.0.3", "@happy-dom/jest-environment": "^17.4.4", "@mdx-js/loader": "^3.1.0", diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index 38f120c126..764202490e 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -393,8 +393,8 @@ importers: specifier: ^3.1.0 version: 3.3.1 '@eslint/js': - specifier: ^9.20.0 - version: 9.31.0 + specifier: ^9.36.0 + version: 9.36.0 '@faker-js/faker': specifier: ^9.0.3 version: 9.9.0 @@ -1660,14 +1660,14 @@ packages: resolution: {integrity: sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/js@9.31.0': - resolution: {integrity: sha512-LOm5OVt7D4qiKCqoiPbA7LWmI+tbw1VbTUowBcUMgQSuM6poJufkFkYDcQpo5KfgD39TnNySV26QjOh7VFpSyw==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/js@9.35.0': resolution: {integrity: sha512-30iXE9whjlILfWobBkNerJo+TXYsgVM5ERQwMcMKCHckHflCmf7wXDAHlARoWnh0s1U72WqlbeyE7iAcCzuCPw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@eslint/js@9.36.0': + resolution: {integrity: sha512-uhCbYtYynH30iZErszX78U+nR3pJU3RHGQ57NXy5QupD4SBVwDeU8TNBy+MjMngc1UyIW9noKqsRqfjQTBU2dw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@eslint/markdown@7.1.0': resolution: {integrity: sha512-Y+X1B1j+/zupKDVJfkKc8uYMjQkGzfnd8lt7vK3y8x9Br6H5dBuhAfFrQ6ff7HAMm/1BwgecyEiRFkYCWPRxmA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -10312,10 +10312,10 @@ snapshots: transitivePeerDependencies: - supports-color - '@eslint/js@9.31.0': {} - '@eslint/js@9.35.0': {} + '@eslint/js@9.36.0': {} + '@eslint/markdown@7.1.0': dependencies: '@eslint/core': 0.15.1 From 70bc5ca7f436edb655aba52d558ca71a2e01e758 Mon Sep 17 00:00:00 2001 From: Timo <57227498+EchterTimo@users.noreply.github.com> Date: Mon, 29 Sep 2025 07:50:53 +0200 Subject: [PATCH 076/126] Add missing import "IO" in client.py (#26389) Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- sdks/python-client/dify_client/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdks/python-client/dify_client/client.py b/sdks/python-client/dify_client/client.py index 201391eae9..2154741e91 100644 --- a/sdks/python-client/dify_client/client.py +++ b/sdks/python-client/dify_client/client.py @@ -1,5 +1,5 @@ import json -from typing import Literal +from typing import IO, Literal import requests From 595df172a8526afa81de791e7cff4b00bec7089d Mon Sep 17 00:00:00 2001 From: zxhlyh <jasonapring2015@outlook.com> Date: Mon, 29 Sep 2025 13:51:48 +0800 Subject: [PATCH 077/126] fix: model list refresh when change credential (#26421) --- .../header/account-setting/model-provider-page/hooks.ts | 9 ++++++--- .../model-provider-page/model-auth/hooks/use-auth.ts | 6 +++--- .../provider-added-card/model-load-balancing-modal.tsx | 2 +- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/web/app/components/header/account-setting/model-provider-page/hooks.ts b/web/app/components/header/account-setting/model-provider-page/hooks.ts index b10aeeb47e..48dc609795 100644 --- a/web/app/components/header/account-setting/model-provider-page/hooks.ts +++ b/web/app/components/header/account-setting/model-provider-page/hooks.ts @@ -323,15 +323,18 @@ export const useRefreshModel = () => { const { eventEmitter } = useEventEmitterContextContext() const updateModelProviders = useUpdateModelProviders() const updateModelList = useUpdateModelList() - const handleRefreshModel = useCallback((provider: ModelProvider, configurationMethod: ConfigurationMethodEnum, CustomConfigurationModelFixedFields?: CustomConfigurationModelFixedFields) => { + const handleRefreshModel = useCallback(( + provider: ModelProvider, + CustomConfigurationModelFixedFields?: CustomConfigurationModelFixedFields, + refreshModelList?: boolean, + ) => { updateModelProviders() provider.supported_model_types.forEach((type) => { updateModelList(type) }) - if (configurationMethod === ConfigurationMethodEnum.customizableModel - && provider.custom_configuration.status === CustomConfigurationStatusEnum.active) { + if (refreshModelList && provider.custom_configuration.status === CustomConfigurationStatusEnum.active) { eventEmitter?.emit({ type: UPDATE_MODEL_PROVIDER_CUSTOM_MODEL_LIST, payload: provider.provider, diff --git a/web/app/components/header/account-setting/model-provider-page/model-auth/hooks/use-auth.ts b/web/app/components/header/account-setting/model-provider-page/model-auth/hooks/use-auth.ts index 14b21be7f7..3136a70563 100644 --- a/web/app/components/header/account-setting/model-provider-page/model-auth/hooks/use-auth.ts +++ b/web/app/components/header/account-setting/model-provider-page/model-auth/hooks/use-auth.ts @@ -90,7 +90,7 @@ export const useAuth = ( type: 'success', message: t('common.api.actionSuccess'), }) - handleRefreshModel(provider, configurationMethod, undefined) + handleRefreshModel(provider, undefined, true) } finally { handleSetDoingAction(false) @@ -125,7 +125,7 @@ export const useAuth = ( type: 'success', message: t('common.api.actionSuccess'), }) - handleRefreshModel(provider, configurationMethod, undefined) + handleRefreshModel(provider, undefined, true) onRemove?.(pendingOperationCredentialId.current ?? '') closeConfirmDelete() } @@ -147,7 +147,7 @@ export const useAuth = ( if (res.result === 'success') { notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') }) - handleRefreshModel(provider, configurationMethod, undefined) + handleRefreshModel(provider, undefined, !payload.credential_id) } } finally { diff --git a/web/app/components/header/account-setting/model-provider-page/provider-added-card/model-load-balancing-modal.tsx b/web/app/components/header/account-setting/model-provider-page/provider-added-card/model-load-balancing-modal.tsx index 070c2ee90f..090147897b 100644 --- a/web/app/components/header/account-setting/model-provider-page/provider-added-card/model-load-balancing-modal.tsx +++ b/web/app/components/header/account-setting/model-provider-page/provider-added-card/model-load-balancing-modal.tsx @@ -159,7 +159,7 @@ const ModelLoadBalancingModal = ({ ) if (res.result === 'success') { notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') }) - handleRefreshModel(provider, configurateMethod, currentCustomConfigurationModelFixedFields) + handleRefreshModel(provider, currentCustomConfigurationModelFixedFields, false) onSave?.(provider.provider) onClose?.() } From 4ba6de1116420eba5a288b5b2ebe55b794d919cb Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Mon, 29 Sep 2025 14:01:42 +0800 Subject: [PATCH 078/126] add leader session more check --- api/controllers/console/app/online_user.py | 43 +++++++++++++++++++--- 1 file changed, 38 insertions(+), 5 deletions(-) diff --git a/api/controllers/console/app/online_user.py b/api/controllers/console/app/online_user.py index f69ea32752..d3103a6666 100644 --- a/api/controllers/console/app/online_user.py +++ b/api/controllers/console/app/online_user.py @@ -99,20 +99,53 @@ def handle_disconnect(sid): broadcast_online_users(workflow_id) -def get_or_set_leader(workflow_id, sid): +def _clear_session_state(workflow_id: str, sid: str) -> None: + redis_client.hdel(f"workflow_online_users:{workflow_id}", sid) + redis_client.delete(f"ws_sid_map:{sid}") + + +def _is_session_active(workflow_id: str, sid: str) -> bool: + if not sid: + return False + + try: + if not sio.manager.is_connected(sid, "/"): + return False + except AttributeError: + return False + + if not redis_client.hexists(f"workflow_online_users:{workflow_id}", sid): + return False + + if not redis_client.exists(f"ws_sid_map:{sid}"): + return False + + return True + + +def get_or_set_leader(workflow_id: str, sid: str) -> str: """ - Get current leader session or set this session as leader if no leader exists. + Get current leader session or set this session as leader if no valid leader exists. Returns the leader session id (sid). """ leader_key = f"workflow_leader:{workflow_id}" - current_leader = redis_client.get(leader_key) + raw_leader = redis_client.get(leader_key) + current_leader = raw_leader.decode("utf-8") if isinstance(raw_leader, bytes) else raw_leader + leader_replaced = False + + if current_leader and not _is_session_active(workflow_id, current_leader): + _clear_session_state(workflow_id, current_leader) + redis_client.delete(leader_key) + current_leader = None + leader_replaced = True if not current_leader: - # No leader exists, make this session the leader redis_client.set(leader_key, sid, ex=3600) # Expire in 1 hour + if leader_replaced: + broadcast_leader_change(workflow_id, sid) return sid - return current_leader.decode("utf-8") if isinstance(current_leader, bytes) else current_leader + return current_leader def handle_leader_disconnect(workflow_id, disconnected_sid): From af662b100bd8b74d23b730c2207806208b980850 Mon Sep 17 00:00:00 2001 From: JoJohanse <97782983+JoJohanse@users.noreply.github.com> Date: Mon, 29 Sep 2025 14:35:05 +0800 Subject: [PATCH 079/126] Fixes#26332 Remove FILES_URL in default .yaml settings (#26410) Co-authored-by: crazywoola <427733928@qq.com> --- docker/.env.example | 2 +- docker/docker-compose.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/.env.example b/docker/.env.example index c0f084796e..8c23bfc9b7 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -45,7 +45,7 @@ APP_WEB_URL= # Recommendation: use a dedicated domain (e.g., https://upload.example.com). # Alternatively, use http://<your-ip>:5001 or http://api:5001, # ensuring port 5001 is externally accessible (see docker-compose.yaml). -FILES_URL=http://api:5001 +FILES_URL= # INTERNAL_FILES_URL is used for plugin daemon communication within Docker network. # Set this to the internal Docker service URL for proper plugin file access. diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 2617f84e7d..cc66f69550 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -10,7 +10,7 @@ x-shared-env: &shared-api-worker-env SERVICE_API_URL: ${SERVICE_API_URL:-} APP_API_URL: ${APP_API_URL:-} APP_WEB_URL: ${APP_WEB_URL:-} - FILES_URL: ${FILES_URL:-http://api:5001} + FILES_URL: ${FILES_URL:-} INTERNAL_FILES_URL: ${INTERNAL_FILES_URL:-} LANG: ${LANG:-en_US.UTF-8} LC_ALL: ${LC_ALL:-en_US.UTF-8} From 1a7898dff1a69791854a57d773520cb170e18bc6 Mon Sep 17 00:00:00 2001 From: Wu Tianwei <30284043+WTW0313@users.noreply.github.com> Date: Mon, 29 Sep 2025 14:58:28 +0800 Subject: [PATCH 080/126] fix: Fix retrieval configuration handling in dataset components (#26361) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .../configuration/dataset-config/index.tsx | 31 +++- .../params-config/config-content.tsx | 27 ++-- .../components/app/configuration/index.tsx | 32 +++-- .../common/retrieval-method-config/index.tsx | 4 +- .../common/retrieval-param-config/index.tsx | 4 +- .../components/retrieval-config.tsx | 100 ++++++------- .../nodes/knowledge-retrieval/default.ts | 4 +- .../nodes/knowledge-retrieval/panel.tsx | 10 +- .../nodes/knowledge-retrieval/use-config.ts | 6 +- .../nodes/knowledge-retrieval/utils.ts | 132 ++++++++++++------ 10 files changed, 216 insertions(+), 134 deletions(-) diff --git a/web/app/components/app/configuration/dataset-config/index.tsx b/web/app/components/app/configuration/dataset-config/index.tsx index 6165cfdeec..65ef74bc27 100644 --- a/web/app/components/app/configuration/dataset-config/index.tsx +++ b/web/app/components/app/configuration/dataset-config/index.tsx @@ -65,13 +65,40 @@ const DatasetConfig: FC = () => { const onRemove = (id: string) => { const filteredDataSets = dataSet.filter(item => item.id !== id) setDataSet(filteredDataSets) - const retrievalConfig = getMultipleRetrievalConfig(datasetConfigs as any, filteredDataSets, dataSet, { + const { datasets, retrieval_model, score_threshold_enabled, ...restConfigs } = datasetConfigs + const { + top_k, + score_threshold, + reranking_model, + reranking_mode, + weights, + reranking_enable, + } = restConfigs + const oldRetrievalConfig = { + top_k, + score_threshold, + reranking_model: (reranking_model.reranking_provider_name && reranking_model.reranking_model_name) ? { + provider: reranking_model.reranking_provider_name, + model: reranking_model.reranking_model_name, + } : undefined, + reranking_mode, + weights, + reranking_enable, + } + const retrievalConfig = getMultipleRetrievalConfig(oldRetrievalConfig, filteredDataSets, dataSet, { provider: currentRerankProvider?.provider, model: currentRerankModel?.model, }) setDatasetConfigs({ - ...(datasetConfigs as any), + ...datasetConfigsRef.current, ...retrievalConfig, + reranking_model: { + reranking_provider_name: retrievalConfig?.reranking_model?.provider || '', + reranking_model_name: retrievalConfig?.reranking_model?.model || '', + }, + retrieval_model, + score_threshold_enabled, + datasets, }) const { allExternal, diff --git a/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx b/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx index cb61b927bc..1558d32fc6 100644 --- a/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx +++ b/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx @@ -30,11 +30,11 @@ import { noop } from 'lodash-es' type Props = { datasetConfigs: DatasetConfigs onChange: (configs: DatasetConfigs, isRetrievalModeChange?: boolean) => void + selectedDatasets?: DataSet[] isInWorkflow?: boolean singleRetrievalModelConfig?: ModelConfig onSingleRetrievalModelChange?: (config: ModelConfig) => void onSingleRetrievalModelParamsChange?: (config: ModelConfig) => void - selectedDatasets?: DataSet[] } const ConfigContent: FC<Props> = ({ @@ -61,22 +61,28 @@ const ConfigContent: FC<Props> = ({ const { modelList: rerankModelList, + currentModel: validDefaultRerankModel, + currentProvider: validDefaultRerankProvider, } = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.rerank) + /** + * If reranking model is set and is valid, use the reranking model + * Otherwise, check if the default reranking model is valid + */ const { currentModel: currentRerankModel, } = useCurrentProviderAndModel( rerankModelList, { - provider: datasetConfigs.reranking_model?.reranking_provider_name, - model: datasetConfigs.reranking_model?.reranking_model_name, + provider: datasetConfigs.reranking_model?.reranking_provider_name || validDefaultRerankProvider?.provider || '', + model: datasetConfigs.reranking_model?.reranking_model_name || validDefaultRerankModel?.model || '', }, ) const rerankModel = useMemo(() => { return { - provider_name: datasetConfigs?.reranking_model?.reranking_provider_name ?? '', - model_name: datasetConfigs?.reranking_model?.reranking_model_name ?? '', + provider_name: datasetConfigs.reranking_model?.reranking_provider_name ?? '', + model_name: datasetConfigs.reranking_model?.reranking_model_name ?? '', } }, [datasetConfigs.reranking_model]) @@ -135,7 +141,7 @@ const ConfigContent: FC<Props> = ({ }) } - const model = singleRetrievalConfig + const model = singleRetrievalConfig // Legacy code, for compatibility, have to keep it const rerankingModeOptions = [ { @@ -158,7 +164,7 @@ const ConfigContent: FC<Props> = ({ const canManuallyToggleRerank = useMemo(() => { return (selectedDatasetsMode.allInternal && selectedDatasetsMode.allEconomic) - || selectedDatasetsMode.allExternal + || selectedDatasetsMode.allExternal }, [selectedDatasetsMode.allEconomic, selectedDatasetsMode.allExternal, selectedDatasetsMode.allInternal]) const showRerankModel = useMemo(() => { @@ -168,7 +174,7 @@ const ConfigContent: FC<Props> = ({ return datasetConfigs.reranking_enable }, [datasetConfigs.reranking_enable, canManuallyToggleRerank]) - const handleDisabledSwitchClick = useCallback((enable: boolean) => { + const handleManuallyToggleRerank = useCallback((enable: boolean) => { if (!currentRerankModel && enable) Toast.notify({ type: 'error', message: t('workflow.errorMsg.rerankModelRequired') }) onChange({ @@ -255,12 +261,11 @@ const ConfigContent: FC<Props> = ({ <div className='mt-2'> <div className='flex items-center'> { - selectedDatasetsMode.allEconomic && !selectedDatasetsMode.mixtureInternalAndExternal && ( + canManuallyToggleRerank && ( <Switch size='md' defaultValue={showRerankModel} - disabled={!canManuallyToggleRerank} - onChange={handleDisabledSwitchClick} + onChange={handleManuallyToggleRerank} /> ) } diff --git a/web/app/components/app/configuration/index.tsx b/web/app/components/app/configuration/index.tsx index 091900642a..f1f81ebf97 100644 --- a/web/app/components/app/configuration/index.tsx +++ b/web/app/components/app/configuration/index.tsx @@ -284,18 +284,28 @@ const Configuration: FC = () => { setRerankSettingModalOpen(true) const { datasets, retrieval_model, score_threshold_enabled, ...restConfigs } = datasetConfigs + const { + top_k, + score_threshold, + reranking_model, + reranking_mode, + weights, + reranking_enable, + } = restConfigs - const retrievalConfig = getMultipleRetrievalConfig({ - top_k: restConfigs.top_k, - score_threshold: restConfigs.score_threshold, - reranking_model: restConfigs.reranking_model && { - provider: restConfigs.reranking_model.reranking_provider_name, - model: restConfigs.reranking_model.reranking_model_name, - }, - reranking_mode: restConfigs.reranking_mode, - weights: restConfigs.weights, - reranking_enable: restConfigs.reranking_enable, - }, newDatasets, dataSets, { + const oldRetrievalConfig = { + top_k, + score_threshold, + reranking_model: (reranking_model.reranking_provider_name && reranking_model.reranking_model_name) ? { + provider: reranking_model.reranking_provider_name, + model: reranking_model.reranking_model_name, + } : undefined, + reranking_mode, + weights, + reranking_enable, + } + + const retrievalConfig = getMultipleRetrievalConfig(oldRetrievalConfig, newDatasets, dataSets, { provider: currentRerankProvider?.provider, model: currentRerankModel?.model, }) diff --git a/web/app/components/datasets/common/retrieval-method-config/index.tsx b/web/app/components/datasets/common/retrieval-method-config/index.tsx index 57d357442f..ed230c52ce 100644 --- a/web/app/components/datasets/common/retrieval-method-config/index.tsx +++ b/web/app/components/datasets/common/retrieval-method-config/index.tsx @@ -40,7 +40,7 @@ const RetrievalMethodConfig: FC<Props> = ({ onChange({ ...value, search_method: retrieveMethod, - ...(!value.reranking_model.reranking_model_name + ...((!value.reranking_model.reranking_model_name || !value.reranking_model.reranking_provider_name) ? { reranking_model: { reranking_provider_name: isRerankDefaultModelValid ? rerankDefaultModel?.provider?.provider ?? '' : '', @@ -57,7 +57,7 @@ const RetrievalMethodConfig: FC<Props> = ({ onChange({ ...value, search_method: retrieveMethod, - ...(!value.reranking_model.reranking_model_name + ...((!value.reranking_model.reranking_model_name || !value.reranking_model.reranking_provider_name) ? { reranking_model: { reranking_provider_name: isRerankDefaultModelValid ? rerankDefaultModel?.provider?.provider ?? '' : '', diff --git a/web/app/components/datasets/common/retrieval-param-config/index.tsx b/web/app/components/datasets/common/retrieval-param-config/index.tsx index 216a56ab16..0c28149d56 100644 --- a/web/app/components/datasets/common/retrieval-param-config/index.tsx +++ b/web/app/components/datasets/common/retrieval-param-config/index.tsx @@ -54,7 +54,7 @@ const RetrievalParamConfig: FC<Props> = ({ }, ) - const handleDisabledSwitchClick = useCallback((enable: boolean) => { + const handleToggleRerankEnable = useCallback((enable: boolean) => { if (enable && !currentModel) Toast.notify({ type: 'error', message: t('workflow.errorMsg.rerankModelRequired') }) onChange({ @@ -119,7 +119,7 @@ const RetrievalParamConfig: FC<Props> = ({ <Switch size='md' defaultValue={value.reranking_enable} - onChange={handleDisabledSwitchClick} + onChange={handleToggleRerankEnable} /> )} <div className='flex items-center'> diff --git a/web/app/components/workflow/nodes/knowledge-retrieval/components/retrieval-config.tsx b/web/app/components/workflow/nodes/knowledge-retrieval/components/retrieval-config.tsx index 8a3dc1efba..619216d672 100644 --- a/web/app/components/workflow/nodes/knowledge-retrieval/components/retrieval-config.tsx +++ b/web/app/components/workflow/nodes/knowledge-retrieval/components/retrieval-config.tsx @@ -1,6 +1,6 @@ 'use client' import type { FC } from 'react' -import React, { useCallback, useState } from 'react' +import React, { useCallback, useMemo } from 'react' import { RiEqualizer2Line } from '@remixicon/react' import { useTranslation } from 'react-i18next' import type { MultipleRetrievalConfig, SingleRetrievalConfig } from '../types' @@ -14,8 +14,6 @@ import { import ConfigRetrievalContent from '@/app/components/app/configuration/dataset-config/params-config/config-content' import { RETRIEVE_TYPE } from '@/types/app' import { DATASET_DEFAULT } from '@/config' -import { useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks' -import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations' import Button from '@/app/components/base/button' import type { DatasetConfigs } from '@/models/debug' import type { DataSet } from '@/models/datasets' @@ -32,8 +30,8 @@ type Props = { onSingleRetrievalModelChange?: (config: ModelConfig) => void onSingleRetrievalModelParamsChange?: (config: ModelConfig) => void readonly?: boolean - openFromProps?: boolean - onOpenFromPropsChange?: (openFromProps: boolean) => void + rerankModalOpen: boolean + onRerankModelOpenChange: (open: boolean) => void selectedDatasets: DataSet[] } @@ -45,26 +43,52 @@ const RetrievalConfig: FC<Props> = ({ onSingleRetrievalModelChange, onSingleRetrievalModelParamsChange, readonly, - openFromProps, - onOpenFromPropsChange, + rerankModalOpen, + onRerankModelOpenChange, selectedDatasets, }) => { const { t } = useTranslation() - const [open, setOpen] = useState(false) - const mergedOpen = openFromProps !== undefined ? openFromProps : open + const { retrieval_mode, multiple_retrieval_config } = payload const handleOpen = useCallback((newOpen: boolean) => { - setOpen(newOpen) - onOpenFromPropsChange?.(newOpen) - }, [onOpenFromPropsChange]) + onRerankModelOpenChange(newOpen) + }, [onRerankModelOpenChange]) - const { - currentProvider: validRerankDefaultProvider, - currentModel: validRerankDefaultModel, - } = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.rerank) + const datasetConfigs = useMemo(() => { + const { + reranking_model, + top_k, + score_threshold, + reranking_mode, + weights, + reranking_enable, + } = multiple_retrieval_config || {} + + return { + retrieval_model: retrieval_mode, + reranking_model: (reranking_model?.provider && reranking_model?.model) + ? { + reranking_provider_name: reranking_model?.provider, + reranking_model_name: reranking_model?.model, + } + : { + reranking_provider_name: '', + reranking_model_name: '', + }, + top_k: top_k || DATASET_DEFAULT.top_k, + score_threshold_enabled: !(score_threshold === undefined || score_threshold === null), + score_threshold, + datasets: { + datasets: [], + }, + reranking_mode, + weights, + reranking_enable, + } + }, [retrieval_mode, multiple_retrieval_config]) - const { multiple_retrieval_config } = payload const handleChange = useCallback((configs: DatasetConfigs, isRetrievalModeChange?: boolean) => { + // Legacy code, for compatibility, have to keep it if (isRetrievalModeChange) { onRetrievalModeChange(configs.retrieval_model) return @@ -72,13 +96,11 @@ const RetrievalConfig: FC<Props> = ({ onMultipleRetrievalConfigChange({ top_k: configs.top_k, score_threshold: configs.score_threshold_enabled ? (configs.score_threshold ?? DATASET_DEFAULT.score_threshold) : null, - reranking_model: payload.retrieval_mode === RETRIEVE_TYPE.oneWay + reranking_model: retrieval_mode === RETRIEVE_TYPE.oneWay ? undefined + // eslint-disable-next-line sonarjs/no-nested-conditional : (!configs.reranking_model?.reranking_provider_name - ? { - provider: validRerankDefaultProvider?.provider || '', - model: validRerankDefaultModel?.model || '', - } + ? undefined : { provider: configs.reranking_model?.reranking_provider_name, model: configs.reranking_model?.reranking_model_name, @@ -87,11 +109,11 @@ const RetrievalConfig: FC<Props> = ({ weights: configs.weights, reranking_enable: configs.reranking_enable, }) - }, [onMultipleRetrievalConfigChange, payload.retrieval_mode, validRerankDefaultProvider, validRerankDefaultModel, onRetrievalModeChange]) + }, [onMultipleRetrievalConfigChange, retrieval_mode, onRetrievalModeChange]) return ( <PortalToFollowElem - open={mergedOpen} + open={rerankModalOpen} onOpenChange={handleOpen} placement='bottom-end' offset={{ @@ -102,14 +124,14 @@ const RetrievalConfig: FC<Props> = ({ onClick={() => { if (readonly) return - handleOpen(!mergedOpen) + handleOpen(!rerankModalOpen) }} > <Button variant='ghost' size='small' disabled={readonly} - className={cn(open && 'bg-components-button-ghost-bg-hover')} + className={cn(rerankModalOpen && 'bg-components-button-ghost-bg-hover')} > <RiEqualizer2Line className='mr-1 h-3.5 w-3.5' /> {t('dataset.retrievalSettings')} @@ -118,35 +140,13 @@ const RetrievalConfig: FC<Props> = ({ <PortalToFollowElemContent style={{ zIndex: 1001 }}> <div className='w-[404px] rounded-2xl border border-components-panel-border bg-components-panel-bg px-4 pb-4 pt-3 shadow-xl'> <ConfigRetrievalContent - datasetConfigs={ - { - retrieval_model: payload.retrieval_mode, - reranking_model: multiple_retrieval_config?.reranking_model?.provider - ? { - reranking_provider_name: multiple_retrieval_config.reranking_model?.provider, - reranking_model_name: multiple_retrieval_config.reranking_model?.model, - } - : { - reranking_provider_name: '', - reranking_model_name: '', - }, - top_k: multiple_retrieval_config?.top_k || DATASET_DEFAULT.top_k, - score_threshold_enabled: !(multiple_retrieval_config?.score_threshold === undefined || multiple_retrieval_config.score_threshold === null), - score_threshold: multiple_retrieval_config?.score_threshold, - datasets: { - datasets: [], - }, - reranking_mode: multiple_retrieval_config?.reranking_mode, - weights: multiple_retrieval_config?.weights, - reranking_enable: multiple_retrieval_config?.reranking_enable, - } - } + datasetConfigs={datasetConfigs} onChange={handleChange} + selectedDatasets={selectedDatasets} isInWorkflow singleRetrievalModelConfig={singleRetrievalModelConfig} onSingleRetrievalModelChange={onSingleRetrievalModelChange} onSingleRetrievalModelParamsChange={onSingleRetrievalModelParamsChange} - selectedDatasets={selectedDatasets} /> </div> </PortalToFollowElemContent> diff --git a/web/app/components/workflow/nodes/knowledge-retrieval/default.ts b/web/app/components/workflow/nodes/knowledge-retrieval/default.ts index 5b2cd737ed..44d26cf5cc 100644 --- a/web/app/components/workflow/nodes/knowledge-retrieval/default.ts +++ b/web/app/components/workflow/nodes/knowledge-retrieval/default.ts @@ -1,6 +1,6 @@ import type { NodeDefault } from '../../types' import type { KnowledgeRetrievalNodeType } from './types' -import { checkoutRerankModelConfigedInRetrievalSettings } from './utils' +import { checkoutRerankModelConfiguredInRetrievalSettings } from './utils' import { DATASET_DEFAULT } from '@/config' import { RETRIEVE_TYPE } from '@/types/app' import { genNodeMetaData } from '@/app/components/workflow/utils' @@ -36,7 +36,7 @@ const nodeDefault: NodeDefault<KnowledgeRetrievalNodeType> = { const { _datasets, multiple_retrieval_config, retrieval_mode } = payload if (retrieval_mode === RETRIEVE_TYPE.multiWay) { - const checked = checkoutRerankModelConfigedInRetrievalSettings(_datasets || [], multiple_retrieval_config) + const checked = checkoutRerankModelConfiguredInRetrievalSettings(_datasets || [], multiple_retrieval_config) if (!errorMessages && !checked) errorMessages = t(`${i18nPrefix}.errorMsg.fieldRequired`, { field: t(`${i18nPrefix}.errorMsg.fields.rerankModel`) }) diff --git a/web/app/components/workflow/nodes/knowledge-retrieval/panel.tsx b/web/app/components/workflow/nodes/knowledge-retrieval/panel.tsx index 267a0ef797..88f7cc1418 100644 --- a/web/app/components/workflow/nodes/knowledge-retrieval/panel.tsx +++ b/web/app/components/workflow/nodes/knowledge-retrieval/panel.tsx @@ -1,7 +1,6 @@ import type { FC } from 'react' import { memo, - useCallback, useMemo, } from 'react' import { intersectionBy } from 'lodash-es' @@ -53,10 +52,6 @@ const Panel: FC<NodePanelProps<KnowledgeRetrievalNodeType>> = ({ availableNumberNodesWithParent, } = useConfig(id, data) - const handleOpenFromPropsChange = useCallback((openFromProps: boolean) => { - setRerankModelOpen(openFromProps) - }, [setRerankModelOpen]) - const metadataList = useMemo(() => { return intersectionBy(...selectedDatasets.filter((dataset) => { return !!dataset.doc_metadata @@ -68,7 +63,6 @@ const Panel: FC<NodePanelProps<KnowledgeRetrievalNodeType>> = ({ return ( <div className='pt-2'> <div className='space-y-4 px-4 pb-2'> - {/* {JSON.stringify(inputs, null, 2)} */} <Field title={t(`${i18nPrefix}.queryVariable`)} required @@ -100,8 +94,8 @@ const Panel: FC<NodePanelProps<KnowledgeRetrievalNodeType>> = ({ onSingleRetrievalModelChange={handleModelChanged as any} onSingleRetrievalModelParamsChange={handleCompletionParamsChange} readonly={readOnly || !selectedDatasets.length} - openFromProps={rerankModelOpen} - onOpenFromPropsChange={handleOpenFromPropsChange} + rerankModalOpen={rerankModelOpen} + onRerankModelOpenChange={setRerankModelOpen} selectedDatasets={selectedDatasets} /> {!readOnly && (<div className='h-3 w-px bg-divider-regular'></div>)} diff --git a/web/app/components/workflow/nodes/knowledge-retrieval/use-config.ts b/web/app/components/workflow/nodes/knowledge-retrieval/use-config.ts index 47e376cdc6..8a1f2d8455 100644 --- a/web/app/components/workflow/nodes/knowledge-retrieval/use-config.ts +++ b/web/app/components/workflow/nodes/knowledge-retrieval/use-config.ts @@ -204,10 +204,11 @@ const useConfig = (id: string, payload: KnowledgeRetrievalNodeType) => { const handleMultipleRetrievalConfigChange = useCallback((newConfig: MultipleRetrievalConfig) => { const newInputs = produce(inputs, (draft) => { - draft.multiple_retrieval_config = getMultipleRetrievalConfig(newConfig!, selectedDatasets, selectedDatasets, { + const newMultipleRetrievalConfig = getMultipleRetrievalConfig(newConfig!, selectedDatasets, selectedDatasets, { provider: currentRerankProvider?.provider, model: currentRerankModel?.model, }) + draft.multiple_retrieval_config = newMultipleRetrievalConfig }) setInputs(newInputs) }, [inputs, setInputs, selectedDatasets, currentRerankModel, currentRerankProvider]) @@ -254,10 +255,11 @@ const useConfig = (id: string, payload: KnowledgeRetrievalNodeType) => { if (payload.retrieval_mode === RETRIEVE_TYPE.multiWay && newDatasets.length > 0) { const multipleRetrievalConfig = draft.multiple_retrieval_config - draft.multiple_retrieval_config = getMultipleRetrievalConfig(multipleRetrievalConfig!, newDatasets, selectedDatasets, { + const newMultipleRetrievalConfig = getMultipleRetrievalConfig(multipleRetrievalConfig!, newDatasets, selectedDatasets, { provider: currentRerankProvider?.provider, model: currentRerankModel?.model, }) + draft.multiple_retrieval_config = newMultipleRetrievalConfig } }) updateDatasetsDetail(newDatasets) diff --git a/web/app/components/workflow/nodes/knowledge-retrieval/utils.ts b/web/app/components/workflow/nodes/knowledge-retrieval/utils.ts index 3821a7a052..719aa57f2f 100644 --- a/web/app/components/workflow/nodes/knowledge-retrieval/utils.ts +++ b/web/app/components/workflow/nodes/knowledge-retrieval/utils.ts @@ -10,6 +10,7 @@ import type { import { DEFAULT_WEIGHTED_SCORE, RerankingModeEnum, + WeightedScoreEnum, } from '@/models/datasets' import { RETRIEVE_METHOD } from '@/types/app' import { DATASET_DEFAULT } from '@/config' @@ -93,10 +94,12 @@ export const getMultipleRetrievalConfig = ( multipleRetrievalConfig: MultipleRetrievalConfig, selectedDatasets: DataSet[], originalDatasets: DataSet[], - validRerankModel?: { provider?: string; model?: string }, + fallbackRerankModel?: { provider?: string; model?: string }, // fallback rerank model ) => { - const shouldSetWeightDefaultValue = xorBy(selectedDatasets, originalDatasets, 'id').length > 0 - const rerankModelIsValid = validRerankModel?.provider && validRerankModel?.model + // Check if the selected datasets are different from the original datasets + const isDatasetsChanged = xorBy(selectedDatasets, originalDatasets, 'id').length > 0 + // Check if the rerank model is valid + const isFallbackRerankModelValid = !!(fallbackRerankModel?.provider && fallbackRerankModel?.model) const { allHighQuality, @@ -125,14 +128,16 @@ export const getMultipleRetrievalConfig = ( reranking_mode, reranking_model, weights, - reranking_enable: ((allInternal && allEconomic) || allExternal) ? reranking_enable : shouldSetWeightDefaultValue, + reranking_enable, } const setDefaultWeights = () => { result.weights = { + weight_type: WeightedScoreEnum.Customized, vector_setting: { vector_weight: allHighQualityVectorSearch ? DEFAULT_WEIGHTED_SCORE.allHighQualityVectorSearch.semantic + // eslint-disable-next-line sonarjs/no-nested-conditional : allHighQualityFullTextSearch ? DEFAULT_WEIGHTED_SCORE.allHighQualityFullTextSearch.semantic : DEFAULT_WEIGHTED_SCORE.other.semantic, @@ -142,6 +147,7 @@ export const getMultipleRetrievalConfig = ( keyword_setting: { keyword_weight: allHighQualityVectorSearch ? DEFAULT_WEIGHTED_SCORE.allHighQualityVectorSearch.keyword + // eslint-disable-next-line sonarjs/no-nested-conditional : allHighQualityFullTextSearch ? DEFAULT_WEIGHTED_SCORE.allHighQualityFullTextSearch.keyword : DEFAULT_WEIGHTED_SCORE.other.keyword, @@ -149,65 +155,106 @@ export const getMultipleRetrievalConfig = ( } } - if (allEconomic || mixtureHighQualityAndEconomic || inconsistentEmbeddingModel || allExternal || mixtureInternalAndExternal) { + /** + * In this case, user can manually toggle reranking + * So should keep the reranking_enable value + * But the default reranking_model should be set + */ + if ((allEconomic && allInternal) || allExternal) { result.reranking_mode = RerankingModeEnum.RerankingModel - if (!result.reranking_model?.provider || !result.reranking_model?.model) { - if (rerankModelIsValid) { - result.reranking_enable = reranking_enable !== false - - result.reranking_model = { - provider: validRerankModel?.provider || '', - model: validRerankModel?.model || '', - } - } - else { - result.reranking_model = { - provider: '', - model: '', - } + // Need to check if the reranking model should be set to default when first time initialized + if ((!result.reranking_model?.provider || !result.reranking_model?.model) && isFallbackRerankModelValid) { + result.reranking_model = { + provider: fallbackRerankModel.provider || '', + model: fallbackRerankModel.model || '', } } - else { - result.reranking_enable = reranking_enable !== false - } + result.reranking_enable = reranking_enable } + /** + * In this case, reranking_enable must be true + * And if rerank model is not set, should set the default rerank model + */ + if (mixtureHighQualityAndEconomic || inconsistentEmbeddingModel || mixtureInternalAndExternal) { + result.reranking_mode = RerankingModeEnum.RerankingModel + // Need to check if the reranking model should be set to default when first time initialized + if ((!result.reranking_model?.provider || !result.reranking_model?.model) && isFallbackRerankModelValid) { + result.reranking_model = { + provider: fallbackRerankModel.provider || '', + model: fallbackRerankModel.model || '', + } + } + result.reranking_enable = true + } + + /** + * In this case, user can choose to use weighted score or rerank model + * But if the reranking_mode is not initialized, should set the default rerank model and reranking_enable to true + * and set reranking_mode to reranking_model + */ if (allHighQuality && !inconsistentEmbeddingModel && allInternal) { + // If not initialized, check if the default rerank model is valid if (!reranking_mode) { - if (validRerankModel?.provider && validRerankModel?.model) { + if (isFallbackRerankModelValid) { result.reranking_mode = RerankingModeEnum.RerankingModel - result.reranking_enable = reranking_enable !== false + result.reranking_enable = true result.reranking_model = { - provider: validRerankModel.provider, - model: validRerankModel.model, + provider: fallbackRerankModel.provider || '', + model: fallbackRerankModel.model || '', } } else { result.reranking_mode = RerankingModeEnum.WeightedScore + result.reranking_enable = false setDefaultWeights() } } - if (reranking_mode === RerankingModeEnum.WeightedScore && !weights) - setDefaultWeights() - - if (reranking_mode === RerankingModeEnum.WeightedScore && weights && shouldSetWeightDefaultValue) { - if (rerankModelIsValid) { - result.reranking_mode = RerankingModeEnum.RerankingModel - result.reranking_enable = reranking_enable !== false - + // After initialization, if datasets has no change, make sure the config has correct value + if (reranking_mode === RerankingModeEnum.WeightedScore) { + result.reranking_enable = false + if (!weights) + setDefaultWeights() + } + if (reranking_mode === RerankingModeEnum.RerankingModel) { + if ((!result.reranking_model?.provider || !result.reranking_model?.model) && isFallbackRerankModelValid) { result.reranking_model = { - provider: validRerankModel.provider || '', - model: validRerankModel.model || '', + provider: fallbackRerankModel.provider || '', + model: fallbackRerankModel.model || '', + } + } + result.reranking_enable = true + } + + // Need to check if reranking_mode should be set to reranking_model when datasets changed + if (reranking_mode === RerankingModeEnum.WeightedScore && weights && isDatasetsChanged) { + if ((result.reranking_model?.provider && result.reranking_model?.model) || isFallbackRerankModelValid) { + result.reranking_mode = RerankingModeEnum.RerankingModel + result.reranking_enable = true + + // eslint-disable-next-line sonarjs/nested-control-flow + if ((!result.reranking_model?.provider || !result.reranking_model?.model) && isFallbackRerankModelValid) { + result.reranking_model = { + provider: fallbackRerankModel.provider || '', + model: fallbackRerankModel.model || '', + } } } else { setDefaultWeights() } } - if (reranking_mode === RerankingModeEnum.RerankingModel && !rerankModelIsValid && shouldSetWeightDefaultValue) { + // Need to switch to weighted score when reranking model is not valid and datasets changed + if ( + reranking_mode === RerankingModeEnum.RerankingModel + && (!result.reranking_model?.provider || !result.reranking_model?.model) + && !isFallbackRerankModelValid + && isDatasetsChanged + ) { result.reranking_mode = RerankingModeEnum.WeightedScore + result.reranking_enable = false setDefaultWeights() } } @@ -215,7 +262,7 @@ export const getMultipleRetrievalConfig = ( return result } -export const checkoutRerankModelConfigedInRetrievalSettings = ( +export const checkoutRerankModelConfiguredInRetrievalSettings = ( datasets: DataSet[], multipleRetrievalConfig?: MultipleRetrievalConfig, ) => { @@ -225,6 +272,7 @@ export const checkoutRerankModelConfigedInRetrievalSettings = ( const { allEconomic, allExternal, + allInternal, } = getSelectedDatasetsMode(datasets) const { @@ -233,12 +281,8 @@ export const checkoutRerankModelConfigedInRetrievalSettings = ( reranking_model, } = multipleRetrievalConfig - if (reranking_mode === RerankingModeEnum.RerankingModel && (!reranking_model?.provider || !reranking_model?.model)) { - if ((allEconomic || allExternal) && !reranking_enable) - return true - - return false - } + if (reranking_mode === RerankingModeEnum.RerankingModel && (!reranking_model?.provider || !reranking_model?.model)) + return ((allEconomic && allInternal) || allExternal) && !reranking_enable return true } From d77c2e4d1728a966690a754835810223c5e953f0 Mon Sep 17 00:00:00 2001 From: Asuka Minato <i@asukaminato.eu.org> Date: Mon, 29 Sep 2025 16:21:07 +0900 Subject: [PATCH 081/126] Fix typing errors in dataset API (#26424) Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../service_api/dataset/dataset.py | 66 +++++++++++-------- .../service_api/dataset/document.py | 31 +++++---- .../service_api/dataset/metadata.py | 2 +- api/pyrightconfig.json | 1 - 4 files changed, 56 insertions(+), 44 deletions(-) diff --git a/api/controllers/service_api/dataset/dataset.py b/api/controllers/service_api/dataset/dataset.py index 6a70345f7c..72ab05cec0 100644 --- a/api/controllers/service_api/dataset/dataset.py +++ b/api/controllers/service_api/dataset/dataset.py @@ -1,10 +1,10 @@ -from typing import Literal +from typing import Any, Literal, cast from flask import request from flask_restx import marshal, reqparse from werkzeug.exceptions import Forbidden, NotFound -import services.dataset_service +import services from controllers.service_api import service_api_ns from controllers.service_api.dataset.error import DatasetInUseError, DatasetNameDuplicateError, InvalidActionError from controllers.service_api.wraps import ( @@ -254,19 +254,21 @@ class DatasetListApi(DatasetApiResource): """Resource for creating datasets.""" args = dataset_create_parser.parse_args() - if args.get("embedding_model_provider"): - DatasetService.check_embedding_model_setting( - tenant_id, args.get("embedding_model_provider"), args.get("embedding_model") - ) + embedding_model_provider = args.get("embedding_model_provider") + embedding_model = args.get("embedding_model") + if embedding_model_provider and embedding_model: + DatasetService.check_embedding_model_setting(tenant_id, embedding_model_provider, embedding_model) + + retrieval_model = args.get("retrieval_model") if ( - args.get("retrieval_model") - and args.get("retrieval_model").get("reranking_model") - and args.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + retrieval_model + and retrieval_model.get("reranking_model") + and retrieval_model.get("reranking_model").get("reranking_provider_name") ): DatasetService.check_reranking_model_setting( tenant_id, - args.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), - args.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + retrieval_model.get("reranking_model").get("reranking_provider_name"), + retrieval_model.get("reranking_model").get("reranking_model_name"), ) try: @@ -317,7 +319,7 @@ class DatasetApi(DatasetApiResource): DatasetService.check_dataset_permission(dataset, current_user) except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) - data = marshal(dataset, dataset_detail_fields) + data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields)) # check embedding setting provider_manager = ProviderManager() assert isinstance(current_user, Account) @@ -331,8 +333,8 @@ class DatasetApi(DatasetApiResource): for embedding_model in embedding_models: model_names.append(f"{embedding_model.model}:{embedding_model.provider.provider}") - if data["indexing_technique"] == "high_quality": - item_model = f"{data['embedding_model']}:{data['embedding_model_provider']}" + if data.get("indexing_technique") == "high_quality": + item_model = f"{data.get('embedding_model')}:{data.get('embedding_model_provider')}" if item_model in model_names: data["embedding_available"] = True else: @@ -341,7 +343,9 @@ class DatasetApi(DatasetApiResource): data["embedding_available"] = True # force update search method to keyword_search if indexing_technique is economic - data["retrieval_model_dict"]["search_method"] = "keyword_search" + retrieval_model_dict = data.get("retrieval_model_dict") + if retrieval_model_dict: + retrieval_model_dict["search_method"] = "keyword_search" if data.get("permission") == "partial_members": part_users_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str) @@ -372,19 +376,24 @@ class DatasetApi(DatasetApiResource): data = request.get_json() # check embedding model setting - if data.get("indexing_technique") == "high_quality" or data.get("embedding_model_provider"): - DatasetService.check_embedding_model_setting( - dataset.tenant_id, data.get("embedding_model_provider"), data.get("embedding_model") - ) + embedding_model_provider = data.get("embedding_model_provider") + embedding_model = data.get("embedding_model") + if data.get("indexing_technique") == "high_quality" or embedding_model_provider: + if embedding_model_provider and embedding_model: + DatasetService.check_embedding_model_setting( + dataset.tenant_id, embedding_model_provider, embedding_model + ) + + retrieval_model = data.get("retrieval_model") if ( - data.get("retrieval_model") - and data.get("retrieval_model").get("reranking_model") - and data.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + retrieval_model + and retrieval_model.get("reranking_model") + and retrieval_model.get("reranking_model").get("reranking_provider_name") ): DatasetService.check_reranking_model_setting( dataset.tenant_id, - data.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), - data.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + retrieval_model.get("reranking_model").get("reranking_provider_name"), + retrieval_model.get("reranking_model").get("reranking_model_name"), ) # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator @@ -397,7 +406,7 @@ class DatasetApi(DatasetApiResource): if dataset is None: raise NotFound("Dataset not found.") - result_data = marshal(dataset, dataset_detail_fields) + result_data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields)) assert isinstance(current_user, Account) tenant_id = current_user.current_tenant_id @@ -591,9 +600,10 @@ class DatasetTagsApi(DatasetApiResource): args = tag_update_parser.parse_args() args["type"] = "knowledge" - tag = TagService.update_tags(args, args.get("tag_id")) + tag_id = args["tag_id"] + tag = TagService.update_tags(args, tag_id) - binding_count = TagService.get_tag_binding_count(args.get("tag_id")) + binding_count = TagService.get_tag_binding_count(tag_id) response = {"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": binding_count} @@ -616,7 +626,7 @@ class DatasetTagsApi(DatasetApiResource): if not current_user.has_edit_permission: raise Forbidden() args = tag_delete_parser.parse_args() - TagService.delete_tag(args.get("tag_id")) + TagService.delete_tag(args["tag_id"]) return 204 diff --git a/api/controllers/service_api/dataset/document.py b/api/controllers/service_api/dataset/document.py index e01bc8940c..c1122acd7b 100644 --- a/api/controllers/service_api/dataset/document.py +++ b/api/controllers/service_api/dataset/document.py @@ -108,19 +108,21 @@ class DocumentAddByTextApi(DatasetApiResource): if text is None or name is None: raise ValueError("Both 'text' and 'name' must be non-null values.") - if args.get("embedding_model_provider"): - DatasetService.check_embedding_model_setting( - tenant_id, args.get("embedding_model_provider"), args.get("embedding_model") - ) + embedding_model_provider = args.get("embedding_model_provider") + embedding_model = args.get("embedding_model") + if embedding_model_provider and embedding_model: + DatasetService.check_embedding_model_setting(tenant_id, embedding_model_provider, embedding_model) + + retrieval_model = args.get("retrieval_model") if ( - args.get("retrieval_model") - and args.get("retrieval_model").get("reranking_model") - and args.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + retrieval_model + and retrieval_model.get("reranking_model") + and retrieval_model.get("reranking_model").get("reranking_provider_name") ): DatasetService.check_reranking_model_setting( tenant_id, - args.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), - args.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + retrieval_model.get("reranking_model").get("reranking_provider_name"), + retrieval_model.get("reranking_model").get("reranking_model_name"), ) if not current_user: @@ -187,15 +189,16 @@ class DocumentUpdateByTextApi(DatasetApiResource): if not dataset: raise ValueError("Dataset does not exist.") + retrieval_model = args.get("retrieval_model") if ( - args.get("retrieval_model") - and args.get("retrieval_model").get("reranking_model") - and args.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + retrieval_model + and retrieval_model.get("reranking_model") + and retrieval_model.get("reranking_model").get("reranking_provider_name") ): DatasetService.check_reranking_model_setting( tenant_id, - args.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), - args.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + retrieval_model.get("reranking_model").get("reranking_provider_name"), + retrieval_model.get("reranking_model").get("reranking_model_name"), ) # indexing_technique is already set in dataset since this is an update diff --git a/api/controllers/service_api/dataset/metadata.py b/api/controllers/service_api/dataset/metadata.py index c6032048e6..e01659dc68 100644 --- a/api/controllers/service_api/dataset/metadata.py +++ b/api/controllers/service_api/dataset/metadata.py @@ -106,7 +106,7 @@ class DatasetMetadataServiceApi(DatasetApiResource): raise NotFound("Dataset not found.") DatasetService.check_dataset_permission(dataset, current_user) - metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args.get("name")) + metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args["name"]) return marshal(metadata, dataset_metadata_fields), 200 @service_api_ns.doc("delete_dataset_metadata") diff --git a/api/pyrightconfig.json b/api/pyrightconfig.json index 61ed3ac3b4..f3d56cfbee 100644 --- a/api/pyrightconfig.json +++ b/api/pyrightconfig.json @@ -8,7 +8,6 @@ "extensions", "libs", "controllers/console/datasets", - "controllers/service_api/dataset", "core/ops", "core/tools", "core/model_runtime", From c43c72c1a318ca79626573dde1fe2c9b1c2dfa83 Mon Sep 17 00:00:00 2001 From: Wu Tianwei <30284043+WTW0313@users.noreply.github.com> Date: Mon, 29 Sep 2025 16:12:26 +0800 Subject: [PATCH 082/126] fix: Fix vector_setting not found error (#26380) --- .../install-from-marketplace.tsx | 2 +- .../knowledge-base/components/option-card.tsx | 5 +- .../workflow/nodes/knowledge-base/default.ts | 33 ++++++- .../nodes/knowledge-base/hooks/use-config.ts | 85 +++++++++++++++---- web/i18n/en-US/common.ts | 1 + web/i18n/en-US/workflow.ts | 3 + web/i18n/ja-JP/common.ts | 1 + web/i18n/zh-Hans/common.ts | 1 + web/i18n/zh-Hans/workflow.ts | 3 + 9 files changed, 116 insertions(+), 18 deletions(-) diff --git a/web/app/components/header/account-setting/data-source-page-new/install-from-marketplace.tsx b/web/app/components/header/account-setting/data-source-page-new/install-from-marketplace.tsx index 4c0de924d1..f4f7749f7f 100644 --- a/web/app/components/header/account-setting/data-source-page-new/install-from-marketplace.tsx +++ b/web/app/components/header/account-setting/data-source-page-new/install-from-marketplace.tsx @@ -52,7 +52,7 @@ const InstallFromMarketplace = ({ <div className='flex items-center justify-between'> <div className='system-md-semibold flex cursor-pointer items-center gap-1 text-text-primary' onClick={() => setCollapse(!collapse)}> <RiArrowDownSLine className={cn('h-4 w-4', collapse && '-rotate-90')} /> - {t('common.modelProvider.installProvider')} + {t('common.modelProvider.installDataSourceProvider')} </div> <div className='mb-2 flex items-center pt-2'> <span className='system-sm-regular pr-1 text-text-tertiary'>{t('common.modelProvider.discoverMore')}</span> diff --git a/web/app/components/workflow/nodes/knowledge-base/components/option-card.tsx b/web/app/components/workflow/nodes/knowledge-base/components/option-card.tsx index c15157fc5c..789e24835f 100644 --- a/web/app/components/workflow/nodes/knowledge-base/components/option-card.tsx +++ b/web/app/components/workflow/nodes/knowledge-base/components/option-card.tsx @@ -86,7 +86,10 @@ const OptionCard = memo(({ readonly && 'cursor-not-allowed', wrapperClassName && (typeof wrapperClassName === 'function' ? wrapperClassName(isActive) : wrapperClassName), )} - onClick={() => !readonly && enableSelect && id && onClick?.(id)} + onClick={(e) => { + e.stopPropagation() + !readonly && enableSelect && id && onClick?.(id) + }} > <div className={cn( 'relative flex rounded-t-xl p-2', diff --git a/web/app/components/workflow/nodes/knowledge-base/default.ts b/web/app/components/workflow/nodes/knowledge-base/default.ts index 8175e2ac9e..190addde4d 100644 --- a/web/app/components/workflow/nodes/knowledge-base/default.ts +++ b/web/app/components/workflow/nodes/knowledge-base/default.ts @@ -2,6 +2,7 @@ import type { NodeDefault } from '../../types' import type { KnowledgeBaseNodeType } from './types' import { genNodeMetaData } from '@/app/components/workflow/utils' import { BlockEnum } from '@/app/components/workflow/types' +import { IndexingType } from '@/app/components/datasets/create/step-two' const metaData = genNodeMetaData({ sort: 3.1, @@ -27,8 +28,17 @@ const nodeDefault: NodeDefault<KnowledgeBaseNodeType> = { chunk_structure, indexing_technique, retrieval_model, + embedding_model, + embedding_model_provider, + index_chunk_variable_selector, } = payload + const { + search_method, + reranking_enable, + reranking_model, + } = retrieval_model || {} + if (!chunk_structure) { return { isValid: false, @@ -36,6 +46,13 @@ const nodeDefault: NodeDefault<KnowledgeBaseNodeType> = { } } + if (index_chunk_variable_selector.length === 0) { + return { + isValid: false, + errorMessage: t('workflow.nodes.knowledgeBase.chunksVariableIsRequired'), + } + } + if (!indexing_technique) { return { isValid: false, @@ -43,13 +60,27 @@ const nodeDefault: NodeDefault<KnowledgeBaseNodeType> = { } } - if (!retrieval_model || !retrieval_model.search_method) { + if (indexing_technique === IndexingType.QUALIFIED && (!embedding_model || !embedding_model_provider)) { + return { + isValid: false, + errorMessage: t('workflow.nodes.knowledgeBase.embeddingModelIsRequired'), + } + } + + if (!retrieval_model || !search_method) { return { isValid: false, errorMessage: t('workflow.nodes.knowledgeBase.retrievalSettingIsRequired'), } } + if (reranking_enable && (!reranking_model || !reranking_model.reranking_provider_name || !reranking_model.reranking_model_name)) { + return { + isValid: false, + errorMessage: t('workflow.nodes.knowledgeBase.rerankingModelIsRequired'), + } + } + return { isValid: true, errorMessage: '', diff --git a/web/app/components/workflow/nodes/knowledge-base/hooks/use-config.ts b/web/app/components/workflow/nodes/knowledge-base/hooks/use-config.ts index 365722feba..8b22704c5a 100644 --- a/web/app/components/workflow/nodes/knowledge-base/hooks/use-config.ts +++ b/web/app/components/workflow/nodes/knowledge-base/hooks/use-config.ts @@ -9,13 +9,17 @@ import { ChunkStructureEnum, IndexMethodEnum, RetrievalSearchMethodEnum, + WeightedScoreEnum, } from '../types' import type { - HybridSearchModeEnum, KnowledgeBaseNodeType, RerankingModel, } from '../types' +import { + HybridSearchModeEnum, +} from '../types' import { isHighQualitySearchMethod } from '../utils' +import { DEFAULT_WEIGHTED_SCORE, RerankingModeEnum } from '@/models/datasets' export const useConfig = (id: string) => { const store = useStoreApi() @@ -35,6 +39,25 @@ export const useConfig = (id: string) => { }) }, [id, handleNodeDataUpdateWithSyncDraft]) + const getDefaultWeights = useCallback(({ + embeddingModel, + embeddingModelProvider, + }: { + embeddingModel: string + embeddingModelProvider: string + }) => { + return { + vector_setting: { + vector_weight: DEFAULT_WEIGHTED_SCORE.other.semantic, + embedding_provider_name: embeddingModelProvider || '', + embedding_model_name: embeddingModel, + }, + keyword_setting: { + keyword_weight: DEFAULT_WEIGHTED_SCORE.other.keyword, + }, + } + }, []) + const handleChunkStructureChange = useCallback((chunkStructure: ChunkStructureEnum) => { const nodeData = getNodeData() const { @@ -80,39 +103,72 @@ export const useConfig = (id: string) => { embeddingModelProvider: string }) => { const nodeData = getNodeData() - handleNodeDataUpdate({ + const defaultWeights = getDefaultWeights({ + embeddingModel, + embeddingModelProvider, + }) + const changeData = { embedding_model: embeddingModel, embedding_model_provider: embeddingModelProvider, retrieval_model: { ...nodeData?.data.retrieval_model, - vector_setting: { - ...nodeData?.data.retrieval_model.vector_setting, - embedding_provider_name: embeddingModelProvider, - embedding_model_name: embeddingModel, - }, }, - }) - }, [getNodeData, handleNodeDataUpdate]) + } + if (changeData.retrieval_model.weights) { + changeData.retrieval_model = { + ...changeData.retrieval_model, + weights: { + ...changeData.retrieval_model.weights, + vector_setting: { + ...changeData.retrieval_model.weights.vector_setting, + embedding_provider_name: embeddingModelProvider, + embedding_model_name: embeddingModel, + }, + }, + } + } + else { + changeData.retrieval_model = { + ...changeData.retrieval_model, + weights: defaultWeights, + } + } + handleNodeDataUpdate(changeData) + }, [getNodeData, getDefaultWeights, handleNodeDataUpdate]) const handleRetrievalSearchMethodChange = useCallback((searchMethod: RetrievalSearchMethodEnum) => { const nodeData = getNodeData() - handleNodeDataUpdate({ + const changeData = { retrieval_model: { ...nodeData?.data.retrieval_model, search_method: searchMethod, + reranking_mode: nodeData?.data.retrieval_model.reranking_mode || RerankingModeEnum.RerankingModel, }, - }) + } + if (searchMethod === RetrievalSearchMethodEnum.hybrid) { + changeData.retrieval_model = { + ...changeData.retrieval_model, + reranking_enable: changeData.retrieval_model.reranking_mode === RerankingModeEnum.RerankingModel, + } + } + handleNodeDataUpdate(changeData) }, [getNodeData, handleNodeDataUpdate]) const handleHybridSearchModeChange = useCallback((hybridSearchMode: HybridSearchModeEnum) => { const nodeData = getNodeData() + const defaultWeights = getDefaultWeights({ + embeddingModel: nodeData?.data.embedding_model || '', + embeddingModelProvider: nodeData?.data.embedding_model_provider || '', + }) handleNodeDataUpdate({ retrieval_model: { ...nodeData?.data.retrieval_model, reranking_mode: hybridSearchMode, + reranking_enable: hybridSearchMode === HybridSearchModeEnum.RerankingModel, + weights: nodeData?.data.retrieval_model.weights || defaultWeights, }, }) - }, [getNodeData, handleNodeDataUpdate]) + }, [getNodeData, getDefaultWeights, handleNodeDataUpdate]) const handleRerankingModelEnabledChange = useCallback((rerankingModelEnabled: boolean) => { const nodeData = getNodeData() @@ -130,11 +186,10 @@ export const useConfig = (id: string) => { retrieval_model: { ...nodeData?.data.retrieval_model, weights: { - weight_type: 'weighted_score', + weight_type: WeightedScoreEnum.Customized, vector_setting: { + ...nodeData?.data.retrieval_model.weights?.vector_setting, vector_weight: weightedScore.value[0], - embedding_provider_name: '', - embedding_model_name: '', }, keyword_setting: { keyword_weight: weightedScore.value[1], diff --git a/web/i18n/en-US/common.ts b/web/i18n/en-US/common.ts index ef6bf73ab5..b9d315388f 100644 --- a/web/i18n/en-US/common.ts +++ b/web/i18n/en-US/common.ts @@ -493,6 +493,7 @@ const translation = { toBeConfigured: 'To be configured', configureTip: 'Set up api-key or add model to use', installProvider: 'Install model providers', + installDataSourceProvider: 'Install data source providers', discoverMore: 'Discover more in ', emptyProviderTitle: 'Model provider not set up', emptyProviderTip: 'Please install a model provider first.', diff --git a/web/i18n/en-US/workflow.ts b/web/i18n/en-US/workflow.ts index 6b6a25e7af..3f1654b2e7 100644 --- a/web/i18n/en-US/workflow.ts +++ b/web/i18n/en-US/workflow.ts @@ -955,7 +955,10 @@ const translation = { aboutRetrieval: 'about retrieval method.', chunkIsRequired: 'Chunk structure is required', indexMethodIsRequired: 'Index method is required', + chunksVariableIsRequired: 'Chunks variable is required', + embeddingModelIsRequired: 'Embedding model is required', retrievalSettingIsRequired: 'Retrieval setting is required', + rerankingModelIsRequired: 'Reranking model is required', }, }, tracing: { diff --git a/web/i18n/ja-JP/common.ts b/web/i18n/ja-JP/common.ts index 7052b6f4ab..5526ac0441 100644 --- a/web/i18n/ja-JP/common.ts +++ b/web/i18n/ja-JP/common.ts @@ -484,6 +484,7 @@ const translation = { emptyProviderTitle: 'モデルプロバイダーが設定されていません', discoverMore: 'もっと発見する', installProvider: 'モデルプロバイダーをインストールする', + installDataSourceProvider: 'データソースプロバイダーをインストールする', configureTip: 'API キーを設定するか、使用するモデルを追加してください', toBeConfigured: '設定中', emptyProviderTip: '最初にモデルプロバイダーをインストールしてください。', diff --git a/web/i18n/zh-Hans/common.ts b/web/i18n/zh-Hans/common.ts index bcefb09e3e..0ecdb20d5e 100644 --- a/web/i18n/zh-Hans/common.ts +++ b/web/i18n/zh-Hans/common.ts @@ -487,6 +487,7 @@ const translation = { toBeConfigured: '待配置', configureTip: '请配置 API 密钥,添加模型。', installProvider: '安装模型供应商', + installDataSourceProvider: '安装数据源供应商', discoverMore: '发现更多就在', emptyProviderTitle: '尚未安装模型供应商', emptyProviderTip: '请安装模型供应商。', diff --git a/web/i18n/zh-Hans/workflow.ts b/web/i18n/zh-Hans/workflow.ts index 0bf078c085..77b7fe3597 100644 --- a/web/i18n/zh-Hans/workflow.ts +++ b/web/i18n/zh-Hans/workflow.ts @@ -955,7 +955,10 @@ const translation = { aboutRetrieval: '关于知识检索。', chunkIsRequired: '分段结构是必需的', indexMethodIsRequired: '索引方法是必需的', + chunksVariableIsRequired: 'Chunks 变量是必需的', + embeddingModelIsRequired: 'Embedding 模型是必需的', retrievalSettingIsRequired: '检索设置是必需的', + rerankingModelIsRequired: 'Reranking 模型是必需的', }, }, tracing: { From fd86cadf679bd9b48d151b41af436d1dc2a015dc Mon Sep 17 00:00:00 2001 From: crazywoola <100913391+crazywoola@users.noreply.github.com> Date: Mon, 29 Sep 2025 16:13:38 +0800 Subject: [PATCH 083/126] fix: debounce ref (#26433) --- web/app/components/base/chat/chat/index.tsx | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/web/app/components/base/chat/chat/index.tsx b/web/app/components/base/chat/chat/index.tsx index bee37cf2cd..a362f4dc99 100644 --- a/web/app/components/base/chat/chat/index.tsx +++ b/web/app/components/base/chat/chat/index.tsx @@ -160,8 +160,13 @@ const Chat: FC<ChatProps> = ({ }) useEffect(() => { - window.addEventListener('resize', debounce(handleWindowResize)) - return () => window.removeEventListener('resize', handleWindowResize) + const debouncedHandler = debounce(handleWindowResize, 200) + window.addEventListener('resize', debouncedHandler) + + return () => { + window.removeEventListener('resize', debouncedHandler) + debouncedHandler.cancel() + } }, [handleWindowResize]) useEffect(() => { From 6462328620f79aa0b7ca70e404932ac06b59d09c Mon Sep 17 00:00:00 2001 From: sqewad <lx19910131@gmail.com> Date: Mon, 29 Sep 2025 18:15:05 +0800 Subject: [PATCH 084/126] fix: variable not found #26144 (#26155) Signed-off-by: -LAN- <laipz8200@outlook.com> Co-authored-by: lix43 <lix43@chinatelecom.cn> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: -LAN- <laipz8200@outlook.com> --- api/core/workflow/entities/variable_pool.py | 19 ++- api/factories/variable_factory.py | 2 + .../workflow/entities/test_variable_pool.py | 113 ++++++++++++++++++ 3 files changed, 130 insertions(+), 4 deletions(-) create mode 100644 api/tests/unit_tests/core/workflow/entities/test_variable_pool.py diff --git a/api/core/workflow/entities/variable_pool.py b/api/core/workflow/entities/variable_pool.py index 8ceabde7e6..2dc00fd70b 100644 --- a/api/core/workflow/entities/variable_pool.py +++ b/api/core/workflow/entities/variable_pool.py @@ -184,11 +184,22 @@ class VariablePool(BaseModel): """Extract the actual value from an ObjectSegment.""" return obj.value if isinstance(obj, ObjectSegment) else obj - def _get_nested_attribute(self, obj: Mapping[str, Any], attr: str): - """Get a nested attribute from a dictionary-like object.""" - if not isinstance(obj, dict): + def _get_nested_attribute(self, obj: Mapping[str, Any], attr: str) -> Segment | None: + """ + Get a nested attribute from a dictionary-like object. + + Args: + obj: The dictionary-like object to search. + attr: The key to look up. + + Returns: + Segment | None: + The corresponding Segment built from the attribute value if the key exists, + otherwise None. + """ + if not isinstance(obj, dict) or attr not in obj: return None - return obj.get(attr) + return variable_factory.build_segment(obj.get(attr)) def remove(self, selector: Sequence[str], /): """ diff --git a/api/factories/variable_factory.py b/api/factories/variable_factory.py index 2104e66254..494194369a 100644 --- a/api/factories/variable_factory.py +++ b/api/factories/variable_factory.py @@ -142,6 +142,8 @@ def build_segment(value: Any, /) -> Segment: # below if value is None: return NoneSegment() + if isinstance(value, Segment): + return value if isinstance(value, str): return StringSegment(value=value) if isinstance(value, bool): diff --git a/api/tests/unit_tests/core/workflow/entities/test_variable_pool.py b/api/tests/unit_tests/core/workflow/entities/test_variable_pool.py new file mode 100644 index 0000000000..68fe82d05e --- /dev/null +++ b/api/tests/unit_tests/core/workflow/entities/test_variable_pool.py @@ -0,0 +1,113 @@ +from core.variables.segments import ( + BooleanSegment, + IntegerSegment, + NoneSegment, + StringSegment, +) +from core.workflow.entities.variable_pool import VariablePool + + +class TestVariablePoolGetAndNestedAttribute: + # + # _get_nested_attribute tests + # + def test__get_nested_attribute_existing_key(self): + pool = VariablePool.empty() + obj = {"a": 123} + segment = pool._get_nested_attribute(obj, "a") + assert segment is not None + assert segment.value == 123 + + def test__get_nested_attribute_missing_key(self): + pool = VariablePool.empty() + obj = {"a": 123} + segment = pool._get_nested_attribute(obj, "b") + assert segment is None + + def test__get_nested_attribute_non_dict(self): + pool = VariablePool.empty() + obj = ["not", "a", "dict"] + segment = pool._get_nested_attribute(obj, "a") + assert segment is None + + def test__get_nested_attribute_with_none_value(self): + pool = VariablePool.empty() + obj = {"a": None} + segment = pool._get_nested_attribute(obj, "a") + assert segment is not None + assert isinstance(segment, NoneSegment) + + def test__get_nested_attribute_with_empty_string(self): + pool = VariablePool.empty() + obj = {"a": ""} + segment = pool._get_nested_attribute(obj, "a") + assert segment is not None + assert isinstance(segment, StringSegment) + assert segment.value == "" + + # + # get tests + # + def test_get_simple_variable(self): + pool = VariablePool.empty() + pool.add(("node1", "var1"), "value1") + segment = pool.get(("node1", "var1")) + assert segment is not None + assert segment.value == "value1" + + def test_get_missing_variable(self): + pool = VariablePool.empty() + result = pool.get(("node1", "unknown")) + assert result is None + + def test_get_with_too_short_selector(self): + pool = VariablePool.empty() + result = pool.get(("only_node",)) + assert result is None + + def test_get_nested_object_attribute(self): + pool = VariablePool.empty() + obj_value = {"inner": "hello"} + pool.add(("node1", "obj"), obj_value) + + # simulate selector with nested attr + segment = pool.get(("node1", "obj", "inner")) + assert segment is not None + assert segment.value == "hello" + + def test_get_nested_object_missing_attribute(self): + pool = VariablePool.empty() + obj_value = {"inner": "hello"} + pool.add(("node1", "obj"), obj_value) + + result = pool.get(("node1", "obj", "not_exist")) + assert result is None + + def test_get_nested_object_attribute_with_falsy_values(self): + pool = VariablePool.empty() + obj_value = { + "inner_none": None, + "inner_empty": "", + "inner_zero": 0, + "inner_false": False, + } + pool.add(("node1", "obj"), obj_value) + + segment_none = pool.get(("node1", "obj", "inner_none")) + assert segment_none is not None + assert isinstance(segment_none, NoneSegment) + + segment_empty = pool.get(("node1", "obj", "inner_empty")) + assert segment_empty is not None + assert isinstance(segment_empty, StringSegment) + assert segment_empty.value == "" + + segment_zero = pool.get(("node1", "obj", "inner_zero")) + assert segment_zero is not None + assert isinstance(segment_zero, IntegerSegment) + assert segment_zero.value == 0 + + segment_false = pool.get(("node1", "obj", "inner_false")) + assert segment_false is not None + assert isinstance(segment_false, BooleanSegment) + assert segment_false.value is False From b6cea710232943c96d18a078c903a0ff0567393c Mon Sep 17 00:00:00 2001 From: -LAN- <laipz8200@outlook.com> Date: Mon, 29 Sep 2025 18:15:22 +0800 Subject: [PATCH 085/126] fix(workflow): sync iteration conversation variables (#26368) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- .../nodes/iteration/iteration_node.py | 49 ++- ...ate-conversation-variable-in-iteration.yml | 316 ++++++++++++++++++ ..._update_conversation_variable_iteration.py | 41 +++ 3 files changed, 402 insertions(+), 4 deletions(-) create mode 100644 api/tests/fixtures/workflow/update-conversation-variable-in-iteration.yml create mode 100644 api/tests/unit_tests/core/workflow/graph_engine/test_update_conversation_variable_iteration.py diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py index 1a417b5739..a05a6b1b96 100644 --- a/api/core/workflow/nodes/iteration/iteration_node.py +++ b/api/core/workflow/nodes/iteration/iteration_node.py @@ -10,6 +10,8 @@ from typing_extensions import TypeIs from core.variables import IntegerVariable, NoneSegment from core.variables.segments import ArrayAnySegment, ArraySegment +from core.variables.variables import VariableUnion +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID from core.workflow.entities import VariablePool from core.workflow.enums import ( ErrorStrategy, @@ -217,6 +219,13 @@ class IterationNode(Node): graph_engine=graph_engine, ) + # Sync conversation variables after each iteration completes + self._sync_conversation_variables_from_snapshot( + self._extract_conversation_variable_snapshot( + variable_pool=graph_engine.graph_runtime_state.variable_pool + ) + ) + # Update the total tokens from this iteration self.graph_runtime_state.total_tokens += graph_engine.graph_runtime_state.total_tokens iter_run_map[str(index)] = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds() @@ -235,7 +244,10 @@ class IterationNode(Node): with ThreadPoolExecutor(max_workers=max_workers) as executor: # Submit all iteration tasks - future_to_index: dict[Future[tuple[datetime, list[GraphNodeEventBase], object | None, int]], int] = {} + future_to_index: dict[ + Future[tuple[datetime, list[GraphNodeEventBase], object | None, int, dict[str, VariableUnion]]], + int, + ] = {} for index, item in enumerate(iterator_list_value): yield IterationNextEvent(index=index) future = executor.submit( @@ -252,7 +264,7 @@ class IterationNode(Node): index = future_to_index[future] try: result = future.result() - iter_start_at, events, output_value, tokens_used = result + iter_start_at, events, output_value, tokens_used, conversation_snapshot = result # Update outputs at the correct index outputs[index] = output_value @@ -264,6 +276,9 @@ class IterationNode(Node): self.graph_runtime_state.total_tokens += tokens_used iter_run_map[str(index)] = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds() + # Sync conversation variables after iteration completion + self._sync_conversation_variables_from_snapshot(conversation_snapshot) + except Exception as e: # Handle errors based on error_handle_mode match self._node_data.error_handle_mode: @@ -288,7 +303,7 @@ class IterationNode(Node): item: object, flask_app: Flask, context_vars: contextvars.Context, - ) -> tuple[datetime, list[GraphNodeEventBase], object | None, int]: + ) -> tuple[datetime, list[GraphNodeEventBase], object | None, int, dict[str, VariableUnion]]: """Execute a single iteration in parallel mode and return results.""" with preserve_flask_contexts(flask_app=flask_app, context_vars=context_vars): iter_start_at = datetime.now(UTC).replace(tzinfo=None) @@ -307,8 +322,17 @@ class IterationNode(Node): # Get the output value from the temporary outputs list output_value = outputs_temp[0] if outputs_temp else None + conversation_snapshot = self._extract_conversation_variable_snapshot( + variable_pool=graph_engine.graph_runtime_state.variable_pool + ) - return iter_start_at, events, output_value, graph_engine.graph_runtime_state.total_tokens + return ( + iter_start_at, + events, + output_value, + graph_engine.graph_runtime_state.total_tokens, + conversation_snapshot, + ) def _handle_iteration_success( self, @@ -430,6 +454,23 @@ class IterationNode(Node): return variable_mapping + def _extract_conversation_variable_snapshot(self, *, variable_pool: VariablePool) -> dict[str, VariableUnion]: + conversation_variables = variable_pool.variable_dictionary.get(CONVERSATION_VARIABLE_NODE_ID, {}) + return {name: variable.model_copy(deep=True) for name, variable in conversation_variables.items()} + + def _sync_conversation_variables_from_snapshot(self, snapshot: dict[str, VariableUnion]) -> None: + parent_pool = self.graph_runtime_state.variable_pool + parent_conversations = parent_pool.variable_dictionary.get(CONVERSATION_VARIABLE_NODE_ID, {}) + + current_keys = set(parent_conversations.keys()) + snapshot_keys = set(snapshot.keys()) + + for removed_key in current_keys - snapshot_keys: + parent_pool.remove((CONVERSATION_VARIABLE_NODE_ID, removed_key)) + + for name, variable in snapshot.items(): + parent_pool.add((CONVERSATION_VARIABLE_NODE_ID, name), variable) + def _append_iteration_info_to_event( self, event: GraphNodeEventBase, diff --git a/api/tests/fixtures/workflow/update-conversation-variable-in-iteration.yml b/api/tests/fixtures/workflow/update-conversation-variable-in-iteration.yml new file mode 100644 index 0000000000..ffc6eb9120 --- /dev/null +++ b/api/tests/fixtures/workflow/update-conversation-variable-in-iteration.yml @@ -0,0 +1,316 @@ +app: + description: 'This chatflow receives a sys.query, writes it into the `answer` variable, + and then outputs the `answer` variable. + + + `answer` is a conversation variable with a blank default value; it will be updated + in an iteration node. + + + if this chatflow works correctly, it will output the `sys.query` as the same.' + icon: 🤖 + icon_background: '#FFEAD5' + mode: advanced-chat + name: update-conversation-variable-in-iteration + use_icon_as_answer_icon: false +dependencies: [] +kind: app +version: 0.4.0 +workflow: + conversation_variables: + - description: '' + id: c30af82d-b2ec-417d-a861-4dd78584faa4 + name: answer + selector: + - conversation + - answer + value: '' + value_type: string + environment_variables: [] + features: + file_upload: + allowed_file_extensions: + - .JPG + - .JPEG + - .PNG + - .GIF + - .WEBP + - .SVG + allowed_file_types: + - image + allowed_file_upload_methods: + - local_file + - remote_url + enabled: false + fileUploadConfig: + audio_file_size_limit: 50 + batch_count_limit: 5 + file_size_limit: 15 + image_file_size_limit: 10 + video_file_size_limit: 100 + workflow_file_upload_limit: 10 + image: + enabled: false + number_limits: 3 + transfer_methods: + - local_file + - remote_url + number_limits: 3 + opening_statement: '' + retriever_resource: + enabled: true + sensitive_word_avoidance: + enabled: false + speech_to_text: + enabled: false + suggested_questions: [] + suggested_questions_after_answer: + enabled: false + text_to_speech: + enabled: false + language: '' + voice: '' + graph: + edges: + - data: + isInIteration: false + isInLoop: false + sourceType: start + targetType: code + id: 1759032354471-source-1759032363865-target + source: '1759032354471' + sourceHandle: source + target: '1759032363865' + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: false + isInLoop: false + sourceType: code + targetType: iteration + id: 1759032363865-source-1759032379989-target + source: '1759032363865' + sourceHandle: source + target: '1759032379989' + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: true + isInLoop: false + iteration_id: '1759032379989' + sourceType: iteration-start + targetType: assigner + id: 1759032379989start-source-1759032394460-target + source: 1759032379989start + sourceHandle: source + target: '1759032394460' + targetHandle: target + type: custom + zIndex: 1002 + - data: + isInIteration: false + isInLoop: false + sourceType: iteration + targetType: answer + id: 1759032379989-source-1759032410331-target + source: '1759032379989' + sourceHandle: source + target: '1759032410331' + targetHandle: target + type: custom + zIndex: 0 + - data: + isInIteration: true + isInLoop: false + iteration_id: '1759032379989' + sourceType: assigner + targetType: code + id: 1759032394460-source-1759032476318-target + source: '1759032394460' + sourceHandle: source + target: '1759032476318' + targetHandle: target + type: custom + zIndex: 1002 + nodes: + - data: + selected: false + title: Start + type: start + variables: [] + height: 52 + id: '1759032354471' + position: + x: 30 + y: 302 + positionAbsolute: + x: 30 + y: 302 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 242 + - data: + code: "\ndef main():\n return {\n \"result\": [1],\n }\n" + code_language: python3 + outputs: + result: + children: null + type: array[number] + selected: false + title: Code + type: code + variables: [] + height: 52 + id: '1759032363865' + position: + x: 332 + y: 302 + positionAbsolute: + x: 332 + y: 302 + sourcePosition: right + targetPosition: left + type: custom + width: 242 + - data: + error_handle_mode: terminated + height: 204 + is_parallel: false + iterator_input_type: array[number] + iterator_selector: + - '1759032363865' + - result + output_selector: + - '1759032476318' + - result + output_type: array[string] + parallel_nums: 10 + selected: false + start_node_id: 1759032379989start + title: Iteration + type: iteration + width: 808 + height: 204 + id: '1759032379989' + position: + x: 634 + y: 302 + positionAbsolute: + x: 634 + y: 302 + selected: true + sourcePosition: right + targetPosition: left + type: custom + width: 808 + zIndex: 1 + - data: + desc: '' + isInIteration: true + selected: false + title: '' + type: iteration-start + draggable: false + height: 48 + id: 1759032379989start + parentId: '1759032379989' + position: + x: 60 + y: 78 + positionAbsolute: + x: 694 + y: 380 + selectable: false + sourcePosition: right + targetPosition: left + type: custom-iteration-start + width: 44 + zIndex: 1002 + - data: + isInIteration: true + isInLoop: false + items: + - input_type: variable + operation: over-write + value: + - sys + - query + variable_selector: + - conversation + - answer + write_mode: over-write + iteration_id: '1759032379989' + selected: false + title: Variable Assigner + type: assigner + version: '2' + height: 84 + id: '1759032394460' + parentId: '1759032379989' + position: + x: 204 + y: 60 + positionAbsolute: + x: 838 + y: 362 + sourcePosition: right + targetPosition: left + type: custom + width: 242 + zIndex: 1002 + - data: + answer: '{{#conversation.answer#}}' + selected: false + title: Answer + type: answer + variables: [] + height: 104 + id: '1759032410331' + position: + x: 1502 + y: 302 + positionAbsolute: + x: 1502 + y: 302 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 242 + - data: + code: "\ndef main():\n return {\n \"result\": '',\n }\n" + code_language: python3 + isInIteration: true + isInLoop: false + iteration_id: '1759032379989' + outputs: + result: + children: null + type: string + selected: false + title: Code 2 + type: code + variables: [] + height: 52 + id: '1759032476318' + parentId: '1759032379989' + position: + x: 506 + y: 76 + positionAbsolute: + x: 1140 + y: 378 + sourcePosition: right + targetPosition: left + type: custom + width: 242 + zIndex: 1002 + viewport: + x: 120.39999999999998 + y: 85.20000000000005 + zoom: 0.7 + rag_pipeline_variables: [] diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_update_conversation_variable_iteration.py b/api/tests/unit_tests/core/workflow/graph_engine/test_update_conversation_variable_iteration.py new file mode 100644 index 0000000000..a7309f64de --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_update_conversation_variable_iteration.py @@ -0,0 +1,41 @@ +"""Validate conversation variable updates inside an iteration workflow. + +This test uses the ``update-conversation-variable-in-iteration`` fixture, which +routes ``sys.query`` into the conversation variable ``answer`` from within an +iteration container. The workflow should surface that updated conversation +variable in the final answer output. + +Code nodes in the fixture are mocked because their concrete outputs are not +relevant to verifying variable propagation semantics. +""" + +from .test_mock_config import MockConfigBuilder +from .test_table_runner import TableTestRunner, WorkflowTestCase + + +def test_update_conversation_variable_in_iteration(): + fixture_name = "update-conversation-variable-in-iteration" + user_query = "ensure conversation variable syncs" + + mock_config = ( + MockConfigBuilder() + .with_node_output("1759032363865", {"result": [1]}) + .with_node_output("1759032476318", {"result": ""}) + .build() + ) + + case = WorkflowTestCase( + fixture_path=fixture_name, + use_auto_mock=True, + mock_config=mock_config, + query=user_query, + expected_outputs={"answer": user_query}, + description="Conversation variable updated within iteration should flow to answer output.", + ) + + runner = TableTestRunner() + result = runner.run_test_case(case) + + assert result.success, f"Workflow execution failed: {result.error}" + assert result.actual_outputs is not None + assert result.actual_outputs.get("answer") == user_query From b2bcb6d21a893b9ac725e866b3542d893c3456f9 Mon Sep 17 00:00:00 2001 From: Asuka Minato <i@asukaminato.eu.org> Date: Mon, 29 Sep 2025 20:22:38 +0900 Subject: [PATCH 086/126] Fix: Remove core/tools from pyrightconfig.json and fix type errors (#26413) Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> --- .../dataset_retriever_base_tool.py | 4 ++ .../tools/utils/dataset_retriever_tool.py | 2 +- api/core/tools/utils/parser.py | 72 ++++++++++--------- api/pyrightconfig.json | 1 - 4 files changed, 43 insertions(+), 36 deletions(-) diff --git a/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py b/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py index ac2967d0c1..dd0b4bedcf 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py @@ -18,6 +18,10 @@ class DatasetRetrieverBaseTool(BaseModel, ABC): retriever_from: str model_config = ConfigDict(arbitrary_types_allowed=True) + def run(self, query: str) -> str: + """Use the tool.""" + return self._run(query) + @abstractmethod def _run(self, query: str) -> str: """Use the tool. diff --git a/api/core/tools/utils/dataset_retriever_tool.py b/api/core/tools/utils/dataset_retriever_tool.py index a62d419243..fca6e6f1c7 100644 --- a/api/core/tools/utils/dataset_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever_tool.py @@ -124,7 +124,7 @@ class DatasetRetrieverTool(Tool): yield self.create_text_message(text="please input query") else: # invoke dataset retriever tool - result = self.retrieval_tool._run(query=query) + result = self.retrieval_tool.run(query=query) yield self.create_text_message(text=result) def validate_credentials( diff --git a/api/core/tools/utils/parser.py b/api/core/tools/utils/parser.py index 2e306db6c7..fcb1d325af 100644 --- a/api/core/tools/utils/parser.py +++ b/api/core/tools/utils/parser.py @@ -2,6 +2,7 @@ import re from json import dumps as json_dumps from json import loads as json_loads from json.decoder import JSONDecodeError +from typing import Any from flask import request from requests import get @@ -127,34 +128,34 @@ class ApiBasedToolSchemaParser: if "allOf" in prop_dict: del prop_dict["allOf"] - # parse body parameters - if "schema" in interface["operation"]["requestBody"]["content"][content_type]: - body_schema = interface["operation"]["requestBody"]["content"][content_type]["schema"] - required = body_schema.get("required", []) - properties = body_schema.get("properties", {}) - for name, property in properties.items(): - tool = ToolParameter( - name=name, - label=I18nObject(en_US=name, zh_Hans=name), - human_description=I18nObject( - en_US=property.get("description", ""), zh_Hans=property.get("description", "") - ), - type=ToolParameter.ToolParameterType.STRING, - required=name in required, - form=ToolParameter.ToolParameterForm.LLM, - llm_description=property.get("description", ""), - default=property.get("default", None), - placeholder=I18nObject( - en_US=property.get("description", ""), zh_Hans=property.get("description", "") - ), - ) + # parse body parameters + if "schema" in interface["operation"]["requestBody"]["content"][content_type]: + body_schema = interface["operation"]["requestBody"]["content"][content_type]["schema"] + required = body_schema.get("required", []) + properties = body_schema.get("properties", {}) + for name, property in properties.items(): + tool = ToolParameter( + name=name, + label=I18nObject(en_US=name, zh_Hans=name), + human_description=I18nObject( + en_US=property.get("description", ""), zh_Hans=property.get("description", "") + ), + type=ToolParameter.ToolParameterType.STRING, + required=name in required, + form=ToolParameter.ToolParameterForm.LLM, + llm_description=property.get("description", ""), + default=property.get("default", None), + placeholder=I18nObject( + en_US=property.get("description", ""), zh_Hans=property.get("description", "") + ), + ) - # check if there is a type - typ = ApiBasedToolSchemaParser._get_tool_parameter_type(property) - if typ: - tool.type = typ + # check if there is a type + typ = ApiBasedToolSchemaParser._get_tool_parameter_type(property) + if typ: + tool.type = typ - parameters.append(tool) + parameters.append(tool) # check if parameters is duplicated parameters_count = {} @@ -241,7 +242,9 @@ class ApiBasedToolSchemaParser: return ApiBasedToolSchemaParser.parse_openapi_to_tool_bundle(openapi, extra_info=extra_info, warning=warning) @staticmethod - def parse_swagger_to_openapi(swagger: dict, extra_info: dict | None = None, warning: dict | None = None): + def parse_swagger_to_openapi( + swagger: dict, extra_info: dict | None = None, warning: dict | None = None + ) -> dict[str, Any]: warning = warning or {} """ parse swagger to openapi @@ -257,7 +260,7 @@ class ApiBasedToolSchemaParser: if len(servers) == 0: raise ToolApiSchemaError("No server found in the swagger yaml.") - openapi = { + converted_openapi: dict[str, Any] = { "openapi": "3.0.0", "info": { "title": info.get("title", "Swagger"), @@ -275,7 +278,7 @@ class ApiBasedToolSchemaParser: # convert paths for path, path_item in swagger["paths"].items(): - openapi["paths"][path] = {} + converted_openapi["paths"][path] = {} for method, operation in path_item.items(): if "operationId" not in operation: raise ToolApiSchemaError(f"No operationId found in operation {method} {path}.") @@ -286,7 +289,7 @@ class ApiBasedToolSchemaParser: if warning is not None: warning["missing_summary"] = f"No summary or description found in operation {method} {path}." - openapi["paths"][path][method] = { + converted_openapi["paths"][path][method] = { "operationId": operation["operationId"], "summary": operation.get("summary", ""), "description": operation.get("description", ""), @@ -295,13 +298,14 @@ class ApiBasedToolSchemaParser: } if "requestBody" in operation: - openapi["paths"][path][method]["requestBody"] = operation["requestBody"] + converted_openapi["paths"][path][method]["requestBody"] = operation["requestBody"] # convert definitions - for name, definition in swagger["definitions"].items(): - openapi["components"]["schemas"][name] = definition + if "definitions" in swagger: + for name, definition in swagger["definitions"].items(): + converted_openapi["components"]["schemas"][name] = definition - return openapi + return converted_openapi @staticmethod def parse_openai_plugin_json_to_tool_bundle( diff --git a/api/pyrightconfig.json b/api/pyrightconfig.json index f3d56cfbee..1b60212c2b 100644 --- a/api/pyrightconfig.json +++ b/api/pyrightconfig.json @@ -9,7 +9,6 @@ "libs", "controllers/console/datasets", "core/ops", - "core/tools", "core/model_runtime", "core/workflow/nodes", "core/app/app_config/easy_ui_based_app/dataset" From f60aa36fa03f69ebaa374fdca6eb01710ac2c917 Mon Sep 17 00:00:00 2001 From: lyzno1 <92089059+lyzno1@users.noreply.github.com> Date: Mon, 29 Sep 2025 19:22:58 +0800 Subject: [PATCH 087/126] feat(sdk): enhance Python SDK with 27 new Service API endpoints (#26401) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- sdks/python-client/dify_client/__init__.py | 2 + sdks/python-client/dify_client/client.py | 223 ++++++++++- sdks/python-client/tests/test_new_apis.py | 416 +++++++++++++++++++++ 3 files changed, 640 insertions(+), 1 deletion(-) create mode 100644 sdks/python-client/tests/test_new_apis.py diff --git a/sdks/python-client/dify_client/__init__.py b/sdks/python-client/dify_client/__init__.py index e866472f45..e252bc0472 100644 --- a/sdks/python-client/dify_client/__init__.py +++ b/sdks/python-client/dify_client/__init__.py @@ -4,6 +4,7 @@ from dify_client.client import ( DifyClient, KnowledgeBaseClient, WorkflowClient, + WorkspaceClient, ) __all__ = [ @@ -12,4 +13,5 @@ __all__ = [ "DifyClient", "KnowledgeBaseClient", "WorkflowClient", + "WorkspaceClient", ] diff --git a/sdks/python-client/dify_client/client.py b/sdks/python-client/dify_client/client.py index 2154741e91..fb42e3773d 100644 --- a/sdks/python-client/dify_client/client.py +++ b/sdks/python-client/dify_client/client.py @@ -1,5 +1,6 @@ import json -from typing import IO, Literal +from typing import Literal, Union, Dict, List, Any, Optional, IO + import requests @@ -49,6 +50,18 @@ class DifyClient: params = {"user": user} return self._send_request("GET", "/meta", params=params) + def get_app_info(self): + """Get basic application information including name, description, tags, and mode.""" + return self._send_request("GET", "/info") + + def get_app_site_info(self): + """Get application site information.""" + return self._send_request("GET", "/site") + + def get_file_preview(self, file_id: str): + """Get file preview by file ID.""" + return self._send_request("GET", f"/files/{file_id}/preview") + class CompletionClient(DifyClient): def create_completion_message( @@ -144,6 +157,51 @@ class ChatClient(DifyClient): files = {"file": audio_file} return self._send_request_with_files("POST", "/audio-to-text", data, files) + # Annotation APIs + def annotation_reply_action( + self, + action: Literal["enable", "disable"], + score_threshold: float, + embedding_provider_name: str, + embedding_model_name: str, + ): + """Enable or disable annotation reply feature.""" + # Backend API requires these fields to be non-None values + if score_threshold is None or embedding_provider_name is None or embedding_model_name is None: + raise ValueError("score_threshold, embedding_provider_name, and embedding_model_name cannot be None") + + data = { + "score_threshold": score_threshold, + "embedding_provider_name": embedding_provider_name, + "embedding_model_name": embedding_model_name, + } + return self._send_request("POST", f"/apps/annotation-reply/{action}", json=data) + + def get_annotation_reply_status(self, action: Literal["enable", "disable"], job_id: str): + """Get the status of an annotation reply action job.""" + return self._send_request("GET", f"/apps/annotation-reply/{action}/status/{job_id}") + + def list_annotations(self, page: int = 1, limit: int = 20, keyword: str = ""): + """List annotations for the application.""" + params = {"page": page, "limit": limit} + if keyword: + params["keyword"] = keyword + return self._send_request("GET", "/apps/annotations", params=params) + + def create_annotation(self, question: str, answer: str): + """Create a new annotation.""" + data = {"question": question, "answer": answer} + return self._send_request("POST", "/apps/annotations", json=data) + + def update_annotation(self, annotation_id: str, question: str, answer: str): + """Update an existing annotation.""" + data = {"question": question, "answer": answer} + return self._send_request("PUT", f"/apps/annotations/{annotation_id}", json=data) + + def delete_annotation(self, annotation_id: str): + """Delete an annotation.""" + return self._send_request("DELETE", f"/apps/annotations/{annotation_id}") + class WorkflowClient(DifyClient): def run(self, inputs: dict, response_mode: Literal["blocking", "streaming"] = "streaming", user: str = "abc-123"): @@ -157,6 +215,55 @@ class WorkflowClient(DifyClient): def get_result(self, workflow_run_id): return self._send_request("GET", f"/workflows/run/{workflow_run_id}") + def get_workflow_logs( + self, + keyword: str = None, + status: Literal["succeeded", "failed", "stopped"] | None = None, + page: int = 1, + limit: int = 20, + created_at__before: str = None, + created_at__after: str = None, + created_by_end_user_session_id: str = None, + created_by_account: str = None, + ): + """Get workflow execution logs with optional filtering.""" + params = {"page": page, "limit": limit} + if keyword: + params["keyword"] = keyword + if status: + params["status"] = status + if created_at__before: + params["created_at__before"] = created_at__before + if created_at__after: + params["created_at__after"] = created_at__after + if created_by_end_user_session_id: + params["created_by_end_user_session_id"] = created_by_end_user_session_id + if created_by_account: + params["created_by_account"] = created_by_account + return self._send_request("GET", "/workflows/logs", params=params) + + def run_specific_workflow( + self, + workflow_id: str, + inputs: dict, + response_mode: Literal["blocking", "streaming"] = "streaming", + user: str = "abc-123", + ): + """Run a specific workflow by workflow ID.""" + data = {"inputs": inputs, "response_mode": response_mode, "user": user} + return self._send_request( + "POST", f"/workflows/{workflow_id}/run", data, stream=True if response_mode == "streaming" else False + ) + + +class WorkspaceClient(DifyClient): + """Client for workspace-related operations.""" + + def get_available_models(self, model_type: str): + """Get available models by model type.""" + url = f"/workspaces/current/models/model-types/{model_type}" + return self._send_request("GET", url) + class KnowledgeBaseClient(DifyClient): def __init__( @@ -443,3 +550,117 @@ class KnowledgeBaseClient(DifyClient): data = {"segment": segment_data} url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/segments/{segment_id}" return self._send_request("POST", url, json=data, **kwargs) + + # Advanced Knowledge Base APIs + def hit_testing( + self, query: str, retrieval_model: Dict[str, Any] = None, external_retrieval_model: Dict[str, Any] = None + ): + """Perform hit testing on the dataset.""" + data = {"query": query} + if retrieval_model: + data["retrieval_model"] = retrieval_model + if external_retrieval_model: + data["external_retrieval_model"] = external_retrieval_model + url = f"/datasets/{self._get_dataset_id()}/hit-testing" + return self._send_request("POST", url, json=data) + + def get_dataset_metadata(self): + """Get dataset metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata" + return self._send_request("GET", url) + + def create_dataset_metadata(self, metadata_data: Dict[str, Any]): + """Create dataset metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata" + return self._send_request("POST", url, json=metadata_data) + + def update_dataset_metadata(self, metadata_id: str, metadata_data: Dict[str, Any]): + """Update dataset metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata/{metadata_id}" + return self._send_request("PATCH", url, json=metadata_data) + + def get_built_in_metadata(self): + """Get built-in metadata.""" + url = f"/datasets/{self._get_dataset_id()}/metadata/built-in" + return self._send_request("GET", url) + + def manage_built_in_metadata(self, action: str, metadata_data: Dict[str, Any] = None): + """Manage built-in metadata with specified action.""" + data = metadata_data or {} + url = f"/datasets/{self._get_dataset_id()}/metadata/built-in/{action}" + return self._send_request("POST", url, json=data) + + def update_documents_metadata(self, operation_data: List[Dict[str, Any]]): + """Update metadata for multiple documents.""" + url = f"/datasets/{self._get_dataset_id()}/documents/metadata" + data = {"operation_data": operation_data} + return self._send_request("POST", url, json=data) + + # Dataset Tags APIs + def list_dataset_tags(self): + """List all dataset tags.""" + return self._send_request("GET", "/datasets/tags") + + def bind_dataset_tags(self, tag_ids: List[str]): + """Bind tags to dataset.""" + data = {"tag_ids": tag_ids, "target_id": self._get_dataset_id()} + return self._send_request("POST", "/datasets/tags/binding", json=data) + + def unbind_dataset_tag(self, tag_id: str): + """Unbind a single tag from dataset.""" + data = {"tag_id": tag_id, "target_id": self._get_dataset_id()} + return self._send_request("POST", "/datasets/tags/unbinding", json=data) + + def get_dataset_tags(self): + """Get tags for current dataset.""" + url = f"/datasets/{self._get_dataset_id()}/tags" + return self._send_request("GET", url) + + # RAG Pipeline APIs + def get_datasource_plugins(self, is_published: bool = True): + """Get datasource plugins for RAG pipeline.""" + params = {"is_published": is_published} + url = f"/datasets/{self._get_dataset_id()}/pipeline/datasource-plugins" + return self._send_request("GET", url, params=params) + + def run_datasource_node( + self, + node_id: str, + inputs: Dict[str, Any], + datasource_type: str, + is_published: bool = True, + credential_id: str = None, + ): + """Run a datasource node in RAG pipeline.""" + data = {"inputs": inputs, "datasource_type": datasource_type, "is_published": is_published} + if credential_id: + data["credential_id"] = credential_id + url = f"/datasets/{self._get_dataset_id()}/pipeline/datasource/nodes/{node_id}/run" + return self._send_request("POST", url, json=data, stream=True) + + def run_rag_pipeline( + self, + inputs: Dict[str, Any], + datasource_type: str, + datasource_info_list: List[Dict[str, Any]], + start_node_id: str, + is_published: bool = True, + response_mode: Literal["streaming", "blocking"] = "blocking", + ): + """Run RAG pipeline.""" + data = { + "inputs": inputs, + "datasource_type": datasource_type, + "datasource_info_list": datasource_info_list, + "start_node_id": start_node_id, + "is_published": is_published, + "response_mode": response_mode, + } + url = f"/datasets/{self._get_dataset_id()}/pipeline/run" + return self._send_request("POST", url, json=data, stream=response_mode == "streaming") + + def upload_pipeline_file(self, file_path: str): + """Upload file for RAG pipeline.""" + with open(file_path, "rb") as f: + files = {"file": f} + return self._send_request_with_files("POST", "/datasets/pipeline/file-upload", {}, files) diff --git a/sdks/python-client/tests/test_new_apis.py b/sdks/python-client/tests/test_new_apis.py new file mode 100644 index 0000000000..09c62dfda7 --- /dev/null +++ b/sdks/python-client/tests/test_new_apis.py @@ -0,0 +1,416 @@ +#!/usr/bin/env python3 +""" +Test suite for the new Service API functionality in the Python SDK. + +This test validates the implementation of the missing Service API endpoints +that were added to the Python SDK to achieve complete coverage. +""" + +import unittest +from unittest.mock import Mock, patch, MagicMock +import json + +from dify_client import ( + DifyClient, + ChatClient, + WorkflowClient, + KnowledgeBaseClient, + WorkspaceClient, +) + + +class TestNewServiceAPIs(unittest.TestCase): + """Test cases for new Service API implementations.""" + + def setUp(self): + """Set up test fixtures.""" + self.api_key = "test-api-key" + self.base_url = "https://api.dify.ai/v1" + + @patch("dify_client.client.requests.request") + def test_app_info_apis(self, mock_request): + """Test application info APIs.""" + mock_response = Mock() + mock_response.json.return_value = { + "name": "Test App", + "description": "Test Description", + "tags": ["test", "api"], + "mode": "chat", + "author_name": "Test Author", + } + mock_request.return_value = mock_response + + client = DifyClient(self.api_key, self.base_url) + + # Test get_app_info + result = client.get_app_info() + mock_request.assert_called_with( + "GET", + f"{self.base_url}/info", + json=None, + params=None, + headers={ + "Authorization": f"Bearer {self.api_key}", + "Content-Type": "application/json", + }, + stream=False, + ) + + # Test get_app_site_info + client.get_app_site_info() + mock_request.assert_called_with( + "GET", + f"{self.base_url}/site", + json=None, + params=None, + headers={ + "Authorization": f"Bearer {self.api_key}", + "Content-Type": "application/json", + }, + stream=False, + ) + + # Test get_file_preview + file_id = "test-file-id" + client.get_file_preview(file_id) + mock_request.assert_called_with( + "GET", + f"{self.base_url}/files/{file_id}/preview", + json=None, + params=None, + headers={ + "Authorization": f"Bearer {self.api_key}", + "Content-Type": "application/json", + }, + stream=False, + ) + + @patch("dify_client.client.requests.request") + def test_annotation_apis(self, mock_request): + """Test annotation APIs.""" + mock_response = Mock() + mock_response.json.return_value = {"result": "success"} + mock_request.return_value = mock_response + + client = ChatClient(self.api_key, self.base_url) + + # Test annotation_reply_action - enable + client.annotation_reply_action( + action="enable", + score_threshold=0.8, + embedding_provider_name="openai", + embedding_model_name="text-embedding-ada-002", + ) + mock_request.assert_called_with( + "POST", + f"{self.base_url}/apps/annotation-reply/enable", + json={ + "score_threshold": 0.8, + "embedding_provider_name": "openai", + "embedding_model_name": "text-embedding-ada-002", + }, + params=None, + headers={ + "Authorization": f"Bearer {self.api_key}", + "Content-Type": "application/json", + }, + stream=False, + ) + + # Test annotation_reply_action - disable (now requires same fields as enable) + client.annotation_reply_action( + action="disable", + score_threshold=0.5, + embedding_provider_name="openai", + embedding_model_name="text-embedding-ada-002", + ) + + # Test annotation_reply_action with score_threshold=0 (edge case) + client.annotation_reply_action( + action="enable", + score_threshold=0.0, # This should work and not raise ValueError + embedding_provider_name="openai", + embedding_model_name="text-embedding-ada-002", + ) + + # Test get_annotation_reply_status + client.get_annotation_reply_status("enable", "job-123") + + # Test list_annotations + client.list_annotations(page=1, limit=20, keyword="test") + + # Test create_annotation + client.create_annotation("Test question?", "Test answer.") + + # Test update_annotation + client.update_annotation("annotation-123", "Updated question?", "Updated answer.") + + # Test delete_annotation + client.delete_annotation("annotation-123") + + # Verify all calls were made (8 calls: enable + disable + enable with 0.0 + 5 other operations) + self.assertEqual(mock_request.call_count, 8) + + @patch("dify_client.client.requests.request") + def test_knowledge_base_advanced_apis(self, mock_request): + """Test advanced knowledge base APIs.""" + mock_response = Mock() + mock_response.json.return_value = {"result": "success"} + mock_request.return_value = mock_response + + dataset_id = "test-dataset-id" + client = KnowledgeBaseClient(self.api_key, self.base_url, dataset_id) + + # Test hit_testing + client.hit_testing("test query", {"type": "vector"}) + mock_request.assert_called_with( + "POST", + f"{self.base_url}/datasets/{dataset_id}/hit-testing", + json={"query": "test query", "retrieval_model": {"type": "vector"}}, + params=None, + headers={ + "Authorization": f"Bearer {self.api_key}", + "Content-Type": "application/json", + }, + stream=False, + ) + + # Test metadata operations + client.get_dataset_metadata() + client.create_dataset_metadata({"key": "value"}) + client.update_dataset_metadata("meta-123", {"key": "new_value"}) + client.get_built_in_metadata() + client.manage_built_in_metadata("enable", {"type": "built_in"}) + client.update_documents_metadata([{"document_id": "doc1", "metadata": {"key": "value"}}]) + + # Test tag operations + client.list_dataset_tags() + client.bind_dataset_tags(["tag1", "tag2"]) + client.unbind_dataset_tag("tag1") + client.get_dataset_tags() + + # Verify multiple calls were made + self.assertGreater(mock_request.call_count, 5) + + @patch("dify_client.client.requests.request") + def test_rag_pipeline_apis(self, mock_request): + """Test RAG pipeline APIs.""" + mock_response = Mock() + mock_response.json.return_value = {"result": "success"} + mock_request.return_value = mock_response + + dataset_id = "test-dataset-id" + client = KnowledgeBaseClient(self.api_key, self.base_url, dataset_id) + + # Test get_datasource_plugins + client.get_datasource_plugins(is_published=True) + mock_request.assert_called_with( + "GET", + f"{self.base_url}/datasets/{dataset_id}/pipeline/datasource-plugins", + json=None, + params={"is_published": True}, + headers={ + "Authorization": f"Bearer {self.api_key}", + "Content-Type": "application/json", + }, + stream=False, + ) + + # Test run_datasource_node + client.run_datasource_node( + node_id="node-123", + inputs={"param": "value"}, + datasource_type="online_document", + is_published=True, + credential_id="cred-123", + ) + + # Test run_rag_pipeline with blocking mode + client.run_rag_pipeline( + inputs={"query": "test"}, + datasource_type="online_document", + datasource_info_list=[{"id": "ds1"}], + start_node_id="start-node", + is_published=True, + response_mode="blocking", + ) + + # Test run_rag_pipeline with streaming mode + client.run_rag_pipeline( + inputs={"query": "test"}, + datasource_type="online_document", + datasource_info_list=[{"id": "ds1"}], + start_node_id="start-node", + is_published=True, + response_mode="streaming", + ) + + self.assertEqual(mock_request.call_count, 4) + + @patch("dify_client.client.requests.request") + def test_workspace_apis(self, mock_request): + """Test workspace APIs.""" + mock_response = Mock() + mock_response.json.return_value = { + "data": [{"name": "gpt-3.5-turbo", "type": "llm"}, {"name": "gpt-4", "type": "llm"}] + } + mock_request.return_value = mock_response + + client = WorkspaceClient(self.api_key, self.base_url) + + # Test get_available_models + result = client.get_available_models("llm") + mock_request.assert_called_with( + "GET", + f"{self.base_url}/workspaces/current/models/model-types/llm", + json=None, + params=None, + headers={ + "Authorization": f"Bearer {self.api_key}", + "Content-Type": "application/json", + }, + stream=False, + ) + + @patch("dify_client.client.requests.request") + def test_workflow_advanced_apis(self, mock_request): + """Test advanced workflow APIs.""" + mock_response = Mock() + mock_response.json.return_value = {"result": "success"} + mock_request.return_value = mock_response + + client = WorkflowClient(self.api_key, self.base_url) + + # Test get_workflow_logs + client.get_workflow_logs(keyword="test", status="succeeded", page=1, limit=20) + mock_request.assert_called_with( + "GET", + f"{self.base_url}/workflows/logs", + json=None, + params={"page": 1, "limit": 20, "keyword": "test", "status": "succeeded"}, + headers={ + "Authorization": f"Bearer {self.api_key}", + "Content-Type": "application/json", + }, + stream=False, + ) + + # Test get_workflow_logs with additional filters + client.get_workflow_logs( + keyword="test", + status="succeeded", + page=1, + limit=20, + created_at__before="2024-01-01", + created_at__after="2023-01-01", + created_by_account="user123", + ) + + # Test run_specific_workflow + client.run_specific_workflow( + workflow_id="workflow-123", inputs={"param": "value"}, response_mode="streaming", user="user-123" + ) + + self.assertEqual(mock_request.call_count, 3) + + def test_error_handling(self): + """Test error handling for required parameters.""" + client = ChatClient(self.api_key, self.base_url) + + # Test annotation_reply_action with missing required parameters would be a TypeError now + # since parameters are required in method signature + with self.assertRaises(TypeError): + client.annotation_reply_action("enable") + + # Test annotation_reply_action with explicit None values should raise ValueError + with self.assertRaises(ValueError) as context: + client.annotation_reply_action("enable", None, "provider", "model") + + self.assertIn("cannot be None", str(context.exception)) + + # Test KnowledgeBaseClient without dataset_id + kb_client = KnowledgeBaseClient(self.api_key, self.base_url) + with self.assertRaises(ValueError) as context: + kb_client.hit_testing("test query") + + self.assertIn("dataset_id is not set", str(context.exception)) + + @patch("dify_client.client.open") + @patch("dify_client.client.requests.request") + def test_file_upload_apis(self, mock_request, mock_open): + """Test file upload APIs.""" + mock_response = Mock() + mock_response.json.return_value = {"result": "success"} + mock_request.return_value = mock_response + + mock_file = MagicMock() + mock_open.return_value.__enter__.return_value = mock_file + + dataset_id = "test-dataset-id" + client = KnowledgeBaseClient(self.api_key, self.base_url, dataset_id) + + # Test upload_pipeline_file + client.upload_pipeline_file("/path/to/test.pdf") + + mock_open.assert_called_with("/path/to/test.pdf", "rb") + mock_request.assert_called_once() + + def test_comprehensive_coverage(self): + """Test that all previously missing APIs are now implemented.""" + + # Test DifyClient methods + dify_methods = ["get_app_info", "get_app_site_info", "get_file_preview"] + client = DifyClient(self.api_key) + for method in dify_methods: + self.assertTrue(hasattr(client, method), f"DifyClient missing method: {method}") + + # Test ChatClient annotation methods + chat_methods = [ + "annotation_reply_action", + "get_annotation_reply_status", + "list_annotations", + "create_annotation", + "update_annotation", + "delete_annotation", + ] + chat_client = ChatClient(self.api_key) + for method in chat_methods: + self.assertTrue(hasattr(chat_client, method), f"ChatClient missing method: {method}") + + # Test WorkflowClient advanced methods + workflow_methods = ["get_workflow_logs", "run_specific_workflow"] + workflow_client = WorkflowClient(self.api_key) + for method in workflow_methods: + self.assertTrue(hasattr(workflow_client, method), f"WorkflowClient missing method: {method}") + + # Test KnowledgeBaseClient advanced methods + kb_methods = [ + "hit_testing", + "get_dataset_metadata", + "create_dataset_metadata", + "update_dataset_metadata", + "get_built_in_metadata", + "manage_built_in_metadata", + "update_documents_metadata", + "list_dataset_tags", + "bind_dataset_tags", + "unbind_dataset_tag", + "get_dataset_tags", + "get_datasource_plugins", + "run_datasource_node", + "run_rag_pipeline", + "upload_pipeline_file", + ] + kb_client = KnowledgeBaseClient(self.api_key) + for method in kb_methods: + self.assertTrue(hasattr(kb_client, method), f"KnowledgeBaseClient missing method: {method}") + + # Test WorkspaceClient methods + workspace_methods = ["get_available_models"] + workspace_client = WorkspaceClient(self.api_key) + for method in workspace_methods: + self.assertTrue(hasattr(workspace_client, method), f"WorkspaceClient missing method: {method}") + + +if __name__ == "__main__": + unittest.main() From e5d4235f1b9a3be891cab9ef905f4baba3ea6f8e Mon Sep 17 00:00:00 2001 From: Asuka Minato <i@asukaminato.eu.org> Date: Mon, 29 Sep 2025 20:23:22 +0900 Subject: [PATCH 088/126] feat(typing): Remove "libs" from pyright exclude and fix typing errors (#26423) Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> --- api/libs/external_api.py | 2 +- api/libs/gmpy2_pkcs10aep_cipher.py | 6 +++--- api/libs/sendgrid.py | 4 ++-- api/pyrightconfig.json | 1 - 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/api/libs/external_api.py b/api/libs/external_api.py index cf91b0117f..25a82f8a96 100644 --- a/api/libs/external_api.py +++ b/api/libs/external_api.py @@ -94,7 +94,7 @@ def register_external_error_handlers(api: Api): got_request_exception.send(current_app, exception=e) status_code = 500 - data = getattr(e, "data", {"message": http_status_message(status_code)}) + data: dict[str, Any] = getattr(e, "data", {"message": http_status_message(status_code)}) # 🔒 Normalize non-mapping data (e.g., if someone set e.data = Response) if not isinstance(data, dict): diff --git a/api/libs/gmpy2_pkcs10aep_cipher.py b/api/libs/gmpy2_pkcs10aep_cipher.py index 9759156c0f..fc38d51005 100644 --- a/api/libs/gmpy2_pkcs10aep_cipher.py +++ b/api/libs/gmpy2_pkcs10aep_cipher.py @@ -27,7 +27,7 @@ import gmpy2 # type: ignore from Crypto import Random from Crypto.Signature.pss import MGF1 from Crypto.Util.number import bytes_to_long, ceil_div, long_to_bytes -from Crypto.Util.py3compat import _copy_bytes, bord +from Crypto.Util.py3compat import bord from Crypto.Util.strxor import strxor @@ -72,7 +72,7 @@ class PKCS1OAepCipher: else: self._mgf = lambda x, y: MGF1(x, y, self._hashObj) - self._label = _copy_bytes(None, None, label) + self._label = bytes(label) self._randfunc = randfunc def can_encrypt(self): @@ -120,7 +120,7 @@ class PKCS1OAepCipher: # Step 2b ps = b"\x00" * ps_len # Step 2c - db = lHash + ps + b"\x01" + _copy_bytes(None, None, message) + db = lHash + ps + b"\x01" + bytes(message) # Step 2d ros = self._randfunc(hLen) # Step 2e diff --git a/api/libs/sendgrid.py b/api/libs/sendgrid.py index ecc4b3fb98..a270fa70fa 100644 --- a/api/libs/sendgrid.py +++ b/api/libs/sendgrid.py @@ -14,7 +14,7 @@ class SendGridClient: def send(self, mail: dict): logger.debug("Sending email with SendGrid") - + _to = "" try: _to = mail["to"] @@ -28,7 +28,7 @@ class SendGridClient: content = Content("text/html", mail["html"]) sg_mail = Mail(from_email, to_email, subject, content) mail_json = sg_mail.get() - response = sg.client.mail.send.post(request_body=mail_json) # ty: ignore [call-non-callable] + response = sg.client.mail.send.post(request_body=mail_json) # type: ignore logger.debug(response.status_code) logger.debug(response.body) logger.debug(response.headers) diff --git a/api/pyrightconfig.json b/api/pyrightconfig.json index 1b60212c2b..9cb1ea9bf1 100644 --- a/api/pyrightconfig.json +++ b/api/pyrightconfig.json @@ -6,7 +6,6 @@ "migrations/", "core/rag", "extensions", - "libs", "controllers/console/datasets", "core/ops", "core/model_runtime", From cd47a47c3b4acc4d4457b73777722602c33bc950 Mon Sep 17 00:00:00 2001 From: -LAN- <laipz8200@outlook.com> Date: Mon, 29 Sep 2025 19:34:09 +0800 Subject: [PATCH 089/126] Bump release references to 1.9.1 (#26453) --- api/pyproject.toml | 2 +- api/uv.lock | 2 +- docker/docker-compose-template.yaml | 8 ++++---- docker/docker-compose.yaml | 8 ++++---- web/package.json | 4 ++-- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/api/pyproject.toml b/api/pyproject.toml index 485fb918ed..85fa0beaab 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "dify-api" -version = "1.9.0" +version = "1.9.1" requires-python = ">=3.11,<3.13" dependencies = [ diff --git a/api/uv.lock b/api/uv.lock index 87799977ca..b1e86cd86d 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1276,7 +1276,7 @@ wheels = [ [[package]] name = "dify-api" -version = "1.9.0" +version = "1.9.1" source = { virtual = "." } dependencies = [ { name = "arize-phoenix-otel" }, diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index 685fc325d0..5253f750b9 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.1 restart: always environment: # Use the shared environment variables. @@ -31,7 +31,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.1 restart: always environment: # Use the shared environment variables. @@ -58,7 +58,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.1 restart: always environment: # Use the shared environment variables. @@ -76,7 +76,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.9.0 + image: langgenius/dify-web:1.9.1 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index cc66f69550..b5ecb9db03 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -599,7 +599,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.1 restart: always environment: # Use the shared environment variables. @@ -628,7 +628,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.1 restart: always environment: # Use the shared environment variables. @@ -655,7 +655,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:1.9.0 + image: langgenius/dify-api:1.9.1 restart: always environment: # Use the shared environment variables. @@ -673,7 +673,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.9.0 + image: langgenius/dify-web:1.9.1 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} diff --git a/web/package.json b/web/package.json index f40c346f82..36eb6d37f1 100644 --- a/web/package.json +++ b/web/package.json @@ -1,6 +1,6 @@ { "name": "dify-web", - "version": "1.9.0", + "version": "1.9.1", "private": true, "packageManager": "pnpm@10.16.0", "engines": { @@ -278,4 +278,4 @@ "which-typed-array": "npm:@nolyfill/which-typed-array@^1" } } -} \ No newline at end of file +} From df43c6ab8a0d2f80d97f69b3a4854eb360b8517b Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Mon, 29 Sep 2025 20:35:55 +0900 Subject: [PATCH 090/126] [Chore/Refactor] Implement lazy initialization for useState calls to prevent re-computation (#26252) Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: asukaminato0721 <30024051+asukaminato0721@users.noreply.github.com> --- web/app/components/app/annotation/index.tsx | 4 ++-- .../config-prompt/prompt-editor-height-resize-wrap.tsx | 2 +- .../app/configuration/config-var/config-modal/index.tsx | 2 +- .../app/configuration/hooks/use-advanced-prompt-config.ts | 4 ++-- .../base/date-and-time-picker/date-picker/index.tsx | 4 ++-- .../base/date-and-time-picker/time-picker/index.tsx | 2 +- web/app/components/base/markdown-blocks/think-block.tsx | 2 +- web/app/components/base/notion-page-selector/base.tsx | 2 +- web/app/components/base/tab-slider/index.tsx | 2 +- web/app/components/custom/custom-web-app-brand/index.tsx | 2 +- web/app/components/header/maintenance-notice.tsx | 2 +- web/app/components/signin/countdown.tsx | 2 +- web/app/components/tools/mcp/modal.tsx | 2 +- .../components/workflow/nodes/_base/hooks/use-resize-panel.ts | 2 +- .../workflow/nodes/http/hooks/use-key-value-list.ts | 2 +- .../json-schema-config-modal/json-schema-config.tsx | 2 +- .../nodes/question-classifier/components/class-item.tsx | 2 +- .../components/workflow/variable-inspect/value-content.tsx | 2 +- web/app/components/workflow/workflow-preview/index.tsx | 4 ++-- web/app/signin/invite-settings/page.tsx | 2 +- 20 files changed, 24 insertions(+), 24 deletions(-) diff --git a/web/app/components/app/annotation/index.tsx b/web/app/components/app/annotation/index.tsx index afa8732701..264b1ac727 100644 --- a/web/app/components/app/annotation/index.tsx +++ b/web/app/components/app/annotation/index.tsx @@ -38,7 +38,7 @@ const Annotation: FC<Props> = (props) => { const [isShowEdit, setIsShowEdit] = useState(false) const [annotationConfig, setAnnotationConfig] = useState<AnnotationReplyConfig | null>(null) const [isChatApp] = useState(appDetail.mode !== 'completion') - const [controlRefreshSwitch, setControlRefreshSwitch] = useState(Date.now()) + const [controlRefreshSwitch, setControlRefreshSwitch] = useState(() => Date.now()) const { plan, enableBilling } = useProviderContext() const isAnnotationFull = enableBilling && plan.usage.annotatedResponse >= plan.total.annotatedResponse const [isShowAnnotationFullModal, setIsShowAnnotationFullModal] = useState(false) @@ -48,7 +48,7 @@ const Annotation: FC<Props> = (props) => { const [list, setList] = useState<AnnotationItem[]>([]) const [total, setTotal] = useState(0) const [isLoading, setIsLoading] = useState(false) - const [controlUpdateList, setControlUpdateList] = useState(Date.now()) + const [controlUpdateList, setControlUpdateList] = useState(() => Date.now()) const [currItem, setCurrItem] = useState<AnnotationItem | null>(null) const [isShowViewModal, setIsShowViewModal] = useState(false) const [selectedIds, setSelectedIds] = useState<string[]>([]) diff --git a/web/app/components/app/configuration/config-prompt/prompt-editor-height-resize-wrap.tsx b/web/app/components/app/configuration/config-prompt/prompt-editor-height-resize-wrap.tsx index 1457a298f2..9e10db93ae 100644 --- a/web/app/components/app/configuration/config-prompt/prompt-editor-height-resize-wrap.tsx +++ b/web/app/components/app/configuration/config-prompt/prompt-editor-height-resize-wrap.tsx @@ -25,7 +25,7 @@ const PromptEditorHeightResizeWrap: FC<Props> = ({ }) => { const [clientY, setClientY] = useState(0) const [isResizing, setIsResizing] = useState(false) - const [prevUserSelectStyle, setPrevUserSelectStyle] = useState(getComputedStyle(document.body).userSelect) + const [prevUserSelectStyle, setPrevUserSelectStyle] = useState(() => getComputedStyle(document.body).userSelect) const [oldHeight, setOldHeight] = useState(height) const handleStartResize = useCallback((e: React.MouseEvent<HTMLElement>) => { diff --git a/web/app/components/app/configuration/config-var/config-modal/index.tsx b/web/app/components/app/configuration/config-var/config-modal/index.tsx index cecc076fe7..b0f0ea8779 100644 --- a/web/app/components/app/configuration/config-var/config-modal/index.tsx +++ b/web/app/components/app/configuration/config-var/config-modal/index.tsx @@ -53,7 +53,7 @@ const ConfigModal: FC<IConfigModalProps> = ({ }) => { const { modelConfig } = useContext(ConfigContext) const { t } = useTranslation() - const [tempPayload, setTempPayload] = useState<InputVar>(payload || getNewVarInWorkflow('') as any) + const [tempPayload, setTempPayload] = useState<InputVar>(() => payload || getNewVarInWorkflow('') as any) const { type, label, variable, options, max_length } = tempPayload const modalRef = useRef<HTMLDivElement>(null) const appDetail = useAppStore(state => state.appDetail) diff --git a/web/app/components/app/configuration/hooks/use-advanced-prompt-config.ts b/web/app/components/app/configuration/hooks/use-advanced-prompt-config.ts index 193ac87dd0..92958cc96d 100644 --- a/web/app/components/app/configuration/hooks/use-advanced-prompt-config.ts +++ b/web/app/components/app/configuration/hooks/use-advanced-prompt-config.ts @@ -35,8 +35,8 @@ const useAdvancedPromptConfig = ({ setStop, }: Param) => { const isAdvancedPrompt = promptMode === PromptMode.advanced - const [chatPromptConfig, setChatPromptConfig] = useState<ChatPromptConfig>(clone(DEFAULT_CHAT_PROMPT_CONFIG)) - const [completionPromptConfig, setCompletionPromptConfig] = useState<CompletionPromptConfig>(clone(DEFAULT_COMPLETION_PROMPT_CONFIG)) + const [chatPromptConfig, setChatPromptConfig] = useState<ChatPromptConfig>(() => clone(DEFAULT_CHAT_PROMPT_CONFIG)) + const [completionPromptConfig, setCompletionPromptConfig] = useState<CompletionPromptConfig>(() => clone(DEFAULT_COMPLETION_PROMPT_CONFIG)) const currentAdvancedPrompt = (() => { if (!isAdvancedPrompt) diff --git a/web/app/components/base/date-and-time-picker/date-picker/index.tsx b/web/app/components/base/date-and-time-picker/date-picker/index.tsx index f6b7973cb0..8653c6772d 100644 --- a/web/app/components/base/date-and-time-picker/date-picker/index.tsx +++ b/web/app/components/base/date-and-time-picker/date-picker/index.tsx @@ -55,8 +55,8 @@ const DatePicker = ({ const [currentDate, setCurrentDate] = useState(inputValue || defaultValue) const [selectedDate, setSelectedDate] = useState(inputValue) - const [selectedMonth, setSelectedMonth] = useState((inputValue || defaultValue).month()) - const [selectedYear, setSelectedYear] = useState((inputValue || defaultValue).year()) + const [selectedMonth, setSelectedMonth] = useState(() => (inputValue || defaultValue).month()) + const [selectedYear, setSelectedYear] = useState(() => (inputValue || defaultValue).year()) useEffect(() => { const handleClickOutside = (event: MouseEvent) => { diff --git a/web/app/components/base/date-and-time-picker/time-picker/index.tsx b/web/app/components/base/date-and-time-picker/time-picker/index.tsx index 8ef10abc2e..1fb2cfed11 100644 --- a/web/app/components/base/date-and-time-picker/time-picker/index.tsx +++ b/web/app/components/base/date-and-time-picker/time-picker/index.tsx @@ -28,7 +28,7 @@ const TimePicker = ({ const [isOpen, setIsOpen] = useState(false) const containerRef = useRef<HTMLDivElement>(null) const isInitial = useRef(true) - const [selectedTime, setSelectedTime] = useState(value ? getDateWithTimezone({ timezone, date: value }) : undefined) + const [selectedTime, setSelectedTime] = useState(() => value ? getDateWithTimezone({ timezone, date: value }) : undefined) useEffect(() => { const handleClickOutside = (event: MouseEvent) => { diff --git a/web/app/components/base/markdown-blocks/think-block.tsx b/web/app/components/base/markdown-blocks/think-block.tsx index acceecd433..a3b0561677 100644 --- a/web/app/components/base/markdown-blocks/think-block.tsx +++ b/web/app/components/base/markdown-blocks/think-block.tsx @@ -37,7 +37,7 @@ const removeEndThink = (children: any): any => { const useThinkTimer = (children: any) => { const { isResponding } = useChatContext() - const [startTime] = useState(Date.now()) + const [startTime] = useState(() => Date.now()) const [elapsedTime, setElapsedTime] = useState(0) const [isComplete, setIsComplete] = useState(false) const timerRef = useRef<NodeJS.Timeout>() diff --git a/web/app/components/base/notion-page-selector/base.tsx b/web/app/components/base/notion-page-selector/base.tsx index 1c54b57a18..adf044c406 100644 --- a/web/app/components/base/notion-page-selector/base.tsx +++ b/web/app/components/base/notion-page-selector/base.tsx @@ -93,7 +93,7 @@ const NotionPageSelector = ({ const defaultSelectedPagesId = useMemo(() => { return [...Array.from(pagesMapAndSelectedPagesId[1]), ...(value || [])] }, [pagesMapAndSelectedPagesId, value]) - const [selectedPagesId, setSelectedPagesId] = useState<Set<string>>(new Set(defaultSelectedPagesId)) + const [selectedPagesId, setSelectedPagesId] = useState<Set<string>>(() => new Set(defaultSelectedPagesId)) useEffect(() => { setSelectedPagesId(new Set(defaultSelectedPagesId)) diff --git a/web/app/components/base/tab-slider/index.tsx b/web/app/components/base/tab-slider/index.tsx index 56cde52154..55c44d5ea8 100644 --- a/web/app/components/base/tab-slider/index.tsx +++ b/web/app/components/base/tab-slider/index.tsx @@ -21,7 +21,7 @@ const TabSlider: FC<TabSliderProps> = ({ onChange, options, }) => { - const [activeIndex, setActiveIndex] = useState(options.findIndex(option => option.value === value)) + const [activeIndex, setActiveIndex] = useState(() => options.findIndex(option => option.value === value)) const [sliderStyle, setSliderStyle] = useState({}) const { data: pluginList } = useInstalledPluginList() diff --git a/web/app/components/custom/custom-web-app-brand/index.tsx b/web/app/components/custom/custom-web-app-brand/index.tsx index ea2f44caea..eb06265042 100644 --- a/web/app/components/custom/custom-web-app-brand/index.tsx +++ b/web/app/components/custom/custom-web-app-brand/index.tsx @@ -38,7 +38,7 @@ const CustomWebAppBrand = () => { isCurrentWorkspaceManager, } = useAppContext() const [fileId, setFileId] = useState('') - const [imgKey, setImgKey] = useState(Date.now()) + const [imgKey, setImgKey] = useState(() => Date.now()) const [uploadProgress, setUploadProgress] = useState(0) const systemFeatures = useGlobalPublicStore(s => s.systemFeatures) const isSandbox = enableBilling && plan.type === Plan.sandbox diff --git a/web/app/components/header/maintenance-notice.tsx b/web/app/components/header/maintenance-notice.tsx index 4bb4ef7f7d..bcbb344b2c 100644 --- a/web/app/components/header/maintenance-notice.tsx +++ b/web/app/components/header/maintenance-notice.tsx @@ -6,7 +6,7 @@ import { useLanguage } from '@/app/components/header/account-setting/model-provi const MaintenanceNotice = () => { const locale = useLanguage() - const [showNotice, setShowNotice] = useState(localStorage.getItem('hide-maintenance-notice') !== '1') + const [showNotice, setShowNotice] = useState(() => localStorage.getItem('hide-maintenance-notice') !== '1') const handleJumpNotice = () => { window.open(NOTICE_I18N.href, '_blank') } diff --git a/web/app/components/signin/countdown.tsx b/web/app/components/signin/countdown.tsx index 5fd6a29712..c16bd46fe4 100644 --- a/web/app/components/signin/countdown.tsx +++ b/web/app/components/signin/countdown.tsx @@ -12,7 +12,7 @@ type CountdownProps = { export default function Countdown({ onResend }: CountdownProps) { const { t } = useTranslation() - const [leftTime, setLeftTime] = useState(Number(localStorage.getItem(COUNT_DOWN_KEY) || COUNT_DOWN_TIME_MS)) + const [leftTime, setLeftTime] = useState(() => Number(localStorage.getItem(COUNT_DOWN_KEY) || COUNT_DOWN_TIME_MS)) const [time] = useCountDown({ leftTime, onEnd: () => { diff --git a/web/app/components/tools/mcp/modal.tsx b/web/app/components/tools/mcp/modal.tsx index 211d594caf..1a12b3b3e9 100644 --- a/web/app/components/tools/mcp/modal.tsx +++ b/web/app/components/tools/mcp/modal.tsx @@ -65,7 +65,7 @@ const MCPModal = ({ const originalServerID = data?.server_identifier const [url, setUrl] = React.useState(data?.server_url || '') const [name, setName] = React.useState(data?.name || '') - const [appIcon, setAppIcon] = useState<AppIconSelection>(getIcon(data)) + const [appIcon, setAppIcon] = useState<AppIconSelection>(() => getIcon(data)) const [showAppIconPicker, setShowAppIconPicker] = useState(false) const [serverIdentifier, setServerIdentifier] = React.useState(data?.server_identifier || '') const [timeout, setMcpTimeout] = React.useState(data?.timeout || 30) diff --git a/web/app/components/workflow/nodes/_base/hooks/use-resize-panel.ts b/web/app/components/workflow/nodes/_base/hooks/use-resize-panel.ts index f2259a02cf..336c440d58 100644 --- a/web/app/components/workflow/nodes/_base/hooks/use-resize-panel.ts +++ b/web/app/components/workflow/nodes/_base/hooks/use-resize-panel.ts @@ -33,7 +33,7 @@ export const useResizePanel = (params?: UseResizePanelParams) => { const initContainerWidthRef = useRef(0) const initContainerHeightRef = useRef(0) const isResizingRef = useRef(false) - const [prevUserSelectStyle, setPrevUserSelectStyle] = useState(getComputedStyle(document.body).userSelect) + const [prevUserSelectStyle, setPrevUserSelectStyle] = useState(() => getComputedStyle(document.body).userSelect) const handleStartResize = useCallback((e: MouseEvent) => { initXRef.current = e.clientX diff --git a/web/app/components/workflow/nodes/http/hooks/use-key-value-list.ts b/web/app/components/workflow/nodes/http/hooks/use-key-value-list.ts index a61cad646f..44774074dc 100644 --- a/web/app/components/workflow/nodes/http/hooks/use-key-value-list.ts +++ b/web/app/components/workflow/nodes/http/hooks/use-key-value-list.ts @@ -16,7 +16,7 @@ const strToKeyValueList = (value: string) => { } const useKeyValueList = (value: string, onChange: (value: string) => void, noFilter?: boolean) => { - const [list, doSetList] = useState<KeyValue[]>(value ? strToKeyValueList(value) : []) + const [list, doSetList] = useState<KeyValue[]>(() => value ? strToKeyValueList(value) : []) const setList = (l: KeyValue[]) => { doSetList(l.map((item) => { return { diff --git a/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/json-schema-config.tsx b/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/json-schema-config.tsx index b87dc6e245..7c343d320a 100644 --- a/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/json-schema-config.tsx +++ b/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/json-schema-config.tsx @@ -55,7 +55,7 @@ const JsonSchemaConfig: FC<JsonSchemaConfigProps> = ({ const docLink = useDocLink() const [currentTab, setCurrentTab] = useState(SchemaView.VisualEditor) const [jsonSchema, setJsonSchema] = useState(defaultSchema || DEFAULT_SCHEMA) - const [json, setJson] = useState(JSON.stringify(jsonSchema, null, 2)) + const [json, setJson] = useState(() => JSON.stringify(jsonSchema, null, 2)) const [btnWidth, setBtnWidth] = useState(0) const [parseError, setParseError] = useState<Error | null>(null) const [validationError, setValidationError] = useState<string>('') diff --git a/web/app/components/workflow/nodes/question-classifier/components/class-item.tsx b/web/app/components/workflow/nodes/question-classifier/components/class-item.tsx index 478ac925d6..8e6865f557 100644 --- a/web/app/components/workflow/nodes/question-classifier/components/class-item.tsx +++ b/web/app/components/workflow/nodes/question-classifier/components/class-item.tsx @@ -34,7 +34,7 @@ const ClassItem: FC<Props> = ({ filterVar, }) => { const { t } = useTranslation() - const [instanceId, setInstanceId] = useState(uniqueId()) + const [instanceId, setInstanceId] = useState(() => uniqueId()) useEffect(() => { setInstanceId(`${nodeId}-${uniqueId()}`) diff --git a/web/app/components/workflow/variable-inspect/value-content.tsx b/web/app/components/workflow/variable-inspect/value-content.tsx index 6c727e8699..47546a863e 100644 --- a/web/app/components/workflow/variable-inspect/value-content.tsx +++ b/web/app/components/workflow/variable-inspect/value-content.tsx @@ -69,7 +69,7 @@ const ValueContent = ({ const [json, setJson] = useState('') const [parseError, setParseError] = useState<Error | null>(null) const [validationError, setValidationError] = useState<string>('') - const [fileValue, setFileValue] = useState<any>(formatFileValue(currentVar)) + const [fileValue, setFileValue] = useState<any>(() => formatFileValue(currentVar)) const { run: debounceValueChange } = useDebounceFn(handleValueChange, { wait: 500 }) diff --git a/web/app/components/workflow/workflow-preview/index.tsx b/web/app/components/workflow/workflow-preview/index.tsx index 2aeb09cd1e..5fd4b9097c 100644 --- a/web/app/components/workflow/workflow-preview/index.tsx +++ b/web/app/components/workflow/workflow-preview/index.tsx @@ -68,8 +68,8 @@ const WorkflowPreview = ({ viewport, className, }: WorkflowPreviewProps) => { - const [nodesData, setNodesData] = useState(initialNodes(nodes, edges)) - const [edgesData, setEdgesData] = useState(initialEdges(edges, nodes)) + const [nodesData, setNodesData] = useState(() => initialNodes(nodes, edges)) + const [edgesData, setEdgesData] = useState(() => initialEdges(edges, nodes)) const onNodesChange = useCallback( (changes: NodeChange[]) => setNodesData(nds => applyNodeChanges(changes, nds)), diff --git a/web/app/signin/invite-settings/page.tsx b/web/app/signin/invite-settings/page.tsx index 036edfc478..cec51a70ef 100644 --- a/web/app/signin/invite-settings/page.tsx +++ b/web/app/signin/invite-settings/page.tsx @@ -30,7 +30,7 @@ export default function InviteSettingsPage() { const { setLocaleOnClient } = useContext(I18n) const [name, setName] = useState('') const [language, setLanguage] = useState(LanguagesSupported[0]) - const [timezone, setTimezone] = useState(Intl.DateTimeFormat().resolvedOptions().timeZone || 'America/Los_Angeles') + const [timezone, setTimezone] = useState(() => Intl.DateTimeFormat().resolvedOptions().timeZone || 'America/Los_Angeles') const checkParams = { url: '/activate/check', From d552680e72406b549437917a2d01f55ebb7efcea Mon Sep 17 00:00:00 2001 From: Asuka Minato <i@asukaminato.eu.org> Date: Mon, 29 Sep 2025 22:33:29 +0900 Subject: [PATCH 091/126] Refactor: Use @ns.route for tags API (#26357) Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: -LAN- <laipz8200@outlook.com> --- api/controllers/console/tag/tags.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/api/controllers/console/tag/tags.py b/api/controllers/console/tag/tags.py index da236ee5af..3d29b3ee61 100644 --- a/api/controllers/console/tag/tags.py +++ b/api/controllers/console/tag/tags.py @@ -3,7 +3,7 @@ from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from werkzeug.exceptions import Forbidden -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from fields.tag_fields import dataset_tag_fields from libs.login import login_required @@ -17,6 +17,7 @@ def _validate_name(name): return name +@console_ns.route("/tags") class TagListApi(Resource): @setup_required @login_required @@ -52,6 +53,7 @@ class TagListApi(Resource): return response, 200 +@console_ns.route("/tags/<uuid:tag_id>") class TagUpdateDeleteApi(Resource): @setup_required @login_required @@ -89,6 +91,7 @@ class TagUpdateDeleteApi(Resource): return 204 +@console_ns.route("/tag-bindings/create") class TagBindingCreateApi(Resource): @setup_required @login_required @@ -114,6 +117,7 @@ class TagBindingCreateApi(Resource): return {"result": "success"}, 200 +@console_ns.route("/tag-bindings/remove") class TagBindingDeleteApi(Resource): @setup_required @login_required @@ -133,9 +137,3 @@ class TagBindingDeleteApi(Resource): TagService.delete_tag_binding(args) return {"result": "success"}, 200 - - -api.add_resource(TagListApi, "/tags") -api.add_resource(TagUpdateDeleteApi, "/tags/<uuid:tag_id>") -api.add_resource(TagBindingCreateApi, "/tag-bindings/create") -api.add_resource(TagBindingDeleteApi, "/tag-bindings/remove") From bbdcbac5449993dc30574848cb8a6ff026a2da4f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 29 Sep 2025 21:35:26 +0800 Subject: [PATCH 092/126] chore: translate i18n files and update type definitions (#26440) Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> --- web/i18n/de-DE/common.ts | 1 + web/i18n/de-DE/workflow.ts | 3 +++ web/i18n/es-ES/common.ts | 1 + web/i18n/es-ES/workflow.ts | 3 +++ web/i18n/fa-IR/common.ts | 1 + web/i18n/fa-IR/workflow.ts | 3 +++ web/i18n/fr-FR/common.ts | 1 + web/i18n/fr-FR/workflow.ts | 3 +++ web/i18n/hi-IN/common.ts | 1 + web/i18n/hi-IN/workflow.ts | 3 +++ web/i18n/id-ID/common.ts | 1 + web/i18n/id-ID/workflow.ts | 3 +++ web/i18n/it-IT/common.ts | 1 + web/i18n/it-IT/workflow.ts | 3 +++ web/i18n/ja-JP/workflow.ts | 3 +++ web/i18n/ko-KR/common.ts | 1 + web/i18n/ko-KR/workflow.ts | 3 +++ web/i18n/pl-PL/common.ts | 1 + web/i18n/pl-PL/workflow.ts | 3 +++ web/i18n/pt-BR/common.ts | 1 + web/i18n/pt-BR/workflow.ts | 3 +++ web/i18n/ro-RO/common.ts | 1 + web/i18n/ro-RO/workflow.ts | 3 +++ web/i18n/ru-RU/common.ts | 1 + web/i18n/ru-RU/workflow.ts | 3 +++ web/i18n/sl-SI/common.ts | 1 + web/i18n/sl-SI/workflow.ts | 3 +++ web/i18n/th-TH/common.ts | 1 + web/i18n/th-TH/workflow.ts | 3 +++ web/i18n/tr-TR/common.ts | 1 + web/i18n/tr-TR/workflow.ts | 3 +++ web/i18n/uk-UA/common.ts | 1 + web/i18n/uk-UA/workflow.ts | 3 +++ web/i18n/vi-VN/common.ts | 1 + web/i18n/vi-VN/workflow.ts | 3 +++ web/i18n/zh-Hant/common.ts | 1 + web/i18n/zh-Hant/workflow.ts | 3 +++ 37 files changed, 75 insertions(+) diff --git a/web/i18n/de-DE/common.ts b/web/i18n/de-DE/common.ts index 69572af38d..9431fbbf6a 100644 --- a/web/i18n/de-DE/common.ts +++ b/web/i18n/de-DE/common.ts @@ -501,6 +501,7 @@ const translation = { customModelCredentialsDeleteTip: 'Anmeldeinformationen werden verwendet und können nicht gelöscht werden', }, parametersInvalidRemoved: 'Einige Parameter sind ungültig und wurden entfernt.', + installDataSourceProvider: 'Datenquellenanbieter installieren', }, dataSource: { add: 'Eine Datenquelle hinzufügen', diff --git a/web/i18n/de-DE/workflow.ts b/web/i18n/de-DE/workflow.ts index 9c83e5af20..71000897ca 100644 --- a/web/i18n/de-DE/workflow.ts +++ b/web/i18n/de-DE/workflow.ts @@ -944,6 +944,9 @@ const translation = { chunkIsRequired: 'Chunk-Struktur ist erforderlich', chunksInput: 'Stücke', chunksInputTip: 'Die Eingangsvariable des Wissensbasis-Knotens sind Chunks. Der Variablentyp ist ein Objekt mit einem spezifischen JSON-Schema, das konsistent mit der ausgewählten Chunk-Struktur sein muss.', + embeddingModelIsRequired: 'Ein Einbettungsmodell ist erforderlich', + chunksVariableIsRequired: 'Die Variable \'Chunks\' ist erforderlich', + rerankingModelIsRequired: 'Ein Reranking-Modell ist erforderlich', }, }, tracing: { diff --git a/web/i18n/es-ES/common.ts b/web/i18n/es-ES/common.ts index 0728491adf..74af4a03b6 100644 --- a/web/i18n/es-ES/common.ts +++ b/web/i18n/es-ES/common.ts @@ -505,6 +505,7 @@ const translation = { editModelCredential: 'Editar credencial de modelo', }, parametersInvalidRemoved: 'Algunos parámetros son inválidos y han sido eliminados', + installDataSourceProvider: 'Instalar proveedores de fuentes de datos', }, dataSource: { add: 'Agregar una fuente de datos', diff --git a/web/i18n/es-ES/workflow.ts b/web/i18n/es-ES/workflow.ts index 1a7c62eab8..822b226e71 100644 --- a/web/i18n/es-ES/workflow.ts +++ b/web/i18n/es-ES/workflow.ts @@ -944,6 +944,9 @@ const translation = { chunkIsRequired: 'Se requiere una estructura de fragmentos', chunksInput: 'Trozo', chunksInputTip: 'La variable de entrada del nodo de la base de conocimientos es Chunks. El tipo de variable es un objeto con un esquema JSON específico que debe ser consistente con la estructura del fragmento seleccionado.', + embeddingModelIsRequired: 'Se requiere un modelo de incrustación', + rerankingModelIsRequired: 'Se requiere un modelo de reordenamiento', + chunksVariableIsRequired: 'La variable Chunks es obligatoria', }, }, tracing: { diff --git a/web/i18n/fa-IR/common.ts b/web/i18n/fa-IR/common.ts index 613c593570..dc6620ce2e 100644 --- a/web/i18n/fa-IR/common.ts +++ b/web/i18n/fa-IR/common.ts @@ -505,6 +505,7 @@ const translation = { customModelCredentialsDeleteTip: 'اعتبار در حال استفاده است و قابل حذف نیست', }, parametersInvalidRemoved: 'برخی پارامترها نامعتبر هستند و حذف شده‌اند', + installDataSourceProvider: 'نصب ارائه‌دهندگان منبع داده', }, dataSource: { add: 'افزودن منبع داده', diff --git a/web/i18n/fa-IR/workflow.ts b/web/i18n/fa-IR/workflow.ts index 6abbcb5c52..4b8a552889 100644 --- a/web/i18n/fa-IR/workflow.ts +++ b/web/i18n/fa-IR/workflow.ts @@ -944,6 +944,9 @@ const translation = { chooseChunkStructure: 'یک ساختار تکه ای را انتخاب کنید', chunksInput: 'تکه‌ها', chunksInputTip: 'متغیر ورودی گره پایگاه دانش چانک‌ها است. نوع متغیر یک شیء با یک طرح JSON خاص است که باید با ساختار چانک انتخاب شده سازگار باشد.', + embeddingModelIsRequired: 'مدل جاسازی مورد نیاز است', + chunksVariableIsRequired: 'متغیر Chunks الزامی است', + rerankingModelIsRequired: 'مدل رتبه‌بندی مجدد مورد نیاز است', }, }, tracing: { diff --git a/web/i18n/fr-FR/common.ts b/web/i18n/fr-FR/common.ts index 053318e7b5..f1e8ad007c 100644 --- a/web/i18n/fr-FR/common.ts +++ b/web/i18n/fr-FR/common.ts @@ -502,6 +502,7 @@ const translation = { editModelCredential: 'Modifier les informations d’identification du modèle', }, parametersInvalidRemoved: 'Certains paramètres sont invalides et ont été supprimés.', + installDataSourceProvider: 'Installer les fournisseurs de sources de données', }, dataSource: { add: 'Ajouter une source de données', diff --git a/web/i18n/fr-FR/workflow.ts b/web/i18n/fr-FR/workflow.ts index e68f254273..270cd1b7e6 100644 --- a/web/i18n/fr-FR/workflow.ts +++ b/web/i18n/fr-FR/workflow.ts @@ -944,6 +944,9 @@ const translation = { retrievalSettingIsRequired: 'Le paramètre de récupération est requis', chunksInput: 'Morceaux', chunksInputTip: 'La variable d\'entrée du nœud de la base de connaissances est Chunks. Le type de variable est un objet avec un schéma JSON spécifique qui doit être cohérent avec la structure de morceau sélectionnée.', + rerankingModelIsRequired: 'Un modèle de rerankage est requis', + embeddingModelIsRequired: 'Un modèle d\'intégration est requis', + chunksVariableIsRequired: 'La variable Chunks est requise', }, }, tracing: { diff --git a/web/i18n/hi-IN/common.ts b/web/i18n/hi-IN/common.ts index 6b5c5a260c..d882b00929 100644 --- a/web/i18n/hi-IN/common.ts +++ b/web/i18n/hi-IN/common.ts @@ -521,6 +521,7 @@ const translation = { editModelCredential: 'मॉडल की क्रेडेंशियल संपादित करें', }, parametersInvalidRemoved: 'कुछ पैरामीटर अमान्य हैं और हटा दिए गए हैं', + installDataSourceProvider: 'डेटा स्रोत प्रदाताओं को स्थापित करें', }, dataSource: { add: 'डेटा स्रोत जोड़ें', diff --git a/web/i18n/hi-IN/workflow.ts b/web/i18n/hi-IN/workflow.ts index 9bbfc05f61..60beb5c215 100644 --- a/web/i18n/hi-IN/workflow.ts +++ b/web/i18n/hi-IN/workflow.ts @@ -964,6 +964,9 @@ const translation = { chooseChunkStructure: 'एक चंक संरचना चुनें', chunksInput: 'टुकड़े', chunksInputTip: 'ज्ञान आधार नोड का इनपुट वेरिएबल चंक्स है। वेरिएबल प्रकार एक ऑब्जेक्ट है जिसमें एक विशेष JSON स्कीमा है जो चयनित चंक संरचना के साथ सुसंगत होना चाहिए।', + chunksVariableIsRequired: 'Chunks चर आवश्यक है', + embeddingModelIsRequired: 'एम्बेडिंग मॉडल आवश्यक है', + rerankingModelIsRequired: 'पुनः क्रमांकन मॉडल की आवश्यकता है', }, }, tracing: { diff --git a/web/i18n/id-ID/common.ts b/web/i18n/id-ID/common.ts index 4fb1f2afb3..b224f153f6 100644 --- a/web/i18n/id-ID/common.ts +++ b/web/i18n/id-ID/common.ts @@ -501,6 +501,7 @@ const translation = { callTimes: 'Waktu panggilan', getFreeTokens: 'Dapatkan Token gratis', parametersInvalidRemoved: 'Beberapa parameter tidak valid dan telah dihapus', + installDataSourceProvider: 'Pasang penyedia sumber data', }, dataSource: { notion: { diff --git a/web/i18n/id-ID/workflow.ts b/web/i18n/id-ID/workflow.ts index 7bc9b631dd..4bfbe934f7 100644 --- a/web/i18n/id-ID/workflow.ts +++ b/web/i18n/id-ID/workflow.ts @@ -919,6 +919,9 @@ const translation = { chunkStructure: 'Struktur Potongan', chunksInput: 'Potongan', chunksInputTip: 'Variabel input dari node basis pengetahuan adalah Chunks. Tipe variabel adalah objek dengan Skema JSON tertentu yang harus konsisten dengan struktur chunk yang dipilih.', + chunksVariableIsRequired: 'Variabel Chunks diperlukan', + rerankingModelIsRequired: 'Model reranking diperlukan', + embeddingModelIsRequired: 'Model embedding diperlukan', }, }, tracing: {}, diff --git a/web/i18n/it-IT/common.ts b/web/i18n/it-IT/common.ts index 617a7fe495..4ba4f34240 100644 --- a/web/i18n/it-IT/common.ts +++ b/web/i18n/it-IT/common.ts @@ -527,6 +527,7 @@ const translation = { editModelCredential: 'Modificare le credenziali del modello', }, parametersInvalidRemoved: 'Alcuni parametri non sono validi e sono stati rimossi.', + installDataSourceProvider: 'Installa i fornitori di sorgenti dati', }, dataSource: { add: 'Aggiungi una fonte di dati', diff --git a/web/i18n/it-IT/workflow.ts b/web/i18n/it-IT/workflow.ts index 1df67ba454..7322599abf 100644 --- a/web/i18n/it-IT/workflow.ts +++ b/web/i18n/it-IT/workflow.ts @@ -970,6 +970,9 @@ const translation = { retrievalSettingIsRequired: 'È richiesta l\'impostazione di recupero', chunksInputTip: 'La variabile di input del nodo della base di conoscenza è Chunks. Il tipo di variabile è un oggetto con uno specifico schema JSON che deve essere coerente con la struttura del chunk selezionato.', chunksInput: 'Pezzetti', + chunksVariableIsRequired: 'La variabile Chunks è richiesta', + rerankingModelIsRequired: 'È richiesto un modello di riordinamento', + embeddingModelIsRequired: 'È necessario un modello di embedding', }, }, tracing: { diff --git a/web/i18n/ja-JP/workflow.ts b/web/i18n/ja-JP/workflow.ts index 87ca5782a5..e85dcd305e 100644 --- a/web/i18n/ja-JP/workflow.ts +++ b/web/i18n/ja-JP/workflow.ts @@ -956,6 +956,9 @@ const translation = { indexMethodIsRequired: 'インデックスメソッドが必要です', chunksInput: 'チャンク', chunksInputTip: '知識ベースノードの入力変数はチャンクです。変数のタイプは、選択されたチャンク構造と一貫性のある特定のJSONスキーマを持つオブジェクトです。', + chunksVariableIsRequired: 'Chunks変数は必須です', + embeddingModelIsRequired: '埋め込みモデルが必要です', + rerankingModelIsRequired: '再ランキングモデルが必要です', }, }, tracing: { diff --git a/web/i18n/ko-KR/common.ts b/web/i18n/ko-KR/common.ts index 86209e1fab..9d2948c594 100644 --- a/web/i18n/ko-KR/common.ts +++ b/web/i18n/ko-KR/common.ts @@ -497,6 +497,7 @@ const translation = { customModelCredentialsDeleteTip: '자격 증명이 사용 중이며 삭제할 수 없습니다.', }, parametersInvalidRemoved: '일부 매개변수가 유효하지 않아 제거되었습니다.', + installDataSourceProvider: '데이터 소스 공급자 설치', }, dataSource: { add: '데이터 소스 추가하기', diff --git a/web/i18n/ko-KR/workflow.ts b/web/i18n/ko-KR/workflow.ts index 70fd324f82..7e3775c1f8 100644 --- a/web/i18n/ko-KR/workflow.ts +++ b/web/i18n/ko-KR/workflow.ts @@ -992,6 +992,9 @@ const translation = { retrievalSettingIsRequired: '검색 설정이 필요합니다.', chunksInput: '청크', chunksInputTip: '지식 기반 노드의 입력 변수는 Chunks입니다. 변수 유형은 선택된 청크 구조와 일치해야 하는 특정 JSON 스키마를 가진 객체입니다.', + chunksVariableIsRequired: 'Chunks 변수는 필수입니다', + embeddingModelIsRequired: '임베딩 모델이 필요합니다', + rerankingModelIsRequired: '재순위 모델이 필요합니다', }, }, tracing: { diff --git a/web/i18n/pl-PL/common.ts b/web/i18n/pl-PL/common.ts index 752bbc1ee1..3f820e14e0 100644 --- a/web/i18n/pl-PL/common.ts +++ b/web/i18n/pl-PL/common.ts @@ -514,6 +514,7 @@ const translation = { editModelCredential: 'Edytowanie poświadczeń modelu', }, parametersInvalidRemoved: 'Niektóre parametry są nieprawidłowe i zostały usunięte.', + installDataSourceProvider: 'Zainstaluj dostawców źródeł danych', }, dataSource: { add: 'Dodaj źródło danych', diff --git a/web/i18n/pl-PL/workflow.ts b/web/i18n/pl-PL/workflow.ts index f4d5b98102..87c96c758f 100644 --- a/web/i18n/pl-PL/workflow.ts +++ b/web/i18n/pl-PL/workflow.ts @@ -944,6 +944,9 @@ const translation = { chunkIsRequired: 'Wymagana jest struktura porcji', chunksInput: 'Kawałki', chunksInputTip: 'Zmienna wejściowa węzła bazy wiedzy to Chunks. Typ zmiennej to obiekt z określonym schematem JSON, który musi być zgodny z wybraną strukturą chunk.', + embeddingModelIsRequired: 'Wymagany jest model osadzania', + chunksVariableIsRequired: 'Wymagana jest zmienna Chunks', + rerankingModelIsRequired: 'Wymagany jest model ponownego rankingu', }, }, tracing: { diff --git a/web/i18n/pt-BR/common.ts b/web/i18n/pt-BR/common.ts index aa831aa58f..3f5f353fb6 100644 --- a/web/i18n/pt-BR/common.ts +++ b/web/i18n/pt-BR/common.ts @@ -501,6 +501,7 @@ const translation = { addNewModelCredential: 'Adicionar nova credencial de modelo', }, parametersInvalidRemoved: 'Alguns parâmetros são inválidos e foram removidos', + installDataSourceProvider: 'Instalar provedores de fontes de dados', }, dataSource: { add: 'Adicionar uma fonte de dados', diff --git a/web/i18n/pt-BR/workflow.ts b/web/i18n/pt-BR/workflow.ts index af9ad3ae40..9657ef8e7f 100644 --- a/web/i18n/pt-BR/workflow.ts +++ b/web/i18n/pt-BR/workflow.ts @@ -944,6 +944,9 @@ const translation = { indexMethodIsRequired: 'O método de índice é necessário', chunksInput: 'Pedaços', chunksInputTip: 'A variável de entrada do nó da base de conhecimento é Chunks. O tipo da variável é um objeto com um esquema JSON específico que deve ser consistente com a estrutura de chunk selecionada.', + chunksVariableIsRequired: 'A variável \'chunks\' é obrigatória', + embeddingModelIsRequired: 'Modelo de incorporação é necessário', + rerankingModelIsRequired: 'Um modelo de reclassificação é necessário', }, }, tracing: { diff --git a/web/i18n/ro-RO/common.ts b/web/i18n/ro-RO/common.ts index 9ab998a7ee..2e36e487fb 100644 --- a/web/i18n/ro-RO/common.ts +++ b/web/i18n/ro-RO/common.ts @@ -501,6 +501,7 @@ const translation = { customModelCredentialsDeleteTip: 'Acreditarea este în uz și nu poate fi ștearsă', }, parametersInvalidRemoved: 'Unele parametrii sunt invalizi și au fost eliminați.', + installDataSourceProvider: 'Instalați furnizorii de surse de date', }, dataSource: { add: 'Adăugați o sursă de date', diff --git a/web/i18n/ro-RO/workflow.ts b/web/i18n/ro-RO/workflow.ts index 04f899a460..94d01ec1ba 100644 --- a/web/i18n/ro-RO/workflow.ts +++ b/web/i18n/ro-RO/workflow.ts @@ -944,6 +944,9 @@ const translation = { changeChunkStructure: 'Modificați structura bucății', chunksInput: 'Bucăți', chunksInputTip: 'Variabila de intrare a nodului bazei de cunoștințe este Chunks. Tipul variabilei este un obiect cu un Șchema JSON specific care trebuie să fie coerent cu structura de chunk selectată.', + chunksVariableIsRequired: 'Variabila Chunks este obligatorie', + embeddingModelIsRequired: 'Este necesar un model de încorporare', + rerankingModelIsRequired: 'Este necesar un model de reordonare', }, }, tracing: { diff --git a/web/i18n/ru-RU/common.ts b/web/i18n/ru-RU/common.ts index cfbe58140b..8f1fb3a51b 100644 --- a/web/i18n/ru-RU/common.ts +++ b/web/i18n/ru-RU/common.ts @@ -505,6 +505,7 @@ const translation = { customModelCredentialsDeleteTip: 'Учетные данные используются и не могут быть удалены', }, parametersInvalidRemoved: 'Некоторые параметры недействительны и были удалены', + installDataSourceProvider: 'Установить поставщиков источников данных', }, dataSource: { add: 'Добавить источник данных', diff --git a/web/i18n/ru-RU/workflow.ts b/web/i18n/ru-RU/workflow.ts index 531352c54d..1e0ecf1276 100644 --- a/web/i18n/ru-RU/workflow.ts +++ b/web/i18n/ru-RU/workflow.ts @@ -944,6 +944,9 @@ const translation = { retrievalSettingIsRequired: 'Настройка извлечения обязательна', chunksInput: 'Куски', chunksInputTip: 'Входная переменная узла базы знаний - это Чанки. Тип переменной является объектом с определенной схемой JSON, которая должна соответствовать выбранной структуре чанка.', + chunksVariableIsRequired: 'Переменная chunks обязательна', + embeddingModelIsRequired: 'Требуется модель встраивания', + rerankingModelIsRequired: 'Требуется модель перераспределения рангов', }, }, tracing: { diff --git a/web/i18n/sl-SI/common.ts b/web/i18n/sl-SI/common.ts index a6fc939af7..2efd6f8de6 100644 --- a/web/i18n/sl-SI/common.ts +++ b/web/i18n/sl-SI/common.ts @@ -586,6 +586,7 @@ const translation = { customModelCredentials: 'Poverilnice modela po meri', }, parametersInvalidRemoved: 'Nekateri parametri so neveljavni in so bili odstranjeni.', + installDataSourceProvider: 'Namestite ponudnike podatkovnih virov', }, dataSource: { notion: { diff --git a/web/i18n/sl-SI/workflow.ts b/web/i18n/sl-SI/workflow.ts index 2aa192a7ad..80ad3c89fb 100644 --- a/web/i18n/sl-SI/workflow.ts +++ b/web/i18n/sl-SI/workflow.ts @@ -951,6 +951,9 @@ const translation = { aboutRetrieval: 'o metodi iskanja.', chunksInput: 'Kosi', chunksInputTip: 'Vhodna spremenljivka vozlišča podatkovne baze je Chunks. Tip spremenljivke je objekt s specifično JSON shemo, ki mora biti skladna z izbrano strukturo kosov.', + chunksVariableIsRequired: 'Spremenljivka Chunks je obvezna', + embeddingModelIsRequired: 'Zahtuje se vgrajevalni model', + rerankingModelIsRequired: 'Potreben je model za ponovno razvrščanje', }, }, tracing: { diff --git a/web/i18n/th-TH/common.ts b/web/i18n/th-TH/common.ts index 10eb409b92..a673629d3e 100644 --- a/web/i18n/th-TH/common.ts +++ b/web/i18n/th-TH/common.ts @@ -500,6 +500,7 @@ const translation = { addNewModelCredential: 'เพิ่มข้อมูลประจําตัวของโมเดลใหม่', }, parametersInvalidRemoved: 'บางพารามิเตอร์ไม่ถูกต้องและถูกนำออก', + installDataSourceProvider: 'ติดตั้งผู้ให้บริการแหล่งข้อมูล', }, dataSource: { add: 'เพิ่มแหล่งข้อมูล', diff --git a/web/i18n/th-TH/workflow.ts b/web/i18n/th-TH/workflow.ts index d735a82ded..e2db4ceb4a 100644 --- a/web/i18n/th-TH/workflow.ts +++ b/web/i18n/th-TH/workflow.ts @@ -944,6 +944,9 @@ const translation = { chunkIsRequired: 'จําเป็นต้องมีโครงสร้างก้อน', chunksInput: 'ชิ้นส่วน', chunksInputTip: 'ตัวแปรนำเข้าของโหนดฐานความรู้คือ Chunks ตัวแปรประเภทเป็นอ็อบเจ็กต์ที่มี JSON Schema เฉพาะซึ่งต้องสอดคล้องกับโครงสร้างชิ้นส่วนที่เลือกไว้.', + chunksVariableIsRequired: 'ตัวแปร Chunks เป็นสิ่งจำเป็น', + embeddingModelIsRequired: 'จำเป็นต้องใช้โมเดลฝัง', + rerankingModelIsRequired: 'จำเป็นต้องมีโมเดลการจัดอันดับใหม่', }, }, tracing: { diff --git a/web/i18n/tr-TR/common.ts b/web/i18n/tr-TR/common.ts index 243de6d38d..b198bd5d63 100644 --- a/web/i18n/tr-TR/common.ts +++ b/web/i18n/tr-TR/common.ts @@ -505,6 +505,7 @@ const translation = { customModelCredentialsDeleteTip: 'Kimlik bilgisi kullanımda ve silinemiyor', }, parametersInvalidRemoved: 'Bazı parametreler geçersizdir ve kaldırılmıştır.', + installDataSourceProvider: 'Veri kaynağı sağlayıcılarını yükle', }, dataSource: { add: 'Bir veri kaynağı ekle', diff --git a/web/i18n/tr-TR/workflow.ts b/web/i18n/tr-TR/workflow.ts index f4964e87fb..68f3d5c0c2 100644 --- a/web/i18n/tr-TR/workflow.ts +++ b/web/i18n/tr-TR/workflow.ts @@ -945,6 +945,9 @@ const translation = { changeChunkStructure: 'Yığın Yapısını Değiştir', chunksInput: 'Parçalar', chunksInputTip: 'Bilgi tabanı düğümünün girdi değişkeni \'Chunks\'tır. Değişkenin tipi, seçilen parça yapısıyla tutarlı olması gereken belirli bir JSON Şemasına sahip bir nesnedir.', + embeddingModelIsRequired: 'Gömme modeli gereklidir', + chunksVariableIsRequired: 'Chunks değişkeni gereklidir', + rerankingModelIsRequired: 'Yeniden sıralama modeli gereklidir', }, }, tracing: { diff --git a/web/i18n/uk-UA/common.ts b/web/i18n/uk-UA/common.ts index 84f7e0cbb7..69af3cc2db 100644 --- a/web/i18n/uk-UA/common.ts +++ b/web/i18n/uk-UA/common.ts @@ -502,6 +502,7 @@ const translation = { customModelCredentialsDeleteTip: 'Облікові дані використовуються і не можуть бути видалені', }, parametersInvalidRemoved: 'Деякі параметри є недійсними і були видалені', + installDataSourceProvider: 'Встановіть постачальників джерел даних', }, dataSource: { add: 'Додати джерело даних', diff --git a/web/i18n/uk-UA/workflow.ts b/web/i18n/uk-UA/workflow.ts index 40004b4ea8..f4e95be60e 100644 --- a/web/i18n/uk-UA/workflow.ts +++ b/web/i18n/uk-UA/workflow.ts @@ -944,6 +944,9 @@ const translation = { retrievalSettingIsRequired: 'Потрібне налаштування для отримання', chunksInput: 'Частини', chunksInputTip: 'Вхідна змінна вузла бази знань - це Частини. Тип змінної - об\'єкт з певною JSON-схемою, яка повинна відповідати вибраній структурі частин.', + chunksVariableIsRequired: 'Змінна chunks є обов\'язковою', + embeddingModelIsRequired: 'Потрібна модель вбудовування', + rerankingModelIsRequired: 'Потрібна модель перенавчання', }, }, tracing: { diff --git a/web/i18n/vi-VN/common.ts b/web/i18n/vi-VN/common.ts index 145b79ea24..216a2e2ed2 100644 --- a/web/i18n/vi-VN/common.ts +++ b/web/i18n/vi-VN/common.ts @@ -501,6 +501,7 @@ const translation = { selectModelCredential: 'Chọn thông tin xác thực mô hình', }, parametersInvalidRemoved: 'Một số tham số không hợp lệ và đã được loại bỏ', + installDataSourceProvider: 'Cài đặt các nhà cung cấp nguồn dữ liệu', }, dataSource: { add: 'Thêm nguồn dữ liệu', diff --git a/web/i18n/vi-VN/workflow.ts b/web/i18n/vi-VN/workflow.ts index e85e2e8fc3..3016d79a23 100644 --- a/web/i18n/vi-VN/workflow.ts +++ b/web/i18n/vi-VN/workflow.ts @@ -944,6 +944,9 @@ const translation = { indexMethodIsRequired: 'Phương pháp chỉ mục là bắt buộc', chunksInput: 'Mảnh', chunksInputTip: 'Biến đầu vào của nút cơ sở tri thức là Chunks. Loại biến là một đối tượng với một JSON Schema cụ thể mà phải nhất quán với cấu trúc chunk đã chọn.', + chunksVariableIsRequired: 'Biến Chunks là bắt buộc', + embeddingModelIsRequired: 'Cần có mô hình nhúng', + rerankingModelIsRequired: 'Cần có mô hình sắp xếp lại', }, }, tracing: { diff --git a/web/i18n/zh-Hant/common.ts b/web/i18n/zh-Hant/common.ts index 8923df1553..a5747ba300 100644 --- a/web/i18n/zh-Hant/common.ts +++ b/web/i18n/zh-Hant/common.ts @@ -501,6 +501,7 @@ const translation = { selectModelCredential: '選取模型認證', }, parametersInvalidRemoved: '一些參數無效,已被移除', + installDataSourceProvider: '安裝資料來源提供者', }, dataSource: { add: '新增資料來源', diff --git a/web/i18n/zh-Hant/workflow.ts b/web/i18n/zh-Hant/workflow.ts index ee10c976ed..809051c2be 100644 --- a/web/i18n/zh-Hant/workflow.ts +++ b/web/i18n/zh-Hant/workflow.ts @@ -944,6 +944,9 @@ const translation = { retrievalSettingIsRequired: '需要檢索設定', chunksInput: '區塊', chunksInputTip: '知識庫節點的輸入變數是 Chunks。該變數類型是一個物件,具有特定的 JSON Schema,必須與所選的塊結構一致。', + rerankingModelIsRequired: '需要重新排序模型', + chunksVariableIsRequired: 'Chunks 變數是必需的', + embeddingModelIsRequired: '需要嵌入模型', }, }, tracing: { From f79d8baf63778691d9c6d784305245ce8d6e650e Mon Sep 17 00:00:00 2001 From: Asuka Minato <i@asukaminato.eu.org> Date: Tue, 30 Sep 2025 00:38:59 +0900 Subject: [PATCH 093/126] Fix: Enable Pyright and Fix Typing Errors in Datasets Controller (#26425) Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- api/controllers/console/datasets/datasets.py | 18 +++++----- .../console/datasets/datasets_document.py | 15 +++++--- .../console/datasets/datasets_segments.py | 18 ++++++---- api/controllers/console/datasets/external.py | 7 ++-- .../console/datasets/hit_testing_base.py | 6 ++-- api/controllers/console/datasets/metadata.py | 3 +- .../datasets/rag_pipeline/rag_pipeline.py | 8 ++--- .../rag_pipeline/rag_pipeline_datasets.py | 16 ++------- .../rag_pipeline_draft_variable.py | 36 +++---------------- api/pyrightconfig.json | 1 - 10 files changed, 53 insertions(+), 75 deletions(-) diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 2affbd6a42..60eedd2197 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -1,4 +1,5 @@ -import flask_restx +from typing import Any, cast + from flask import request from flask_login import current_user from flask_restx import Resource, fields, marshal, marshal_with, reqparse @@ -31,12 +32,13 @@ from fields.dataset_fields import dataset_detail_fields, dataset_query_detail_fi from fields.document_fields import document_status_fields from libs.login import login_required from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile +from models.account import Account from models.dataset import DatasetPermissionEnum from models.provider_ids import ModelProviderID from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService -def _validate_name(name): +def _validate_name(name: str) -> str: if not name or len(name) < 1 or len(name) > 40: raise ValueError("Name must be between 1 to 40 characters.") return name @@ -92,7 +94,7 @@ class DatasetListApi(Resource): for embedding_model in embedding_models: model_names.append(f"{embedding_model.model}:{embedding_model.provider.provider}") - data = marshal(datasets, dataset_detail_fields) + data = cast(list[dict[str, Any]], marshal(datasets, dataset_detail_fields)) for item in data: # convert embedding_model_provider to plugin standard format if item["indexing_technique"] == "high_quality" and item["embedding_model_provider"]: @@ -192,7 +194,7 @@ class DatasetListApi(Resource): name=args["name"], description=args["description"], indexing_technique=args["indexing_technique"], - account=current_user, + account=cast(Account, current_user), permission=DatasetPermissionEnum.ONLY_ME, provider=args["provider"], external_knowledge_api_id=args["external_knowledge_api_id"], @@ -224,7 +226,7 @@ class DatasetApi(Resource): DatasetService.check_dataset_permission(dataset, current_user) except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) - data = marshal(dataset, dataset_detail_fields) + data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields)) if dataset.indexing_technique == "high_quality": if dataset.embedding_model_provider: provider_id = ModelProviderID(dataset.embedding_model_provider) @@ -369,7 +371,7 @@ class DatasetApi(Resource): if dataset is None: raise NotFound("Dataset not found.") - result_data = marshal(dataset, dataset_detail_fields) + result_data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields)) tenant_id = current_user.current_tenant_id if data.get("partial_member_list") and data.get("permission") == "partial_members": @@ -688,7 +690,7 @@ class DatasetApiKeyApi(Resource): ) if current_key_count >= self.max_keys: - flask_restx.abort( + api.abort( 400, message=f"Cannot create more than {self.max_keys} API keys for this resource type.", code="max_keys_exceeded", @@ -733,7 +735,7 @@ class DatasetApiDeleteApi(Resource): ) if key is None: - flask_restx.abort(404, message="API key not found") + api.abort(404, message="API key not found") db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete() db.session.commit() diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 6aaede0fb3..c5fa2061bf 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -55,6 +55,7 @@ from fields.document_fields import ( from libs.datetime_utils import naive_utc_now from libs.login import login_required from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile +from models.account import Account from models.dataset import DocumentPipelineExecutionLog from services.dataset_service import DatasetService, DocumentService from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig @@ -418,7 +419,9 @@ class DatasetInitApi(Resource): try: dataset, documents, batch = DocumentService.save_document_without_dataset_id( - tenant_id=current_user.current_tenant_id, knowledge_config=knowledge_config, account=current_user + tenant_id=current_user.current_tenant_id, + knowledge_config=knowledge_config, + account=cast(Account, current_user), ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -452,7 +455,7 @@ class DocumentIndexingEstimateApi(DocumentResource): raise DocumentAlreadyFinishedError() data_process_rule = document.dataset_process_rule - data_process_rule_dict = data_process_rule.to_dict() + data_process_rule_dict = data_process_rule.to_dict() if data_process_rule else {} response = {"tokens": 0, "total_price": 0, "currency": "USD", "total_segments": 0, "preview": []} @@ -514,7 +517,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): if not documents: return {"tokens": 0, "total_price": 0, "currency": "USD", "total_segments": 0, "preview": []}, 200 data_process_rule = documents[0].dataset_process_rule - data_process_rule_dict = data_process_rule.to_dict() + data_process_rule_dict = data_process_rule.to_dict() if data_process_rule else {} extract_settings = [] for document in documents: if document.indexing_status in {"completed", "error"}: @@ -753,7 +756,7 @@ class DocumentApi(DocumentResource): } else: dataset_process_rules = DatasetService.get_process_rules(dataset_id) - document_process_rules = document.dataset_process_rule.to_dict() + document_process_rules = document.dataset_process_rule.to_dict() if document.dataset_process_rule else {} data_source_info = document.data_source_detail_dict response = { "id": document.id, @@ -1073,7 +1076,9 @@ class DocumentRenameApi(DocumentResource): if not current_user.is_dataset_editor: raise Forbidden() dataset = DatasetService.get_dataset(dataset_id) - DatasetService.check_dataset_operator_permission(current_user, dataset) + if not dataset: + raise NotFound("Dataset not found.") + DatasetService.check_dataset_operator_permission(cast(Account, current_user), dataset) parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, nullable=False, location="json") args = parser.parse_args() diff --git a/api/controllers/console/datasets/datasets_segments.py b/api/controllers/console/datasets/datasets_segments.py index ba552821d2..9f2805e2c6 100644 --- a/api/controllers/console/datasets/datasets_segments.py +++ b/api/controllers/console/datasets/datasets_segments.py @@ -392,7 +392,12 @@ class DatasetDocumentSegmentBatchImportApi(Resource): # send batch add segments task redis_client.setnx(indexing_cache_key, "waiting") batch_create_segment_to_index_task.delay( - str(job_id), upload_file_id, dataset_id, document_id, current_user.current_tenant_id, current_user.id + str(job_id), + upload_file_id, + dataset_id, + document_id, + current_user.current_tenant_id, + current_user.id, ) except Exception as e: return {"error": str(e)}, 500 @@ -468,7 +473,8 @@ class ChildChunkAddApi(Resource): parser.add_argument("content", type=str, required=True, nullable=False, location="json") args = parser.parse_args() try: - child_chunk = SegmentService.create_child_chunk(args.get("content"), segment, document, dataset) + content = args["content"] + child_chunk = SegmentService.create_child_chunk(content, segment, document, dataset) except ChildChunkIndexingServiceError as e: raise ChildChunkIndexingError(str(e)) return {"data": marshal(child_chunk, child_chunk_fields)}, 200 @@ -557,7 +563,8 @@ class ChildChunkAddApi(Resource): parser.add_argument("chunks", type=list, required=True, nullable=False, location="json") args = parser.parse_args() try: - chunks = [ChildChunkUpdateArgs(**chunk) for chunk in args.get("chunks")] + chunks_data = args["chunks"] + chunks = [ChildChunkUpdateArgs(**chunk) for chunk in chunks_data] child_chunks = SegmentService.update_child_chunks(chunks, segment, document, dataset) except ChildChunkIndexingServiceError as e: raise ChildChunkIndexingError(str(e)) @@ -674,9 +681,8 @@ class ChildChunkUpdateApi(Resource): parser.add_argument("content", type=str, required=True, nullable=False, location="json") args = parser.parse_args() try: - child_chunk = SegmentService.update_child_chunk( - args.get("content"), child_chunk, segment, document, dataset - ) + content = args["content"] + child_chunk = SegmentService.update_child_chunk(content, child_chunk, segment, document, dataset) except ChildChunkIndexingServiceError as e: raise ChildChunkIndexingError(str(e)) return {"data": marshal(child_chunk, child_chunk_fields)}, 200 diff --git a/api/controllers/console/datasets/external.py b/api/controllers/console/datasets/external.py index e8f5a11b41..adf9f53523 100644 --- a/api/controllers/console/datasets/external.py +++ b/api/controllers/console/datasets/external.py @@ -1,3 +1,5 @@ +from typing import cast + from flask import request from flask_login import current_user from flask_restx import Resource, fields, marshal, reqparse @@ -9,13 +11,14 @@ from controllers.console.datasets.error import DatasetNameDuplicateError from controllers.console.wraps import account_initialization_required, setup_required from fields.dataset_fields import dataset_detail_fields from libs.login import login_required +from models.account import Account from services.dataset_service import DatasetService from services.external_knowledge_service import ExternalDatasetService from services.hit_testing_service import HitTestingService from services.knowledge_service import ExternalDatasetTestService -def _validate_name(name): +def _validate_name(name: str) -> str: if not name or len(name) < 1 or len(name) > 100: raise ValueError("Name must be between 1 to 100 characters.") return name @@ -274,7 +277,7 @@ class ExternalKnowledgeHitTestingApi(Resource): response = HitTestingService.external_retrieve( dataset=dataset, query=args["query"], - account=current_user, + account=cast(Account, current_user), external_retrieval_model=args["external_retrieval_model"], metadata_filtering_conditions=args["metadata_filtering_conditions"], ) diff --git a/api/controllers/console/datasets/hit_testing_base.py b/api/controllers/console/datasets/hit_testing_base.py index cfbfc50873..a68e337135 100644 --- a/api/controllers/console/datasets/hit_testing_base.py +++ b/api/controllers/console/datasets/hit_testing_base.py @@ -1,10 +1,11 @@ import logging +from typing import cast from flask_login import current_user from flask_restx import marshal, reqparse from werkzeug.exceptions import Forbidden, InternalServerError, NotFound -import services.dataset_service +import services from controllers.console.app.error import ( CompletionRequestError, ProviderModelCurrentlyNotSupportError, @@ -20,6 +21,7 @@ from core.errors.error import ( ) from core.model_runtime.errors.invoke import InvokeError from fields.hit_testing_fields import hit_testing_record_fields +from models.account import Account from services.dataset_service import DatasetService from services.hit_testing_service import HitTestingService @@ -59,7 +61,7 @@ class DatasetsHitTestingBase: response = HitTestingService.retrieve( dataset=dataset, query=args["query"], - account=current_user, + account=cast(Account, current_user), retrieval_model=args["retrieval_model"], external_retrieval_model=args["external_retrieval_model"], limit=10, diff --git a/api/controllers/console/datasets/metadata.py b/api/controllers/console/datasets/metadata.py index 53dc80eaa5..dc3cd3fce9 100644 --- a/api/controllers/console/datasets/metadata.py +++ b/api/controllers/console/datasets/metadata.py @@ -62,6 +62,7 @@ class DatasetMetadataApi(Resource): parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, nullable=False, location="json") args = parser.parse_args() + name = args["name"] dataset_id_str = str(dataset_id) metadata_id_str = str(metadata_id) @@ -70,7 +71,7 @@ class DatasetMetadataApi(Resource): raise NotFound("Dataset not found.") DatasetService.check_dataset_permission(dataset, current_user) - metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args.get("name")) + metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, name) return metadata, 200 @setup_required diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py index 6641911243..3af590afc8 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py @@ -20,13 +20,13 @@ from services.rag_pipeline.rag_pipeline import RagPipelineService logger = logging.getLogger(__name__) -def _validate_name(name): +def _validate_name(name: str) -> str: if not name or len(name) < 1 or len(name) > 40: raise ValueError("Name must be between 1 to 40 characters.") return name -def _validate_description_length(description): +def _validate_description_length(description: str) -> str: if len(description) > 400: raise ValueError("Description cannot exceed 400 characters.") return description @@ -76,7 +76,7 @@ class CustomizedPipelineTemplateApi(Resource): ) parser.add_argument( "description", - type=str, + type=_validate_description_length, nullable=True, required=False, default="", @@ -133,7 +133,7 @@ class PublishCustomizedPipelineTemplateApi(Resource): ) parser.add_argument( "description", - type=str, + type=_validate_description_length, nullable=True, required=False, default="", diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py index c741bfbf82..404aa42073 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py @@ -1,5 +1,5 @@ -from flask_login import current_user # type: ignore # type: ignore -from flask_restx import Resource, marshal, reqparse # type: ignore +from flask_login import current_user +from flask_restx import Resource, marshal, reqparse from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden @@ -20,18 +20,6 @@ from services.entities.knowledge_entities.rag_pipeline_entities import IconInfo, from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelineDslService -def _validate_name(name): - if not name or len(name) < 1 or len(name) > 40: - raise ValueError("Name must be between 1 to 40 characters.") - return name - - -def _validate_description_length(description): - if len(description) > 400: - raise ValueError("Description cannot exceed 400 characters.") - return description - - @console_ns.route("/rag/pipeline/dataset") class CreateRagPipelineDatasetApi(Resource): @setup_required diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py index 38f75402a8..bef6bfd13e 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py @@ -1,5 +1,5 @@ import logging -from typing import Any, NoReturn +from typing import NoReturn from flask import Response from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse @@ -11,14 +11,12 @@ from controllers.console.app.error import ( DraftWorkflowNotExist, ) from controllers.console.app.workflow_draft_variable import ( - _WORKFLOW_DRAFT_VARIABLE_FIELDS, - _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS, + _WORKFLOW_DRAFT_VARIABLE_FIELDS, # type: ignore[private-usage] + _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS, # type: ignore[private-usage] ) from controllers.console.datasets.wraps import get_rag_pipeline from controllers.console.wraps import account_initialization_required, setup_required from controllers.web.error import InvalidArgumentError, NotFoundError -from core.variables.segment_group import SegmentGroup -from core.variables.segments import ArrayFileSegment, FileSegment, Segment from core.variables.types import SegmentType from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID from extensions.ext_database import db @@ -34,32 +32,6 @@ from services.workflow_draft_variable_service import WorkflowDraftVariableList, logger = logging.getLogger(__name__) -def _convert_values_to_json_serializable_object(value: Segment) -> Any: - if isinstance(value, FileSegment): - return value.value.model_dump() - elif isinstance(value, ArrayFileSegment): - return [i.model_dump() for i in value.value] - elif isinstance(value, SegmentGroup): - return [_convert_values_to_json_serializable_object(i) for i in value.value] - else: - return value.value - - -def _serialize_var_value(variable: WorkflowDraftVariable) -> Any: - value = variable.get_value() - # create a copy of the value to avoid affecting the model cache. - value = value.model_copy(deep=True) - # Refresh the url signature before returning it to client. - if isinstance(value, FileSegment): - file = value.value - file.remote_url = file.generate_url() - elif isinstance(value, ArrayFileSegment): - files = value.value - for file in files: - file.remote_url = file.generate_url() - return _convert_values_to_json_serializable_object(value) - - def _create_pagination_parser(): parser = reqparse.RequestParser() parser.add_argument( @@ -104,7 +76,7 @@ def _api_prerequisite(f): @account_initialization_required @get_rag_pipeline def wrapper(*args, **kwargs): - if not isinstance(current_user, Account) or not current_user.is_editor: + if not isinstance(current_user, Account) or not current_user.has_edit_permission: raise Forbidden() return f(*args, **kwargs) diff --git a/api/pyrightconfig.json b/api/pyrightconfig.json index 9cb1ea9bf1..1e6cd501ad 100644 --- a/api/pyrightconfig.json +++ b/api/pyrightconfig.json @@ -6,7 +6,6 @@ "migrations/", "core/rag", "extensions", - "controllers/console/datasets", "core/ops", "core/model_runtime", "core/workflow/nodes", From 2619c7553a6f94057e8c2ff9056851a831e77e48 Mon Sep 17 00:00:00 2001 From: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Date: Tue, 30 Sep 2025 10:17:13 +0800 Subject: [PATCH 094/126] minor fix: fix some translations: trunk should use native, and some translation typos (#26469) Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> --- web/i18n/fa-IR/workflow.ts | 4 ++-- web/i18n/hi-IN/workflow.ts | 4 ++-- web/i18n/sl-SI/workflow.ts | 2 +- web/i18n/uk-UA/workflow.ts | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/web/i18n/fa-IR/workflow.ts b/web/i18n/fa-IR/workflow.ts index 4b8a552889..d91e4498fe 100644 --- a/web/i18n/fa-IR/workflow.ts +++ b/web/i18n/fa-IR/workflow.ts @@ -943,9 +943,9 @@ const translation = { chunkIsRequired: 'ساختار تکه ای مورد نیاز است', chooseChunkStructure: 'یک ساختار تکه ای را انتخاب کنید', chunksInput: 'تکه‌ها', - chunksInputTip: 'متغیر ورودی گره پایگاه دانش چانک‌ها است. نوع متغیر یک شیء با یک طرح JSON خاص است که باید با ساختار چانک انتخاب شده سازگار باشد.', + chunksInputTip: 'متغیر ورودی گره پایگاه دانش تکه‌ها است. نوع متغیر یک شیء با یک طرح JSON خاص است که باید با ساختار تکه انتخاب شده سازگار باشد.', embeddingModelIsRequired: 'مدل جاسازی مورد نیاز است', - chunksVariableIsRequired: 'متغیر Chunks الزامی است', + chunksVariableIsRequired: 'متغیر تکه‌ها الزامی است', rerankingModelIsRequired: 'مدل رتبه‌بندی مجدد مورد نیاز است', }, }, diff --git a/web/i18n/hi-IN/workflow.ts b/web/i18n/hi-IN/workflow.ts index 60beb5c215..d94c9f102e 100644 --- a/web/i18n/hi-IN/workflow.ts +++ b/web/i18n/hi-IN/workflow.ts @@ -963,8 +963,8 @@ const translation = { aboutRetrieval: 'पुनर्प्राप्ति विधि के बारे में।', chooseChunkStructure: 'एक चंक संरचना चुनें', chunksInput: 'टुकड़े', - chunksInputTip: 'ज्ञान आधार नोड का इनपुट वेरिएबल चंक्स है। वेरिएबल प्रकार एक ऑब्जेक्ट है जिसमें एक विशेष JSON स्कीमा है जो चयनित चंक संरचना के साथ सुसंगत होना चाहिए।', - chunksVariableIsRequired: 'Chunks चर आवश्यक है', + chunksInputTip: 'ज्ञान आधार नोड का इनपुट वेरिएबल टुकड़े है। वेरिएबल प्रकार एक ऑब्जेक्ट है जिसमें एक विशेष JSON स्कीमा है जो चयनित चंक संरचना के साथ सुसंगत होना चाहिए।', + chunksVariableIsRequired: 'टुकड़े चर आवश्यक है', embeddingModelIsRequired: 'एम्बेडिंग मॉडल आवश्यक है', rerankingModelIsRequired: 'पुनः क्रमांकन मॉडल की आवश्यकता है', }, diff --git a/web/i18n/sl-SI/workflow.ts b/web/i18n/sl-SI/workflow.ts index 80ad3c89fb..baeff90ee6 100644 --- a/web/i18n/sl-SI/workflow.ts +++ b/web/i18n/sl-SI/workflow.ts @@ -952,7 +952,7 @@ const translation = { chunksInput: 'Kosi', chunksInputTip: 'Vhodna spremenljivka vozlišča podatkovne baze je Chunks. Tip spremenljivke je objekt s specifično JSON shemo, ki mora biti skladna z izbrano strukturo kosov.', chunksVariableIsRequired: 'Spremenljivka Chunks je obvezna', - embeddingModelIsRequired: 'Zahtuje se vgrajevalni model', + embeddingModelIsRequired: 'Zahteva se vgrajevalni model', rerankingModelIsRequired: 'Potreben je model za ponovno razvrščanje', }, }, diff --git a/web/i18n/uk-UA/workflow.ts b/web/i18n/uk-UA/workflow.ts index f4e95be60e..56715c5e37 100644 --- a/web/i18n/uk-UA/workflow.ts +++ b/web/i18n/uk-UA/workflow.ts @@ -946,7 +946,7 @@ const translation = { chunksInputTip: 'Вхідна змінна вузла бази знань - це Частини. Тип змінної - об\'єкт з певною JSON-схемою, яка повинна відповідати вибраній структурі частин.', chunksVariableIsRequired: 'Змінна chunks є обов\'язковою', embeddingModelIsRequired: 'Потрібна модель вбудовування', - rerankingModelIsRequired: 'Потрібна модель перенавчання', + rerankingModelIsRequired: 'Потрібна модель повторного ранжування', }, }, tracing: { From f7b13486230dc3fb8e2a9216583c9fbab1c8a87e Mon Sep 17 00:00:00 2001 From: Asuka Minato <i@asukaminato.eu.org> Date: Tue, 30 Sep 2025 11:24:05 +0900 Subject: [PATCH 095/126] Fix typing errors in core/model_runtime (#26462) Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- api/core/model_runtime/entities/message_entities.py | 10 +++++----- .../__base/tokenizers/gpt2_tokenizer.py | 2 +- api/core/model_runtime/utils/encoders.py | 6 +++--- api/pyrightconfig.json | 3 +-- 4 files changed, 10 insertions(+), 11 deletions(-) diff --git a/api/core/model_runtime/entities/message_entities.py b/api/core/model_runtime/entities/message_entities.py index 9235c881e0..89dae2dbff 100644 --- a/api/core/model_runtime/entities/message_entities.py +++ b/api/core/model_runtime/entities/message_entities.py @@ -74,7 +74,7 @@ class TextPromptMessageContent(PromptMessageContent): Model class for text prompt message content. """ - type: Literal[PromptMessageContentType.TEXT] = PromptMessageContentType.TEXT + type: Literal[PromptMessageContentType.TEXT] = PromptMessageContentType.TEXT # type: ignore data: str @@ -95,11 +95,11 @@ class MultiModalPromptMessageContent(PromptMessageContent): class VideoPromptMessageContent(MultiModalPromptMessageContent): - type: Literal[PromptMessageContentType.VIDEO] = PromptMessageContentType.VIDEO + type: Literal[PromptMessageContentType.VIDEO] = PromptMessageContentType.VIDEO # type: ignore class AudioPromptMessageContent(MultiModalPromptMessageContent): - type: Literal[PromptMessageContentType.AUDIO] = PromptMessageContentType.AUDIO + type: Literal[PromptMessageContentType.AUDIO] = PromptMessageContentType.AUDIO # type: ignore class ImagePromptMessageContent(MultiModalPromptMessageContent): @@ -111,12 +111,12 @@ class ImagePromptMessageContent(MultiModalPromptMessageContent): LOW = auto() HIGH = auto() - type: Literal[PromptMessageContentType.IMAGE] = PromptMessageContentType.IMAGE + type: Literal[PromptMessageContentType.IMAGE] = PromptMessageContentType.IMAGE # type: ignore detail: DETAIL = DETAIL.LOW class DocumentPromptMessageContent(MultiModalPromptMessageContent): - type: Literal[PromptMessageContentType.DOCUMENT] = PromptMessageContentType.DOCUMENT + type: Literal[PromptMessageContentType.DOCUMENT] = PromptMessageContentType.DOCUMENT # type: ignore PromptMessageContentUnionTypes = Annotated[ diff --git a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py b/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py index 23d36c03af..3967acf07b 100644 --- a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py +++ b/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py @@ -15,7 +15,7 @@ class GPT2Tokenizer: use gpt2 tokenizer to get num tokens """ _tokenizer = GPT2Tokenizer.get_encoder() - tokens = _tokenizer.encode(text) + tokens = _tokenizer.encode(text) # type: ignore return len(tokens) @staticmethod diff --git a/api/core/model_runtime/utils/encoders.py b/api/core/model_runtime/utils/encoders.py index c758eaf49f..c85152463e 100644 --- a/api/core/model_runtime/utils/encoders.py +++ b/api/core/model_runtime/utils/encoders.py @@ -196,15 +196,15 @@ def jsonable_encoder( return encoder(obj) try: - data = dict(obj) + data = dict(obj) # type: ignore except Exception as e: errors: list[Exception] = [] errors.append(e) try: - data = vars(obj) + data = vars(obj) # type: ignore except Exception as e: errors.append(e) - raise ValueError(errors) from e + raise ValueError(str(errors)) from e return jsonable_encoder( data, by_alias=by_alias, diff --git a/api/pyrightconfig.json b/api/pyrightconfig.json index 1e6cd501ad..23db443a39 100644 --- a/api/pyrightconfig.json +++ b/api/pyrightconfig.json @@ -7,7 +7,6 @@ "core/rag", "extensions", "core/ops", - "core/model_runtime", "core/workflow/nodes", "core/app/app_config/easy_ui_based_app/dataset" ], @@ -37,4 +36,4 @@ "reportAttributeAccessIssue": "hint", "pythonVersion": "3.11", "pythonPlatform": "All" -} +} \ No newline at end of file From b4d435120396b3156bac40fd0f44f3b670e0f76c Mon Sep 17 00:00:00 2001 From: goofy <38034027+goofy-z@users.noreply.github.com> Date: Tue, 30 Sep 2025 10:38:55 +0800 Subject: [PATCH 096/126] fix single-step runs support user input as structured_output variable values (#26430) --- api/core/workflow/workflow_entry.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/api/core/workflow/workflow_entry.py b/api/core/workflow/workflow_entry.py index 49645ff120..3801dfe15d 100644 --- a/api/core/workflow/workflow_entry.py +++ b/api/core/workflow/workflow_entry.py @@ -416,4 +416,8 @@ class WorkflowEntry: # append variable and value to variable pool if variable_node_id != ENVIRONMENT_VARIABLE_NODE_ID: + # In single run, the input_value is set as the LLM's structured output value within the variable_pool. + if len(variable_key_list) == 2 and variable_key_list[0] == "structured_output": + input_value = {variable_key_list[1]: input_value} + variable_key_list = variable_key_list[0:1] variable_pool.add([variable_node_id] + variable_key_list, input_value) From e1691fddaaa2248e04650db0c9366989b4356b3d Mon Sep 17 00:00:00 2001 From: Asuka Minato <i@asukaminato.eu.org> Date: Tue, 30 Sep 2025 11:39:37 +0900 Subject: [PATCH 097/126] Refactor: Enable type checking for core/ops and fix type errors (#26414) Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- .../entities/aliyun_trace_entity.py | 3 ++- api/core/ops/ops_trace_manager.py | 5 +++- api/core/ops/weave_trace/weave_trace.py | 24 ++++++++++++++++--- api/pyrightconfig.json | 1 - 4 files changed, 27 insertions(+), 6 deletions(-) diff --git a/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py b/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py index 0ee71fc23f..20ff2d0875 100644 --- a/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py +++ b/api/core/ops/aliyun_trace/entities/aliyun_trace_entity.py @@ -3,7 +3,8 @@ from dataclasses import dataclass from typing import Any from opentelemetry import trace as trace_api -from opentelemetry.sdk.trace import Event, Status, StatusCode +from opentelemetry.sdk.trace import Event +from opentelemetry.trace import Status, StatusCode from pydantic import BaseModel, Field diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index 0679b27271..e181373bd0 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -155,7 +155,10 @@ class OpsTraceManager: if key in tracing_config: if "*" in tracing_config[key]: # If the key contains '*', retain the original value from the current config - new_config[key] = current_trace_config.get(key, tracing_config[key]) + if current_trace_config: + new_config[key] = current_trace_config.get(key, tracing_config[key]) + else: + new_config[key] = tracing_config[key] else: # Otherwise, encrypt the key new_config[key] = encrypt_token(tenant_id, tracing_config[key]) diff --git a/api/core/ops/weave_trace/weave_trace.py b/api/core/ops/weave_trace/weave_trace.py index 339694cf07..185bdd8179 100644 --- a/api/core/ops/weave_trace/weave_trace.py +++ b/api/core/ops/weave_trace/weave_trace.py @@ -62,7 +62,8 @@ class WeaveDataTrace(BaseTraceInstance): self, ): try: - project_url = f"https://wandb.ai/{self.weave_client._project_id()}" + project_identifier = f"{self.entity}/{self.project_name}" if self.entity else self.project_name + project_url = f"https://wandb.ai/{project_identifier}" return project_url except Exception as e: logger.debug("Weave get run url failed: %s", str(e)) @@ -424,7 +425,23 @@ class WeaveDataTrace(BaseTraceInstance): raise ValueError(f"Weave API check failed: {str(e)}") def start_call(self, run_data: WeaveTraceModel, parent_run_id: str | None = None): - call = self.weave_client.create_call(op=run_data.op, inputs=run_data.inputs, attributes=run_data.attributes) + inputs = run_data.inputs + if inputs is None: + inputs = {} + elif not isinstance(inputs, dict): + inputs = {"inputs": str(inputs)} + + attributes = run_data.attributes + if attributes is None: + attributes = {} + elif not isinstance(attributes, dict): + attributes = {"attributes": str(attributes)} + + call = self.weave_client.create_call( + op=run_data.op, + inputs=inputs, + attributes=attributes, + ) self.calls[run_data.id] = call if parent_run_id: self.calls[run_data.id].parent_id = parent_run_id @@ -432,6 +449,7 @@ class WeaveDataTrace(BaseTraceInstance): def finish_call(self, run_data: WeaveTraceModel): call = self.calls.get(run_data.id) if call: - self.weave_client.finish_call(call=call, output=run_data.outputs, exception=run_data.exception) + exception = Exception(run_data.exception) if run_data.exception else None + self.weave_client.finish_call(call=call, output=run_data.outputs, exception=exception) else: raise ValueError(f"Call with id {run_data.id} not found") diff --git a/api/pyrightconfig.json b/api/pyrightconfig.json index 23db443a39..caa194c906 100644 --- a/api/pyrightconfig.json +++ b/api/pyrightconfig.json @@ -6,7 +6,6 @@ "migrations/", "core/rag", "extensions", - "core/ops", "core/workflow/nodes", "core/app/app_config/easy_ui_based_app/dataset" ], From 591c463e4b1ef16b2ec230a02f35b6fc9c40826e Mon Sep 17 00:00:00 2001 From: Blackoutta <37723456+Blackoutta@users.noreply.github.com> Date: Tue, 30 Sep 2025 10:41:42 +0800 Subject: [PATCH 098/126] improve: Explicitly delete task Redis key on completion in AppQueueManager (#26406) Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/core/app/apps/base_app_queue_manager.py | 23 +++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/api/core/app/apps/base_app_queue_manager.py b/api/core/app/apps/base_app_queue_manager.py index fdba952eeb..4b246a53d3 100644 --- a/api/core/app/apps/base_app_queue_manager.py +++ b/api/core/app/apps/base_app_queue_manager.py @@ -1,9 +1,11 @@ +import logging import queue import time from abc import abstractmethod from enum import IntEnum, auto from typing import Any +from redis.exceptions import RedisError from sqlalchemy.orm import DeclarativeMeta from configs import dify_config @@ -18,6 +20,8 @@ from core.app.entities.queue_entities import ( ) from extensions.ext_redis import redis_client +logger = logging.getLogger(__name__) + class PublishFrom(IntEnum): APPLICATION_MANAGER = auto() @@ -35,9 +39,8 @@ class AppQueueManager: self.invoke_from = invoke_from # Public accessor for invoke_from user_prefix = "account" if self._invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER} else "end-user" - redis_client.setex( - AppQueueManager._generate_task_belong_cache_key(self._task_id), 1800, f"{user_prefix}-{self._user_id}" - ) + self._task_belong_cache_key = AppQueueManager._generate_task_belong_cache_key(self._task_id) + redis_client.setex(self._task_belong_cache_key, 1800, f"{user_prefix}-{self._user_id}") q: queue.Queue[WorkflowQueueMessage | MessageQueueMessage | None] = queue.Queue() @@ -79,9 +82,21 @@ class AppQueueManager: Stop listen to queue :return: """ + self._clear_task_belong_cache() self._q.put(None) - def publish_error(self, e, pub_from: PublishFrom): + def _clear_task_belong_cache(self) -> None: + """ + Remove the task belong cache key once listening is finished. + """ + try: + redis_client.delete(self._task_belong_cache_key) + except RedisError: + logger.exception( + "Failed to clear task belong cache for task %s (key: %s)", self._task_id, self._task_belong_cache_key + ) + + def publish_error(self, e, pub_from: PublishFrom) -> None: """ Publish error :param e: error From 97c924fe297c22ea440477f9b2c61ec1d8581f1a Mon Sep 17 00:00:00 2001 From: lyzno1 <92089059+lyzno1@users.noreply.github.com> Date: Tue, 30 Sep 2025 10:48:06 +0800 Subject: [PATCH 099/126] chore: bump pnpm version (#26010) Co-authored-by: 17hz <0x149527@gmail.com> --- web/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/package.json b/web/package.json index 36eb6d37f1..75f2200e50 100644 --- a/web/package.json +++ b/web/package.json @@ -2,7 +2,7 @@ "name": "dify-web", "version": "1.9.1", "private": true, - "packageManager": "pnpm@10.16.0", + "packageManager": "pnpm@10.17.1", "engines": { "node": ">=v22.11.0" }, From aa3129c2a972299f3a6f6e055bd172a10770ff3b Mon Sep 17 00:00:00 2001 From: casio12r <32792244+casio12r@users.noreply.github.com> Date: Tue, 30 Sep 2025 10:50:43 +0800 Subject: [PATCH 100/126] Fix a typo in prompt (#25583) Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> --- api/core/workflow/nodes/parameter_extractor/prompts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/core/workflow/nodes/parameter_extractor/prompts.py b/api/core/workflow/nodes/parameter_extractor/prompts.py index ab7ddcc32a..b74be8f206 100644 --- a/api/core/workflow/nodes/parameter_extractor/prompts.py +++ b/api/core/workflow/nodes/parameter_extractor/prompts.py @@ -179,6 +179,6 @@ CHAT_EXAMPLE = [ "required": ["food"], }, }, - "assistant": {"text": "I need to output a valid JSON object.", "json": {"result": "apple pie"}}, + "assistant": {"text": "I need to output a valid JSON object.", "json": {"food": "apple pie"}}, }, ] From 8d803a26eb7ca9f9a5c04504f7b75b594b9560ab Mon Sep 17 00:00:00 2001 From: kenwoodjw <blackxin55+@gmail.com> Date: Tue, 30 Sep 2025 10:53:55 +0800 Subject: [PATCH 101/126] fix: duplicate chunks (#26360) Signed-off-by: kenwoodjw <blackxin55+@gmail.com> --- api/core/rag/datasource/retrieval_service.py | 36 ++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py index 429744c0de..63a1d911ca 100644 --- a/api/core/rag/datasource/retrieval_service.py +++ b/api/core/rag/datasource/retrieval_service.py @@ -106,7 +106,9 @@ class RetrievalService: if exceptions: raise ValueError(";\n".join(exceptions)) + # Deduplicate documents for hybrid search to avoid duplicate chunks if retrieval_method == RetrievalMethod.HYBRID_SEARCH.value: + all_documents = cls._deduplicate_documents(all_documents) data_post_processor = DataPostProcessor( str(dataset.tenant_id), reranking_mode, reranking_model, weights, False ) @@ -143,6 +145,40 @@ class RetrievalService: ) return all_documents + @classmethod + def _deduplicate_documents(cls, documents: list[Document]) -> list[Document]: + """Deduplicate documents based on doc_id to avoid duplicate chunks in hybrid search.""" + if not documents: + return documents + + unique_documents = [] + seen_doc_ids = set() + + for document in documents: + # For dify provider documents, use doc_id for deduplication + if document.provider == "dify" and document.metadata is not None and "doc_id" in document.metadata: + doc_id = document.metadata["doc_id"] + if doc_id not in seen_doc_ids: + seen_doc_ids.add(doc_id) + unique_documents.append(document) + # If duplicate, keep the one with higher score + elif "score" in document.metadata: + # Find existing document with same doc_id and compare scores + for i, existing_doc in enumerate(unique_documents): + if ( + existing_doc.metadata + and existing_doc.metadata.get("doc_id") == doc_id + and existing_doc.metadata.get("score", 0) < document.metadata.get("score", 0) + ): + unique_documents[i] = document + break + else: + # For non-dify documents, use content-based deduplication + if document not in unique_documents: + unique_documents.append(document) + + return unique_documents + @classmethod def _get_dataset(cls, dataset_id: str) -> Dataset | None: with Session(db.engine) as session: From 86c3c58e64b2e9fc3dd45e08a82ff82467716745 Mon Sep 17 00:00:00 2001 From: goofy <38034027+goofy-z@users.noreply.github.com> Date: Tue, 30 Sep 2025 10:58:35 +0800 Subject: [PATCH 102/126] support returning structured output when using LLM API non streaming invocation (#26451) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/core/workflow/node_events/node.py | 1 + api/core/workflow/nodes/llm/node.py | 12 +++++++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/api/core/workflow/node_events/node.py b/api/core/workflow/node_events/node.py index c1aeb9fe27..93dfefb679 100644 --- a/api/core/workflow/node_events/node.py +++ b/api/core/workflow/node_events/node.py @@ -20,6 +20,7 @@ class ModelInvokeCompletedEvent(NodeEventBase): usage: LLMUsage finish_reason: str | None = None reasoning_content: str | None = None + structured_output: dict | None = None class RunRetryEvent(NodeEventBase): diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index 7767440be6..36183bf8db 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -23,6 +23,7 @@ from core.model_runtime.entities.llm_entities import ( LLMResult, LLMResultChunk, LLMResultChunkWithStructuredOutput, + LLMResultWithStructuredOutput, LLMStructuredOutput, LLMUsage, ) @@ -278,6 +279,13 @@ class LLMNode(Node): # Extract clean text from <think> tags clean_text, _ = LLMNode._split_reasoning(result_text, self._node_data.reasoning_format) + # Process structured output if available from the event. + structured_output = ( + LLMStructuredOutput(structured_output=event.structured_output) + if event.structured_output + else None + ) + # deduct quota llm_utils.deduct_llm_quota(tenant_id=self.tenant_id, model_instance=model_instance, usage=usage) break @@ -1048,7 +1056,7 @@ class LLMNode(Node): @staticmethod def handle_blocking_result( *, - invoke_result: LLMResult, + invoke_result: LLMResult | LLMResultWithStructuredOutput, saver: LLMFileSaver, file_outputs: list["File"], reasoning_format: Literal["separated", "tagged"] = "tagged", @@ -1079,6 +1087,8 @@ class LLMNode(Node): finish_reason=None, # Reasoning content for workflow variables and downstream nodes reasoning_content=reasoning_content, + # Pass structured output if enabled + structured_output=getattr(invoke_result, "structured_output", None), ) @staticmethod From 578b1b45eaa1664cdb490040182b0fd284fd5311 Mon Sep 17 00:00:00 2001 From: zlyszx <74173496+zlyszx@users.noreply.github.com> Date: Tue, 30 Sep 2025 11:00:10 +0800 Subject: [PATCH 103/126] fix: duplicate data in datasets pagination list (#25783) --- api/services/dataset_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index c9dd78ddd1..87861ada87 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -93,7 +93,7 @@ logger = logging.getLogger(__name__) class DatasetService: @staticmethod def get_datasets(page, per_page, tenant_id=None, user=None, search=None, tag_ids=None, include_all=False): - query = select(Dataset).where(Dataset.tenant_id == tenant_id).order_by(Dataset.created_at.desc()) + query = select(Dataset).where(Dataset.tenant_id == tenant_id).order_by(Dataset.created_at.desc(), Dataset.id) if user: # get permitted dataset ids From 809f48f733b527b5602bb1be6b410c238acabe4e Mon Sep 17 00:00:00 2001 From: quicksand <quicksandzn@gmail.com> Date: Tue, 30 Sep 2025 14:24:40 +0800 Subject: [PATCH 104/126] fix: document is not bound to a session (#26480) --- .../knowledge_index/knowledge_index_node.py | 25 +++++++++++-------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py b/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py index 4b6bad1aa3..8d685fa82e 100644 --- a/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py +++ b/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py @@ -136,6 +136,11 @@ class KnowledgeIndexNode(Node): document = db.session.query(Document).filter_by(id=document_id.value).first() if not document: raise KnowledgeIndexNodeError(f"Document {document_id.value} not found.") + doc_id_value = document.id + ds_id_value = dataset.id + dataset_name_value = dataset.name + document_name_value = document.name + created_at_value = document.created_at # chunk nodes by chunk size indexing_start_at = time.perf_counter() index_processor = IndexProcessorFactory(dataset.chunk_structure).init_index_processor() @@ -161,16 +166,16 @@ class KnowledgeIndexNode(Node): document.word_count = ( db.session.query(func.sum(DocumentSegment.word_count)) .where( - DocumentSegment.document_id == document.id, - DocumentSegment.dataset_id == dataset.id, + DocumentSegment.document_id == doc_id_value, + DocumentSegment.dataset_id == ds_id_value, ) .scalar() ) db.session.add(document) # update document segment status db.session.query(DocumentSegment).where( - DocumentSegment.document_id == document.id, - DocumentSegment.dataset_id == dataset.id, + DocumentSegment.document_id == doc_id_value, + DocumentSegment.dataset_id == ds_id_value, ).update( { DocumentSegment.status: "completed", @@ -182,13 +187,13 @@ class KnowledgeIndexNode(Node): db.session.commit() return { - "dataset_id": dataset.id, - "dataset_name": dataset.name, + "dataset_id": ds_id_value, + "dataset_name": dataset_name_value, "batch": batch.value, - "document_id": document.id, - "document_name": document.name, - "created_at": document.created_at.timestamp(), - "display_status": document.indexing_status, + "document_id": doc_id_value, + "document_name": document_name_value, + "created_at": created_at_value.timestamp(), + "display_status": "completed", } def _get_preview_output(self, chunk_structure: str, chunks: Any) -> Mapping[str, Any]: From 633e68a2f787956d7a8437e7f7f19b3589ba410c Mon Sep 17 00:00:00 2001 From: Asuka Minato <i@asukaminato.eu.org> Date: Tue, 30 Sep 2025 15:39:34 +0900 Subject: [PATCH 105/126] feat(web): remove dead dependencies (#26484) Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> --- web/package.json | 30 +- web/pnpm-lock.yaml | 1654 ++++++++++++++------------------------------ 2 files changed, 539 insertions(+), 1145 deletions(-) diff --git a/web/package.json b/web/package.json index 75f2200e50..2107aae6bc 100644 --- a/web/package.json +++ b/web/package.json @@ -39,13 +39,12 @@ "storybook": "storybook dev -p 6006", "build-storybook": "storybook build", "preinstall": "npx only-allow pnpm", - "analyze": "ANALYZE=true pnpm build" + "analyze": "ANALYZE=true pnpm build", + "knip": "knip" }, "dependencies": { - "@babel/runtime": "^7.22.3", "@dagrejs/dagre": "^1.1.4", "@emoji-mart/data": "^1.2.1", - "@eslint/compat": "^1.2.4", "@floating-ui/react": "^0.26.25", "@formatjs/intl-localematcher": "^0.5.6", "@headlessui/react": "2.2.1", @@ -63,7 +62,6 @@ "@octokit/request-error": "^6.1.5", "@remixicon/react": "^4.5.0", "@sentry/react": "^8.54.0", - "@sentry/utils": "^8.54.0", "@svgdotjs/svg.js": "^3.2.4", "@tailwindcss/typography": "^0.5.15", "@tanstack/react-form": "^1.3.3", @@ -75,7 +73,6 @@ "classnames": "^2.5.1", "cmdk": "^1.1.1", "copy-to-clipboard": "^3.3.3", - "crypto-js": "^4.2.0", "dayjs": "^1.11.13", "decimal.js": "^10.4.3", "dompurify": "^3.2.4", @@ -91,7 +88,6 @@ "js-audio-recorder": "^1.0.7", "js-cookie": "^3.0.5", "jsonschema": "^1.5.0", - "jwt-decode": "^4.0.0", "katex": "^0.16.21", "ky": "^1.7.2", "lamejs": "^1.2.1", @@ -112,12 +108,9 @@ "react-18-input-autosize": "^3.0.0", "react-dom": "19.1.1", "react-easy-crop": "^5.1.0", - "react-error-boundary": "^4.1.2", - "react-headless-pagination": "^1.1.6", "react-hook-form": "^7.53.1", "react-hotkeys-hook": "^4.6.1", "react-i18next": "^15.1.0", - "react-infinite-scroll-component": "^6.1.0", "react-markdown": "^9.0.1", "react-multi-email": "^1.0.25", "react-papaparse": "^4.4.0", @@ -126,11 +119,8 @@ "react-sortablejs": "^6.1.4", "react-syntax-highlighter": "^15.6.1", "react-textarea-autosize": "^8.5.8", - "react-tooltip": "5.8.3", "react-window": "^1.8.10", - "react-window-infinite-loader": "^1.0.9", "reactflow": "^11.11.3", - "recordrtc": "^5.6.2", "rehype-katex": "^7.0.1", "rehype-raw": "^7.0.0", "remark-breaks": "^4.0.0", @@ -138,9 +128,7 @@ "remark-math": "^6.0.0", "scheduler": "^0.26.0", "semver": "^7.6.3", - "server-only": "^0.0.1", "sharp": "^0.33.2", - "shave": "^5.0.4", "sortablejs": "^1.15.0", "swr": "^2.3.0", "tailwind-merge": "^2.5.4", @@ -153,13 +141,8 @@ }, "devDependencies": { "@antfu/eslint-config": "^5.0.0", - "@babel/core": "^7.28.3", - "@babel/preset-env": "^7.28.3", "@chromatic-com/storybook": "^3.1.0", "@eslint-react/eslint-plugin": "^1.15.0", - "@eslint/eslintrc": "^3.1.0", - "@eslint/js": "^9.36.0", - "@faker-js/faker": "^9.0.3", "@happy-dom/jest-environment": "^17.4.4", "@mdx-js/loader": "^3.1.0", "@mdx-js/react": "^3.1.0", @@ -172,14 +155,12 @@ "@storybook/addon-links": "8.5.0", "@storybook/addon-onboarding": "8.5.0", "@storybook/addon-themes": "8.5.0", - "@storybook/blocks": "8.5.0", "@storybook/nextjs": "8.5.0", "@storybook/react": "8.5.0", "@storybook/test": "8.5.0", "@testing-library/dom": "^10.4.0", "@testing-library/jest-dom": "^6.8.0", "@testing-library/react": "^16.0.1", - "@types/crypto-js": "^4.2.2", "@types/dagre": "^0.7.52", "@types/jest": "^29.5.13", "@types/js-cookie": "^3.0.6", @@ -192,18 +173,14 @@ "@types/react-slider": "^1.3.6", "@types/react-syntax-highlighter": "^15.5.13", "@types/react-window": "^1.8.8", - "@types/react-window-infinite-loader": "^1.0.9", - "@types/recordrtc": "^5.6.14", "@types/semver": "^7.5.8", "@types/sortablejs": "^1.15.1", "@types/uuid": "^10.0.0", "autoprefixer": "^10.4.20", - "babel-loader": "^10.0.0", "bing-translate-api": "^4.0.2", "code-inspector-plugin": "1.2.9", "cross-env": "^7.0.3", "eslint": "^9.35.0", - "eslint-config-next": "15.5.0", "eslint-plugin-oxlint": "^1.6.0", "eslint-plugin-react-hooks": "^5.1.0", "eslint-plugin-react-refresh": "^0.4.19", @@ -213,6 +190,7 @@ "globals": "^15.11.0", "husky": "^9.1.6", "jest": "^29.7.0", + "knip": "^5.64.1", "lint-staged": "^15.2.10", "lodash": "^4.17.21", "magicast": "^0.3.4", @@ -220,9 +198,7 @@ "sass": "^1.92.1", "storybook": "8.5.0", "tailwindcss": "^3.4.14", - "ts-node": "^10.9.2", "typescript": "^5.8.3", - "typescript-eslint": "^8.38.0", "uglify-js": "^3.19.3" }, "resolutions": { diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index 764202490e..9112473adf 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -49,18 +49,12 @@ importers: .: dependencies: - '@babel/runtime': - specifier: ^7.22.3 - version: 7.27.6 '@dagrejs/dagre': specifier: ^1.1.4 version: 1.1.5 '@emoji-mart/data': specifier: ^1.2.1 version: 1.2.1 - '@eslint/compat': - specifier: ^1.2.4 - version: 1.3.1(eslint@9.35.0(jiti@1.21.7)) '@floating-ui/react': specifier: ^0.26.25 version: 0.26.28(react-dom@19.1.1(react@19.1.1))(react@19.1.1) @@ -112,9 +106,6 @@ importers: '@sentry/react': specifier: ^8.54.0 version: 8.55.0(react@19.1.1) - '@sentry/utils': - specifier: ^8.54.0 - version: 8.55.0 '@svgdotjs/svg.js': specifier: ^3.2.4 version: 3.2.4 @@ -148,9 +139,6 @@ importers: copy-to-clipboard: specifier: ^3.3.3 version: 3.3.3 - crypto-js: - specifier: ^4.2.0 - version: 4.2.0 dayjs: specifier: ^1.11.13 version: 1.11.13 @@ -196,9 +184,6 @@ importers: jsonschema: specifier: ^1.5.0 version: 1.5.0 - jwt-decode: - specifier: ^4.0.0 - version: 4.0.0 katex: specifier: ^0.16.21 version: 0.16.22 @@ -259,12 +244,6 @@ importers: react-easy-crop: specifier: ^5.1.0 version: 5.5.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - react-error-boundary: - specifier: ^4.1.2 - version: 4.1.2(react@19.1.1) - react-headless-pagination: - specifier: ^1.1.6 - version: 1.1.6(react@19.1.1) react-hook-form: specifier: ^7.53.1 version: 7.60.0(react@19.1.1) @@ -274,9 +253,6 @@ importers: react-i18next: specifier: ^15.1.0 version: 15.6.0(i18next@23.16.8)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(typescript@5.8.3) - react-infinite-scroll-component: - specifier: ^6.1.0 - version: 6.1.0(react@19.1.1) react-markdown: specifier: ^9.0.1 version: 9.1.0(@types/react@19.1.11)(react@19.1.1) @@ -301,21 +277,12 @@ importers: react-textarea-autosize: specifier: ^8.5.8 version: 8.5.9(@types/react@19.1.11)(react@19.1.1) - react-tooltip: - specifier: 5.8.3 - version: 5.8.3(react-dom@19.1.1(react@19.1.1))(react@19.1.1) react-window: specifier: ^1.8.10 version: 1.8.11(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - react-window-infinite-loader: - specifier: ^1.0.9 - version: 1.0.10(react-dom@19.1.1(react@19.1.1))(react@19.1.1) reactflow: specifier: ^11.11.3 version: 11.11.4(@types/react@19.1.11)(immer@9.0.21)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - recordrtc: - specifier: ^5.6.2 - version: 5.6.2 rehype-katex: specifier: ^7.0.1 version: 7.0.1 @@ -337,15 +304,9 @@ importers: semver: specifier: ^7.6.3 version: 7.7.2 - server-only: - specifier: ^0.0.1 - version: 0.0.1 sharp: specifier: ^0.33.2 version: 0.33.5 - shave: - specifier: ^5.0.4 - version: 5.0.4 sortablejs: specifier: ^1.15.0 version: 1.15.6 @@ -376,28 +337,13 @@ importers: devDependencies: '@antfu/eslint-config': specifier: ^5.0.0 - version: 5.0.0(@eslint-react/eslint-plugin@1.52.3(eslint@9.35.0(jiti@1.21.7))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3))(@next/eslint-plugin-next@15.5.0)(@vue/compiler-sfc@3.5.17)(eslint-plugin-react-hooks@5.2.0(eslint@9.35.0(jiti@1.21.7)))(eslint-plugin-react-refresh@0.4.20(eslint@9.35.0(jiti@1.21.7)))(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@babel/core': - specifier: ^7.28.3 - version: 7.28.3 - '@babel/preset-env': - specifier: ^7.28.3 - version: 7.28.3(@babel/core@7.28.3) + version: 5.0.0(@eslint-react/eslint-plugin@1.52.3(eslint@9.35.0(jiti@2.6.0))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3))(@next/eslint-plugin-next@15.5.0)(@vue/compiler-sfc@3.5.17)(eslint-plugin-react-hooks@5.2.0(eslint@9.35.0(jiti@2.6.0)))(eslint-plugin-react-refresh@0.4.20(eslint@9.35.0(jiti@2.6.0)))(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@chromatic-com/storybook': specifier: ^3.1.0 version: 3.2.7(react@19.1.1)(storybook@8.5.0) '@eslint-react/eslint-plugin': specifier: ^1.15.0 - version: 1.52.3(eslint@9.35.0(jiti@1.21.7))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3) - '@eslint/eslintrc': - specifier: ^3.1.0 - version: 3.3.1 - '@eslint/js': - specifier: ^9.36.0 - version: 9.36.0 - '@faker-js/faker': - specifier: ^9.0.3 - version: 9.9.0 + version: 1.52.3(eslint@9.35.0(jiti@2.6.0))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3) '@happy-dom/jest-environment': specifier: ^17.4.4 version: 17.6.3 @@ -434,9 +380,6 @@ importers: '@storybook/addon-themes': specifier: 8.5.0 version: 8.5.0(storybook@8.5.0) - '@storybook/blocks': - specifier: 8.5.0 - version: 8.5.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@8.5.0) '@storybook/nextjs': specifier: 8.5.0 version: 8.5.0(esbuild@0.25.0)(next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1)(storybook@8.5.0)(type-fest@2.19.0)(typescript@5.8.3)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)) @@ -455,9 +398,6 @@ importers: '@testing-library/react': specifier: ^16.0.1 version: 16.3.0(@testing-library/dom@10.4.0)(@types/react-dom@19.1.7(@types/react@19.1.11))(@types/react@19.1.11)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@types/crypto-js': - specifier: ^4.2.2 - version: 4.2.2 '@types/dagre': specifier: ^0.7.52 version: 0.7.53 @@ -494,12 +434,6 @@ importers: '@types/react-window': specifier: ^1.8.8 version: 1.8.8 - '@types/react-window-infinite-loader': - specifier: ^1.0.9 - version: 1.0.9 - '@types/recordrtc': - specifier: ^5.6.14 - version: 5.6.14 '@types/semver': specifier: ^7.5.8 version: 7.7.0 @@ -512,9 +446,6 @@ importers: autoprefixer: specifier: ^10.4.20 version: 10.4.21(postcss@8.5.6) - babel-loader: - specifier: ^10.0.0 - version: 10.0.0(@babel/core@7.28.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)) bing-translate-api: specifier: ^4.0.2 version: 4.1.0 @@ -526,25 +457,22 @@ importers: version: 7.0.3 eslint: specifier: ^9.35.0 - version: 9.35.0(jiti@1.21.7) - eslint-config-next: - specifier: 15.5.0 - version: 15.5.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + version: 9.35.0(jiti@2.6.0) eslint-plugin-oxlint: specifier: ^1.6.0 version: 1.6.0 eslint-plugin-react-hooks: specifier: ^5.1.0 - version: 5.2.0(eslint@9.35.0(jiti@1.21.7)) + version: 5.2.0(eslint@9.35.0(jiti@2.6.0)) eslint-plugin-react-refresh: specifier: ^0.4.19 - version: 0.4.20(eslint@9.35.0(jiti@1.21.7)) + version: 0.4.20(eslint@9.35.0(jiti@2.6.0)) eslint-plugin-sonarjs: specifier: ^3.0.2 - version: 3.0.4(eslint@9.35.0(jiti@1.21.7)) + version: 3.0.4(eslint@9.35.0(jiti@2.6.0)) eslint-plugin-storybook: specifier: ^9.0.7 - version: 9.0.7(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + version: 9.0.7(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) eslint-plugin-tailwindcss: specifier: ^3.18.0 version: 3.18.2(tailwindcss@3.4.17(ts-node@10.9.2(@types/node@18.15.0)(typescript@5.8.3))) @@ -557,6 +485,9 @@ importers: jest: specifier: ^29.7.0 version: 29.7.0(@types/node@18.15.0)(ts-node@10.9.2(@types/node@18.15.0)(typescript@5.8.3)) + knip: + specifier: ^5.64.1 + version: 5.64.1(@types/node@18.15.0)(typescript@5.8.3) lint-staged: specifier: ^15.2.10 version: 15.5.2 @@ -578,15 +509,9 @@ importers: tailwindcss: specifier: ^3.4.14 version: 3.4.17(ts-node@10.9.2(@types/node@18.15.0)(typescript@5.8.3)) - ts-node: - specifier: ^10.9.2 - version: 10.9.2(@types/node@18.15.0)(typescript@5.8.3) typescript: specifier: ^5.8.3 version: 5.8.3 - typescript-eslint: - specifier: ^8.38.0 - version: 8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) uglify-js: specifier: ^3.19.3 version: 3.19.3 @@ -1405,14 +1330,17 @@ packages: resolution: {integrity: sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==} engines: {node: '>=10.0.0'} - '@emnapi/core@1.4.4': - resolution: {integrity: sha512-A9CnAbC6ARNMKcIcrQwq6HeHCjpcBZ5wSx4U01WXCqEKlrzB9F9315WDNHkrs2xbx7YjjSxbUYxuN6EQzpcY2g==} + '@emnapi/core@1.5.0': + resolution: {integrity: sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg==} '@emnapi/runtime@1.4.4': resolution: {integrity: sha512-hHyapA4A3gPaDCNfiqyZUStTMqIkKRshqPIuDOXv1hcBnD4U3l8cP0T1HMCfGRxQ6V64TGCcoswChANyOAwbQg==} - '@emnapi/wasi-threads@1.0.3': - resolution: {integrity: sha512-8K5IFFsQqF9wQNJptGbS6FNKgUTsSRYnTqNCG1vPP8jFdjSv18n2mQfJpkt2Oibo9iBEzcDnDxNwKTzC7svlJw==} + '@emnapi/runtime@1.5.0': + resolution: {integrity: sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==} + + '@emnapi/wasi-threads@1.1.0': + resolution: {integrity: sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==} '@emoji-mart/data@1.2.1': resolution: {integrity: sha512-no2pQMWiBy6gpBEiqGeU77/bFejDqUTRY7KX+0+iur13op3bqUsXdnwoZs6Xb1zbv0gAj5VvS1PWoUUckSr5Dw==} @@ -1664,10 +1592,6 @@ packages: resolution: {integrity: sha512-30iXE9whjlILfWobBkNerJo+TXYsgVM5ERQwMcMKCHckHflCmf7wXDAHlARoWnh0s1U72WqlbeyE7iAcCzuCPw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/js@9.36.0': - resolution: {integrity: sha512-uhCbYtYynH30iZErszX78U+nR3pJU3RHGQ57NXy5QupD4SBVwDeU8TNBy+MjMngc1UyIW9noKqsRqfjQTBU2dw==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/markdown@7.1.0': resolution: {integrity: sha512-Y+X1B1j+/zupKDVJfkKc8uYMjQkGzfnd8lt7vK3y8x9Br6H5dBuhAfFrQ6ff7HAMm/1BwgecyEiRFkYCWPRxmA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -1684,16 +1608,9 @@ packages: resolution: {integrity: sha512-Z5kJ+wU3oA7MMIqVR9tyZRtjYPr4OC004Q4Rw7pgOKUOKkJfZ3O24nz3WYfGRpMDNmcOi3TwQOmgm7B7Tpii0w==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@faker-js/faker@9.9.0': - resolution: {integrity: sha512-OEl393iCOoo/z8bMezRlJu+GlRGlsKbUAN7jKB6LhnKoqKve5DXRpalbItIIcwnCjs1k/FOPjFzcA6Qn+H+YbA==} - engines: {node: '>=18.0.0', npm: '>=9.0.0'} - '@floating-ui/core@1.7.2': resolution: {integrity: sha512-wNB5ooIKHQc+Kui96jE/n69rHFWAVoxn5CAzL1Xdd8FG03cgY3MLO+GF9U3W737fYDSgPWA6MReKhBQBop6Pcw==} - '@floating-ui/dom@1.1.1': - resolution: {integrity: sha512-TpIO93+DIujg3g7SykEAGZMDtbJRrmnYRCNYSjJlvIbGhBjRSNTLVbNeDQBrzy9qDgUbiWdc7KA0uZHZ2tJmiw==} - '@floating-ui/dom@1.7.2': resolution: {integrity: sha512-7cfaOQuCS27HD7DX+6ib2OrnW+b4ZBwDNnCcT0uTyidcmyWb03FnQqJybDBoCnpdxwBSfA94UAYlRCt7mV+TbA==} @@ -2205,8 +2122,8 @@ packages: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 - '@napi-rs/wasm-runtime@0.2.12': - resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==} + '@napi-rs/wasm-runtime@1.0.5': + resolution: {integrity: sha512-TBr9Cf9onSAS2LQ2+QHx6XcC6h9+RIzJgbqG3++9TUZSH204AwEy5jg3BTQ0VATsyoGj4ee49tN/y6rvaOOtcg==} '@next/bundle-analyzer@15.5.3': resolution: {integrity: sha512-l2NxnWHP2gWHbomAlz/wFnN2jNCx/dpr7P/XWeOLhULiyKkXSac8O8SjxRO/8FNhr2l4JNtWVKk82Uya4cZYTw==} @@ -2292,38 +2209,14 @@ packages: resolution: {integrity: sha512-IVEqpEgFbLaU0hUoMwJYXNSdi6lq+FxHdxd8xTKDLxh8k6u5YNGz4Bo6bT46l7p0x8PbJmHViBtngqhvE528fA==} engines: {node: '>=12.4.0'} - '@nolyfill/array.prototype.findlast@1.0.44': - resolution: {integrity: sha512-vtrf2HM9BoxlYt2s3vTngfhUKef9c2lIw9ALvOCKS1pwXSIxWfSlf8UvQzG5vRImgflqbaXw+Pj6Y77SomHMaA==} - engines: {node: '>=12.4.0'} - - '@nolyfill/array.prototype.findlastindex@1.0.44': - resolution: {integrity: sha512-BLeHS3SulsR3iFxxETL9q21lArV2KS7lh2wcUnhue1ppx19xah1W7MdFxepyeGbM3Umk9S90snfboXAds5HkTg==} - engines: {node: '>=12.4.0'} - '@nolyfill/array.prototype.flat@1.0.44': resolution: {integrity: sha512-HnOqOT4te0l+XU9UKhy3ry+pc+ZRNsUJFR7omMEtjXf4+dq6oXmIBk7vR35+hSTk4ldjwm/27jwV3ZIGp3l4IQ==} engines: {node: '>=12.4.0'} - '@nolyfill/array.prototype.flatmap@1.0.44': - resolution: {integrity: sha512-P6OsaEUrpBJ9NdNekFDQVM9LOFHPDKSJzwOWRBaC6LqREX+4lkZT2Q+to78R6aG6atuOQsxBVqPjMGCKjWdvyQ==} - engines: {node: '>=12.4.0'} - - '@nolyfill/array.prototype.tosorted@1.0.44': - resolution: {integrity: sha512-orF3SWnIhoinCPrMW7XwpoDBccRfF6tXKzcMKlG3AQQmVzRanOYBj7/s1yy6KAQPWker4H1Ih281/GT7y/QXSA==} - engines: {node: '>=12.4.0'} - '@nolyfill/assert@1.0.26': resolution: {integrity: sha512-xYXWX/30t7LmvXry+FF2nJKwFxNHZeprLy4KvfqK0ViAozp3+oXI3X4ANe8RQqZ7KaRc4OsEd5nzcvLKO+60Ng==} engines: {node: '>=12.4.0'} - '@nolyfill/es-iterator-helpers@1.0.21': - resolution: {integrity: sha512-i326KeE0nhW4STobcUhkxpXzZUddedCmfh7b/IyXR9kW0CFHiNNT80C3JSEy33mUlhZtk/ezX47nymcFxyBigg==} - engines: {node: '>=12.4.0'} - - '@nolyfill/hasown@1.0.44': - resolution: {integrity: sha512-GA/21lkTr2PAQuT6jGnhLuBD5IFd/AEhBXJ/tf33+/bVxPxg+5ejKx9jGQGnyV/P0eSmdup5E+s8b2HL6lOrwQ==} - engines: {node: '>=12.4.0'} - '@nolyfill/is-arguments@1.0.44': resolution: {integrity: sha512-I/knhoEt8pfYZj20gOmlFSNtRdDvmtJPPeS9MaDvBeRlJEd+vNBAqeVswo48Hp4uF1Fqit5HO78cgpcrqZiw0A==} engines: {node: '>=12.4.0'} @@ -2360,18 +2253,6 @@ packages: resolution: {integrity: sha512-cZoXq09YZXDgkxRMAP/TTb3kAsWm7p5OyBugWDe4fOfxf0XRI55mgDSkuyq41sV1qW1zVC5aSsKEh1hQo1KOvA==} engines: {node: '>=12.4.0'} - '@nolyfill/object.entries@1.0.44': - resolution: {integrity: sha512-RCxO6EH9YbvxQWGYLKOd7MjNi7vKzPkXv1VDWNsy1C8BksQxXNPQrddlu3INi1O2fexk82WXpCCeaCtpU/y21w==} - engines: {node: '>=12.4.0'} - - '@nolyfill/object.fromentries@1.0.44': - resolution: {integrity: sha512-/LrsCtpLmByZ6GwP/NeXULSgMyNsVr5d6FlgQy1HZatAiBc8c+WZ1VmFkK19ZLXCNNXBedXDultrp0x4Nz+QQw==} - engines: {node: '>=12.4.0'} - - '@nolyfill/object.groupby@1.0.44': - resolution: {integrity: sha512-jCt/8pN+10mlbeg0ZESpVVaqn5qqpv6kpjM+GDfEP7cXGDSPlIjtvfYWRZK4k4Gftkhhgqkzvcrr8z1wuNO1TQ==} - engines: {node: '>=12.4.0'} - '@nolyfill/object.values@1.0.44': resolution: {integrity: sha512-bwIpVzFMudUC0ofnvdSDB/OyGUizcU+r32ZZ0QTMbN03gUttMtdCFDekuSYT0XGFgufTQyZ4ONBnAeb3DFCPGQ==} engines: {node: '>=12.4.0'} @@ -2380,17 +2261,10 @@ packages: resolution: {integrity: sha512-SqlKXtlhNTDMeZKey9jnnuPhi8YTl1lJuEcY9zbm5i4Pqe79UJJ8IJ9oiD6DhgI8KjYc+HtLzpQJNRdNYqb/hw==} engines: {node: '>=12.4.0'} - '@nolyfill/safe-regex-test@1.0.44': - resolution: {integrity: sha512-Q6veatd1NebtD8Sre6zjvO35QzG21IskMVOOEbePFcNO9noanNJgsqHeOCr0c5yZz6Z0DAizLg2gIZWokJSkXw==} - engines: {node: '>=12.4.0'} - '@nolyfill/safer-buffer@1.0.44': resolution: {integrity: sha512-Ouw1fMwjAy1V4MpnDASfu1DCPgkP0nNFteiiWbFoEGSqa7Vnmkb6if2c522N2WcMk+RuaaabQbC1F1D4/kTXcg==} engines: {node: '>=12.4.0'} - '@nolyfill/shared@1.0.21': - resolution: {integrity: sha512-qDc/NoaFU23E0hhiDPeUrvWzTXIPE+RbvRQtRWSeHHNmCIgYI9HS1jKzNYNJxv4jvZ/1VmM3L6rNVxbj+LBMNA==} - '@nolyfill/shared@1.0.24': resolution: {integrity: sha512-TGCpg3k5N7jj9AgU/1xFw9K1g4AC1vEE5ZFkW77oPNNLzprxT17PvFaNr/lr3BkkT5fJ5LNMntaTIq+pyWaeEA==} @@ -2401,22 +2275,10 @@ packages: resolution: {integrity: sha512-y3SvzjuY1ygnzWA4Krwx/WaJAsTMP11DN+e21A8Fa8PW1oDtVB5NSRW7LWurAiS2oKRkuCgcjTYMkBuBkcPCRg==} engines: {node: '>=12.4.0'} - '@nolyfill/string.prototype.includes@1.0.44': - resolution: {integrity: sha512-d1t7rnoAYyoap0X3a/gCnusCvxzK6v7uMFzW8k0mI2WtAK8HiKuzaQUwAriyVPh63GsvQCqvXx8Y5gtdh4LjSA==} - engines: {node: '>=12.4.0'} - '@nolyfill/string.prototype.matchall@1.0.44': resolution: {integrity: sha512-/lwVUaDPCeopUL6XPz2B2ZwaQeIbctP8YxNIyCxunxVKWhCAhii+w0ourNK7JedyGIcM+DaXZTeRlcbgEWaZig==} engines: {node: '>=12.4.0'} - '@nolyfill/string.prototype.repeat@1.0.44': - resolution: {integrity: sha512-CvHQRuEi1t/jpAlodKuW32BMQ5FL/n2/AbYD7ppKZnz/4CxSwsML2302sTwm9MqNUK6O5P3vyO2B+uDweuvZdw==} - engines: {node: '>=12.4.0'} - - '@nolyfill/string.prototype.trimend@1.0.44': - resolution: {integrity: sha512-3dsKlf4Ma7o+uxLIg5OI1Tgwfet2pE8WTbPjEGWvOe6CSjMtK0skJnnSVHaEVX4N4mYU81To0qDeZOPqjaUotg==} - engines: {node: '>=12.4.0'} - '@nolyfill/typed-array-buffer@1.0.44': resolution: {integrity: sha512-QDtsud32BpViorcc6KOgFaRYUI2hyQewOaRD9NF1fs7g+cv6d3MbIJCYWpkOwAXATKlCeELtSbuTYDXAaw7S+Q==} engines: {node: '>=12.4.0'} @@ -2455,6 +2317,101 @@ packages: '@octokit/types@14.1.0': resolution: {integrity: sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==} + '@oxc-resolver/binding-android-arm-eabi@11.8.4': + resolution: {integrity: sha512-6BjMji0TcvQfJ4EoSunOSyu/SiyHKficBD0V3Y0NxF0beaNnnZ7GYEi2lHmRNnRCuIPK8IuVqQ6XizYau+CkKw==} + cpu: [arm] + os: [android] + + '@oxc-resolver/binding-android-arm64@11.8.4': + resolution: {integrity: sha512-SxF4X6rzCBS9XNPXKZGoIHIABjfGmtQpEgRBDzpDHx5VTuLAUmwLTHXnVBAZoX5bmnhF79RiMElavzFdJ2cA1A==} + cpu: [arm64] + os: [android] + + '@oxc-resolver/binding-darwin-arm64@11.8.4': + resolution: {integrity: sha512-8zWeERrzgscAniE6kh1TQ4E7GJyglYsvdoKrHYLBCbHWD+0/soffiwAYxZuckKEQSc2RXMSPjcu+JTCALaY0Dw==} + cpu: [arm64] + os: [darwin] + + '@oxc-resolver/binding-darwin-x64@11.8.4': + resolution: {integrity: sha512-BUwggKz8Hi5uEQ0AeVTSun1+sp4lzNcItn+L7fDsHu5Cx0Zueuo10BtVm+dIwmYVVPL5oGYOeD0fS7MKAazKiw==} + cpu: [x64] + os: [darwin] + + '@oxc-resolver/binding-freebsd-x64@11.8.4': + resolution: {integrity: sha512-fPO5TQhnn8gA6yP4o49lc4Gn8KeDwAp9uYd4PlE3Q00JVqU6cY9WecDhYHrWtiFcyoZ8UVBlIxuhRqT/DP4Z4A==} + cpu: [x64] + os: [freebsd] + + '@oxc-resolver/binding-linux-arm-gnueabihf@11.8.4': + resolution: {integrity: sha512-QuNbdUaVGiP0W0GrXsvCDZjqeL4lZGU7aXlx/S2tCvyTk3wh6skoiLJgqUf/eeqXfUPnzTfntYqyfolzCAyBYA==} + cpu: [arm] + os: [linux] + + '@oxc-resolver/binding-linux-arm-musleabihf@11.8.4': + resolution: {integrity: sha512-p/zLMfza8OsC4BDKxqeZ9Qel+4eA/oiMSyKLRkMrTgt6OWQq1d5nHntjfG35Abcw4ev6Q9lRU3NOW5hj0xlUbw==} + cpu: [arm] + os: [linux] + + '@oxc-resolver/binding-linux-arm64-gnu@11.8.4': + resolution: {integrity: sha512-bvJF9wWxF1+a5YZATlS5JojpOMC7OsnTatA6sXVHoOb7MIigjledYB5ZMAeRrnWWexRMiEX3YSaA46oSfOzmOg==} + cpu: [arm64] + os: [linux] + + '@oxc-resolver/binding-linux-arm64-musl@11.8.4': + resolution: {integrity: sha512-gf4nwGBfu+EFwOn5p7/T7VF4jmIdfodwJS9MRkOBHvuAm3LQgCX7O6d3Y80mm0TV7ZMRD/trfW628rHfd5++vQ==} + cpu: [arm64] + os: [linux] + + '@oxc-resolver/binding-linux-ppc64-gnu@11.8.4': + resolution: {integrity: sha512-T120R5GIzRd41rYWWKCI6cSYrZjmRQzf3X4xeE1WX396Uabz5DX8KU7RnVHihSK+KDxccCVOFBxcH3ITd+IEpw==} + cpu: [ppc64] + os: [linux] + + '@oxc-resolver/binding-linux-riscv64-gnu@11.8.4': + resolution: {integrity: sha512-PVG7SxBFFjAaQ76p9O/0Xt5mTBlziRwpck+6cRNhy/hbWY/hSt8BFfPqw0EDSfnl40Uuh+NPsHFMnaWWyxbQEg==} + cpu: [riscv64] + os: [linux] + + '@oxc-resolver/binding-linux-riscv64-musl@11.8.4': + resolution: {integrity: sha512-L0OklUhM2qLGaKvPSyKmwWpoijfc++VJtPyVgz031ShOXyo0WjD0ZGzusyJMsA1a/gdulAmN6CQ/0Sf4LGXEcw==} + cpu: [riscv64] + os: [linux] + + '@oxc-resolver/binding-linux-s390x-gnu@11.8.4': + resolution: {integrity: sha512-18Ajz5hqO4cRGuoHzLFUsIPod9GIaIRDiXFg2m6CS3NgVdHx7iCZscplYH7KtjdE42M8nGWYMyyq5BOk7QVgPw==} + cpu: [s390x] + os: [linux] + + '@oxc-resolver/binding-linux-x64-gnu@11.8.4': + resolution: {integrity: sha512-uHvH4RyYBdQ/lFGV9H+R1ScHg6EBnAhE3mnX+u+mO/btnalvg7j80okuHf8Qw0tLQiP5P1sEBoVeE6zviXY9IA==} + cpu: [x64] + os: [linux] + + '@oxc-resolver/binding-linux-x64-musl@11.8.4': + resolution: {integrity: sha512-X5z44qh5DdJfVhcqXAQFTDFUpcxdpf6DT/lHL5CFcdQGIZxatjc7gFUy05IXPI9xwfq39RValjJBvFovUk9XBw==} + cpu: [x64] + os: [linux] + + '@oxc-resolver/binding-wasm32-wasi@11.8.4': + resolution: {integrity: sha512-z3906y+cd8RRhBGNwHRrRAFxnKjXsBeL3+rdQjZpBrUyrhhsaV5iKD/ROx64FNJ9GjL/9mfon8A5xx/McYIqHA==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + + '@oxc-resolver/binding-win32-arm64-msvc@11.8.4': + resolution: {integrity: sha512-70vXFs74uA3X5iYOkpclbkWlQEF+MI325uAQ+Or2n8HJip2T0SEmuBlyw/sRL2E8zLC4oocb+1g25fmzlDVkmg==} + cpu: [arm64] + os: [win32] + + '@oxc-resolver/binding-win32-ia32-msvc@11.8.4': + resolution: {integrity: sha512-SEOUAzTvr+nyMia3nx1dMtD7YUxZwuhQ3QAPnxy21261Lj0yT3JY4EIfwWH54lAWWfMdRSRRMFuGeF/dq7XjEw==} + cpu: [ia32] + os: [win32] + + '@oxc-resolver/binding-win32-x64-msvc@11.8.4': + resolution: {integrity: sha512-1gARIQsOPOU7LJ7jvMyPmZEVMapL/PymeG3J7naOdLZDrIZKX6CTvgawJmETYKt+8icP8M6KbBinrVkKVqFd+A==} + cpu: [x64] + os: [win32] + '@parcel/watcher-android-arm64@2.5.1': resolution: {integrity: sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==} engines: {node: '>= 10.0.0'} @@ -2855,12 +2812,6 @@ packages: peerDependencies: rollup: ^1.20.0||^2.0.0 - '@rtsao/scc@1.1.0': - resolution: {integrity: sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==} - - '@rushstack/eslint-patch@1.12.0': - resolution: {integrity: sha512-5EwMtOqvJMMa3HbmxLlF74e+3/HhwBTMcvt3nqVJgGCozO6hzIPOBlwm8mGVNR9SN2IJpxSnlxczyDjcn7qIyw==} - '@sentry-internal/browser-utils@8.55.0': resolution: {integrity: sha512-ROgqtQfpH/82AQIpESPqPQe0UyWywKJsmVIqi3c5Fh+zkds5LUxnssTj3yNd1x+kxaPDVB023jAP+3ibNgeNDw==} engines: {node: '>=14.18'} @@ -2891,10 +2842,6 @@ packages: peerDependencies: react: ^16.14.0 || 17.x || 18.x || 19.x - '@sentry/utils@8.55.0': - resolution: {integrity: sha512-cYcl39+xcOivBpN9d8ZKbALl+DxZKo/8H0nueJZ0PO4JA+MJGhSm6oHakXxLPaiMoNLTX7yor8ndnQIuFg+vmQ==} - engines: {node: '>=14.18'} - '@sinclair/typebox@0.27.8': resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==} @@ -3245,8 +3192,8 @@ packages: '@tsconfig/node16@1.0.4': resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==} - '@tybys/wasm-util@0.10.0': - resolution: {integrity: sha512-VyyPYFlOMNylG45GoAe0xDoLwWuowvf92F9kySqzYh8vmYm7D2u4iUJKa1tOUpS70Ku13ASrOkS4ScXFsTaCNQ==} + '@tybys/wasm-util@0.10.1': + resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==} '@types/aria-query@5.0.4': resolution: {integrity: sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==} @@ -3266,9 +3213,6 @@ packages: '@types/cacheable-request@6.0.3': resolution: {integrity: sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==} - '@types/crypto-js@4.2.2': - resolution: {integrity: sha512-sDOLlVbHhXpAUAL0YHDUUwDZf3iN4Bwi4W6a0W0b+QcAezUbRtH4FVb+9J4h+XFPW7l/gQ9F8qC7P+Ec4k8QVQ==} - '@types/d3-array@3.2.1': resolution: {integrity: sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg==} @@ -3425,9 +3369,6 @@ packages: '@types/json-schema@7.0.15': resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} - '@types/json5@0.0.29': - resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==} - '@types/katex@0.16.7': resolution: {integrity: sha512-HMwFiRujE5PjrgwHQ25+bsLJgowjGjm5Z8FVSf0N6PwgJrwxH0QxzHYDcKsTfV3wva0vzrpqMTJS2jXPr5BMEQ==} @@ -3479,18 +3420,12 @@ packages: '@types/react-syntax-highlighter@15.5.13': resolution: {integrity: sha512-uLGJ87j6Sz8UaBAooU0T6lWJ0dBmjZgN1PZTrj05TNql2/XpC6+4HhMT5syIdFUUt+FASfCeLLv4kBygNU+8qA==} - '@types/react-window-infinite-loader@1.0.9': - resolution: {integrity: sha512-gEInTjQwURCnDOFyIEK2+fWB5gTjqwx30O62QfxA9stE5aiB6EWkGj4UMhc0axq7/FV++Gs/TGW8FtgEx0S6Tw==} - '@types/react-window@1.8.8': resolution: {integrity: sha512-8Ls660bHR1AUA2kuRvVG9D/4XpRC6wjAaPT9dil7Ckc76eP9TKWZwwmgfq8Q1LANX3QNDnoU4Zp48A3w+zK69Q==} '@types/react@19.1.11': resolution: {integrity: sha512-lr3jdBw/BGj49Eps7EvqlUaoeA0xpj3pc0RoJkHpYaCHkVK7i28dKyImLQb3JVlqs3aYSXf7qYuWOW/fgZnTXQ==} - '@types/recordrtc@5.6.14': - resolution: {integrity: sha512-Reiy1sl11xP0r6w8DW3iQjc1BgXFyNC7aDuutysIjpFoqyftbQps9xPA2FoBkfVXpJM61betgYPNt+v65zvMhA==} - '@types/resolve@1.17.1': resolution: {integrity: sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw==} @@ -3673,101 +3608,6 @@ packages: '@ungap/structured-clone@1.3.0': resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} - '@unrs/resolver-binding-android-arm-eabi@1.11.1': - resolution: {integrity: sha512-ppLRUgHVaGRWUx0R0Ut06Mjo9gBaBkg3v/8AxusGLhsIotbBLuRk51rAzqLC8gq6NyyAojEXglNjzf6R948DNw==} - cpu: [arm] - os: [android] - - '@unrs/resolver-binding-android-arm64@1.11.1': - resolution: {integrity: sha512-lCxkVtb4wp1v+EoN+HjIG9cIIzPkX5OtM03pQYkG+U5O/wL53LC4QbIeazgiKqluGeVEeBlZahHalCaBvU1a2g==} - cpu: [arm64] - os: [android] - - '@unrs/resolver-binding-darwin-arm64@1.11.1': - resolution: {integrity: sha512-gPVA1UjRu1Y/IsB/dQEsp2V1pm44Of6+LWvbLc9SDk1c2KhhDRDBUkQCYVWe6f26uJb3fOK8saWMgtX8IrMk3g==} - cpu: [arm64] - os: [darwin] - - '@unrs/resolver-binding-darwin-x64@1.11.1': - resolution: {integrity: sha512-cFzP7rWKd3lZaCsDze07QX1SC24lO8mPty9vdP+YVa3MGdVgPmFc59317b2ioXtgCMKGiCLxJ4HQs62oz6GfRQ==} - cpu: [x64] - os: [darwin] - - '@unrs/resolver-binding-freebsd-x64@1.11.1': - resolution: {integrity: sha512-fqtGgak3zX4DCB6PFpsH5+Kmt/8CIi4Bry4rb1ho6Av2QHTREM+47y282Uqiu3ZRF5IQioJQ5qWRV6jduA+iGw==} - cpu: [x64] - os: [freebsd] - - '@unrs/resolver-binding-linux-arm-gnueabihf@1.11.1': - resolution: {integrity: sha512-u92mvlcYtp9MRKmP+ZvMmtPN34+/3lMHlyMj7wXJDeXxuM0Vgzz0+PPJNsro1m3IZPYChIkn944wW8TYgGKFHw==} - cpu: [arm] - os: [linux] - - '@unrs/resolver-binding-linux-arm-musleabihf@1.11.1': - resolution: {integrity: sha512-cINaoY2z7LVCrfHkIcmvj7osTOtm6VVT16b5oQdS4beibX2SYBwgYLmqhBjA1t51CarSaBuX5YNsWLjsqfW5Cw==} - cpu: [arm] - os: [linux] - - '@unrs/resolver-binding-linux-arm64-gnu@1.11.1': - resolution: {integrity: sha512-34gw7PjDGB9JgePJEmhEqBhWvCiiWCuXsL9hYphDF7crW7UgI05gyBAi6MF58uGcMOiOqSJ2ybEeCvHcq0BCmQ==} - cpu: [arm64] - os: [linux] - - '@unrs/resolver-binding-linux-arm64-musl@1.11.1': - resolution: {integrity: sha512-RyMIx6Uf53hhOtJDIamSbTskA99sPHS96wxVE/bJtePJJtpdKGXO1wY90oRdXuYOGOTuqjT8ACccMc4K6QmT3w==} - cpu: [arm64] - os: [linux] - - '@unrs/resolver-binding-linux-ppc64-gnu@1.11.1': - resolution: {integrity: sha512-D8Vae74A4/a+mZH0FbOkFJL9DSK2R6TFPC9M+jCWYia/q2einCubX10pecpDiTmkJVUH+y8K3BZClycD8nCShA==} - cpu: [ppc64] - os: [linux] - - '@unrs/resolver-binding-linux-riscv64-gnu@1.11.1': - resolution: {integrity: sha512-frxL4OrzOWVVsOc96+V3aqTIQl1O2TjgExV4EKgRY09AJ9leZpEg8Ak9phadbuX0BA4k8U5qtvMSQQGGmaJqcQ==} - cpu: [riscv64] - os: [linux] - - '@unrs/resolver-binding-linux-riscv64-musl@1.11.1': - resolution: {integrity: sha512-mJ5vuDaIZ+l/acv01sHoXfpnyrNKOk/3aDoEdLO/Xtn9HuZlDD6jKxHlkN8ZhWyLJsRBxfv9GYM2utQ1SChKew==} - cpu: [riscv64] - os: [linux] - - '@unrs/resolver-binding-linux-s390x-gnu@1.11.1': - resolution: {integrity: sha512-kELo8ebBVtb9sA7rMe1Cph4QHreByhaZ2QEADd9NzIQsYNQpt9UkM9iqr2lhGr5afh885d/cB5QeTXSbZHTYPg==} - cpu: [s390x] - os: [linux] - - '@unrs/resolver-binding-linux-x64-gnu@1.11.1': - resolution: {integrity: sha512-C3ZAHugKgovV5YvAMsxhq0gtXuwESUKc5MhEtjBpLoHPLYM+iuwSj3lflFwK3DPm68660rZ7G8BMcwSro7hD5w==} - cpu: [x64] - os: [linux] - - '@unrs/resolver-binding-linux-x64-musl@1.11.1': - resolution: {integrity: sha512-rV0YSoyhK2nZ4vEswT/QwqzqQXw5I6CjoaYMOX0TqBlWhojUf8P94mvI7nuJTeaCkkds3QE4+zS8Ko+GdXuZtA==} - cpu: [x64] - os: [linux] - - '@unrs/resolver-binding-wasm32-wasi@1.11.1': - resolution: {integrity: sha512-5u4RkfxJm+Ng7IWgkzi3qrFOvLvQYnPBmjmZQ8+szTK/b31fQCnleNl1GgEt7nIsZRIf5PLhPwT0WM+q45x/UQ==} - engines: {node: '>=14.0.0'} - cpu: [wasm32] - - '@unrs/resolver-binding-win32-arm64-msvc@1.11.1': - resolution: {integrity: sha512-nRcz5Il4ln0kMhfL8S3hLkxI85BXs3o8EYoattsJNdsX4YUU89iOkVn7g0VHSRxFuVMdM4Q1jEpIId1Ihim/Uw==} - cpu: [arm64] - os: [win32] - - '@unrs/resolver-binding-win32-ia32-msvc@1.11.1': - resolution: {integrity: sha512-DCEI6t5i1NmAZp6pFonpD5m7i6aFrpofcp4LA2i8IIq60Jyo28hamKBxNrZcyOwVOZkgsRp9O2sXWBWP8MnvIQ==} - cpu: [ia32] - os: [win32] - - '@unrs/resolver-binding-win32-x64-msvc@1.11.1': - resolution: {integrity: sha512-lrW200hZdbfRtztbygyaq/6jP6AKE8qQN2KvPcJ+x7wiD038YtnYtZ82IMNJ69GJibV7bwL3y9FgK+5w/pYt6g==} - cpu: [x64] - os: [win32] - '@vitest/eslint-plugin@1.3.4': resolution: {integrity: sha512-EOg8d0jn3BAiKnR55WkFxmxfWA3nmzrbIIuOXyTe6A72duryNgyU+bdBEauA97Aab3ho9kLmAwgPX63Ckj4QEg==} peerDependencies: @@ -4036,9 +3876,6 @@ packages: resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} engines: {node: '>=12'} - ast-types-flow@0.0.8: - resolution: {integrity: sha512-OH/2E5Fg20h2aPrbe+QL8JZQFko0YZaF+j4mnQ7BGhfavO7OpSLa8a0y9sBwomHdSbkhTS8TQNayBfnW5DwbvQ==} - ast-types@0.16.1: resolution: {integrity: sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg==} engines: {node: '>=4'} @@ -4061,27 +3898,12 @@ packages: peerDependencies: postcss: ^8.1.0 - axe-core@4.10.3: - resolution: {integrity: sha512-Xm7bpRXnDSX2YE2YFfBk2FnF0ep6tmG7xPh8iHee8MIcrgq762Nkce856dYtJYLkuIoYZvGfTs/PbZhideTcEg==} - engines: {node: '>=4'} - - axobject-query@4.1.0: - resolution: {integrity: sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==} - engines: {node: '>= 0.4'} - babel-jest@29.7.0: resolution: {integrity: sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} peerDependencies: '@babel/core': ^7.8.0 - babel-loader@10.0.0: - resolution: {integrity: sha512-z8jt+EdS61AMw22nSfoNJAZ0vrtmhPRVi6ghL3rCeRZI8cdNYFiV5xeV3HbE7rlZZNmGH8BVccwWt8/ED0QOHA==} - engines: {node: ^18.20.0 || ^20.10.0 || >=22.0.0} - peerDependencies: - '@babel/core': ^7.12.0 - webpack: '>=5.61.0' - babel-loader@8.4.1: resolution: {integrity: sha512-nXzRChX+Z1GoE6yWavBQg6jDslyFF3SDjl2paADuoQtQW10JqShJt62R6eJQ5m/pjJFDT8xgKIWSP85OY8eXeA==} engines: {node: '>= 8.9'} @@ -4600,9 +4422,6 @@ packages: resolution: {integrity: sha512-r4ESw/IlusD17lgQi1O20Fa3qNnsckR126TdUuBgAu7GBYSIPvdNyONd3Zrxh0xCwA4+6w/TDArBPsMvhur+KQ==} engines: {node: '>= 0.10'} - crypto-js@4.2.0: - resolution: {integrity: sha512-KALDyEYgpY+Rlob/iriUtjV6d5Eq+Y191A5g4UqLAi8CyGP9N1+FdVbkc1SxKc2r4YAYqG8JzO2KGL+AizD70Q==} - crypto-random-string@2.0.0: resolution: {integrity: sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==} engines: {node: '>=8'} @@ -4793,23 +4612,12 @@ packages: dagre-d3-es@7.0.11: resolution: {integrity: sha512-tvlJLyQf834SylNKax8Wkzco/1ias1OPw8DcUMDE7oUIoSEW25riQVuiu/0OWEFqT0cxHT3Pa9/D82Jr47IONw==} - damerau-levenshtein@1.0.8: - resolution: {integrity: sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==} - dayjs@1.11.13: resolution: {integrity: sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==} debounce@1.2.1: resolution: {integrity: sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug==} - debug@3.2.7: - resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - debug@4.4.1: resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==} engines: {node: '>=6.0'} @@ -4897,10 +4705,6 @@ packages: engines: {node: '>=0.10'} hasBin: true - detect-libc@2.0.4: - resolution: {integrity: sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==} - engines: {node: '>=8'} - detect-libc@2.1.0: resolution: {integrity: sha512-vEtk+OcP7VBRtQZ1EJ3bdgzSfBjgnEalLTp5zjJrS+2Z1w2KZly4SBdac/WDU3hhsNAZ9E8SC96ME4Ey8MZ7cg==} engines: {node: '>=8'} @@ -4939,10 +4743,6 @@ packages: dlv@1.1.3: resolution: {integrity: sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==} - doctrine@2.1.0: - resolution: {integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==} - engines: {node: '>=0.10.0'} - doctrine@3.0.0: resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} engines: {node: '>=6.0.0'} @@ -5019,9 +4819,6 @@ packages: emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} - emoji-regex@9.2.2: - resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} - emojis-list@3.0.0: resolution: {integrity: sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==} engines: {node: '>= 4'} @@ -5117,34 +4914,9 @@ packages: peerDependencies: eslint: ^9.5.0 - eslint-config-next@15.5.0: - resolution: {integrity: sha512-Yl4hlOdBqstAuHnlBfx2RimBzWQwysM2SJNu5EzYVa2qS2ItPs7lgxL0sJJDudEx5ZZHfWPZ/6U8+FtDFWs7/w==} - peerDependencies: - eslint: ^7.23.0 || ^8.0.0 || ^9.0.0 - typescript: '>=3.3.1' - peerDependenciesMeta: - typescript: - optional: true - eslint-flat-config-utils@2.1.0: resolution: {integrity: sha512-6fjOJ9tS0k28ketkUcQ+kKptB4dBZY2VijMZ9rGn8Cwnn1SH0cZBoPXT8AHBFHxmHcLFQK9zbELDinZ2Mr1rng==} - eslint-import-resolver-node@0.3.9: - resolution: {integrity: sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==} - - eslint-import-resolver-typescript@3.10.1: - resolution: {integrity: sha512-A1rHYb06zjMGAxdLSkN2fXPBwuSaQ0iO5M/hdyS0Ajj1VBaRp0sPD3dn1FhME3c/JluGFbwSxyCfqdSbtQLAHQ==} - engines: {node: ^14.18.0 || >=16.0.0} - peerDependencies: - eslint: '*' - eslint-plugin-import: '*' - eslint-plugin-import-x: '*' - peerDependenciesMeta: - eslint-plugin-import: - optional: true - eslint-plugin-import-x: - optional: true - eslint-json-compat-utils@0.2.1: resolution: {integrity: sha512-YzEodbDyW8DX8bImKhAcCeu/L31Dd/70Bidx2Qex9OFUtgzXLqtfWL4Hr5fM/aCCB8QUZLuJur0S9k6UfgFkfg==} engines: {node: '>=12'} @@ -5161,27 +4933,6 @@ packages: peerDependencies: eslint: '*' - eslint-module-utils@2.12.1: - resolution: {integrity: sha512-L8jSWTze7K2mTg0vos/RuLRS5soomksDPoJLXIslC7c8Wmut3bx7CPpJijDcBZtxQ5lrbUdM+s0OlNbz0DCDNw==} - engines: {node: '>=4'} - peerDependencies: - '@typescript-eslint/parser': '*' - eslint: '*' - eslint-import-resolver-node: '*' - eslint-import-resolver-typescript: '*' - eslint-import-resolver-webpack: '*' - peerDependenciesMeta: - '@typescript-eslint/parser': - optional: true - eslint: - optional: true - eslint-import-resolver-node: - optional: true - eslint-import-resolver-typescript: - optional: true - eslint-import-resolver-webpack: - optional: true - eslint-plugin-antfu@3.1.1: resolution: {integrity: sha512-7Q+NhwLfHJFvopI2HBZbSxWXngTwBLKxW1AGXLr2lEGxcEIK/AsDs8pn8fvIizl5aZjBbVbVK5ujmMpBe4Tvdg==} peerDependencies: @@ -5208,16 +4959,6 @@ packages: typescript: optional: true - eslint-plugin-import@2.32.0: - resolution: {integrity: sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==} - engines: {node: '>=4'} - peerDependencies: - '@typescript-eslint/parser': '*' - eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9 - peerDependenciesMeta: - '@typescript-eslint/parser': - optional: true - eslint-plugin-jsdoc@51.4.1: resolution: {integrity: sha512-y4CA9OkachG8v5nAtrwvcvjIbdcKgSyS6U//IfQr4FZFFyeBFwZFf/tfSsMr46mWDJgidZjBTqoCRlXywfFBMg==} engines: {node: '>=20.11.0'} @@ -5230,12 +4971,6 @@ packages: peerDependencies: eslint: '>=6.0.0' - eslint-plugin-jsx-a11y@6.10.2: - resolution: {integrity: sha512-scB3nz4WmG75pV8+3eRUQOHZlNSUhFNq37xnpgRkCCELU3XMvXAxLk1eqWWyE22Ki4Q01Fnsw9BA3cJHDPgn2Q==} - engines: {node: '>=4.0'} - peerDependencies: - eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9 - eslint-plugin-n@17.21.0: resolution: {integrity: sha512-1+iZ8We4ZlwVMtb/DcHG3y5/bZOdazIpa/4TySo22MLKdwrLcfrX0hbadnCvykSQCCmkAnWmIP8jZVb2AAq29A==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -5334,12 +5069,6 @@ packages: typescript: optional: true - eslint-plugin-react@7.37.5: - resolution: {integrity: sha512-Qteup0SqU15kdocexFNAJMvCJEfa2xUKNV4CC1xsVMrIIqEy3SQ/rqyxCWNzfrd3/ldy6HMlD2e0JDVpDg2qIA==} - engines: {node: '>=4'} - peerDependencies: - eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7 - eslint-plugin-regexp@2.9.0: resolution: {integrity: sha512-9WqJMnOq8VlE/cK+YAo9C9YHhkOtcEtEk9d12a+H7OSZFwlpI6stiHmYPGa2VE0QhTzodJyhlyprUaXDZLgHBw==} engines: {node: ^18 || >=20} @@ -5567,6 +5296,9 @@ packages: fb-watchman@2.0.2: resolution: {integrity: sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==} + fd-package-json@2.0.0: + resolution: {integrity: sha512-jKmm9YtsNXN789RS/0mSzOC1NUq9mkVd65vbSSVsKdjGvYXBuE4oWe2QOEoFeRmJg+lPuZxpmrfFclNhoRMneQ==} + fdir@6.4.6: resolution: {integrity: sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==} peerDependencies: @@ -5644,6 +5376,11 @@ packages: resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==} engines: {node: '>=0.4.x'} + formatly@0.3.0: + resolution: {integrity: sha512-9XNj/o4wrRFyhSMJOvsuyMwy8aUfBaZ1VrqHVfohyXf0Sw0e+yfKG+xZaY3arGCOMdwFsqObtzVOc1gU9KiT9w==} + engines: {node: '>=18.3.0'} + hasBin: true + fraction.js@4.3.7: resolution: {integrity: sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==} @@ -6030,9 +5767,6 @@ packages: resolution: {integrity: sha512-f4RqJKBUe5rQkJ2eJEJBXSticB3hGbN9j0yxxMQFqIW89Jp9WYFtzfTcRlstDKVUTRzSOTLKRfO9vIztenwtxA==} engines: {node: '>=18.20'} - is-bun-module@2.0.0: - resolution: {integrity: sha512-gNCGbnnnnFAUGKeZ9PdbyeGYJqewpmc2aKHUEMO5nQPWU9lOmv7jcmQIv+qHD8fXW6W7qfuCwX4rY9LNRjXrkQ==} - is-decimal@1.0.4: resolution: {integrity: sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==} @@ -6306,6 +6040,10 @@ packages: resolution: {integrity: sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==} hasBin: true + jiti@2.6.0: + resolution: {integrity: sha512-VXe6RjJkBPj0ohtqaO8vSWP3ZhAKo66fKrFNCll4BTcwljPLz03pCbaNKfzGP5MbrCYcbJ7v0nOYYwUzTEIdXQ==} + hasBin: true + js-audio-recorder@1.0.7: resolution: {integrity: sha512-JiDODCElVHGrFyjGYwYyNi7zCbKk9va9C77w+zCPMmi4C6ix7zsX2h3ddHugmo4dOTOTCym9++b/wVW9nC0IaA==} @@ -6356,10 +6094,6 @@ packages: json-stable-stringify-without-jsonify@1.0.1: resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} - json5@1.0.2: - resolution: {integrity: sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==} - hasBin: true - json5@2.2.3: resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} engines: {node: '>=6'} @@ -6386,10 +6120,6 @@ packages: resolution: {integrity: sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==} engines: {node: '>=4.0'} - jwt-decode@4.0.0: - resolution: {integrity: sha512-+KJGIyHgkGuIq3IEBNftfhW/LfWhXUIY6OmyVWjliu5KH1y0fw7VQ8YndE2O4qZdMSd9SqbnC8GOcZEy0Om7sA==} - engines: {node: '>=18'} - katex@0.16.22: resolution: {integrity: sha512-XCHRdUw4lf3SKBaJe4EvgqIuWwkPSo9XoeO8GjQW94Bp7TWv9hNhzZjZ+OH9yf1UmLygb7DIT5GSFQiyt16zYg==} hasBin: true @@ -6404,6 +6134,14 @@ packages: resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} engines: {node: '>=6'} + knip@5.64.1: + resolution: {integrity: sha512-80XnLsyeXuyxj1F4+NBtQFHxaRH0xWRw8EKwfQ6EkVZZ0bSz/kqqan08k/Qg8ajWsFPhFq+0S2RbLCBGIQtuOg==} + engines: {node: '>=18.18.0'} + hasBin: true + peerDependencies: + '@types/node': '>=18' + typescript: '>=5.0.4 <7' + kolorist@1.8.0: resolution: {integrity: sha512-Y+60/zizpJ3HRH8DCss+q95yr6145JXZo46OTpFvDZWLfRCE4qChOyk1b26nMaNpfHHgxagk9dXT5OP0Tfe+dQ==} @@ -6418,13 +6156,6 @@ packages: resolution: {integrity: sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w==} engines: {node: '>=16.0.0'} - language-subtag-registry@0.3.23: - resolution: {integrity: sha512-0K65Lea881pHotoGEa5gDlMxt3pctLi2RplBb7Ezh4rRdLEOtgi7n4EwK9lamnUCkKBqaeKRVebTq6BAxSkpXQ==} - - language-tags@1.0.9: - resolution: {integrity: sha512-MbjN408fEndfiQXbFQ1vnd+1NoLDsnQW41410oQBXiyXDMYH5z505juWa4KUE1LqxRC7DgOgZDbKLxHIwm27hA==} - engines: {node: '>=0.10'} - launch-ide@1.2.0: resolution: {integrity: sha512-7nXSPQOt3b2JT52Ge8jp4miFcY+nrUEZxNLWBzrEfjmByDTb9b5ytqMSwGhsNwY6Cntwop+6n7rWIFN0+S8PTw==} @@ -7072,6 +6803,9 @@ packages: os-browserify@0.3.0: resolution: {integrity: sha512-gjcpUc3clBf9+210TRaDWbf+rZZZEshZ+DlXMRCeAjp0xhTrnQsKHypIy1J3d5hKdUzj69t708EHtU8P6bUn0A==} + oxc-resolver@11.8.4: + resolution: {integrity: sha512-qpimS3tHHEf+kgESMAme+q+rj7aCzMya00u9YdKOKyX2o7q4lozjPo6d7ZTTi979KHEcVOPWdNTueAKdeNq72w==} + p-cancelable@2.1.1: resolution: {integrity: sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==} engines: {node: '>=8'} @@ -7542,20 +7276,9 @@ packages: peerDependencies: react: '>=16.13.1' - react-error-boundary@4.1.2: - resolution: {integrity: sha512-GQDxZ5Jd+Aq/qUxbCm1UtzmL/s++V7zKgE8yMktJiCQXCCFZnMZh9ng+6/Ne6PjNSXH0L9CjeOEREfRnq6Duag==} - peerDependencies: - react: '>=16.13.1' - react-fast-compare@3.2.2: resolution: {integrity: sha512-nsO+KSNgo1SbJqJEYRE9ERzo7YtYbou/OqjSQKxV7jcKox7+usiUVZOAC+XnDOABXggQTno0Y1CpVnuWEc1boQ==} - react-headless-pagination@1.1.6: - resolution: {integrity: sha512-t7L/Q4xpyZszw8iC8ALERs/G2644JESmssahUkRp65WFWvw2k9HXVmfI6VbXvTXrqy+a8fbKT6BQ6SgS2ULNOA==} - engines: {node: '>=18.13'} - peerDependencies: - react: '>=16' - react-hook-form@7.60.0: resolution: {integrity: sha512-SBrYOvMbDB7cV8ZfNpaiLcgjH/a1c7aK0lK+aNigpf4xWLO8q+o4tcvVurv3c4EOyzn/3dCsYt4GKD42VvJ/+A==} engines: {node: '>=18.0.0'} @@ -7584,11 +7307,6 @@ packages: typescript: optional: true - react-infinite-scroll-component@6.1.0: - resolution: {integrity: sha512-SQu5nCqy8DxQWpnUVLx7V7b7LcA37aM7tvoWjTLZp1dk6EJibM5/4EJKzOnl07/BsM1Y40sKLuqjCwwH/xV0TQ==} - peerDependencies: - react: '>=16.0.0' - react-is@16.13.1: resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==} @@ -7684,19 +7402,6 @@ packages: peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 - react-tooltip@5.8.3: - resolution: {integrity: sha512-h7maAlm2Xeymc14gWKhhrzsENeB83N65EzZ+AcQIGrOpNE0yefVRJIHhNcWHEJ0FEtf7VZXxtsj5glVXKxEtvA==} - peerDependencies: - react: '>=16.14.0' - react-dom: '>=16.14.0' - - react-window-infinite-loader@1.0.10: - resolution: {integrity: sha512-NO/csdHlxjWqA2RJZfzQgagAjGHspbO2ik9GtWZb0BY1Nnapq0auG8ErI+OhGCzpjYJsCYerqUlK6hkq9dfAAA==} - engines: {node: '>8.0.0'} - peerDependencies: - react: ^15.3.0 || ^16.0.0-alpha || ^17.0.0 || ^18.0.0 || ^19.0.0 - react-dom: ^15.3.0 || ^16.0.0-alpha || ^17.0.0 || ^18.0.0 || ^19.0.0 - react-window@1.8.11: resolution: {integrity: sha512-+SRbUVT2scadgFSWx+R1P754xHPEqvcfSfVX10QYg6POOz+WNgkN48pS+BtZNIMGiL1HYrSEiCkwsMS15QogEQ==} engines: {node: '>8.0.0'} @@ -7756,9 +7461,6 @@ packages: recma-stringify@1.0.0: resolution: {integrity: sha512-cjwII1MdIIVloKvC9ErQ+OgAtwHBmcZ0Bg4ciz78FtbT8In39aAYbaA7zvxQ61xVMSPE8WxhLwLbhif4Js2C+g==} - recordrtc@5.6.2: - resolution: {integrity: sha512-1QNKKNtl7+KcwD1lyOgP3ZlbiJ1d0HtXnypUy7yq49xEERxk31PHvE9RCciDrulPCY7WJ+oz0R9hpNxgsIurGQ==} - redent@3.0.0: resolution: {integrity: sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==} engines: {node: '>=8'} @@ -7878,10 +7580,6 @@ packages: engines: {node: '>= 0.4'} hasBin: true - resolve@2.0.0-next.5: - resolution: {integrity: sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==} - hasBin: true - responselike@2.0.1: resolution: {integrity: sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw==} @@ -8002,9 +7700,6 @@ packages: serialize-javascript@6.0.2: resolution: {integrity: sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==} - server-only@0.0.1: - resolution: {integrity: sha512-qepMx2JxAa5jjfzxG79yPPq+8BuFToHd1hm7kI+Z4zAq1ftQiP7HcxMhDDItrbtwVeLg/cY2JnKnrcFkmiswNA==} - set-blocking@2.0.0: resolution: {integrity: sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==} @@ -8024,9 +7719,6 @@ packages: resolution: {integrity: sha512-eX2IQ6nFohW4DbvHIOLRB3MHFpYqaqvXd3Tp5e/T/dSH83fxaNJQRvDMhASmkNTsNTVF2/OOopzRCt7xokgPfg==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} - shave@5.0.4: - resolution: {integrity: sha512-AnvEI1wM2rQmrwCl364LVLLhzCzSHJ7DQmdd+fHJTnNzbD2mjsUAOcxWLLYKam7Q63skwyQf2CB2TCdJ2O5c8w==} - shebang-command@2.0.0: resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} engines: {node: '>=8'} @@ -8073,6 +7765,10 @@ packages: resolution: {integrity: sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg==} engines: {node: '>=18'} + smol-toml@1.4.2: + resolution: {integrity: sha512-rInDH6lCNiEyn3+hH8KVGFdbjc099j47+OSgbMrfDYX1CmXLfdKd7qi6IfcWj2wFxvSVkuI46M+wPGYfEOEj6g==} + engines: {node: '>= 18'} + sortablejs@1.15.6: resolution: {integrity: sha512-aNfiuwMEpfBM/CN6LY0ibyhxPfPbyFeBTYJKCvzkJ2GkUpazIt3H+QIPAMHwqQ7tMKaHz1Qj+rJJCqljnf4p3A==} @@ -8124,9 +7820,6 @@ packages: sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} - stable-hash@0.0.5: - resolution: {integrity: sha512-+L3ccpzibovGXFK+Ap/f8LOS0ahMrHTf3xu7mMLSpEGU0EO9ucaysSylKo9eRDFNhWve/y275iPmIZ4z39a9iA==} - stack-utils@2.0.6: resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} engines: {node: '>=10'} @@ -8224,6 +7917,10 @@ packages: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} + strip-json-comments@5.0.2: + resolution: {integrity: sha512-4X2FR3UwhNUE9G49aIsJW5hRRR3GXGTBTZRMfv568O60ojM8HcWjV/VxAxCDW3SUND33O6ZY66ZuRcdkj73q2g==} + engines: {node: '>=14.16'} + style-loader@3.3.4: resolution: {integrity: sha512-0WqXzrsMTyb8yjZJHDqwmnwRJvhALK9LfRtRc6B4UTWe8AijYLZYZ9thuJTZc2VfQWINADW/j+LiJnfy2RoC1w==} engines: {node: '>= 12.13.0'} @@ -8350,10 +8047,6 @@ packages: thenify@3.3.1: resolution: {integrity: sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==} - throttle-debounce@2.3.0: - resolution: {integrity: sha512-H7oLPV0P7+jgvrk+6mwwwBDmxTaxnu9HMXmloNLXwnNO0ZxZ31Orah2n8lU1eMPvsaowP2CX+USCgyovXfdOFQ==} - engines: {node: '>=8'} - timers-browserify@2.0.12: resolution: {integrity: sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ==} engines: {node: '>=0.6.0'} @@ -8471,9 +8164,6 @@ packages: resolution: {integrity: sha512-zbem3rfRS8BgeNK50Zz5SIQgXzLafiHjOwUAvk/38/o1jHn/V5QAgVUcz884or7WYcPaH3N2CIfUc2u0ul7UcA==} engines: {node: '>=10.13.0'} - tsconfig-paths@3.15.0: - resolution: {integrity: sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==} - tsconfig-paths@4.2.0: resolution: {integrity: sha512-NoZ4roiN7LnbKn9QqE1amc9DJfzvZXxF4xDavcOWt1BPkdx+m+0gJuPM+S0vCe7zTJMYUP0R8pO2XMr+Y8oLIg==} engines: {node: '>=6'} @@ -8513,13 +8203,6 @@ packages: resolution: {integrity: sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==} engines: {node: '>=12.20'} - typescript-eslint@8.38.0: - resolution: {integrity: sha512-FsZlrYK6bPDGoLeZRuvx2v6qrM03I0U0SnfCLPs/XCCPCFD80xU9Pg09H/K+XFa68uJuZo7l/Xhs+eDRg2l3hg==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - eslint: ^8.57.0 || ^9.0.0 - typescript: '>=4.8.4 <5.9.0' - typescript@5.8.3: resolution: {integrity: sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==} engines: {node: '>=14.17'} @@ -8591,9 +8274,6 @@ packages: resolution: {integrity: sha512-4/u/j4FrCKdi17jaxuJA0jClGxB1AvU2hw/IuayPc4ay1XGaJs/rbb4v5WKwAjNifjmXK9PIFyuPiaK8azyR9w==} engines: {node: '>=14.0.0'} - unrs-resolver@1.11.1: - resolution: {integrity: sha512-bSjt9pjaEBnNiGgc9rUiHGKv5l4/TGzDmYw3RhnkJGtLhbnnA/5qJj7x3dNDCRx/PJxu774LlH8lCOlB4hEfKg==} - upath@1.2.0: resolution: {integrity: sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==} engines: {node: '>=4'} @@ -8742,6 +8422,10 @@ packages: peerDependencies: eslint: ^8.57.0 || ^9.0.0 + walk-up-path@4.0.0: + resolution: {integrity: sha512-3hu+tD8YzSLGuFYtPRb48vdhKMi0KQV5sn+uWr8+7dMEq/2G/dtLrdDinkLjqq5TIbIBjYJ4Ax/n3YiaW7QM8A==} + engines: {node: 20 || >=22} + walker@1.0.8: resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==} @@ -8982,6 +8666,9 @@ packages: zod@4.0.5: resolution: {integrity: sha512-/5UuuRPStvHXu7RS+gmvRf4NXrNxpSllGwDnCBcJZtQsKrviYXm54yDGV2KYNLT5kq0lHGcl7lqWJLgSaG+tgA==} + zod@4.1.11: + resolution: {integrity: sha512-WPsqwxITS2tzx1bzhIKsEs19ABD5vmCVa4xBo2tq/SrV4RNZtfws1EnCWQXM6yh8bD08a1idvkB5MZSBiZsjwg==} + zrender@5.6.1: resolution: {integrity: sha512-OFXkDJKcrlx5su2XbzJvj/34Q3m6PvyCZkVPHGYpcCJ52ek4U/ymZyfuV1nKE23AyBJ51E/6Yr0mhZ7xGTO4ag==} @@ -9019,50 +8706,50 @@ snapshots: '@jridgewell/gen-mapping': 0.3.12 '@jridgewell/trace-mapping': 0.3.29 - '@antfu/eslint-config@5.0.0(@eslint-react/eslint-plugin@1.52.3(eslint@9.35.0(jiti@1.21.7))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3))(@next/eslint-plugin-next@15.5.0)(@vue/compiler-sfc@3.5.17)(eslint-plugin-react-hooks@5.2.0(eslint@9.35.0(jiti@1.21.7)))(eslint-plugin-react-refresh@0.4.20(eslint@9.35.0(jiti@1.21.7)))(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3)': + '@antfu/eslint-config@5.0.0(@eslint-react/eslint-plugin@1.52.3(eslint@9.35.0(jiti@2.6.0))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3))(@next/eslint-plugin-next@15.5.0)(@vue/compiler-sfc@3.5.17)(eslint-plugin-react-hooks@5.2.0(eslint@9.35.0(jiti@2.6.0)))(eslint-plugin-react-refresh@0.4.20(eslint@9.35.0(jiti@2.6.0)))(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': dependencies: '@antfu/install-pkg': 1.1.0 '@clack/prompts': 0.11.0 - '@eslint-community/eslint-plugin-eslint-comments': 4.5.0(eslint@9.35.0(jiti@1.21.7)) + '@eslint-community/eslint-plugin-eslint-comments': 4.5.0(eslint@9.35.0(jiti@2.6.0)) '@eslint/markdown': 7.1.0 - '@stylistic/eslint-plugin': 5.2.2(eslint@9.35.0(jiti@1.21.7)) - '@typescript-eslint/eslint-plugin': 8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@typescript-eslint/parser': 8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@vitest/eslint-plugin': 1.3.4(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@stylistic/eslint-plugin': 5.2.2(eslint@9.35.0(jiti@2.6.0)) + '@typescript-eslint/eslint-plugin': 8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3))(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@typescript-eslint/parser': 8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@vitest/eslint-plugin': 1.3.4(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) ansis: 4.1.0 cac: 6.7.14 - eslint: 9.35.0(jiti@1.21.7) - eslint-config-flat-gitignore: 2.1.0(eslint@9.35.0(jiti@1.21.7)) + eslint: 9.35.0(jiti@2.6.0) + eslint-config-flat-gitignore: 2.1.0(eslint@9.35.0(jiti@2.6.0)) eslint-flat-config-utils: 2.1.0 - eslint-merge-processors: 2.0.0(eslint@9.35.0(jiti@1.21.7)) - eslint-plugin-antfu: 3.1.1(eslint@9.35.0(jiti@1.21.7)) - eslint-plugin-command: 3.3.1(eslint@9.35.0(jiti@1.21.7)) - eslint-plugin-import-lite: 0.3.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint-plugin-jsdoc: 51.4.1(eslint@9.35.0(jiti@1.21.7)) - eslint-plugin-jsonc: 2.20.1(eslint@9.35.0(jiti@1.21.7)) - eslint-plugin-n: 17.21.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + eslint-merge-processors: 2.0.0(eslint@9.35.0(jiti@2.6.0)) + eslint-plugin-antfu: 3.1.1(eslint@9.35.0(jiti@2.6.0)) + eslint-plugin-command: 3.3.1(eslint@9.35.0(jiti@2.6.0)) + eslint-plugin-import-lite: 0.3.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint-plugin-jsdoc: 51.4.1(eslint@9.35.0(jiti@2.6.0)) + eslint-plugin-jsonc: 2.20.1(eslint@9.35.0(jiti@2.6.0)) + eslint-plugin-n: 17.21.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) eslint-plugin-no-only-tests: 3.3.0 - eslint-plugin-perfectionist: 4.15.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint-plugin-pnpm: 1.1.0(eslint@9.35.0(jiti@1.21.7)) - eslint-plugin-regexp: 2.9.0(eslint@9.35.0(jiti@1.21.7)) - eslint-plugin-toml: 0.12.0(eslint@9.35.0(jiti@1.21.7)) - eslint-plugin-unicorn: 60.0.0(eslint@9.35.0(jiti@1.21.7)) - eslint-plugin-unused-imports: 4.1.4(@typescript-eslint/eslint-plugin@8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.35.0(jiti@1.21.7)) - eslint-plugin-vue: 10.3.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.35.0(jiti@1.21.7))(vue-eslint-parser@10.2.0(eslint@9.35.0(jiti@1.21.7))) - eslint-plugin-yml: 1.18.0(eslint@9.35.0(jiti@1.21.7)) - eslint-processor-vue-blocks: 2.0.0(@vue/compiler-sfc@3.5.17)(eslint@9.35.0(jiti@1.21.7)) + eslint-plugin-perfectionist: 4.15.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint-plugin-pnpm: 1.1.0(eslint@9.35.0(jiti@2.6.0)) + eslint-plugin-regexp: 2.9.0(eslint@9.35.0(jiti@2.6.0)) + eslint-plugin-toml: 0.12.0(eslint@9.35.0(jiti@2.6.0)) + eslint-plugin-unicorn: 60.0.0(eslint@9.35.0(jiti@2.6.0)) + eslint-plugin-unused-imports: 4.1.4(@typescript-eslint/eslint-plugin@8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3))(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3))(eslint@9.35.0(jiti@2.6.0)) + eslint-plugin-vue: 10.3.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3))(eslint@9.35.0(jiti@2.6.0))(vue-eslint-parser@10.2.0(eslint@9.35.0(jiti@2.6.0))) + eslint-plugin-yml: 1.18.0(eslint@9.35.0(jiti@2.6.0)) + eslint-processor-vue-blocks: 2.0.0(@vue/compiler-sfc@3.5.17)(eslint@9.35.0(jiti@2.6.0)) globals: 16.3.0 jsonc-eslint-parser: 2.4.0 local-pkg: 1.1.1 parse-gitignore: 2.0.0 toml-eslint-parser: 0.10.0 - vue-eslint-parser: 10.2.0(eslint@9.35.0(jiti@1.21.7)) + vue-eslint-parser: 10.2.0(eslint@9.35.0(jiti@2.6.0)) yaml-eslint-parser: 1.3.0 optionalDependencies: - '@eslint-react/eslint-plugin': 1.52.3(eslint@9.35.0(jiti@1.21.7))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3) + '@eslint-react/eslint-plugin': 1.52.3(eslint@9.35.0(jiti@2.6.0))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3) '@next/eslint-plugin-next': 15.5.0 - eslint-plugin-react-hooks: 5.2.0(eslint@9.35.0(jiti@1.21.7)) - eslint-plugin-react-refresh: 0.4.20(eslint@9.35.0(jiti@1.21.7)) + eslint-plugin-react-hooks: 5.2.0(eslint@9.35.0(jiti@2.6.0)) + eslint-plugin-react-refresh: 0.4.20(eslint@9.35.0(jiti@2.6.0)) transitivePeerDependencies: - '@eslint/json' - '@vue/compiler-sfc' @@ -10048,6 +9735,7 @@ snapshots: '@cspotcode/source-map-support@0.8.1': dependencies: '@jridgewell/trace-mapping': 0.3.9 + optional: true '@dagrejs/dagre@1.1.5': dependencies: @@ -10057,9 +9745,9 @@ snapshots: '@discoveryjs/json-ext@0.5.7': {} - '@emnapi/core@1.4.4': + '@emnapi/core@1.5.0': dependencies: - '@emnapi/wasi-threads': 1.0.3 + '@emnapi/wasi-threads': 1.1.0 tslib: 2.8.1 optional: true @@ -10068,7 +9756,12 @@ snapshots: tslib: 2.8.1 optional: true - '@emnapi/wasi-threads@1.0.3': + '@emnapi/runtime@1.5.0': + dependencies: + tslib: 2.8.1 + optional: true + + '@emnapi/wasi-threads@1.1.0': dependencies: tslib: 2.8.1 optional: true @@ -10166,30 +9859,30 @@ snapshots: '@esbuild/win32-x64@0.25.0': optional: true - '@eslint-community/eslint-plugin-eslint-comments@4.5.0(eslint@9.35.0(jiti@1.21.7))': + '@eslint-community/eslint-plugin-eslint-comments@4.5.0(eslint@9.35.0(jiti@2.6.0))': dependencies: escape-string-regexp: 4.0.0 - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) ignore: 5.3.2 - '@eslint-community/eslint-utils@4.7.0(eslint@9.35.0(jiti@1.21.7))': + '@eslint-community/eslint-utils@4.7.0(eslint@9.35.0(jiti@2.6.0))': dependencies: - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) eslint-visitor-keys: 3.4.3 - '@eslint-community/eslint-utils@4.9.0(eslint@9.35.0(jiti@1.21.7))': + '@eslint-community/eslint-utils@4.9.0(eslint@9.35.0(jiti@2.6.0))': dependencies: - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) eslint-visitor-keys: 3.4.3 '@eslint-community/regexpp@4.12.1': {} - '@eslint-react/ast@1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3)': + '@eslint-react/ast@1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': dependencies: '@eslint-react/eff': 1.52.3 '@typescript-eslint/types': 8.37.0 '@typescript-eslint/typescript-estree': 8.44.0(typescript@5.8.3) - '@typescript-eslint/utils': 8.44.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/utils': 8.44.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) string-ts: 2.2.1 ts-pattern: 5.7.1 transitivePeerDependencies: @@ -10197,17 +9890,17 @@ snapshots: - supports-color - typescript - '@eslint-react/core@1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3)': + '@eslint-react/core@1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': dependencies: - '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@eslint-react/eff': 1.52.3 - '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/var': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/var': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/scope-manager': 8.37.0 - '@typescript-eslint/type-utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/type-utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/types': 8.37.0 - '@typescript-eslint/utils': 8.44.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/utils': 8.44.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) birecord: 0.1.1 ts-pattern: 5.7.1 transitivePeerDependencies: @@ -10217,32 +9910,32 @@ snapshots: '@eslint-react/eff@1.52.3': {} - '@eslint-react/eslint-plugin@1.52.3(eslint@9.35.0(jiti@1.21.7))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3)': + '@eslint-react/eslint-plugin@1.52.3(eslint@9.35.0(jiti@2.6.0))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3)': dependencies: '@eslint-react/eff': 1.52.3 - '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/scope-manager': 8.37.0 - '@typescript-eslint/type-utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/type-utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/types': 8.37.0 - '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint: 9.35.0(jiti@1.21.7) - eslint-plugin-react-debug: 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint-plugin-react-dom: 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint-plugin-react-hooks-extra: 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint-plugin-react-naming-convention: 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint-plugin-react-web-api: 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint-plugin-react-x: 1.52.3(eslint@9.35.0(jiti@1.21.7))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3) + '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint: 9.35.0(jiti@2.6.0) + eslint-plugin-react-debug: 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint-plugin-react-dom: 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint-plugin-react-hooks-extra: 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint-plugin-react-naming-convention: 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint-plugin-react-web-api: 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint-plugin-react-x: 1.52.3(eslint@9.35.0(jiti@2.6.0))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3) optionalDependencies: typescript: 5.8.3 transitivePeerDependencies: - supports-color - ts-api-utils - '@eslint-react/kit@1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3)': + '@eslint-react/kit@1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': dependencies: '@eslint-react/eff': 1.52.3 - '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) ts-pattern: 5.7.1 zod: 4.0.5 transitivePeerDependencies: @@ -10250,11 +9943,11 @@ snapshots: - supports-color - typescript - '@eslint-react/shared@1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3)': + '@eslint-react/shared@1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': dependencies: '@eslint-react/eff': 1.52.3 - '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) ts-pattern: 5.7.1 zod: 4.0.5 transitivePeerDependencies: @@ -10262,13 +9955,13 @@ snapshots: - supports-color - typescript - '@eslint-react/var@1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3)': + '@eslint-react/var@1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': dependencies: - '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@eslint-react/eff': 1.52.3 '@typescript-eslint/scope-manager': 8.37.0 '@typescript-eslint/types': 8.37.0 - '@typescript-eslint/utils': 8.44.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/utils': 8.44.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) string-ts: 2.2.1 ts-pattern: 5.7.1 transitivePeerDependencies: @@ -10276,9 +9969,9 @@ snapshots: - supports-color - typescript - '@eslint/compat@1.3.1(eslint@9.35.0(jiti@1.21.7))': + '@eslint/compat@1.3.1(eslint@9.35.0(jiti@2.6.0))': optionalDependencies: - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) '@eslint/config-array@0.21.0': dependencies: @@ -10314,8 +10007,6 @@ snapshots: '@eslint/js@9.35.0': {} - '@eslint/js@9.36.0': {} - '@eslint/markdown@7.1.0': dependencies: '@eslint/core': 0.15.1 @@ -10341,16 +10032,10 @@ snapshots: '@eslint/core': 0.15.2 levn: 0.4.1 - '@faker-js/faker@9.9.0': {} - '@floating-ui/core@1.7.2': dependencies: '@floating-ui/utils': 0.2.10 - '@floating-ui/dom@1.1.1': - dependencies: - '@floating-ui/core': 1.7.2 - '@floating-ui/dom@1.7.2': dependencies: '@floating-ui/core': 1.7.2 @@ -10568,7 +10253,7 @@ snapshots: '@img/sharp-wasm32@0.33.5': dependencies: - '@emnapi/runtime': 1.4.4 + '@emnapi/runtime': 1.5.0 optional: true '@img/sharp-wasm32@0.34.3': @@ -10802,7 +10487,8 @@ snapshots: '@jridgewell/trace-mapping@0.3.9': dependencies: '@jridgewell/resolve-uri': 3.1.2 - '@jridgewell/sourcemap-codec': 1.5.4 + '@jridgewell/sourcemap-codec': 1.5.5 + optional: true '@lexical/clipboard@0.30.0': dependencies: @@ -11036,11 +10722,11 @@ snapshots: react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - '@napi-rs/wasm-runtime@0.2.12': + '@napi-rs/wasm-runtime@1.0.5': dependencies: - '@emnapi/core': 1.4.4 - '@emnapi/runtime': 1.4.4 - '@tybys/wasm-util': 0.10.0 + '@emnapi/core': 1.5.0 + '@emnapi/runtime': 1.5.0 + '@tybys/wasm-util': 0.10.1 optional: true '@next/bundle-analyzer@15.5.3': @@ -11103,38 +10789,16 @@ snapshots: dependencies: '@nolyfill/shared': 1.0.44 - '@nolyfill/array.prototype.findlast@1.0.44': - dependencies: - '@nolyfill/shared': 1.0.44 - - '@nolyfill/array.prototype.findlastindex@1.0.44': - dependencies: - '@nolyfill/shared': 1.0.44 - '@nolyfill/array.prototype.flat@1.0.44': dependencies: '@nolyfill/shared': 1.0.44 - '@nolyfill/array.prototype.flatmap@1.0.44': - dependencies: - '@nolyfill/shared': 1.0.44 - - '@nolyfill/array.prototype.tosorted@1.0.44': - dependencies: - '@nolyfill/shared': 1.0.44 - '@nolyfill/assert@1.0.26': dependencies: '@nolyfill/is-nan': 1.0.24 '@nolyfill/object-is': 1.0.24 '@nolyfill/object.assign': 1.0.24 - '@nolyfill/es-iterator-helpers@1.0.21': - dependencies: - '@nolyfill/shared': 1.0.21 - - '@nolyfill/hasown@1.0.44': {} - '@nolyfill/is-arguments@1.0.44': {} '@nolyfill/is-core-module@1.0.39': {} @@ -11163,52 +10827,24 @@ snapshots: dependencies: '@nolyfill/shared': 1.0.44 - '@nolyfill/object.entries@1.0.44': - dependencies: - '@nolyfill/shared': 1.0.44 - - '@nolyfill/object.fromentries@1.0.44': - dependencies: - '@nolyfill/shared': 1.0.44 - - '@nolyfill/object.groupby@1.0.44': - dependencies: - '@nolyfill/shared': 1.0.44 - '@nolyfill/object.values@1.0.44': dependencies: '@nolyfill/shared': 1.0.44 '@nolyfill/safe-buffer@1.0.44': {} - '@nolyfill/safe-regex-test@1.0.44': {} - '@nolyfill/safer-buffer@1.0.44': {} - '@nolyfill/shared@1.0.21': {} - '@nolyfill/shared@1.0.24': {} '@nolyfill/shared@1.0.44': {} '@nolyfill/side-channel@1.0.44': {} - '@nolyfill/string.prototype.includes@1.0.44': - dependencies: - '@nolyfill/shared': 1.0.44 - '@nolyfill/string.prototype.matchall@1.0.44': dependencies: '@nolyfill/shared': 1.0.44 - '@nolyfill/string.prototype.repeat@1.0.44': - dependencies: - '@nolyfill/shared': 1.0.44 - - '@nolyfill/string.prototype.trimend@1.0.44': - dependencies: - '@nolyfill/shared': 1.0.44 - '@nolyfill/typed-array-buffer@1.0.44': dependencies: '@nolyfill/shared': 1.0.44 @@ -11258,6 +10894,65 @@ snapshots: dependencies: '@octokit/openapi-types': 25.1.0 + '@oxc-resolver/binding-android-arm-eabi@11.8.4': + optional: true + + '@oxc-resolver/binding-android-arm64@11.8.4': + optional: true + + '@oxc-resolver/binding-darwin-arm64@11.8.4': + optional: true + + '@oxc-resolver/binding-darwin-x64@11.8.4': + optional: true + + '@oxc-resolver/binding-freebsd-x64@11.8.4': + optional: true + + '@oxc-resolver/binding-linux-arm-gnueabihf@11.8.4': + optional: true + + '@oxc-resolver/binding-linux-arm-musleabihf@11.8.4': + optional: true + + '@oxc-resolver/binding-linux-arm64-gnu@11.8.4': + optional: true + + '@oxc-resolver/binding-linux-arm64-musl@11.8.4': + optional: true + + '@oxc-resolver/binding-linux-ppc64-gnu@11.8.4': + optional: true + + '@oxc-resolver/binding-linux-riscv64-gnu@11.8.4': + optional: true + + '@oxc-resolver/binding-linux-riscv64-musl@11.8.4': + optional: true + + '@oxc-resolver/binding-linux-s390x-gnu@11.8.4': + optional: true + + '@oxc-resolver/binding-linux-x64-gnu@11.8.4': + optional: true + + '@oxc-resolver/binding-linux-x64-musl@11.8.4': + optional: true + + '@oxc-resolver/binding-wasm32-wasi@11.8.4': + dependencies: + '@napi-rs/wasm-runtime': 1.0.5 + optional: true + + '@oxc-resolver/binding-win32-arm64-msvc@11.8.4': + optional: true + + '@oxc-resolver/binding-win32-ia32-msvc@11.8.4': + optional: true + + '@oxc-resolver/binding-win32-x64-msvc@11.8.4': + optional: true + '@parcel/watcher-android-arm64@2.5.1': optional: true @@ -11651,10 +11346,6 @@ snapshots: picomatch: 2.3.1 rollup: 2.79.2 - '@rtsao/scc@1.1.0': {} - - '@rushstack/eslint-patch@1.12.0': {} - '@sentry-internal/browser-utils@8.55.0': dependencies: '@sentry/core': 8.55.0 @@ -11690,10 +11381,6 @@ snapshots: hoist-non-react-statics: 3.3.2 react: 19.1.1 - '@sentry/utils@8.55.0': - dependencies: - '@sentry/core': 8.55.0 - '@sinclair/typebox@0.27.8': {} '@sindresorhus/is@4.6.0': {} @@ -11821,16 +11508,6 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@storybook/blocks@8.5.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@8.5.0)': - dependencies: - '@storybook/csf': 0.1.12 - '@storybook/icons': 1.4.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - storybook: 8.5.0 - ts-dedent: 2.2.0 - optionalDependencies: - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - '@storybook/builder-webpack5@8.5.0(esbuild@0.25.0)(storybook@8.5.0)(typescript@5.8.3)(uglify-js@3.19.3)': dependencies: '@storybook/core-webpack': 8.5.0(storybook@8.5.0) @@ -11914,11 +11591,6 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@storybook/icons@1.4.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - '@storybook/instrumenter@8.5.0(storybook@8.5.0)': dependencies: '@storybook/global': 5.0.0 @@ -12081,11 +11753,11 @@ snapshots: dependencies: storybook: 8.5.0 - '@stylistic/eslint-plugin@5.2.2(eslint@9.35.0(jiti@1.21.7))': + '@stylistic/eslint-plugin@5.2.2(eslint@9.35.0(jiti@2.6.0))': dependencies: - '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@2.6.0)) '@typescript-eslint/types': 8.38.0 - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) eslint-visitor-keys: 4.2.1 espree: 10.4.0 estraverse: 5.3.0 @@ -12210,15 +11882,19 @@ snapshots: dependencies: '@testing-library/dom': 10.4.0 - '@tsconfig/node10@1.0.11': {} + '@tsconfig/node10@1.0.11': + optional: true - '@tsconfig/node12@1.0.11': {} + '@tsconfig/node12@1.0.11': + optional: true - '@tsconfig/node14@1.0.3': {} + '@tsconfig/node14@1.0.3': + optional: true - '@tsconfig/node16@1.0.4': {} + '@tsconfig/node16@1.0.4': + optional: true - '@tybys/wasm-util@0.10.0': + '@tybys/wasm-util@0.10.1': dependencies: tslib: 2.8.1 optional: true @@ -12253,8 +11929,6 @@ snapshots: '@types/node': 18.15.0 '@types/responselike': 1.0.3 - '@types/crypto-js@4.2.2': {} - '@types/d3-array@3.2.1': {} '@types/d3-axis@3.0.6': @@ -12440,8 +12114,6 @@ snapshots: '@types/json-schema@7.0.15': {} - '@types/json5@0.0.29': {} - '@types/katex@0.16.7': {} '@types/keyv@3.1.4': @@ -12490,11 +12162,6 @@ snapshots: dependencies: '@types/react': 19.1.11 - '@types/react-window-infinite-loader@1.0.9': - dependencies: - '@types/react': 19.1.11 - '@types/react-window': 1.8.8 - '@types/react-window@1.8.8': dependencies: '@types/react': 19.1.11 @@ -12503,8 +12170,6 @@ snapshots: dependencies: csstype: 3.1.3 - '@types/recordrtc@5.6.14': {} - '@types/resolve@1.17.1': dependencies: '@types/node': 18.15.0 @@ -12537,15 +12202,15 @@ snapshots: dependencies: '@types/yargs-parser': 21.0.3 - '@typescript-eslint/eslint-plugin@8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3)': + '@typescript-eslint/eslint-plugin@8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3))(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': dependencies: '@eslint-community/regexpp': 4.12.1 - '@typescript-eslint/parser': 8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/parser': 8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/scope-manager': 8.38.0 - '@typescript-eslint/type-utils': 8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@typescript-eslint/utils': 8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/type-utils': 8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@typescript-eslint/utils': 8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/visitor-keys': 8.38.0 - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) graphemer: 1.4.0 ignore: 7.0.5 natural-compare: 1.4.0 @@ -12554,14 +12219,14 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3)': + '@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': dependencies: '@typescript-eslint/scope-manager': 8.38.0 '@typescript-eslint/types': 8.38.0 '@typescript-eslint/typescript-estree': 8.38.0(typescript@5.8.3) '@typescript-eslint/visitor-keys': 8.38.0 debug: 4.4.1 - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) typescript: 5.8.3 transitivePeerDependencies: - supports-color @@ -12620,25 +12285,25 @@ snapshots: dependencies: typescript: 5.8.3 - '@typescript-eslint/type-utils@8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3)': + '@typescript-eslint/type-utils@8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': dependencies: '@typescript-eslint/types': 8.37.0 '@typescript-eslint/typescript-estree': 8.37.0(typescript@5.8.3) - '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) debug: 4.4.1 - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) ts-api-utils: 2.1.0(typescript@5.8.3) typescript: 5.8.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/type-utils@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3)': + '@typescript-eslint/type-utils@8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': dependencies: '@typescript-eslint/types': 8.38.0 '@typescript-eslint/typescript-estree': 8.38.0(typescript@5.8.3) - '@typescript-eslint/utils': 8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/utils': 8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) debug: 4.4.1 - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) ts-api-utils: 2.1.0(typescript@5.8.3) typescript: 5.8.3 transitivePeerDependencies: @@ -12698,35 +12363,35 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3)': + '@typescript-eslint/utils@8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': dependencies: - '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@2.6.0)) '@typescript-eslint/scope-manager': 8.37.0 '@typescript-eslint/types': 8.37.0 '@typescript-eslint/typescript-estree': 8.37.0(typescript@5.8.3) - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) typescript: 5.8.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3)': + '@typescript-eslint/utils@8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': dependencies: - '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@2.6.0)) '@typescript-eslint/scope-manager': 8.38.0 '@typescript-eslint/types': 8.38.0 '@typescript-eslint/typescript-estree': 8.38.0(typescript@5.8.3) - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) typescript: 5.8.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.44.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3)': + '@typescript-eslint/utils@8.44.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.35.0(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.9.0(eslint@9.35.0(jiti@2.6.0)) '@typescript-eslint/scope-manager': 8.44.0 '@typescript-eslint/types': 8.44.0 '@typescript-eslint/typescript-estree': 8.44.0(typescript@5.8.3) - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) typescript: 5.8.3 transitivePeerDependencies: - supports-color @@ -12748,69 +12413,10 @@ snapshots: '@ungap/structured-clone@1.3.0': {} - '@unrs/resolver-binding-android-arm-eabi@1.11.1': - optional: true - - '@unrs/resolver-binding-android-arm64@1.11.1': - optional: true - - '@unrs/resolver-binding-darwin-arm64@1.11.1': - optional: true - - '@unrs/resolver-binding-darwin-x64@1.11.1': - optional: true - - '@unrs/resolver-binding-freebsd-x64@1.11.1': - optional: true - - '@unrs/resolver-binding-linux-arm-gnueabihf@1.11.1': - optional: true - - '@unrs/resolver-binding-linux-arm-musleabihf@1.11.1': - optional: true - - '@unrs/resolver-binding-linux-arm64-gnu@1.11.1': - optional: true - - '@unrs/resolver-binding-linux-arm64-musl@1.11.1': - optional: true - - '@unrs/resolver-binding-linux-ppc64-gnu@1.11.1': - optional: true - - '@unrs/resolver-binding-linux-riscv64-gnu@1.11.1': - optional: true - - '@unrs/resolver-binding-linux-riscv64-musl@1.11.1': - optional: true - - '@unrs/resolver-binding-linux-s390x-gnu@1.11.1': - optional: true - - '@unrs/resolver-binding-linux-x64-gnu@1.11.1': - optional: true - - '@unrs/resolver-binding-linux-x64-musl@1.11.1': - optional: true - - '@unrs/resolver-binding-wasm32-wasi@1.11.1': + '@vitest/eslint-plugin@1.3.4(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': dependencies: - '@napi-rs/wasm-runtime': 0.2.12 - optional: true - - '@unrs/resolver-binding-win32-arm64-msvc@1.11.1': - optional: true - - '@unrs/resolver-binding-win32-ia32-msvc@1.11.1': - optional: true - - '@unrs/resolver-binding-win32-x64-msvc@1.11.1': - optional: true - - '@vitest/eslint-plugin@1.3.4(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3)': - dependencies: - '@typescript-eslint/utils': 8.44.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint: 9.35.0(jiti@1.21.7) + '@typescript-eslint/utils': 8.44.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint: 9.35.0(jiti@2.6.0) optionalDependencies: typescript: 5.8.3 transitivePeerDependencies: @@ -13080,7 +12686,8 @@ snapshots: readable-stream: 3.6.2 optional: true - arg@4.1.3: {} + arg@4.1.3: + optional: true arg@5.0.2: {} @@ -13116,8 +12723,6 @@ snapshots: assertion-error@2.0.1: {} - ast-types-flow@0.0.8: {} - ast-types@0.16.1: dependencies: tslib: 2.8.1 @@ -13138,10 +12743,6 @@ snapshots: postcss: 8.5.6 postcss-value-parser: 4.2.0 - axe-core@4.10.3: {} - - axobject-query@4.1.0: {} - babel-jest@29.7.0(@babel/core@7.28.3): dependencies: '@babel/core': 7.28.3 @@ -13155,12 +12756,6 @@ snapshots: transitivePeerDependencies: - supports-color - babel-loader@10.0.0(@babel/core@7.28.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)): - dependencies: - '@babel/core': 7.28.3 - find-up: 5.0.0 - webpack: 5.100.2(esbuild@0.25.0)(uglify-js@3.19.3) - babel-loader@8.4.1(@babel/core@7.28.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)): dependencies: '@babel/core': 7.28.3 @@ -13711,7 +13306,8 @@ snapshots: - supports-color - ts-node - create-require@1.1.1: {} + create-require@1.1.1: + optional: true cross-env@7.0.3: dependencies: @@ -13738,8 +13334,6 @@ snapshots: randombytes: 2.1.0 randomfill: 1.0.4 - crypto-js@4.2.0: {} - crypto-random-string@2.0.0: {} css-loader@6.11.0(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)): @@ -13955,16 +13549,10 @@ snapshots: d3: 7.9.0 lodash-es: 4.17.21 - damerau-levenshtein@1.0.8: {} - dayjs@1.11.13: {} debounce@1.2.1: {} - debug@3.2.7: - dependencies: - ms: 2.1.3 - debug@4.4.1: dependencies: ms: 2.1.3 @@ -14031,10 +13619,7 @@ snapshots: detect-libc@1.0.3: optional: true - detect-libc@2.0.4: {} - - detect-libc@2.1.0: - optional: true + detect-libc@2.1.0: {} detect-newline@3.1.0: {} @@ -14050,7 +13635,8 @@ snapshots: diff-sequences@29.6.3: {} - diff@4.0.2: {} + diff@4.0.2: + optional: true diffie-hellman@5.0.3: dependencies: @@ -14064,10 +13650,6 @@ snapshots: dlv@1.1.3: {} - doctrine@2.1.0: - dependencies: - esutils: 2.0.3 - doctrine@3.0.0: dependencies: esutils: 2.0.3 @@ -14149,8 +13731,6 @@ snapshots: emoji-regex@8.0.0: {} - emoji-regex@9.2.2: {} - emojis-list@3.0.0: {} end-of-stream@1.4.5: @@ -14247,150 +13827,67 @@ snapshots: escape-string-regexp@5.0.0: {} - eslint-compat-utils@0.5.1(eslint@9.35.0(jiti@1.21.7)): + eslint-compat-utils@0.5.1(eslint@9.35.0(jiti@2.6.0)): dependencies: - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) semver: 7.7.2 - eslint-compat-utils@0.6.5(eslint@9.35.0(jiti@1.21.7)): + eslint-compat-utils@0.6.5(eslint@9.35.0(jiti@2.6.0)): dependencies: - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) semver: 7.7.2 - eslint-config-flat-gitignore@2.1.0(eslint@9.35.0(jiti@1.21.7)): + eslint-config-flat-gitignore@2.1.0(eslint@9.35.0(jiti@2.6.0)): dependencies: - '@eslint/compat': 1.3.1(eslint@9.35.0(jiti@1.21.7)) - eslint: 9.35.0(jiti@1.21.7) - - eslint-config-next@15.5.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3): - dependencies: - '@next/eslint-plugin-next': 15.5.0 - '@rushstack/eslint-patch': 1.12.0 - '@typescript-eslint/eslint-plugin': 8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@typescript-eslint/parser': 8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint: 9.35.0(jiti@1.21.7) - eslint-import-resolver-node: 0.3.9 - eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@9.35.0(jiti@1.21.7)) - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.35.0(jiti@1.21.7)) - eslint-plugin-jsx-a11y: 6.10.2(eslint@9.35.0(jiti@1.21.7)) - eslint-plugin-react: 7.37.5(eslint@9.35.0(jiti@1.21.7)) - eslint-plugin-react-hooks: 5.2.0(eslint@9.35.0(jiti@1.21.7)) - optionalDependencies: - typescript: 5.8.3 - transitivePeerDependencies: - - eslint-import-resolver-webpack - - eslint-plugin-import-x - - supports-color + '@eslint/compat': 1.3.1(eslint@9.35.0(jiti@2.6.0)) + eslint: 9.35.0(jiti@2.6.0) eslint-flat-config-utils@2.1.0: dependencies: pathe: 2.0.3 - eslint-import-resolver-node@0.3.9: + eslint-json-compat-utils@0.2.1(eslint@9.35.0(jiti@2.6.0))(jsonc-eslint-parser@2.4.0): dependencies: - debug: 3.2.7 - is-core-module: '@nolyfill/is-core-module@1.0.39' - resolve: 1.22.10 - transitivePeerDependencies: - - supports-color - - eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0)(eslint@9.35.0(jiti@1.21.7)): - dependencies: - '@nolyfill/is-core-module': 1.0.39 - debug: 4.4.1 - eslint: 9.35.0(jiti@1.21.7) - get-tsconfig: 4.10.1 - is-bun-module: 2.0.0 - stable-hash: 0.0.5 - tinyglobby: 0.2.14 - unrs-resolver: 1.11.1 - optionalDependencies: - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.35.0(jiti@1.21.7)) - transitivePeerDependencies: - - supports-color - - eslint-json-compat-utils@0.2.1(eslint@9.35.0(jiti@1.21.7))(jsonc-eslint-parser@2.4.0): - dependencies: - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) esquery: 1.6.0 jsonc-eslint-parser: 2.4.0 - eslint-merge-processors@2.0.0(eslint@9.35.0(jiti@1.21.7)): + eslint-merge-processors@2.0.0(eslint@9.35.0(jiti@2.6.0)): dependencies: - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) - eslint-module-utils@2.12.1(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@9.35.0(jiti@1.21.7)): + eslint-plugin-antfu@3.1.1(eslint@9.35.0(jiti@2.6.0)): dependencies: - debug: 3.2.7 - optionalDependencies: - '@typescript-eslint/parser': 8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint: 9.35.0(jiti@1.21.7) - eslint-import-resolver-node: 0.3.9 - eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@9.35.0(jiti@1.21.7)) - transitivePeerDependencies: - - supports-color + eslint: 9.35.0(jiti@2.6.0) - eslint-plugin-antfu@3.1.1(eslint@9.35.0(jiti@1.21.7)): - dependencies: - eslint: 9.35.0(jiti@1.21.7) - - eslint-plugin-command@3.3.1(eslint@9.35.0(jiti@1.21.7)): + eslint-plugin-command@3.3.1(eslint@9.35.0(jiti@2.6.0)): dependencies: '@es-joy/jsdoccomment': 0.50.2 - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) - eslint-plugin-es-x@7.8.0(eslint@9.35.0(jiti@1.21.7)): + eslint-plugin-es-x@7.8.0(eslint@9.35.0(jiti@2.6.0)): dependencies: - '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@2.6.0)) '@eslint-community/regexpp': 4.12.1 - eslint: 9.35.0(jiti@1.21.7) - eslint-compat-utils: 0.5.1(eslint@9.35.0(jiti@1.21.7)) + eslint: 9.35.0(jiti@2.6.0) + eslint-compat-utils: 0.5.1(eslint@9.35.0(jiti@2.6.0)) - eslint-plugin-import-lite@0.3.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3): + eslint-plugin-import-lite@0.3.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3): dependencies: - '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@2.6.0)) '@typescript-eslint/types': 8.38.0 - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) optionalDependencies: typescript: 5.8.3 - eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.35.0(jiti@1.21.7)): - dependencies: - '@rtsao/scc': 1.1.0 - array-includes: '@nolyfill/array-includes@1.0.44' - array.prototype.findlastindex: '@nolyfill/array.prototype.findlastindex@1.0.44' - array.prototype.flat: '@nolyfill/array.prototype.flat@1.0.44' - array.prototype.flatmap: '@nolyfill/array.prototype.flatmap@1.0.44' - debug: 3.2.7 - doctrine: 2.1.0 - eslint: 9.35.0(jiti@1.21.7) - eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@9.35.0(jiti@1.21.7)) - hasown: '@nolyfill/hasown@1.0.44' - is-core-module: '@nolyfill/is-core-module@1.0.39' - is-glob: 4.0.3 - minimatch: 3.1.2 - object.fromentries: '@nolyfill/object.fromentries@1.0.44' - object.groupby: '@nolyfill/object.groupby@1.0.44' - object.values: '@nolyfill/object.values@1.0.44' - semver: 6.3.1 - string.prototype.trimend: '@nolyfill/string.prototype.trimend@1.0.44' - tsconfig-paths: 3.15.0 - optionalDependencies: - '@typescript-eslint/parser': 8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - transitivePeerDependencies: - - eslint-import-resolver-typescript - - eslint-import-resolver-webpack - - supports-color - - eslint-plugin-jsdoc@51.4.1(eslint@9.35.0(jiti@1.21.7)): + eslint-plugin-jsdoc@51.4.1(eslint@9.35.0(jiti@2.6.0)): dependencies: '@es-joy/jsdoccomment': 0.52.0 are-docs-informative: 0.0.2 comment-parser: 1.4.1 debug: 4.4.1 escape-string-regexp: 4.0.0 - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) espree: 10.4.0 esquery: 1.6.0 parse-imports-exports: 0.2.4 @@ -14399,12 +13896,12 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-jsonc@2.20.1(eslint@9.35.0(jiti@1.21.7)): + eslint-plugin-jsonc@2.20.1(eslint@9.35.0(jiti@2.6.0)): dependencies: - '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@1.21.7)) - eslint: 9.35.0(jiti@1.21.7) - eslint-compat-utils: 0.6.5(eslint@9.35.0(jiti@1.21.7)) - eslint-json-compat-utils: 0.2.1(eslint@9.35.0(jiti@1.21.7))(jsonc-eslint-parser@2.4.0) + '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@2.6.0)) + eslint: 9.35.0(jiti@2.6.0) + eslint-compat-utils: 0.6.5(eslint@9.35.0(jiti@2.6.0)) + eslint-json-compat-utils: 0.2.1(eslint@9.35.0(jiti@2.6.0))(jsonc-eslint-parser@2.4.0) espree: 10.4.0 graphemer: 1.4.0 jsonc-eslint-parser: 2.4.0 @@ -14413,31 +13910,12 @@ snapshots: transitivePeerDependencies: - '@eslint/json' - eslint-plugin-jsx-a11y@6.10.2(eslint@9.35.0(jiti@1.21.7)): + eslint-plugin-n@17.21.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3): dependencies: - aria-query: 5.3.2 - array-includes: '@nolyfill/array-includes@1.0.44' - array.prototype.flatmap: '@nolyfill/array.prototype.flatmap@1.0.44' - ast-types-flow: 0.0.8 - axe-core: 4.10.3 - axobject-query: 4.1.0 - damerau-levenshtein: 1.0.8 - emoji-regex: 9.2.2 - eslint: 9.35.0(jiti@1.21.7) - hasown: '@nolyfill/hasown@1.0.44' - jsx-ast-utils: 3.3.5 - language-tags: 1.0.9 - minimatch: 3.1.2 - object.fromentries: '@nolyfill/object.fromentries@1.0.44' - safe-regex-test: '@nolyfill/safe-regex-test@1.0.44' - string.prototype.includes: '@nolyfill/string.prototype.includes@1.0.44' - - eslint-plugin-n@17.21.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3): - dependencies: - '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@2.6.0)) enhanced-resolve: 5.18.2 - eslint: 9.35.0(jiti@1.21.7) - eslint-plugin-es-x: 7.8.0(eslint@9.35.0(jiti@1.21.7)) + eslint: 9.35.0(jiti@2.6.0) + eslint-plugin-es-x: 7.8.0(eslint@9.35.0(jiti@2.6.0)) get-tsconfig: 4.10.1 globals: 15.15.0 ignore: 5.3.2 @@ -14453,19 +13931,19 @@ snapshots: dependencies: jsonc-parser: 3.3.1 - eslint-plugin-perfectionist@4.15.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3): + eslint-plugin-perfectionist@4.15.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3): dependencies: '@typescript-eslint/types': 8.38.0 - '@typescript-eslint/utils': 8.44.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint: 9.35.0(jiti@1.21.7) + '@typescript-eslint/utils': 8.44.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint: 9.35.0(jiti@2.6.0) natural-orderby: 5.0.0 transitivePeerDependencies: - supports-color - typescript - eslint-plugin-pnpm@1.1.0(eslint@9.35.0(jiti@1.21.7)): + eslint-plugin-pnpm@1.1.0(eslint@9.35.0(jiti@2.6.0)): dependencies: - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) find-up-simple: 1.0.1 jsonc-eslint-parser: 2.4.0 pathe: 2.0.3 @@ -14473,19 +13951,19 @@ snapshots: tinyglobby: 0.2.14 yaml-eslint-parser: 1.3.0 - eslint-plugin-react-debug@1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3): + eslint-plugin-react-debug@1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3): dependencies: - '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/core': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/core': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@eslint-react/eff': 1.52.3 - '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/var': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/var': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/scope-manager': 8.37.0 - '@typescript-eslint/type-utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/type-utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/types': 8.37.0 - '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint: 9.35.0(jiti@1.21.7) + '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint: 9.35.0(jiti@2.6.0) string-ts: 2.2.1 ts-pattern: 5.7.1 optionalDependencies: @@ -14493,19 +13971,19 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-react-dom@1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3): + eslint-plugin-react-dom@1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3): dependencies: - '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/core': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/core': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@eslint-react/eff': 1.52.3 - '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/var': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/var': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/scope-manager': 8.37.0 '@typescript-eslint/types': 8.37.0 - '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) compare-versions: 6.1.1 - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) string-ts: 2.2.1 ts-pattern: 5.7.1 optionalDependencies: @@ -14513,19 +13991,19 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-react-hooks-extra@1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3): + eslint-plugin-react-hooks-extra@1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3): dependencies: - '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/core': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/core': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@eslint-react/eff': 1.52.3 - '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/var': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/var': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/scope-manager': 8.37.0 - '@typescript-eslint/type-utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/type-utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/types': 8.37.0 - '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint: 9.35.0(jiti@1.21.7) + '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint: 9.35.0(jiti@2.6.0) string-ts: 2.2.1 ts-pattern: 5.7.1 optionalDependencies: @@ -14533,23 +14011,23 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-react-hooks@5.2.0(eslint@9.35.0(jiti@1.21.7)): + eslint-plugin-react-hooks@5.2.0(eslint@9.35.0(jiti@2.6.0)): dependencies: - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) - eslint-plugin-react-naming-convention@1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3): + eslint-plugin-react-naming-convention@1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3): dependencies: - '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/core': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/core': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@eslint-react/eff': 1.52.3 - '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/var': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/var': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/scope-manager': 8.37.0 - '@typescript-eslint/type-utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/type-utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/types': 8.37.0 - '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint: 9.35.0(jiti@1.21.7) + '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint: 9.35.0(jiti@2.6.0) string-ts: 2.2.1 ts-pattern: 5.7.1 optionalDependencies: @@ -14557,22 +14035,22 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-react-refresh@0.4.20(eslint@9.35.0(jiti@1.21.7)): + eslint-plugin-react-refresh@0.4.20(eslint@9.35.0(jiti@2.6.0)): dependencies: - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) - eslint-plugin-react-web-api@1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3): + eslint-plugin-react-web-api@1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3): dependencies: - '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/core': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/core': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@eslint-react/eff': 1.52.3 - '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/var': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/var': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/scope-manager': 8.37.0 '@typescript-eslint/types': 8.37.0 - '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint: 9.35.0(jiti@1.21.7) + '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint: 9.35.0(jiti@2.6.0) string-ts: 2.2.1 ts-pattern: 5.7.1 optionalDependencies: @@ -14580,21 +14058,21 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-react-x@1.52.3(eslint@9.35.0(jiti@1.21.7))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3): + eslint-plugin-react-x@1.52.3(eslint@9.35.0(jiti@2.6.0))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3): dependencies: - '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/core': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/ast': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/core': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@eslint-react/eff': 1.52.3 - '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@eslint-react/var': 1.52.3(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint-react/kit': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/shared': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@eslint-react/var': 1.52.3(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/scope-manager': 8.37.0 - '@typescript-eslint/type-utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/type-utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@typescript-eslint/types': 8.37.0 - '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) compare-versions: 6.1.1 - eslint: 9.35.0(jiti@1.21.7) - is-immutable-type: 5.0.1(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + eslint: 9.35.0(jiti@2.6.0) + is-immutable-type: 5.0.1(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) string-ts: 2.2.1 ts-pattern: 5.7.1 optionalDependencies: @@ -14603,45 +14081,23 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-react@7.37.5(eslint@9.35.0(jiti@1.21.7)): + eslint-plugin-regexp@2.9.0(eslint@9.35.0(jiti@2.6.0)): dependencies: - array-includes: '@nolyfill/array-includes@1.0.44' - array.prototype.findlast: '@nolyfill/array.prototype.findlast@1.0.44' - array.prototype.flatmap: '@nolyfill/array.prototype.flatmap@1.0.44' - array.prototype.tosorted: '@nolyfill/array.prototype.tosorted@1.0.44' - doctrine: 2.1.0 - es-iterator-helpers: '@nolyfill/es-iterator-helpers@1.0.21' - eslint: 9.35.0(jiti@1.21.7) - estraverse: 5.3.0 - hasown: '@nolyfill/hasown@1.0.44' - jsx-ast-utils: 3.3.5 - minimatch: 3.1.2 - object.entries: '@nolyfill/object.entries@1.0.44' - object.fromentries: '@nolyfill/object.fromentries@1.0.44' - object.values: '@nolyfill/object.values@1.0.44' - prop-types: 15.8.1 - resolve: 2.0.0-next.5 - semver: 6.3.1 - string.prototype.matchall: '@nolyfill/string.prototype.matchall@1.0.44' - string.prototype.repeat: '@nolyfill/string.prototype.repeat@1.0.44' - - eslint-plugin-regexp@2.9.0(eslint@9.35.0(jiti@1.21.7)): - dependencies: - '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@2.6.0)) '@eslint-community/regexpp': 4.12.1 comment-parser: 1.4.1 - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) jsdoc-type-pratt-parser: 4.1.0 refa: 0.12.1 regexp-ast-analysis: 0.7.1 scslre: 0.3.0 - eslint-plugin-sonarjs@3.0.4(eslint@9.35.0(jiti@1.21.7)): + eslint-plugin-sonarjs@3.0.4(eslint@9.35.0(jiti@2.6.0)): dependencies: '@eslint-community/regexpp': 4.12.1 builtin-modules: 3.3.0 bytes: 3.1.2 - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) functional-red-black-tree: 1.0.1 jsx-ast-utils: 3.3.5 lodash.merge: 4.6.2 @@ -14650,11 +14106,11 @@ snapshots: semver: 7.7.2 typescript: 5.8.3 - eslint-plugin-storybook@9.0.7(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3): + eslint-plugin-storybook@9.0.7(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3): dependencies: '@storybook/csf': 0.1.13 - '@typescript-eslint/utils': 8.44.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint: 9.35.0(jiti@1.21.7) + '@typescript-eslint/utils': 8.44.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint: 9.35.0(jiti@2.6.0) ts-dedent: 2.2.0 transitivePeerDependencies: - supports-color @@ -14666,26 +14122,26 @@ snapshots: postcss: 8.5.6 tailwindcss: 3.4.17(ts-node@10.9.2(@types/node@18.15.0)(typescript@5.8.3)) - eslint-plugin-toml@0.12.0(eslint@9.35.0(jiti@1.21.7)): + eslint-plugin-toml@0.12.0(eslint@9.35.0(jiti@2.6.0)): dependencies: debug: 4.4.1 - eslint: 9.35.0(jiti@1.21.7) - eslint-compat-utils: 0.6.5(eslint@9.35.0(jiti@1.21.7)) + eslint: 9.35.0(jiti@2.6.0) + eslint-compat-utils: 0.6.5(eslint@9.35.0(jiti@2.6.0)) lodash: 4.17.21 toml-eslint-parser: 0.10.0 transitivePeerDependencies: - supports-color - eslint-plugin-unicorn@60.0.0(eslint@9.35.0(jiti@1.21.7)): + eslint-plugin-unicorn@60.0.0(eslint@9.35.0(jiti@2.6.0)): dependencies: '@babel/helper-validator-identifier': 7.27.1 - '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@2.6.0)) '@eslint/plugin-kit': 0.3.4 change-case: 5.4.4 ci-info: 4.3.0 clean-regexp: 1.0.0 core-js-compat: 3.44.0 - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) esquery: 1.6.0 find-up-simple: 1.0.1 globals: 16.3.0 @@ -14698,40 +14154,40 @@ snapshots: semver: 7.7.2 strip-indent: 4.0.0 - eslint-plugin-unused-imports@4.1.4(@typescript-eslint/eslint-plugin@8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.35.0(jiti@1.21.7)): + eslint-plugin-unused-imports@4.1.4(@typescript-eslint/eslint-plugin@8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3))(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3))(eslint@9.35.0(jiti@2.6.0)): dependencies: - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) optionalDependencies: - '@typescript-eslint/eslint-plugin': 8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/eslint-plugin': 8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3))(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) - eslint-plugin-vue@10.3.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.35.0(jiti@1.21.7))(vue-eslint-parser@10.2.0(eslint@9.35.0(jiti@1.21.7))): + eslint-plugin-vue@10.3.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3))(eslint@9.35.0(jiti@2.6.0))(vue-eslint-parser@10.2.0(eslint@9.35.0(jiti@2.6.0))): dependencies: - '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@1.21.7)) - eslint: 9.35.0(jiti@1.21.7) + '@eslint-community/eslint-utils': 4.7.0(eslint@9.35.0(jiti@2.6.0)) + eslint: 9.35.0(jiti@2.6.0) natural-compare: 1.4.0 nth-check: 2.1.1 postcss-selector-parser: 6.1.2 semver: 7.7.2 - vue-eslint-parser: 10.2.0(eslint@9.35.0(jiti@1.21.7)) + vue-eslint-parser: 10.2.0(eslint@9.35.0(jiti@2.6.0)) xml-name-validator: 4.0.0 optionalDependencies: - '@typescript-eslint/parser': 8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/parser': 8.38.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) - eslint-plugin-yml@1.18.0(eslint@9.35.0(jiti@1.21.7)): + eslint-plugin-yml@1.18.0(eslint@9.35.0(jiti@2.6.0)): dependencies: debug: 4.4.1 escape-string-regexp: 4.0.0 - eslint: 9.35.0(jiti@1.21.7) - eslint-compat-utils: 0.6.5(eslint@9.35.0(jiti@1.21.7)) + eslint: 9.35.0(jiti@2.6.0) + eslint-compat-utils: 0.6.5(eslint@9.35.0(jiti@2.6.0)) natural-compare: 1.4.0 yaml-eslint-parser: 1.3.0 transitivePeerDependencies: - supports-color - eslint-processor-vue-blocks@2.0.0(@vue/compiler-sfc@3.5.17)(eslint@9.35.0(jiti@1.21.7)): + eslint-processor-vue-blocks@2.0.0(@vue/compiler-sfc@3.5.17)(eslint@9.35.0(jiti@2.6.0)): dependencies: '@vue/compiler-sfc': 3.5.17 - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) eslint-scope@5.1.1: dependencies: @@ -14747,9 +14203,9 @@ snapshots: eslint-visitor-keys@4.2.1: {} - eslint@9.35.0(jiti@1.21.7): + eslint@9.35.0(jiti@2.6.0): dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.35.0(jiti@1.21.7)) + '@eslint-community/eslint-utils': 4.9.0(eslint@9.35.0(jiti@2.6.0)) '@eslint-community/regexpp': 4.12.1 '@eslint/config-array': 0.21.0 '@eslint/config-helpers': 0.3.1 @@ -14785,7 +14241,7 @@ snapshots: natural-compare: 1.4.0 optionator: 0.9.4 optionalDependencies: - jiti: 1.21.7 + jiti: 2.6.0 transitivePeerDependencies: - supports-color @@ -14947,6 +14403,10 @@ snapshots: dependencies: bser: 2.1.1 + fd-package-json@2.0.0: + dependencies: + walk-up-path: 4.0.0 + fdir@6.4.6(picomatch@4.0.3): optionalDependencies: picomatch: 4.0.3 @@ -15032,6 +14492,10 @@ snapshots: format@0.2.2: {} + formatly@0.3.0: + dependencies: + fd-package-json: 2.0.0 + fraction.js@4.3.7: {} fs-extra@10.1.0: @@ -15505,10 +14969,6 @@ snapshots: dependencies: builtin-modules: 5.0.0 - is-bun-module@2.0.0: - dependencies: - semver: 7.7.2 - is-decimal@1.0.4: {} is-decimal@2.0.1: {} @@ -15535,10 +14995,10 @@ snapshots: is-hexadecimal@2.0.1: {} - is-immutable-type@5.0.1(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3): + is-immutable-type@5.0.1(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3): dependencies: - '@typescript-eslint/type-utils': 8.37.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint: 9.35.0(jiti@1.21.7) + '@typescript-eslint/type-utils': 8.37.0(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + eslint: 9.35.0(jiti@2.6.0) ts-api-utils: 2.1.0(typescript@5.8.3) ts-declaration-location: 1.0.7(typescript@5.8.3) typescript: 5.8.3 @@ -15955,6 +15415,8 @@ snapshots: jiti@1.21.7: {} + jiti@2.6.0: {} + js-audio-recorder@1.0.7: {} js-cookie@3.0.5: {} @@ -15988,10 +15450,6 @@ snapshots: json-stable-stringify-without-jsonify@1.0.1: {} - json5@1.0.2: - dependencies: - minimist: 1.2.8 - json5@2.2.3: {} jsonc-eslint-parser@2.4.0: @@ -16020,8 +15478,6 @@ snapshots: object.assign: '@nolyfill/object.assign@1.0.44' object.values: '@nolyfill/object.values@1.0.44' - jwt-decode@4.0.0: {} - katex@0.16.22: dependencies: commander: 8.3.0 @@ -16034,6 +15490,23 @@ snapshots: kleur@3.0.3: {} + knip@5.64.1(@types/node@18.15.0)(typescript@5.8.3): + dependencies: + '@nodelib/fs.walk': 1.2.8 + '@types/node': 18.15.0 + fast-glob: 3.3.3 + formatly: 0.3.0 + jiti: 2.6.0 + js-yaml: 4.1.0 + minimist: 1.2.8 + oxc-resolver: 11.8.4 + picocolors: 1.1.1 + picomatch: 4.0.3 + smol-toml: 1.4.2 + strip-json-comments: 5.0.2 + typescript: 5.8.3 + zod: 4.1.11 + kolorist@1.8.0: {} ky@1.8.2: {} @@ -16050,12 +15523,6 @@ snapshots: vscode-languageserver-textdocument: 1.0.12 vscode-uri: 3.0.8 - language-subtag-registry@0.3.23: {} - - language-tags@1.0.9: - dependencies: - language-subtag-registry: 0.3.23 - launch-ide@1.2.0: dependencies: chalk: 4.1.2 @@ -16219,7 +15686,8 @@ snapshots: dependencies: semver: 7.7.2 - make-error@1.3.6: {} + make-error@1.3.6: + optional: true makeerror@1.0.12: dependencies: @@ -17036,6 +16504,30 @@ snapshots: os-browserify@0.3.0: {} + oxc-resolver@11.8.4: + dependencies: + napi-postinstall: 0.3.0 + optionalDependencies: + '@oxc-resolver/binding-android-arm-eabi': 11.8.4 + '@oxc-resolver/binding-android-arm64': 11.8.4 + '@oxc-resolver/binding-darwin-arm64': 11.8.4 + '@oxc-resolver/binding-darwin-x64': 11.8.4 + '@oxc-resolver/binding-freebsd-x64': 11.8.4 + '@oxc-resolver/binding-linux-arm-gnueabihf': 11.8.4 + '@oxc-resolver/binding-linux-arm-musleabihf': 11.8.4 + '@oxc-resolver/binding-linux-arm64-gnu': 11.8.4 + '@oxc-resolver/binding-linux-arm64-musl': 11.8.4 + '@oxc-resolver/binding-linux-ppc64-gnu': 11.8.4 + '@oxc-resolver/binding-linux-riscv64-gnu': 11.8.4 + '@oxc-resolver/binding-linux-riscv64-musl': 11.8.4 + '@oxc-resolver/binding-linux-s390x-gnu': 11.8.4 + '@oxc-resolver/binding-linux-x64-gnu': 11.8.4 + '@oxc-resolver/binding-linux-x64-musl': 11.8.4 + '@oxc-resolver/binding-wasm32-wasi': 11.8.4 + '@oxc-resolver/binding-win32-arm64-msvc': 11.8.4 + '@oxc-resolver/binding-win32-ia32-msvc': 11.8.4 + '@oxc-resolver/binding-win32-x64-msvc': 11.8.4 + p-cancelable@2.1.1: {} p-limit@2.3.0: @@ -17511,18 +17003,8 @@ snapshots: '@babel/runtime': 7.27.6 react: 19.1.1 - react-error-boundary@4.1.2(react@19.1.1): - dependencies: - '@babel/runtime': 7.27.6 - react: 19.1.1 - react-fast-compare@3.2.2: {} - react-headless-pagination@1.1.6(react@19.1.1): - dependencies: - clsx: 2.1.1 - react: 19.1.1 - react-hook-form@7.60.0(react@19.1.1): dependencies: react: 19.1.1 @@ -17542,11 +17024,6 @@ snapshots: react-dom: 19.1.1(react@19.1.1) typescript: 5.8.3 - react-infinite-scroll-component@6.1.0(react@19.1.1): - dependencies: - react: 19.1.1 - throttle-debounce: 2.3.0 - react-is@16.13.1: {} react-is@17.0.2: {} @@ -17662,18 +17139,6 @@ snapshots: transitivePeerDependencies: - '@types/react' - react-tooltip@5.8.3(react-dom@19.1.1(react@19.1.1))(react@19.1.1): - dependencies: - '@floating-ui/dom': 1.1.1 - classnames: 2.5.1 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - - react-window-infinite-loader@1.0.10(react-dom@19.1.1(react@19.1.1))(react@19.1.1): - dependencies: - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - react-window@1.8.11(react-dom@19.1.1(react@19.1.1))(react@19.1.1): dependencies: '@babel/runtime': 7.27.6 @@ -17773,8 +17238,6 @@ snapshots: unified: 11.0.5 vfile: 6.0.3 - recordrtc@5.6.2: {} - redent@3.0.0: dependencies: indent-string: 4.0.0 @@ -17944,12 +17407,6 @@ snapshots: path-parse: 1.0.7 supports-preserve-symlinks-flag: 1.0.0 - resolve@2.0.0-next.5: - dependencies: - is-core-module: '@nolyfill/is-core-module@1.0.39' - path-parse: 1.0.7 - supports-preserve-symlinks-flag: 1.0.0 - responselike@2.0.1: dependencies: lowercase-keys: 2.0.0 @@ -18068,8 +17525,6 @@ snapshots: dependencies: randombytes: 2.1.0 - server-only@0.0.1: {} - set-blocking@2.0.0: optional: true @@ -18084,7 +17539,7 @@ snapshots: sharp@0.33.5: dependencies: color: 4.2.3 - detect-libc: 2.0.4 + detect-libc: 2.1.0 semver: 7.7.2 optionalDependencies: '@img/sharp-darwin-arm64': 0.33.5 @@ -18110,7 +17565,7 @@ snapshots: sharp@0.34.3: dependencies: color: 4.2.3 - detect-libc: 2.0.4 + detect-libc: 2.1.0 semver: 7.7.2 optionalDependencies: '@img/sharp-darwin-arm64': 0.34.3 @@ -18137,8 +17592,6 @@ snapshots: '@img/sharp-win32-x64': 0.34.3 optional: true - shave@5.0.4: {} - shebang-command@2.0.0: dependencies: shebang-regex: 3.0.0 @@ -18185,6 +17638,8 @@ snapshots: ansi-styles: 6.2.1 is-fullwidth-code-point: 5.0.0 + smol-toml@1.4.2: {} + sortablejs@1.15.6: {} source-list-map@2.0.1: {} @@ -18226,8 +17681,6 @@ snapshots: sprintf-js@1.0.3: {} - stable-hash@0.0.5: {} - stack-utils@2.0.6: dependencies: escape-string-regexp: 2.0.0 @@ -18320,6 +17773,8 @@ snapshots: strip-json-comments@3.1.1: {} + strip-json-comments@5.0.2: {} + style-loader@3.3.4(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)): dependencies: webpack: 5.100.2(esbuild@0.25.0)(uglify-js@3.19.3) @@ -18463,8 +17918,6 @@ snapshots: dependencies: any-promise: 1.3.0 - throttle-debounce@2.3.0: {} - timers-browserify@2.0.12: dependencies: setimmediate: 1.0.5 @@ -18553,6 +18006,7 @@ snapshots: typescript: 5.8.3 v8-compile-cache-lib: 3.0.1 yn: 3.1.1 + optional: true ts-pattern@5.7.1: {} @@ -18567,13 +18021,6 @@ snapshots: tapable: 2.2.2 tsconfig-paths: 4.2.0 - tsconfig-paths@3.15.0: - dependencies: - '@types/json5': 0.0.29 - json5: 1.0.2 - minimist: 1.2.8 - strip-bom: 3.0.0 - tsconfig-paths@4.2.0: dependencies: json5: 2.2.3 @@ -18602,17 +18049,6 @@ snapshots: type-fest@2.19.0: {} - typescript-eslint@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3): - dependencies: - '@typescript-eslint/eslint-plugin': 8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@typescript-eslint/parser': 8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - '@typescript-eslint/typescript-estree': 8.38.0(typescript@5.8.3) - '@typescript-eslint/utils': 8.38.0(eslint@9.35.0(jiti@1.21.7))(typescript@5.8.3) - eslint: 9.35.0(jiti@1.21.7) - typescript: 5.8.3 - transitivePeerDependencies: - - supports-color - typescript@5.8.3: {} ufo@1.6.1: {} @@ -18690,30 +18126,6 @@ snapshots: acorn: 8.15.0 webpack-virtual-modules: 0.6.2 - unrs-resolver@1.11.1: - dependencies: - napi-postinstall: 0.3.0 - optionalDependencies: - '@unrs/resolver-binding-android-arm-eabi': 1.11.1 - '@unrs/resolver-binding-android-arm64': 1.11.1 - '@unrs/resolver-binding-darwin-arm64': 1.11.1 - '@unrs/resolver-binding-darwin-x64': 1.11.1 - '@unrs/resolver-binding-freebsd-x64': 1.11.1 - '@unrs/resolver-binding-linux-arm-gnueabihf': 1.11.1 - '@unrs/resolver-binding-linux-arm-musleabihf': 1.11.1 - '@unrs/resolver-binding-linux-arm64-gnu': 1.11.1 - '@unrs/resolver-binding-linux-arm64-musl': 1.11.1 - '@unrs/resolver-binding-linux-ppc64-gnu': 1.11.1 - '@unrs/resolver-binding-linux-riscv64-gnu': 1.11.1 - '@unrs/resolver-binding-linux-riscv64-musl': 1.11.1 - '@unrs/resolver-binding-linux-s390x-gnu': 1.11.1 - '@unrs/resolver-binding-linux-x64-gnu': 1.11.1 - '@unrs/resolver-binding-linux-x64-musl': 1.11.1 - '@unrs/resolver-binding-wasm32-wasi': 1.11.1 - '@unrs/resolver-binding-win32-arm64-msvc': 1.11.1 - '@unrs/resolver-binding-win32-ia32-msvc': 1.11.1 - '@unrs/resolver-binding-win32-x64-msvc': 1.11.1 - upath@1.2.0: {} update-browserslist-db@1.1.3(browserslist@4.25.1): @@ -18794,7 +18206,8 @@ snapshots: uuid@9.0.1: {} - v8-compile-cache-lib@3.0.1: {} + v8-compile-cache-lib@3.0.1: + optional: true v8-to-istanbul@9.3.0: dependencies: @@ -18838,10 +18251,10 @@ snapshots: vscode-uri@3.0.8: {} - vue-eslint-parser@10.2.0(eslint@9.35.0(jiti@1.21.7)): + vue-eslint-parser@10.2.0(eslint@9.35.0(jiti@2.6.0)): dependencies: debug: 4.4.1 - eslint: 9.35.0(jiti@1.21.7) + eslint: 9.35.0(jiti@2.6.0) eslint-scope: 8.4.0 eslint-visitor-keys: 4.2.1 espree: 10.4.0 @@ -18850,6 +18263,8 @@ snapshots: transitivePeerDependencies: - supports-color + walk-up-path@4.0.0: {} + walker@1.0.8: dependencies: makeerror: 1.0.12 @@ -19159,7 +18574,8 @@ snapshots: dependencies: lib0: 0.2.114 - yn@3.1.1: {} + yn@3.1.1: + optional: true yocto-queue@0.1.0: {} @@ -19169,6 +18585,8 @@ snapshots: zod@4.0.5: {} + zod@4.1.11: {} + zrender@5.6.1: dependencies: tslib: 2.3.0 From ee48ca7671e56ccccb61564766a9dae51d562e08 Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Tue, 30 Sep 2025 15:23:43 +0800 Subject: [PATCH 106/126] fix default comment icon --- .../components/workflow/store/workflow/workflow-slice.ts | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/web/app/components/workflow/store/workflow/workflow-slice.ts b/web/app/components/workflow/store/workflow/workflow-slice.ts index 7204ddd9c3..f617c985a2 100644 --- a/web/app/components/workflow/store/workflow/workflow-slice.ts +++ b/web/app/components/workflow/store/workflow/workflow-slice.ts @@ -48,7 +48,13 @@ export const createWorkflowSlice: StateCreator<WorkflowSliceShape> = set => ({ setSelection: selection => set(() => ({ selection })), bundleNodeSize: null, setBundleNodeSize: bundleNodeSize => set(() => ({ bundleNodeSize })), - controlMode: localStorage.getItem('workflow-operation-mode') === 'pointer' ? 'pointer' : localStorage.getItem('workflow-operation-mode') === 'hand' ? 'hand' : 'comment', + controlMode: (() => { + const storedControlMode = localStorage.getItem('workflow-operation-mode') + if (storedControlMode === 'pointer' || storedControlMode === 'hand' || storedControlMode === 'comment') + return storedControlMode + + return 'pointer' + })(), setControlMode: (controlMode) => { set(() => ({ controlMode })) localStorage.setItem('workflow-operation-mode', controlMode) From d49f3327e4d82ee35ecb9756b732c86a723ae1c7 Mon Sep 17 00:00:00 2001 From: crazywoola <100913391+crazywoola@users.noreply.github.com> Date: Tue, 30 Sep 2025 15:30:27 +0800 Subject: [PATCH 107/126] fix: style with self-start (#26492) --- web/app/components/explore/app-list/index.tsx | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/web/app/components/explore/app-list/index.tsx b/web/app/components/explore/app-list/index.tsx index 79cbff81c8..252a102d80 100644 --- a/web/app/components/explore/app-list/index.tsx +++ b/web/app/components/explore/app-list/index.tsx @@ -152,23 +152,20 @@ const Apps = ({ <div className={cn( 'mt-6 flex items-center justify-between px-12', )}> - <> - <Category - list={categories} - value={currCategory} - onChange={setCurrCategory} - allCategoriesEn={allCategoriesEn} - /> - </> + <Category + list={categories} + value={currCategory} + onChange={setCurrCategory} + allCategoriesEn={allCategoriesEn} + /> <Input showLeftIcon showClearIcon - wrapperClassName='w-[200px]' + wrapperClassName='w-[200px] self-start' value={keywords} onChange={e => handleKeywordsChange(e.target.value)} onClear={() => handleKeywordsChange('')} /> - </div> <div className={cn( From c4884eb669added803303b4e95e408306ff04f23 Mon Sep 17 00:00:00 2001 From: Asuka Minato <i@asukaminato.eu.org> Date: Tue, 30 Sep 2025 16:35:10 +0900 Subject: [PATCH 108/126] add back babel/core (#26489) --- web/package.json | 4 +++- web/pnpm-lock.yaml | 14 ++++++++++---- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/web/package.json b/web/package.json index 2107aae6bc..4433d866d8 100644 --- a/web/package.json +++ b/web/package.json @@ -161,6 +161,7 @@ "@testing-library/dom": "^10.4.0", "@testing-library/jest-dom": "^6.8.0", "@testing-library/react": "^16.0.1", + "@babel/core": "^7.28.3", "@types/dagre": "^0.7.52", "@types/jest": "^29.5.13", "@types/js-cookie": "^3.0.6", @@ -199,7 +200,8 @@ "storybook": "8.5.0", "tailwindcss": "^3.4.14", "typescript": "^5.8.3", - "uglify-js": "^3.19.3" + "uglify-js": "^3.19.3", + "babel-loader": "^9.2.1" }, "resolutions": { "@types/react": "19.1.11", diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index 9112473adf..8c2e869c91 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -338,6 +338,9 @@ importers: '@antfu/eslint-config': specifier: ^5.0.0 version: 5.0.0(@eslint-react/eslint-plugin@1.52.3(eslint@9.35.0(jiti@2.6.0))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3))(@next/eslint-plugin-next@15.5.0)(@vue/compiler-sfc@3.5.17)(eslint-plugin-react-hooks@5.2.0(eslint@9.35.0(jiti@2.6.0)))(eslint-plugin-react-refresh@0.4.20(eslint@9.35.0(jiti@2.6.0)))(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + '@babel/core': + specifier: ^7.28.3 + version: 7.28.3 '@chromatic-com/storybook': specifier: ^3.1.0 version: 3.2.7(react@19.1.1)(storybook@8.5.0) @@ -446,6 +449,9 @@ importers: autoprefixer: specifier: ^10.4.20 version: 10.4.21(postcss@8.5.6) + babel-loader: + specifier: ^9.2.1 + version: 9.2.1(@babel/core@7.28.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)) bing-translate-api: specifier: ^4.0.2 version: 4.1.0 @@ -8787,12 +8793,12 @@ snapshots: '@babel/helper-compilation-targets': 7.27.2 '@babel/helper-module-transforms': 7.28.3(@babel/core@7.28.3) '@babel/helpers': 7.28.3 - '@babel/parser': 7.28.3 + '@babel/parser': 7.28.4 '@babel/template': 7.27.2 '@babel/traverse': 7.28.3 - '@babel/types': 7.28.2 + '@babel/types': 7.28.4 convert-source-map: 2.0.0 - debug: 4.4.1 + debug: 4.4.3 gensync: 1.0.0-beta.2 json5: 2.2.3 semver: 6.3.1 @@ -8953,7 +8959,7 @@ snapshots: '@babel/helpers@7.28.3': dependencies: '@babel/template': 7.27.2 - '@babel/types': 7.28.2 + '@babel/types': 7.28.4 '@babel/parser@7.28.0': dependencies: From 7242a67f843635a1c23189934999fb5a47f0759d Mon Sep 17 00:00:00 2001 From: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Date: Tue, 30 Sep 2025 15:36:04 +0800 Subject: [PATCH 109/126] minor fix: improve check_upgradable_plugin_task.py (#26468) Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- api/schedule/check_upgradable_plugin_task.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/api/schedule/check_upgradable_plugin_task.py b/api/schedule/check_upgradable_plugin_task.py index 0712100c01..e91ce07be3 100644 --- a/api/schedule/check_upgradable_plugin_task.py +++ b/api/schedule/check_upgradable_plugin_task.py @@ -52,7 +52,8 @@ def check_upgradable_plugin_task(): strategy.include_plugins, ) - if batch_interval_time > 0.0001: # if lower than 1ms, skip + # Only sleep if batch_interval_time > 0.0001 AND current batch is not the last one + if batch_interval_time > 0.0001 and i + MAX_CONCURRENT_CHECK_TASKS < total_strategies: time.sleep(batch_interval_time) end_at = time.perf_counter() From decf0f3da0e3b3f0abc1abbf8daa5d1b3330f892 Mon Sep 17 00:00:00 2001 From: Asuka Minato <i@asukaminato.eu.org> Date: Tue, 30 Sep 2025 16:39:04 +0900 Subject: [PATCH 110/126] Fix: Remove workflow/nodes from pyright exclusion (#26461) Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> --- api/core/workflow/nodes/http_request/executor.py | 5 ++--- api/core/workflow/nodes/http_request/node.py | 2 ++ api/core/workflow/nodes/if_else/if_else_node.py | 2 +- .../nodes/knowledge_retrieval/knowledge_retrieval_node.py | 2 +- api/core/workflow/nodes/list_operator/node.py | 2 ++ api/core/workflow/nodes/llm/file_saver.py | 4 ++-- api/core/workflow/nodes/llm/node.py | 3 ++- .../nodes/question_classifier/question_classifier_node.py | 6 +++--- api/pyrightconfig.json | 1 - 9 files changed, 15 insertions(+), 12 deletions(-) diff --git a/api/core/workflow/nodes/http_request/executor.py b/api/core/workflow/nodes/http_request/executor.py index c47ffb5ab0..d3d3571b44 100644 --- a/api/core/workflow/nodes/http_request/executor.py +++ b/api/core/workflow/nodes/http_request/executor.py @@ -87,7 +87,7 @@ class Executor: node_data.authorization.config.api_key ).text - self.url: str = node_data.url + self.url = node_data.url self.method = node_data.method self.auth = node_data.authorization self.timeout = timeout @@ -349,11 +349,10 @@ class Executor: "timeout": (self.timeout.connect, self.timeout.read, self.timeout.write), "ssl_verify": self.ssl_verify, "follow_redirects": True, - "max_retries": self.max_retries, } # request_args = {k: v for k, v in request_args.items() if v is not None} try: - response: httpx.Response = _METHOD_MAP[method_lc](**request_args) + response: httpx.Response = _METHOD_MAP[method_lc](**request_args, max_retries=self.max_retries) except (ssrf_proxy.MaxRetriesExceededError, httpx.RequestError) as e: raise HttpRequestNodeError(str(e)) from e # FIXME: fix type ignore, this maybe httpx type issue diff --git a/api/core/workflow/nodes/http_request/node.py b/api/core/workflow/nodes/http_request/node.py index 826820a8e3..20e1337ea7 100644 --- a/api/core/workflow/nodes/http_request/node.py +++ b/api/core/workflow/nodes/http_request/node.py @@ -165,6 +165,8 @@ class HttpRequestNode(Node): body_type = typed_node_data.body.type data = typed_node_data.body.data match body_type: + case "none": + pass case "binary": if len(data) != 1: raise RequestBodyError("invalid body data, should have only one item") diff --git a/api/core/workflow/nodes/if_else/if_else_node.py b/api/core/workflow/nodes/if_else/if_else_node.py index 075f6f8444..7e3b6ecc1a 100644 --- a/api/core/workflow/nodes/if_else/if_else_node.py +++ b/api/core/workflow/nodes/if_else/if_else_node.py @@ -83,7 +83,7 @@ class IfElseNode(Node): else: # TODO: Update database then remove this # Fallback to old structure if cases are not defined - input_conditions, group_result, final_result = _should_not_use_old_function( # ty: ignore [deprecated] + input_conditions, group_result, final_result = _should_not_use_old_function( # pyright: ignore [reportDeprecated] condition_processor=condition_processor, variable_pool=self.graph_runtime_state.variable_pool, conditions=self._node_data.conditions or [], diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index 1afb2e05b9..b6128d3eab 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -107,7 +107,7 @@ class KnowledgeRetrievalNode(Node): graph_runtime_state=graph_runtime_state, ) # LLM file outputs, used for MultiModal outputs. - self._file_outputs: list[File] = [] + self._file_outputs = [] if llm_file_saver is None: llm_file_saver = FileSaverImpl( diff --git a/api/core/workflow/nodes/list_operator/node.py b/api/core/workflow/nodes/list_operator/node.py index 7a31d69221..3243b22d44 100644 --- a/api/core/workflow/nodes/list_operator/node.py +++ b/api/core/workflow/nodes/list_operator/node.py @@ -161,6 +161,8 @@ class ListOperatorNode(Node): elif isinstance(variable, ArrayFileSegment): if isinstance(condition.value, str): value = self.graph_runtime_state.variable_pool.convert_template(condition.value).text + elif isinstance(condition.value, bool): + raise ValueError(f"File filter expects a string value, got {type(condition.value)}") else: value = condition.value filter_func = _get_file_filter_func( diff --git a/api/core/workflow/nodes/llm/file_saver.py b/api/core/workflow/nodes/llm/file_saver.py index 81f2df0891..3f32fa894a 100644 --- a/api/core/workflow/nodes/llm/file_saver.py +++ b/api/core/workflow/nodes/llm/file_saver.py @@ -46,7 +46,7 @@ class LLMFileSaver(tp.Protocol): dot (`.`). For example, `.py` and `.tar.gz` are both valid values, while `py` and `tar.gz` are not. """ - pass + raise NotImplementedError() def save_remote_url(self, url: str, file_type: FileType) -> File: """save_remote_url saves the file from a remote url returned by LLM. @@ -56,7 +56,7 @@ class LLMFileSaver(tp.Protocol): :param url: the url of the file. :param file_type: the file type of the file, check `FileType` enum for reference. """ - pass + raise NotImplementedError() EngineFactory: tp.TypeAlias = tp.Callable[[], Engine] diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index 36183bf8db..4742476352 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -128,7 +128,7 @@ class LLMNode(Node): graph_runtime_state=graph_runtime_state, ) # LLM file outputs, used for MultiModal outputs. - self._file_outputs: list[File] = [] + self._file_outputs = [] if llm_file_saver is None: llm_file_saver = FileSaverImpl( @@ -166,6 +166,7 @@ class LLMNode(Node): node_inputs: dict[str, Any] = {} process_data: dict[str, Any] = {} result_text = "" + clean_text = "" usage = LLMUsage.empty_usage() finish_reason = None reasoning_content = None diff --git a/api/core/workflow/nodes/question_classifier/question_classifier_node.py b/api/core/workflow/nodes/question_classifier/question_classifier_node.py index 483cfff574..592a6566fd 100644 --- a/api/core/workflow/nodes/question_classifier/question_classifier_node.py +++ b/api/core/workflow/nodes/question_classifier/question_classifier_node.py @@ -68,7 +68,7 @@ class QuestionClassifierNode(Node): graph_runtime_state=graph_runtime_state, ) # LLM file outputs, used for MultiModal outputs. - self._file_outputs: list[File] = [] + self._file_outputs = [] if llm_file_saver is None: llm_file_saver = FileSaverImpl( @@ -111,9 +111,9 @@ class QuestionClassifierNode(Node): query = variable.value if variable else None variables = {"query": query} # fetch model config - model_instance, model_config = LLMNode._fetch_model_config( - node_data_model=node_data.model, + model_instance, model_config = llm_utils.fetch_model_config( tenant_id=self.tenant_id, + node_data_model=node_data.model, ) # fetch memory memory = llm_utils.fetch_memory( diff --git a/api/pyrightconfig.json b/api/pyrightconfig.json index caa194c906..8c35dc7abb 100644 --- a/api/pyrightconfig.json +++ b/api/pyrightconfig.json @@ -6,7 +6,6 @@ "migrations/", "core/rag", "extensions", - "core/workflow/nodes", "core/app/app_config/easy_ui_based_app/dataset" ], "typeCheckingMode": "strict", From 19936d23d1ee7726156cba10e2b46fe0324e0bc6 Mon Sep 17 00:00:00 2001 From: -LAN- <laipz8200@outlook.com> Date: Tue, 30 Sep 2025 21:30:15 +0800 Subject: [PATCH 111/126] chore: remove pnpm build from dev-setup command (#26504) --- Makefile | 1 - 1 file changed, 1 deletion(-) diff --git a/Makefile b/Makefile index ea560c7157..19c398ec82 100644 --- a/Makefile +++ b/Makefile @@ -26,7 +26,6 @@ prepare-web: @echo "🌐 Setting up web environment..." @cp -n web/.env.example web/.env 2>/dev/null || echo "Web .env already exists" @cd web && pnpm install - @cd web && pnpm build @echo "✅ Web environment prepared (not started)" # Step 3: Prepare API environment From dd71625f52d2342bb69ea145df9c23231c00199d Mon Sep 17 00:00:00 2001 From: HyaCinth <88471803+HyaCiovo@users.noreply.github.com> Date: Tue, 30 Sep 2025 23:22:43 +0800 Subject: [PATCH 112/126] fix(datasets): Resolve issue where selected list is not updated when deleting a single file (#26502) --- web/app/components/datasets/documents/list.tsx | 2 ++ .../components/datasets/documents/operations.tsx | 15 +++++++++++---- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/web/app/components/datasets/documents/list.tsx b/web/app/components/datasets/documents/list.tsx index f850e1870a..9659925b3a 100644 --- a/web/app/components/datasets/documents/list.tsx +++ b/web/app/components/datasets/documents/list.tsx @@ -418,6 +418,8 @@ const DocumentList: FC<IDocumentListProps> = ({ </td> <td> <Operations + selectedIds={selectedIds} + onSelectedIdChange={onSelectedIdChange} embeddingAvailable={embeddingAvailable} datasetId={datasetId} detail={pick(doc, ['name', 'enabled', 'archived', 'id', 'data_source_type', 'doc_form', 'display_status'])} diff --git a/web/app/components/datasets/documents/operations.tsx b/web/app/components/datasets/documents/operations.tsx index 4c23b700db..74bf0f3179 100644 --- a/web/app/components/datasets/documents/operations.tsx +++ b/web/app/components/datasets/documents/operations.tsx @@ -35,7 +35,7 @@ import { } from '@remixicon/react' import CustomPopover from '../../base/popover' import s from './style.module.css' -import { DataSourceType } from '@/models/datasets' +import { DataSourceType, DocumentActionType } from '@/models/datasets' import Confirm from '../../base/confirm' import RenameModal from './rename-modal' @@ -50,6 +50,8 @@ type OperationsProps = { doc_form: string display_status?: string } + selectedIds?: string[] + onSelectedIdChange?: (ids: string[]) => void datasetId: string onUpdate: (operationName?: string) => void scene?: 'list' | 'detail' @@ -60,6 +62,8 @@ const Operations = ({ embeddingAvailable, datasetId, detail, + selectedIds, + onSelectedIdChange, onUpdate, scene = 'list', className = '', @@ -116,17 +120,20 @@ const Operations = ({ const [e] = await asyncRunSafe<CommonResponse>(opApi({ datasetId, documentId: id }) as Promise<CommonResponse>) if (!e) { notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') }) + // If it is a delete operation, need to update the selectedIds state + if (selectedIds && onSelectedIdChange && operationName === DocumentActionType.delete) + onSelectedIdChange(selectedIds.filter(selectedId => selectedId !== id)) onUpdate(operationName) } else { notify({ type: 'error', message: t('common.actionMsg.modifiedUnsuccessfully') }) } - if (operationName === 'delete') + if (operationName === DocumentActionType.delete) setDeleting(false) } const { run: handleSwitch } = useDebounceFn((operationName: OperationName) => { - if (operationName === 'enable' && enabled) + if (operationName === DocumentActionType.enable && enabled) return - if (operationName === 'disable' && !enabled) + if (operationName === DocumentActionType.disable && !enabled) return onOperate(operationName) }, { wait: 500 }) From b80d55b7649015bd4f4984806583fd23c9372161 Mon Sep 17 00:00:00 2001 From: lyzno1 <92089059+lyzno1@users.noreply.github.com> Date: Thu, 2 Oct 2025 18:08:57 +0800 Subject: [PATCH 113/126] fix: add missing key prop to TabPanel and initialize useRef with null (#26517) --- web/app/components/develop/code.tsx | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/web/app/components/develop/code.tsx b/web/app/components/develop/code.tsx index eadc87a5ca..69d5624966 100644 --- a/web/app/components/develop/code.tsx +++ b/web/app/components/develop/code.tsx @@ -193,8 +193,8 @@ function CodeGroupPanels({ children, targetCode, ...props }: ICodeGroupPanelsPro if ((targetCode?.length ?? 0) > 1) { return ( <TabPanels> - {targetCode!.map(code => ( - <TabPanel> + {targetCode!.map((code, index) => ( + <TabPanel key={code.title || code.tag || index}> <CodePanel {...props} targetCode={code} /> </TabPanel> ))} @@ -206,8 +206,8 @@ function CodeGroupPanels({ children, targetCode, ...props }: ICodeGroupPanelsPro } function usePreventLayoutShift() { - const positionRef = useRef<any>() - const rafRef = useRef<any>() + const positionRef = useRef<any>(null) + const rafRef = useRef<any>(null) useEffect(() => { return () => { From 7a5bb1cfac6314a4b2c679f32515ce985678d7b6 Mon Sep 17 00:00:00 2001 From: Radu Luncasu <radu.luncasu@gmail.com> Date: Thu, 2 Oct 2025 13:12:08 +0300 Subject: [PATCH 114/126] chore(devcontainer): update Python base image from bullseye to bookworm in Dockerfile (#26519) --- .devcontainer/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 3dd00ee4db..c03f281858 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,4 +1,4 @@ -FROM mcr.microsoft.com/devcontainers/python:3.12-bullseye +FROM mcr.microsoft.com/devcontainers/python:3.12-bookworm RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ && apt-get -y install libgmp-dev libmpfr-dev libmpc-dev From 196f69186594f716012abae56f96728968da28aa Mon Sep 17 00:00:00 2001 From: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Date: Thu, 2 Oct 2025 18:13:43 +0800 Subject: [PATCH 115/126] Feature add test containers enable segments to index task (#26539) Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../test_enable_segments_to_index_task.py | 505 ++++++++++++++++++ 1 file changed, 505 insertions(+) create mode 100644 api/tests/test_containers_integration_tests/tasks/test_enable_segments_to_index_task.py diff --git a/api/tests/test_containers_integration_tests/tasks/test_enable_segments_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_enable_segments_to_index_task.py new file mode 100644 index 0000000000..0c03828ec5 --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_enable_segments_to_index_task.py @@ -0,0 +1,505 @@ +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from core.rag.index_processor.constant.index_type import IndexType +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models.dataset import Dataset, Document, DocumentSegment +from tasks.enable_segments_to_index_task import enable_segments_to_index_task + + +class TestEnableSegmentsToIndexTask: + """Integration tests for enable_segments_to_index_task using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.enable_segments_to_index_task.IndexProcessorFactory") as mock_index_processor_factory, + ): + # Setup mock index processor + mock_processor = MagicMock() + mock_index_processor_factory.return_value.init_index_processor.return_value = mock_processor + + yield { + "index_processor_factory": mock_index_processor_factory, + "index_processor": mock_processor, + } + + def _create_test_dataset_and_document(self, db_session_with_containers, mock_external_service_dependencies): + """ + Helper method to create a test dataset and document for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + + Returns: + tuple: (dataset, document) - Created dataset and document instances + """ + fake = Faker() + + # Create account and tenant + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + db.session.add(account) + db.session.commit() + + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db.session.add(tenant) + db.session.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER.value, + current=True, + ) + db.session.add(join) + db.session.commit() + + # Create dataset + dataset = Dataset( + id=fake.uuid4(), + tenant_id=tenant.id, + name=fake.company(), + description=fake.text(max_nb_chars=100), + data_source_type="upload_file", + indexing_technique="high_quality", + created_by=account.id, + ) + db.session.add(dataset) + db.session.commit() + + # Create document + document = Document( + id=fake.uuid4(), + tenant_id=tenant.id, + dataset_id=dataset.id, + position=1, + data_source_type="upload_file", + batch="test_batch", + name=fake.file_name(), + created_from="upload_file", + created_by=account.id, + indexing_status="completed", + enabled=True, + doc_form=IndexType.PARAGRAPH_INDEX, + ) + db.session.add(document) + db.session.commit() + + # Refresh dataset to ensure doc_form property works correctly + db.session.refresh(dataset) + + return dataset, document + + def _create_test_segments( + self, db_session_with_containers, document, dataset, count=3, enabled=False, status="completed" + ): + """ + Helper method to create test document segments. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + document: Document instance + dataset: Dataset instance + count: Number of segments to create + enabled: Whether segments should be enabled + status: Status of the segments + + Returns: + list: List of created DocumentSegment instances + """ + fake = Faker() + segments = [] + + for i in range(count): + text = fake.text(max_nb_chars=200) + segment = DocumentSegment( + id=fake.uuid4(), + tenant_id=document.tenant_id, + dataset_id=dataset.id, + document_id=document.id, + position=i, + content=text, + word_count=len(text.split()), + tokens=len(text.split()) * 2, + index_node_id=f"node_{i}", + index_node_hash=f"hash_{i}", + enabled=enabled, + status=status, + created_by=document.created_by, + ) + db.session.add(segment) + segments.append(segment) + + db.session.commit() + return segments + + def test_enable_segments_to_index_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful segments indexing with paragraph index type. + + This test verifies: + - Proper dataset and document retrieval from database + - Correct segment processing and document creation + - Index processor integration + - Database state updates + - Redis cache key deletion + """ + # Arrange: Create test data + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + segments = self._create_test_segments(db_session_with_containers, document, dataset) + + # Set up Redis cache keys to simulate indexing in progress + segment_ids = [segment.id for segment in segments] + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + redis_client.set(indexing_cache_key, "processing", ex=300) # 5 minutes expiry + + # Verify cache keys exist + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + assert redis_client.exists(indexing_cache_key) == 1 + + # Act: Execute the task + enable_segments_to_index_task(segment_ids, dataset.id, document.id) + + # Assert: Verify the expected outcomes + # Verify index processor was called correctly + mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(IndexType.PARAGRAPH_INDEX) + mock_external_service_dependencies["index_processor"].load.assert_called_once() + + # Verify the load method was called with correct parameters + call_args = mock_external_service_dependencies["index_processor"].load.call_args + assert call_args is not None + documents = call_args[0][1] # Second argument should be documents list + assert len(documents) == 3 + + # Verify document structure + for i, doc in enumerate(documents): + assert doc.page_content == segments[i].content + assert doc.metadata["doc_id"] == segments[i].index_node_id + assert doc.metadata["doc_hash"] == segments[i].index_node_hash + assert doc.metadata["document_id"] == document.id + assert doc.metadata["dataset_id"] == dataset.id + + # Verify Redis cache keys were deleted + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + assert redis_client.exists(indexing_cache_key) == 0 + + def test_enable_segments_to_index_with_different_index_type( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test segments indexing with different index types. + + This test verifies: + - Proper handling of different index types + - Index processor factory integration + - Document processing with various configurations + - Redis cache key deletion + """ + # Arrange: Create test data with different index type + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + + # Update document to use different index type + document.doc_form = IndexType.QA_INDEX + db.session.commit() + + # Refresh dataset to ensure doc_form property reflects the updated document + db.session.refresh(dataset) + + # Create segments + segments = self._create_test_segments(db_session_with_containers, document, dataset) + + # Set up Redis cache keys + segment_ids = [segment.id for segment in segments] + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + redis_client.set(indexing_cache_key, "processing", ex=300) + + # Act: Execute the task + enable_segments_to_index_task(segment_ids, dataset.id, document.id) + + # Assert: Verify different index type handling + mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(IndexType.QA_INDEX) + mock_external_service_dependencies["index_processor"].load.assert_called_once() + + # Verify the load method was called with correct parameters + call_args = mock_external_service_dependencies["index_processor"].load.call_args + assert call_args is not None + documents = call_args[0][1] # Second argument should be documents list + assert len(documents) == 3 + + # Verify Redis cache keys were deleted + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + assert redis_client.exists(indexing_cache_key) == 0 + + def test_enable_segments_to_index_dataset_not_found( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test handling of non-existent dataset. + + This test verifies: + - Proper error handling for missing datasets + - Early return without processing + - Database session cleanup + - No unnecessary index processor calls + """ + # Arrange: Use non-existent dataset ID + fake = Faker() + non_existent_dataset_id = fake.uuid4() + non_existent_document_id = fake.uuid4() + segment_ids = [fake.uuid4()] + + # Act: Execute the task with non-existent dataset + enable_segments_to_index_task(segment_ids, non_existent_dataset_id, non_existent_document_id) + + # Assert: Verify no processing occurred + mock_external_service_dependencies["index_processor_factory"].assert_not_called() + mock_external_service_dependencies["index_processor"].load.assert_not_called() + + def test_enable_segments_to_index_document_not_found( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test handling of non-existent document. + + This test verifies: + - Proper error handling for missing documents + - Early return without processing + - Database session cleanup + - No unnecessary index processor calls + """ + # Arrange: Create dataset but use non-existent document ID + dataset, _ = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + fake = Faker() + non_existent_document_id = fake.uuid4() + segment_ids = [fake.uuid4()] + + # Act: Execute the task with non-existent document + enable_segments_to_index_task(segment_ids, dataset.id, non_existent_document_id) + + # Assert: Verify no processing occurred + mock_external_service_dependencies["index_processor_factory"].assert_not_called() + mock_external_service_dependencies["index_processor"].load.assert_not_called() + + def test_enable_segments_to_index_invalid_document_status( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test handling of document with invalid status. + + This test verifies: + - Early return when document is disabled, archived, or not completed + - No index processing for documents not ready for indexing + - Proper database session cleanup + - No unnecessary external service calls + """ + # Arrange: Create test data with invalid document status + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + + # Test different invalid statuses + invalid_statuses = [ + ("disabled", {"enabled": False}), + ("archived", {"archived": True}), + ("not_completed", {"indexing_status": "processing"}), + ] + + for _, status_attrs in invalid_statuses: + # Reset document status + document.enabled = True + document.archived = False + document.indexing_status = "completed" + db.session.commit() + + # Set invalid status + for attr, value in status_attrs.items(): + setattr(document, attr, value) + db.session.commit() + + # Create segments + segments = self._create_test_segments(db_session_with_containers, document, dataset) + segment_ids = [segment.id for segment in segments] + + # Act: Execute the task + enable_segments_to_index_task(segment_ids, dataset.id, document.id) + + # Assert: Verify no processing occurred + mock_external_service_dependencies["index_processor_factory"].assert_not_called() + mock_external_service_dependencies["index_processor"].load.assert_not_called() + + # Clean up segments for next iteration + for segment in segments: + db.session.delete(segment) + db.session.commit() + + def test_enable_segments_to_index_segments_not_found( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test handling when no segments are found. + + This test verifies: + - Proper handling when segments don't exist + - Early return without processing + - Database session cleanup + - Index processor is created but load is not called + """ + # Arrange: Create test data + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + + # Use non-existent segment IDs + fake = Faker() + non_existent_segment_ids = [fake.uuid4() for _ in range(3)] + + # Act: Execute the task with non-existent segments + enable_segments_to_index_task(non_existent_segment_ids, dataset.id, document.id) + + # Assert: Verify index processor was created but load was not called + mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(IndexType.PARAGRAPH_INDEX) + mock_external_service_dependencies["index_processor"].load.assert_not_called() + + def test_enable_segments_to_index_with_parent_child_structure( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test segments indexing with parent-child structure. + + This test verifies: + - Proper handling of PARENT_CHILD_INDEX type + - Child document creation from segments + - Correct document structure for parent-child indexing + - Index processor receives properly structured documents + - Redis cache key deletion + """ + # Arrange: Create test data with parent-child index type + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + + # Update document to use parent-child index type + document.doc_form = IndexType.PARENT_CHILD_INDEX + db.session.commit() + + # Refresh dataset to ensure doc_form property reflects the updated document + db.session.refresh(dataset) + + # Create segments with mock child chunks + segments = self._create_test_segments(db_session_with_containers, document, dataset) + + # Set up Redis cache keys + segment_ids = [segment.id for segment in segments] + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + redis_client.set(indexing_cache_key, "processing", ex=300) + + # Mock the get_child_chunks method for each segment + with patch.object(DocumentSegment, "get_child_chunks") as mock_get_child_chunks: + # Setup mock to return child chunks for each segment + mock_child_chunks = [] + for i in range(2): # Each segment has 2 child chunks + mock_child = MagicMock() + mock_child.content = f"child_content_{i}" + mock_child.index_node_id = f"child_node_{i}" + mock_child.index_node_hash = f"child_hash_{i}" + mock_child_chunks.append(mock_child) + + mock_get_child_chunks.return_value = mock_child_chunks + + # Act: Execute the task + enable_segments_to_index_task(segment_ids, dataset.id, document.id) + + # Assert: Verify parent-child index processing + mock_external_service_dependencies["index_processor_factory"].assert_called_once_with( + IndexType.PARENT_CHILD_INDEX + ) + mock_external_service_dependencies["index_processor"].load.assert_called_once() + + # Verify the load method was called with correct parameters + call_args = mock_external_service_dependencies["index_processor"].load.call_args + assert call_args is not None + documents = call_args[0][1] # Second argument should be documents list + assert len(documents) == 3 # 3 segments + + # Verify each document has children + for doc in documents: + assert hasattr(doc, "children") + assert len(doc.children) == 2 # Each document has 2 children + + # Verify Redis cache keys were deleted + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + assert redis_client.exists(indexing_cache_key) == 0 + + def test_enable_segments_to_index_general_exception_handling( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test general exception handling during indexing process. + + This test verifies: + - Exceptions are properly caught and handled + - Segment status is set to error + - Segments are disabled + - Error information is recorded + - Redis cache is still cleared + - Database session is properly closed + """ + # Arrange: Create test data + dataset, document = self._create_test_dataset_and_document( + db_session_with_containers, mock_external_service_dependencies + ) + segments = self._create_test_segments(db_session_with_containers, document, dataset) + + # Set up Redis cache keys + segment_ids = [segment.id for segment in segments] + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + redis_client.set(indexing_cache_key, "processing", ex=300) + + # Mock the index processor to raise an exception + mock_external_service_dependencies["index_processor"].load.side_effect = Exception("Index processing failed") + + # Act: Execute the task + enable_segments_to_index_task(segment_ids, dataset.id, document.id) + + # Assert: Verify error handling + for segment in segments: + db.session.refresh(segment) + assert segment.enabled is False + assert segment.status == "error" + assert segment.error is not None + assert "Index processing failed" in segment.error + assert segment.disabled_at is not None + + # Verify Redis cache keys were still cleared despite error + for segment in segments: + indexing_cache_key = f"segment_{segment.id}_indexing" + assert redis_client.exists(indexing_cache_key) == 0 From cfe21f0826a3f1554549a4ec56a59300f9040e39 Mon Sep 17 00:00:00 2001 From: lyzno1 <92089059+lyzno1@users.noreply.github.com> Date: Thu, 2 Oct 2025 18:13:53 +0800 Subject: [PATCH 116/126] chore(web): update Next.js to 15.5.4 and unify Lexical to 0.36.2 (#26541) --- web/package.json | 30 +- web/pnpm-lock.yaml | 779 +++++++++++++++++++++++++-------------------- 2 files changed, 442 insertions(+), 367 deletions(-) diff --git a/web/package.json b/web/package.json index 4433d866d8..7695d04f40 100644 --- a/web/package.json +++ b/web/package.json @@ -50,13 +50,13 @@ "@headlessui/react": "2.2.1", "@heroicons/react": "^2.0.16", "@hookform/resolvers": "^3.9.0", - "@lexical/code": "^0.30.0", - "@lexical/link": "^0.30.0", - "@lexical/list": "^0.30.0", - "@lexical/react": "^0.30.0", - "@lexical/selection": "^0.30.0", - "@lexical/text": "^0.35.0", - "@lexical/utils": "^0.30.0", + "@lexical/code": "^0.36.2", + "@lexical/link": "^0.36.2", + "@lexical/list": "^0.36.2", + "@lexical/react": "^0.36.2", + "@lexical/selection": "^0.36.2", + "@lexical/text": "^0.36.2", + "@lexical/utils": "^0.36.2", "@monaco-editor/react": "^4.6.0", "@octokit/core": "^6.1.2", "@octokit/request-error": "^6.1.5", @@ -91,14 +91,14 @@ "katex": "^0.16.21", "ky": "^1.7.2", "lamejs": "^1.2.1", - "lexical": "^0.30.0", + "lexical": "^0.36.2", "line-clamp": "^1.0.0", "lodash-es": "^4.17.21", "mermaid": "11.10.0", "mime": "^4.0.4", "mitt": "^3.0.1", "negotiator": "^1.0.0", - "next": "15.5.0", + "next": "15.5.4", "next-pwa": "^5.6.0", "next-themes": "^0.4.3", "pinyin-pro": "^3.25.0", @@ -141,14 +141,15 @@ }, "devDependencies": { "@antfu/eslint-config": "^5.0.0", + "@babel/core": "^7.28.3", "@chromatic-com/storybook": "^3.1.0", "@eslint-react/eslint-plugin": "^1.15.0", "@happy-dom/jest-environment": "^17.4.4", "@mdx-js/loader": "^3.1.0", "@mdx-js/react": "^3.1.0", - "@next/bundle-analyzer": "15.5.3", - "@next/eslint-plugin-next": "15.5.0", - "@next/mdx": "15.5.0", + "@next/bundle-analyzer": "15.5.4", + "@next/eslint-plugin-next": "15.5.4", + "@next/mdx": "15.5.4", "@rgrove/parse-xml": "^4.1.0", "@storybook/addon-essentials": "8.5.0", "@storybook/addon-interactions": "8.5.0", @@ -161,7 +162,6 @@ "@testing-library/dom": "^10.4.0", "@testing-library/jest-dom": "^6.8.0", "@testing-library/react": "^16.0.1", - "@babel/core": "^7.28.3", "@types/dagre": "^0.7.52", "@types/jest": "^29.5.13", "@types/js-cookie": "^3.0.6", @@ -178,6 +178,7 @@ "@types/sortablejs": "^1.15.1", "@types/uuid": "^10.0.0", "autoprefixer": "^10.4.20", + "babel-loader": "^9.2.1", "bing-translate-api": "^4.0.2", "code-inspector-plugin": "1.2.9", "cross-env": "^7.0.3", @@ -200,8 +201,7 @@ "storybook": "8.5.0", "tailwindcss": "^3.4.14", "typescript": "^5.8.3", - "uglify-js": "^3.19.3", - "babel-loader": "^9.2.1" + "uglify-js": "^3.19.3" }, "resolutions": { "@types/react": "19.1.11", diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index 8c2e869c91..19a6b87cac 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -71,26 +71,26 @@ importers: specifier: ^3.9.0 version: 3.10.0(react-hook-form@7.60.0(react@19.1.1)) '@lexical/code': - specifier: ^0.30.0 - version: 0.30.0 + specifier: ^0.36.2 + version: 0.36.2 '@lexical/link': - specifier: ^0.30.0 - version: 0.30.0 + specifier: ^0.36.2 + version: 0.36.2 '@lexical/list': - specifier: ^0.30.0 - version: 0.30.0 + specifier: ^0.36.2 + version: 0.36.2 '@lexical/react': - specifier: ^0.30.0 - version: 0.30.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(yjs@13.6.27) + specifier: ^0.36.2 + version: 0.36.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(yjs@13.6.27) '@lexical/selection': - specifier: ^0.30.0 - version: 0.30.0 + specifier: ^0.36.2 + version: 0.36.2 '@lexical/text': - specifier: ^0.35.0 - version: 0.35.0 + specifier: ^0.36.2 + version: 0.36.2 '@lexical/utils': - specifier: ^0.30.0 - version: 0.30.0 + specifier: ^0.36.2 + version: 0.36.2 '@monaco-editor/react': specifier: ^4.6.0 version: 4.7.0(monaco-editor@0.52.2)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) @@ -194,8 +194,8 @@ importers: specifier: ^1.2.1 version: 1.2.1 lexical: - specifier: ^0.30.0 - version: 0.30.0 + specifier: ^0.36.2 + version: 0.36.2 line-clamp: specifier: ^1.0.0 version: 1.0.0 @@ -215,11 +215,11 @@ importers: specifier: ^1.0.0 version: 1.0.0 next: - specifier: 15.5.0 - version: 15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1) + specifier: 15.5.4 + version: 15.5.4(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1) next-pwa: specifier: ^5.6.0 - version: 5.6.0(@babel/core@7.28.3)(@types/babel__core@7.20.5)(esbuild@0.25.0)(next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1))(uglify-js@3.19.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)) + version: 5.6.0(@babel/core@7.28.3)(@types/babel__core@7.20.5)(esbuild@0.25.0)(next@15.5.4(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1))(uglify-js@3.19.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)) next-themes: specifier: ^0.4.3 version: 0.4.6(react-dom@19.1.1(react@19.1.1))(react@19.1.1) @@ -337,7 +337,7 @@ importers: devDependencies: '@antfu/eslint-config': specifier: ^5.0.0 - version: 5.0.0(@eslint-react/eslint-plugin@1.52.3(eslint@9.35.0(jiti@2.6.0))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3))(@next/eslint-plugin-next@15.5.0)(@vue/compiler-sfc@3.5.17)(eslint-plugin-react-hooks@5.2.0(eslint@9.35.0(jiti@2.6.0)))(eslint-plugin-react-refresh@0.4.20(eslint@9.35.0(jiti@2.6.0)))(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) + version: 5.0.0(@eslint-react/eslint-plugin@1.52.3(eslint@9.35.0(jiti@2.6.0))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3))(@next/eslint-plugin-next@15.5.4)(@vue/compiler-sfc@3.5.17)(eslint-plugin-react-hooks@5.2.0(eslint@9.35.0(jiti@2.6.0)))(eslint-plugin-react-refresh@0.4.20(eslint@9.35.0(jiti@2.6.0)))(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3) '@babel/core': specifier: ^7.28.3 version: 7.28.3 @@ -357,14 +357,14 @@ importers: specifier: ^3.1.0 version: 3.1.0(@types/react@19.1.11)(react@19.1.1) '@next/bundle-analyzer': - specifier: 15.5.3 - version: 15.5.3 + specifier: 15.5.4 + version: 15.5.4 '@next/eslint-plugin-next': - specifier: 15.5.0 - version: 15.5.0 + specifier: 15.5.4 + version: 15.5.4 '@next/mdx': - specifier: 15.5.0 - version: 15.5.0(@mdx-js/loader@3.1.0(acorn@8.15.0)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)))(@mdx-js/react@3.1.0(@types/react@19.1.11)(react@19.1.1)) + specifier: 15.5.4 + version: 15.5.4(@mdx-js/loader@3.1.0(acorn@8.15.0)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)))(@mdx-js/react@3.1.0(@types/react@19.1.11)(react@19.1.1)) '@rgrove/parse-xml': specifier: ^4.1.0 version: 4.2.0 @@ -385,7 +385,7 @@ importers: version: 8.5.0(storybook@8.5.0) '@storybook/nextjs': specifier: 8.5.0 - version: 8.5.0(esbuild@0.25.0)(next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1)(storybook@8.5.0)(type-fest@2.19.0)(typescript@5.8.3)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)) + version: 8.5.0(esbuild@0.25.0)(next@15.5.4(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1)(storybook@8.5.0)(type-fest@2.19.0)(typescript@5.8.3)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)) '@storybook/react': specifier: 8.5.0 version: 8.5.0(@storybook/test@8.5.0(storybook@8.5.0))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@8.5.0)(typescript@5.8.3) @@ -1246,6 +1246,10 @@ packages: resolution: {integrity: sha512-vbavdySgbTTrmFE+EsiqUTzlOr5bzlnJtUv9PynGCAKvfQqjIXbvFdumPM/GxMDfyuGMJaJAU6TO4zc1Jf1i8Q==} engines: {node: '>=6.9.0'} + '@babel/runtime@7.28.4': + resolution: {integrity: sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==} + engines: {node: '>=6.9.0'} + '@babel/template@7.27.2': resolution: {integrity: sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==} engines: {node: '>=6.9.0'} @@ -1339,9 +1343,6 @@ packages: '@emnapi/core@1.5.0': resolution: {integrity: sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg==} - '@emnapi/runtime@1.4.4': - resolution: {integrity: sha512-hHyapA4A3gPaDCNfiqyZUStTMqIkKRshqPIuDOXv1hcBnD4U3l8cP0T1HMCfGRxQ6V64TGCcoswChANyOAwbQg==} - '@emnapi/runtime@1.5.0': resolution: {integrity: sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==} @@ -1617,21 +1618,39 @@ packages: '@floating-ui/core@1.7.2': resolution: {integrity: sha512-wNB5ooIKHQc+Kui96jE/n69rHFWAVoxn5CAzL1Xdd8FG03cgY3MLO+GF9U3W737fYDSgPWA6MReKhBQBop6Pcw==} + '@floating-ui/core@1.7.3': + resolution: {integrity: sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==} + '@floating-ui/dom@1.7.2': resolution: {integrity: sha512-7cfaOQuCS27HD7DX+6ib2OrnW+b4ZBwDNnCcT0uTyidcmyWb03FnQqJybDBoCnpdxwBSfA94UAYlRCt7mV+TbA==} + '@floating-ui/dom@1.7.4': + resolution: {integrity: sha512-OOchDgh4F2CchOX94cRVqhvy7b3AFb+/rQXyswmzmGakRfkMgoWVjfnLWkRirfLEfuD4ysVW16eXzwt3jHIzKA==} + '@floating-ui/react-dom@2.1.4': resolution: {integrity: sha512-JbbpPhp38UmXDDAu60RJmbeme37Jbgsm7NrHGgzYYFKmblzRUh6Pa641dII6LsjwF4XlScDrde2UAzDo/b9KPw==} peerDependencies: react: '>=16.8.0' react-dom: '>=16.8.0' + '@floating-ui/react-dom@2.1.6': + resolution: {integrity: sha512-4JX6rEatQEvlmgU80wZyq9RT96HZJa88q8hp0pBd+LrczeDI4o6uA2M+uvxngVHo4Ihr8uibXxH6+70zhAFrVw==} + peerDependencies: + react: '>=16.8.0' + react-dom: '>=16.8.0' + '@floating-ui/react@0.26.28': resolution: {integrity: sha512-yORQuuAtVpiRjpMhdc0wJj06b9JFjrYF4qp96j++v2NBpbi6SEGF7donUJ3TMieerQ6qVkAv1tgr7L4r5roTqw==} peerDependencies: react: '>=16.8.0' react-dom: '>=16.8.0' + '@floating-ui/react@0.27.16': + resolution: {integrity: sha512-9O8N4SeG2z++TSM8QA/KTeKFBVCNEz/AGS7gWPJf6KFRzmRWixFRnCnkPHRDwSVZW6QPDO6uT0P2SpWNKCc9/g==} + peerDependencies: + react: '>=17.0.0' + react-dom: '>=17.0.0' + '@floating-ui/utils@0.2.10': resolution: {integrity: sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==} @@ -1685,14 +1704,18 @@ packages: '@iconify/utils@2.3.0': resolution: {integrity: sha512-GmQ78prtwYW6EtzXRU1rY+KwOKfz32PD7iJh6Iyqw68GiKuoZ2A6pRtzWONz5VQJbp50mEjXh/7NkumtrAgRKA==} + '@img/colour@1.0.0': + resolution: {integrity: sha512-A5P/LfWGFSl6nsckYtjw9da+19jB8hkJ6ACTGcDfEJ0aE+l2n2El7dsVM7UVHZQ9s2lmYMWlrS21YLy2IR1LUw==} + engines: {node: '>=18'} + '@img/sharp-darwin-arm64@0.33.5': resolution: {integrity: sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [darwin] - '@img/sharp-darwin-arm64@0.34.3': - resolution: {integrity: sha512-ryFMfvxxpQRsgZJqBd4wsttYQbCxsJksrv9Lw/v798JcQ8+w84mBWuXwl+TT0WJ/WrYOLaYpwQXi3sA9nTIaIg==} + '@img/sharp-darwin-arm64@0.34.4': + resolution: {integrity: sha512-sitdlPzDVyvmINUdJle3TNHl+AG9QcwiAMsXmccqsCOMZNIdW2/7S26w0LyU8euiLVzFBL3dXPwVCq/ODnf2vA==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [darwin] @@ -1703,8 +1726,8 @@ packages: cpu: [x64] os: [darwin] - '@img/sharp-darwin-x64@0.34.3': - resolution: {integrity: sha512-yHpJYynROAj12TA6qil58hmPmAwxKKC7reUqtGLzsOHfP7/rniNGTL8tjWX6L3CTV4+5P4ypcS7Pp+7OB+8ihA==} + '@img/sharp-darwin-x64@0.34.4': + resolution: {integrity: sha512-rZheupWIoa3+SOdF/IcUe1ah4ZDpKBGWcsPX6MT0lYniH9micvIU7HQkYTfrx5Xi8u+YqwLtxC/3vl8TQN6rMg==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [x64] os: [darwin] @@ -1714,8 +1737,8 @@ packages: cpu: [arm64] os: [darwin] - '@img/sharp-libvips-darwin-arm64@1.2.0': - resolution: {integrity: sha512-sBZmpwmxqwlqG9ueWFXtockhsxefaV6O84BMOrhtg/YqbTaRdqDE7hxraVE3y6gVM4eExmfzW4a8el9ArLeEiQ==} + '@img/sharp-libvips-darwin-arm64@1.2.3': + resolution: {integrity: sha512-QzWAKo7kpHxbuHqUC28DZ9pIKpSi2ts2OJnoIGI26+HMgq92ZZ4vk8iJd4XsxN+tYfNJxzH6W62X5eTcsBymHw==} cpu: [arm64] os: [darwin] @@ -1724,8 +1747,8 @@ packages: cpu: [x64] os: [darwin] - '@img/sharp-libvips-darwin-x64@1.2.0': - resolution: {integrity: sha512-M64XVuL94OgiNHa5/m2YvEQI5q2cl9d/wk0qFTDVXcYzi43lxuiFTftMR1tOnFQovVXNZJ5TURSDK2pNe9Yzqg==} + '@img/sharp-libvips-darwin-x64@1.2.3': + resolution: {integrity: sha512-Ju+g2xn1E2AKO6YBhxjj+ACcsPQRHT0bhpglxcEf+3uyPY+/gL8veniKoo96335ZaPo03bdDXMv0t+BBFAbmRA==} cpu: [x64] os: [darwin] @@ -1734,8 +1757,8 @@ packages: cpu: [arm64] os: [linux] - '@img/sharp-libvips-linux-arm64@1.2.0': - resolution: {integrity: sha512-RXwd0CgG+uPRX5YYrkzKyalt2OJYRiJQ8ED/fi1tq9WQW2jsQIn0tqrlR5l5dr/rjqq6AHAxURhj2DVjyQWSOA==} + '@img/sharp-libvips-linux-arm64@1.2.3': + resolution: {integrity: sha512-I4RxkXU90cpufazhGPyVujYwfIm9Nk1QDEmiIsaPwdnm013F7RIceaCc87kAH+oUB1ezqEvC6ga4m7MSlqsJvQ==} cpu: [arm64] os: [linux] @@ -1744,13 +1767,13 @@ packages: cpu: [arm] os: [linux] - '@img/sharp-libvips-linux-arm@1.2.0': - resolution: {integrity: sha512-mWd2uWvDtL/nvIzThLq3fr2nnGfyr/XMXlq8ZJ9WMR6PXijHlC3ksp0IpuhK6bougvQrchUAfzRLnbsen0Cqvw==} + '@img/sharp-libvips-linux-arm@1.2.3': + resolution: {integrity: sha512-x1uE93lyP6wEwGvgAIV0gP6zmaL/a0tGzJs/BIDDG0zeBhMnuUPm7ptxGhUbcGs4okDJrk4nxgrmxpib9g6HpA==} cpu: [arm] os: [linux] - '@img/sharp-libvips-linux-ppc64@1.2.0': - resolution: {integrity: sha512-Xod/7KaDDHkYu2phxxfeEPXfVXFKx70EAFZ0qyUdOjCcxbjqyJOEUpDe6RIyaunGxT34Anf9ue/wuWOqBW2WcQ==} + '@img/sharp-libvips-linux-ppc64@1.2.3': + resolution: {integrity: sha512-Y2T7IsQvJLMCBM+pmPbM3bKT/yYJvVtLJGfCs4Sp95SjvnFIjynbjzsa7dY1fRJX45FTSfDksbTp6AGWudiyCg==} cpu: [ppc64] os: [linux] @@ -1759,8 +1782,8 @@ packages: cpu: [s390x] os: [linux] - '@img/sharp-libvips-linux-s390x@1.2.0': - resolution: {integrity: sha512-eMKfzDxLGT8mnmPJTNMcjfO33fLiTDsrMlUVcp6b96ETbnJmd4uvZxVJSKPQfS+odwfVaGifhsB07J1LynFehw==} + '@img/sharp-libvips-linux-s390x@1.2.3': + resolution: {integrity: sha512-RgWrs/gVU7f+K7P+KeHFaBAJlNkD1nIZuVXdQv6S+fNA6syCcoboNjsV2Pou7zNlVdNQoQUpQTk8SWDHUA3y/w==} cpu: [s390x] os: [linux] @@ -1769,8 +1792,8 @@ packages: cpu: [x64] os: [linux] - '@img/sharp-libvips-linux-x64@1.2.0': - resolution: {integrity: sha512-ZW3FPWIc7K1sH9E3nxIGB3y3dZkpJlMnkk7z5tu1nSkBoCgw2nSRTFHI5pB/3CQaJM0pdzMF3paf9ckKMSE9Tg==} + '@img/sharp-libvips-linux-x64@1.2.3': + resolution: {integrity: sha512-3JU7LmR85K6bBiRzSUc/Ff9JBVIFVvq6bomKE0e63UXGeRw2HPVEjoJke1Yx+iU4rL7/7kUjES4dZ/81Qjhyxg==} cpu: [x64] os: [linux] @@ -1779,8 +1802,8 @@ packages: cpu: [arm64] os: [linux] - '@img/sharp-libvips-linuxmusl-arm64@1.2.0': - resolution: {integrity: sha512-UG+LqQJbf5VJ8NWJ5Z3tdIe/HXjuIdo4JeVNADXBFuG7z9zjoegpzzGIyV5zQKi4zaJjnAd2+g2nna8TZvuW9Q==} + '@img/sharp-libvips-linuxmusl-arm64@1.2.3': + resolution: {integrity: sha512-F9q83RZ8yaCwENw1GieztSfj5msz7GGykG/BA+MOUefvER69K/ubgFHNeSyUu64amHIYKGDs4sRCMzXVj8sEyw==} cpu: [arm64] os: [linux] @@ -1789,8 +1812,8 @@ packages: cpu: [x64] os: [linux] - '@img/sharp-libvips-linuxmusl-x64@1.2.0': - resolution: {integrity: sha512-SRYOLR7CXPgNze8akZwjoGBoN1ThNZoqpOgfnOxmWsklTGVfJiGJoC/Lod7aNMGA1jSsKWM1+HRX43OP6p9+6Q==} + '@img/sharp-libvips-linuxmusl-x64@1.2.3': + resolution: {integrity: sha512-U5PUY5jbc45ANM6tSJpsgqmBF/VsL6LnxJmIf11kB7J5DctHgqm0SkuXzVWtIY90GnJxKnC/JT251TDnk1fu/g==} cpu: [x64] os: [linux] @@ -1800,8 +1823,8 @@ packages: cpu: [arm64] os: [linux] - '@img/sharp-linux-arm64@0.34.3': - resolution: {integrity: sha512-QdrKe3EvQrqwkDrtuTIjI0bu6YEJHTgEeqdzI3uWJOH6G1O8Nl1iEeVYRGdj1h5I21CqxSvQp1Yv7xeU3ZewbA==} + '@img/sharp-linux-arm64@0.34.4': + resolution: {integrity: sha512-YXU1F/mN/Wu786tl72CyJjP/Ngl8mGHN1hST4BGl+hiW5jhCnV2uRVTNOcaYPs73NeT/H8Upm3y9582JVuZHrQ==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [linux] @@ -1812,14 +1835,14 @@ packages: cpu: [arm] os: [linux] - '@img/sharp-linux-arm@0.34.3': - resolution: {integrity: sha512-oBK9l+h6KBN0i3dC8rYntLiVfW8D8wH+NPNT3O/WBHeW0OQWCjfWksLUaPidsrDKpJgXp3G3/hkmhptAW0I3+A==} + '@img/sharp-linux-arm@0.34.4': + resolution: {integrity: sha512-Xyam4mlqM0KkTHYVSuc6wXRmM7LGN0P12li03jAnZ3EJWZqj83+hi8Y9UxZUbxsgsK1qOEwg7O0Bc0LjqQVtxA==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm] os: [linux] - '@img/sharp-linux-ppc64@0.34.3': - resolution: {integrity: sha512-GLtbLQMCNC5nxuImPR2+RgrviwKwVql28FWZIW1zWruy6zLgA5/x2ZXk3mxj58X/tszVF69KK0Is83V8YgWhLA==} + '@img/sharp-linux-ppc64@0.34.4': + resolution: {integrity: sha512-F4PDtF4Cy8L8hXA2p3TO6s4aDt93v+LKmpcYFLAVdkkD3hSxZzee0rh6/+94FpAynsuMpLX5h+LRsSG3rIciUQ==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [ppc64] os: [linux] @@ -1830,8 +1853,8 @@ packages: cpu: [s390x] os: [linux] - '@img/sharp-linux-s390x@0.34.3': - resolution: {integrity: sha512-3gahT+A6c4cdc2edhsLHmIOXMb17ltffJlxR0aC2VPZfwKoTGZec6u5GrFgdR7ciJSsHT27BD3TIuGcuRT0KmQ==} + '@img/sharp-linux-s390x@0.34.4': + resolution: {integrity: sha512-qVrZKE9Bsnzy+myf7lFKvng6bQzhNUAYcVORq2P7bDlvmF6u2sCmK2KyEQEBdYk+u3T01pVsPrkj943T1aJAsw==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [s390x] os: [linux] @@ -1842,8 +1865,8 @@ packages: cpu: [x64] os: [linux] - '@img/sharp-linux-x64@0.34.3': - resolution: {integrity: sha512-8kYso8d806ypnSq3/Ly0QEw90V5ZoHh10yH0HnrzOCr6DKAPI6QVHvwleqMkVQ0m+fc7EH8ah0BB0QPuWY6zJQ==} + '@img/sharp-linux-x64@0.34.4': + resolution: {integrity: sha512-ZfGtcp2xS51iG79c6Vhw9CWqQC8l2Ot8dygxoDoIQPTat/Ov3qAa8qpxSrtAEAJW+UjTXc4yxCjNfxm4h6Xm2A==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [x64] os: [linux] @@ -1854,8 +1877,8 @@ packages: cpu: [arm64] os: [linux] - '@img/sharp-linuxmusl-arm64@0.34.3': - resolution: {integrity: sha512-vAjbHDlr4izEiXM1OTggpCcPg9tn4YriK5vAjowJsHwdBIdx0fYRsURkxLG2RLm9gyBq66gwtWI8Gx0/ov+JKQ==} + '@img/sharp-linuxmusl-arm64@0.34.4': + resolution: {integrity: sha512-8hDVvW9eu4yHWnjaOOR8kHVrew1iIX+MUgwxSuH2XyYeNRtLUe4VNioSqbNkB7ZYQJj9rUTT4PyRscyk2PXFKA==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [linux] @@ -1866,8 +1889,8 @@ packages: cpu: [x64] os: [linux] - '@img/sharp-linuxmusl-x64@0.34.3': - resolution: {integrity: sha512-gCWUn9547K5bwvOn9l5XGAEjVTTRji4aPTqLzGXHvIr6bIDZKNTA34seMPgM0WmSf+RYBH411VavCejp3PkOeQ==} + '@img/sharp-linuxmusl-x64@0.34.4': + resolution: {integrity: sha512-lU0aA5L8QTlfKjpDCEFOZsTYGn3AEiO6db8W5aQDxj0nQkVrZWmN3ZP9sYKWJdtq3PWPhUNlqehWyXpYDcI9Sg==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [x64] os: [linux] @@ -1877,13 +1900,13 @@ packages: engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [wasm32] - '@img/sharp-wasm32@0.34.3': - resolution: {integrity: sha512-+CyRcpagHMGteySaWos8IbnXcHgfDn7pO2fiC2slJxvNq9gDipYBN42/RagzctVRKgxATmfqOSulgZv5e1RdMg==} + '@img/sharp-wasm32@0.34.4': + resolution: {integrity: sha512-33QL6ZO/qpRyG7woB/HUALz28WnTMI2W1jgX3Nu2bypqLIKx/QKMILLJzJjI+SIbvXdG9fUnmrxR7vbi1sTBeA==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [wasm32] - '@img/sharp-win32-arm64@0.34.3': - resolution: {integrity: sha512-MjnHPnbqMXNC2UgeLJtX4XqoVHHlZNd+nPt1kRPmj63wURegwBhZlApELdtxM2OIZDRv/DFtLcNhVbd1z8GYXQ==} + '@img/sharp-win32-arm64@0.34.4': + resolution: {integrity: sha512-2Q250do/5WXTwxW3zjsEuMSv5sUU4Tq9VThWKlU2EYLm4MB7ZeMwF+SFJutldYODXF6jzc6YEOC+VfX0SZQPqA==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [win32] @@ -1894,8 +1917,8 @@ packages: cpu: [ia32] os: [win32] - '@img/sharp-win32-ia32@0.34.3': - resolution: {integrity: sha512-xuCdhH44WxuXgOM714hn4amodJMZl3OEvf0GVTm0BEyMeA2to+8HEdRPShH0SLYptJY1uBw+SCFP9WVQi1Q/cw==} + '@img/sharp-win32-ia32@0.34.4': + resolution: {integrity: sha512-3ZeLue5V82dT92CNL6rsal6I2weKw1cYu+rGKm8fOCCtJTR2gYeUfY3FqUnIJsMUPIH68oS5jmZ0NiJ508YpEw==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [ia32] os: [win32] @@ -1906,8 +1929,8 @@ packages: cpu: [x64] os: [win32] - '@img/sharp-win32-x64@0.34.3': - resolution: {integrity: sha512-OWwz05d++TxzLEv4VnsTz5CmZ6mI6S05sfQGEMrNrQcOEERbX46332IvE7pO/EUiw7jUrrS40z/M7kPyjfl04g==} + '@img/sharp-win32-x64@0.34.4': + resolution: {integrity: sha512-xIyj4wpYs8J18sVN3mSQjwrw7fKUqRw+Z5rnHNCy5fYTxigBz81u5mOMPmFumwjcn8+ld1ppptMBCLic1nz6ig==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [x64] os: [win32] @@ -2020,77 +2043,77 @@ packages: '@jridgewell/trace-mapping@0.3.9': resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} - '@lexical/clipboard@0.30.0': - resolution: {integrity: sha512-taWQURtE6xF4Jy4I8teQw3+nVBVNO1r+9N9voXeivgwxSrAM40rjqQ/aZEKxWbwZtfkABDkCEArbVrqP0SkWcQ==} + '@lexical/clipboard@0.36.2': + resolution: {integrity: sha512-l7z52jltlMz1HmJRmG7ZdxySPjheRRxdV/75QEnzalMtqfLPgh4G5IpycISjbX+95PgEaC6rXbcjPix0CyHDJg==} - '@lexical/code@0.30.0': - resolution: {integrity: sha512-OmA6Bmp3w9SMV25Hae1dLXtPNOdCgnzo1xy84K19U+dPP5iqXagwFq5oY/9PVOOI2wgaQHrz3C+7B4phDb9xaA==} + '@lexical/code@0.36.2': + resolution: {integrity: sha512-dfS62rNo3uKwNAJQ39zC+8gYX0k8UAoW7u+JPIqx+K2VPukZlvpsPLNGft15pdWBkHc7Pv+o9gJlB6gGv+EBfA==} - '@lexical/devtools-core@0.30.0': - resolution: {integrity: sha512-6vKEEIUym8pQ+tWt4VfRMOGE/dtfyPr9e1zPrAAV7Y/EdzK0AJYPPlw2Dt5Uqq9rposcIriqF4MkuFvy4UcZiQ==} + '@lexical/devtools-core@0.36.2': + resolution: {integrity: sha512-G+XW7gR/SCx3YgX4FK9wAIn6AIOkC+j8zRPWrS3GQNZ15CE0QkwQl3IyQ7XW9KzWmdRMs6yTmTVnENFa1JLzXg==} peerDependencies: react: '>=17.x' react-dom: '>=17.x' - '@lexical/dragon@0.30.0': - resolution: {integrity: sha512-eikVYw1pIcFIOojn2mGlps59YcyT9ATd6UMIx/ivuscakrZeU7SZM/F6c75QPJXNOu1b2koOo+4Bb1GT6jixGQ==} + '@lexical/dragon@0.36.2': + resolution: {integrity: sha512-VWNjYaH74uQ8MFKkl80pTofojpEnTYSX2sgHyZmo1Lk1cKLHK25pMnWgAxPAMLQD5/RW/2PtZcK+j0Kfoe5lSQ==} - '@lexical/hashtag@0.30.0': - resolution: {integrity: sha512-gB3DobSdAc0YZUhlTT7ZAUr+6RRREQ3UWVC1twdtFvXXw1vyTUXH2gWTDp/ParwBZ16Lnrg8mxET8Nu/qD1PSw==} + '@lexical/extension@0.36.2': + resolution: {integrity: sha512-NWxtqMFMzScq4Eemqp1ST2KREIfj57fUbn7qHv+mMnYgQZK4iIhrHKo5klonxi1oBURcxUZMIbdtH7MJ4BdisA==} - '@lexical/history@0.30.0': - resolution: {integrity: sha512-dxudthi94vSLQKXVq3LSwcOVkOmb2lvxoy7sCma513yJbrsn3fPLppR2Ynhl6aB9oPw675wSDrfsE6BG3U3+CA==} + '@lexical/hashtag@0.36.2': + resolution: {integrity: sha512-WdmKtzXFcahQT3ShFDeHF6LCR5C8yvFCj3ImI09rZwICrYeonbMrzsBUxS1joBz0HQ+ufF9Tx+RxLvGWx6WxzQ==} - '@lexical/html@0.30.0': - resolution: {integrity: sha512-GdegWO6RjJ7eE+yD3Z0X/OpT88SZjOs3DyQ0rgrZy3z7RPaFCbEEcq0M/NssJbKAB1XOFUsUFrnS7kZs1vJzGg==} + '@lexical/history@0.36.2': + resolution: {integrity: sha512-pnS36gyMWz1yq/3Z2jv0gUxjJfas5j0GZOM4rFTzDAHjRVc5q3Ua4ElwekdcLaPPGpUlcg3jghIGWa2pSeoPvA==} - '@lexical/link@0.30.0': - resolution: {integrity: sha512-isD3PC0ywQIwbtekHYEvh7hDxcPz/cEr/AspYntYs08u5J0czhw3rpqnXWGauWaav5V9ExIkf1ZkGUFUI6bw5w==} + '@lexical/html@0.36.2': + resolution: {integrity: sha512-fgqALzgKnoy93G0yFyYD4C4qJTSMZyUt4JE5kj/POFwWNOnXThIqJhQGwBvH/ibImpIfOeds2TrSr8PbStlrNg==} - '@lexical/list@0.30.0': - resolution: {integrity: sha512-WKnwH+Cg+j2I0EbaEyPHo8MPNyrqQV3W1NmH5Mf/iRxCq42z7NJxemhmRUxbqv8vsugACwBkh2RlkhekRXmUQQ==} + '@lexical/link@0.36.2': + resolution: {integrity: sha512-Zb+DeHA1po8VMiOAAXsBmAHhfWmQttsUkI5oiZUmOXJruRuQ2rVr01NoxHpoEpLwHOABVNzD3PMbwov+g3c7lg==} - '@lexical/mark@0.30.0': - resolution: {integrity: sha512-dLFH6tJ2WQUSdo1Y2Jp81vRT8j48FjF75K5YLRsKD/UFxWEy+RFgRXsd0H/BuFkx/jPTXt6xe8CaIrZvek8mLg==} + '@lexical/list@0.36.2': + resolution: {integrity: sha512-JpaIaE0lgNUrAR7iaCaIoETcCKG9EvZjM3G71VxiexTs7PltmEMq36LUlO2goafWurP7knG2rUpVnTcuSbYYeA==} - '@lexical/markdown@0.30.0': - resolution: {integrity: sha512-GGddZs63k0wb3/fdL7JyBjiy8L1AIHuRKT68riWbKAcNL7rfMl3Uy5VnMkgV/5bN/2eUQijkGjxG+VxsR8RWbw==} + '@lexical/mark@0.36.2': + resolution: {integrity: sha512-n0MNXtGH+1i43hglgHjpQV0093HmIiFR7Budg2BJb8ZNzO1KZRqeXAHlA5ZzJ698FkAnS4R5bqG9tZ0JJHgAuA==} - '@lexical/offset@0.30.0': - resolution: {integrity: sha512-sZFbZt5dVdtrdoYk79i13xBDs8/MHXw6CqmZNht85L7UdwiuzVqA3KTyaMe60Vrg6mfsKIVjghbpMOhspcuCrw==} + '@lexical/markdown@0.36.2': + resolution: {integrity: sha512-jI4McaVKUo8ADOYNCB5LnYyxXDyOWBOofM05r42R9QIMyUxGryo43WNPMAYXzCgtHlkQv+FNles9OlQY0IlAag==} - '@lexical/overflow@0.30.0': - resolution: {integrity: sha512-fvjWnhtPZLMS3qJ6HC6tZTOMmcfNmeRUkgXTas9bvWT8Yul+WLJ/fWjzwvBcqpKlvPQjRFOcDcrW8T/Rp7KPrg==} + '@lexical/offset@0.36.2': + resolution: {integrity: sha512-+QQNwzFW/joes3DhNINpGdEX6O5scUTs4n8pYDyM/3pWb+8oCHRaRtEmpUU9HStbdy/pK2kQ9XdztkrNvP/ilA==} - '@lexical/plain-text@0.30.0': - resolution: {integrity: sha512-jvxMMxFO3Yuj7evWsc33IGWfigU5A1KrJaIf6zv6GmYj0a7ZRkR1x6vJyc7AlgUM70sld+dozLdoynguQIlmrQ==} + '@lexical/overflow@0.36.2': + resolution: {integrity: sha512-bLaEe93iZIJH5wDh6e/DTZVNz7xO7lMS5akcJW8CIwopr4I/Qv2uCvc4G1bMMHx2xM1gVxstn5rFgIUP8/Gqlg==} - '@lexical/react@0.30.0': - resolution: {integrity: sha512-fsb6voXzxHyP55lXdmnGhHMfxe6g/f+0NpmfPCkutOXYnY8UqKa86LLYl4Nrsi8HX8BRZfh1H0IjkzDG6EzVPw==} + '@lexical/plain-text@0.36.2': + resolution: {integrity: sha512-c9F/+WHl2QuXVhu+1bBVo6BIrSjCcixLe5ePKxoUpy+B7W72s3VCoAQZp+pmtPIyodDLmZAx78hZBBlzoIOeeg==} + + '@lexical/react@0.36.2': + resolution: {integrity: sha512-mPVm1BmeuMsMpVyUplgc0btOI8+Vm9bZj4AftgfMSkvzkr8i6NkLn8LV5IlEnoRvxXkjOExwlwBwdQte5ZGvNw==} peerDependencies: react: '>=17.x' react-dom: '>=17.x' - '@lexical/rich-text@0.30.0': - resolution: {integrity: sha512-oitOh5u68E5DBZt5VBZIaIeM/iNdt3mIDkGp2C259x81V/9KlSNB9c3rqdTKcs/A+Msw4j60FRhdmZcKQ9uYUA==} + '@lexical/rich-text@0.36.2': + resolution: {integrity: sha512-dZ7zAIv5NBrh1ApxIT9bayn96zfQHHdnT+oaqmR+q100Vo2uROeR/ZF5igeAuwYGM1Z3ZWDBvNxRKd1d6FWiZw==} - '@lexical/selection@0.30.0': - resolution: {integrity: sha512-Ys2XfSmIV/Irg6Xo663YtR4jozIv/7sDemArkEGHT0fxZn2py5qftowPF5IBqFYxKTigAdv5vVPwusBvAnLIEg==} + '@lexical/selection@0.36.2': + resolution: {integrity: sha512-n96joW3HCKBmPeESR172BxVE+m8V9SdidQm4kKb9jOZ1Ota+tnam2386TeI6795TWwgjDQJPK3HZNKcX6Gb+Bg==} - '@lexical/table@0.30.0': - resolution: {integrity: sha512-XPCIMIGnZLKTa5/4cP16bXbmzvMndPR273HNl7ZaF35ky7UjZxdj42HBbE7q9zw2zbRPDiO77EyhYA0p20cbdw==} + '@lexical/table@0.36.2': + resolution: {integrity: sha512-96rNNPiVbC65i+Jn1QzIsehCS7UVUc69ovrh9Bt4+pXDebZSdZai153Q7RUq8q3AQ5ocK4/SA2kLQfMu0grj3Q==} - '@lexical/text@0.30.0': - resolution: {integrity: sha512-P0ptriFwwP/hoDpz/MoBbzHxrFHqh0kCGzASWUdRZ1zrU0yPvJ9vV/UNMhyolH7xx+eAGI1Yl+m74NlpGmXqTg==} + '@lexical/text@0.36.2': + resolution: {integrity: sha512-IbbqgRdMAD6Uk9b2+qSVoy+8RVcczrz6OgXvg39+EYD+XEC7Rbw7kDTWzuNSJJpP7vxSO8YDZSaIlP5gNH3qKA==} - '@lexical/text@0.35.0': - resolution: {integrity: sha512-uaMh46BkysV8hK8wQwp5g/ByZW+2hPDt8ahAErxtf8NuzQem1FHG/f5RTchmFqqUDVHO3qLNTv4AehEGmXv8MA==} + '@lexical/utils@0.36.2': + resolution: {integrity: sha512-P9+t2Ob10YNGYT/PWEER+1EqH8SAjCNRn+7SBvKbr0IdleGF2JvzbJwAWaRwZs1c18P11XdQZ779dGvWlfwBIw==} - '@lexical/utils@0.30.0': - resolution: {integrity: sha512-VJlAUhupCZmnbYYX3zMWovd4viu2guR01sAqKGbbOMbP+4rlaymixFbinvNPaRKDBloOARi+fpiveQFxnyr/Ew==} - - '@lexical/yjs@0.30.0': - resolution: {integrity: sha512-mWGFAGpUPz4JoSV+Y0cZOzOZJoMLbVb/enldxEbV0xX71BBVzD0c0vjPxuaIJ9MtNkRZdK3eOubj+B45iOECtw==} + '@lexical/yjs@0.36.2': + resolution: {integrity: sha512-gZ66Mw+uKXTO8KeX/hNKAinXbFg3gnNYraG76lBXCwb/Ka3q34upIY9FUeGOwGVaau3iIDQhE49I+6MugAX2FQ==} peerDependencies: yjs: '>=13.5.22' @@ -2131,17 +2154,17 @@ packages: '@napi-rs/wasm-runtime@1.0.5': resolution: {integrity: sha512-TBr9Cf9onSAS2LQ2+QHx6XcC6h9+RIzJgbqG3++9TUZSH204AwEy5jg3BTQ0VATsyoGj4ee49tN/y6rvaOOtcg==} - '@next/bundle-analyzer@15.5.3': - resolution: {integrity: sha512-l2NxnWHP2gWHbomAlz/wFnN2jNCx/dpr7P/XWeOLhULiyKkXSac8O8SjxRO/8FNhr2l4JNtWVKk82Uya4cZYTw==} + '@next/bundle-analyzer@15.5.4': + resolution: {integrity: sha512-wMtpIjEHi+B/wC34ZbEcacGIPgQTwTFjjp0+F742s9TxC6QwT0MwB/O0QEgalMe8s3SH/K09DO0gmTvUSJrLRA==} - '@next/env@15.5.0': - resolution: {integrity: sha512-sDaprBAfzCQiOgo2pO+LhnV0Wt2wBgartjrr+dpcTORYVnnXD0gwhHhiiyIih9hQbq+JnbqH4odgcFWhqCGidw==} + '@next/env@15.5.4': + resolution: {integrity: sha512-27SQhYp5QryzIT5uO8hq99C69eLQ7qkzkDPsk3N+GuS2XgOgoYEeOav7Pf8Tn4drECOVDsDg8oj+/DVy8qQL2A==} - '@next/eslint-plugin-next@15.5.0': - resolution: {integrity: sha512-+k83U/fST66eQBjTltX2T9qUYd43ntAe+NZ5qeZVTQyTiFiHvTLtkpLKug4AnZAtuI/lwz5tl/4QDJymjVkybg==} + '@next/eslint-plugin-next@15.5.4': + resolution: {integrity: sha512-SR1vhXNNg16T4zffhJ4TS7Xn7eq4NfKfcOsRwea7RIAHrjRpI9ALYbamqIJqkAhowLlERffiwk0FMvTLNdnVtw==} - '@next/mdx@15.5.0': - resolution: {integrity: sha512-TxfWpIDHx9Xy/GgZwegrl+HxjzeQml0bTclxX72SqJLi83IhJaFiglQbfMTotB2hDRbxCGKpPYh0X20+r1Trtw==} + '@next/mdx@15.5.4': + resolution: {integrity: sha512-QUc14KkswCau2/Lul13t13v8QYRiEh3aeyUMUix5mK/Zd8c/J9NQuVvLGhxS7fxGPU+fOcv0GaXqZshkvNaX7A==} peerDependencies: '@mdx-js/loader': '>=0.15.0' '@mdx-js/react': '>=0.15.0' @@ -2151,50 +2174,50 @@ packages: '@mdx-js/react': optional: true - '@next/swc-darwin-arm64@15.5.0': - resolution: {integrity: sha512-v7Jj9iqC6enxIRBIScD/o0lH7QKvSxq2LM8UTyqJi+S2w2QzhMYjven4vgu/RzgsdtdbpkyCxBTzHl/gN5rTRg==} + '@next/swc-darwin-arm64@15.5.4': + resolution: {integrity: sha512-nopqz+Ov6uvorej8ndRX6HlxCYWCO3AHLfKK2TYvxoSB2scETOcfm/HSS3piPqc3A+MUgyHoqE6je4wnkjfrOA==} engines: {node: '>= 10'} cpu: [arm64] os: [darwin] - '@next/swc-darwin-x64@15.5.0': - resolution: {integrity: sha512-s2Nk6ec+pmYmAb/utawuURy7uvyYKDk+TRE5aqLRsdnj3AhwC9IKUBmhfnLmY/+P+DnwqpeXEFIKe9tlG0p6CA==} + '@next/swc-darwin-x64@15.5.4': + resolution: {integrity: sha512-QOTCFq8b09ghfjRJKfb68kU9k2K+2wsC4A67psOiMn849K9ZXgCSRQr0oVHfmKnoqCbEmQWG1f2h1T2vtJJ9mA==} engines: {node: '>= 10'} cpu: [x64] os: [darwin] - '@next/swc-linux-arm64-gnu@15.5.0': - resolution: {integrity: sha512-mGlPJMZReU4yP5fSHjOxiTYvZmwPSWn/eF/dcg21pwfmiUCKS1amFvf1F1RkLHPIMPfocxLViNWFvkvDB14Isg==} + '@next/swc-linux-arm64-gnu@15.5.4': + resolution: {integrity: sha512-eRD5zkts6jS3VfE/J0Kt1VxdFqTnMc3QgO5lFE5GKN3KDI/uUpSyK3CjQHmfEkYR4wCOl0R0XrsjpxfWEA++XA==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@next/swc-linux-arm64-musl@15.5.0': - resolution: {integrity: sha512-biWqIOE17OW/6S34t1X8K/3vb1+svp5ji5QQT/IKR+VfM3B7GvlCwmz5XtlEan2ukOUf9tj2vJJBffaGH4fGRw==} + '@next/swc-linux-arm64-musl@15.5.4': + resolution: {integrity: sha512-TOK7iTxmXFc45UrtKqWdZ1shfxuL4tnVAOuuJK4S88rX3oyVV4ZkLjtMT85wQkfBrOOvU55aLty+MV8xmcJR8A==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@next/swc-linux-x64-gnu@15.5.0': - resolution: {integrity: sha512-zPisT+obYypM/l6EZ0yRkK3LEuoZqHaSoYKj+5jiD9ESHwdr6QhnabnNxYkdy34uCigNlWIaCbjFmQ8FY5AlxA==} + '@next/swc-linux-x64-gnu@15.5.4': + resolution: {integrity: sha512-7HKolaj+481FSW/5lL0BcTkA4Ueam9SPYWyN/ib/WGAFZf0DGAN8frNpNZYFHtM4ZstrHZS3LY3vrwlIQfsiMA==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@next/swc-linux-x64-musl@15.5.0': - resolution: {integrity: sha512-+t3+7GoU9IYmk+N+FHKBNFdahaReoAktdOpXHFIPOU1ixxtdge26NgQEEkJkCw2dHT9UwwK5zw4mAsURw4E8jA==} + '@next/swc-linux-x64-musl@15.5.4': + resolution: {integrity: sha512-nlQQ6nfgN0nCO/KuyEUwwOdwQIGjOs4WNMjEUtpIQJPR2NUfmGpW2wkJln1d4nJ7oUzd1g4GivH5GoEPBgfsdw==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@next/swc-win32-arm64-msvc@15.5.0': - resolution: {integrity: sha512-d8MrXKh0A+c9DLiy1BUFwtg3Hu90Lucj3k6iKTUdPOv42Ve2UiIG8HYi3UAb8kFVluXxEfdpCoPPCSODk5fDcw==} + '@next/swc-win32-arm64-msvc@15.5.4': + resolution: {integrity: sha512-PcR2bN7FlM32XM6eumklmyWLLbu2vs+D7nJX8OAIoWy69Kef8mfiN4e8TUv2KohprwifdpFKPzIP1njuCjD0YA==} engines: {node: '>= 10'} cpu: [arm64] os: [win32] - '@next/swc-win32-x64-msvc@15.5.0': - resolution: {integrity: sha512-Fe1tGHxOWEyQjmygWkkXSwhFcTJuimrNu52JEuwItrKJVV4iRjbWp9I7zZjwqtiNnQmxoEvoisn8wueFLrNpvQ==} + '@next/swc-win32-x64-msvc@15.5.4': + resolution: {integrity: sha512-1ur2tSHZj8Px/KMAthmuI9FMp/YFusMMGoRNJaRZMOlSkgvLjzosSdQI0cJAKogdHl3qXUQKL9MGaYvKwA7DXg==} engines: {node: '>= 10'} cpu: [x64] os: [win32] @@ -2537,6 +2560,9 @@ packages: '@polka/url@1.0.0-next.29': resolution: {integrity: sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==} + '@preact/signals-core@1.12.1': + resolution: {integrity: sha512-BwbTXpj+9QutoZLQvbttRg5x3l5468qaV2kufh+51yha1c53ep5dY4kTuZR35+3pAZxpfQerGJiQqg34ZNZ6uA==} + '@radix-ui/primitive@1.1.2': resolution: {integrity: sha512-XnbHrrprsNqZKQhStrSwgRUQzoCI1glLzdw79xiZPoofhGICeZRSQ3dIxAKH1gb3OHfNf4d6f+vAv3kil2eggA==} @@ -4095,6 +4121,9 @@ packages: caniuse-lite@1.0.30001727: resolution: {integrity: sha512-pB68nIHmbN6L/4C6MH1DokyR3bYqFwjaSs/sWDHGj4CTcFtQUQMuJftVwWkXq7mNWOybD3KhUv3oWHoGxgP14Q==} + caniuse-lite@1.0.30001746: + resolution: {integrity: sha512-eA7Ys/DGw+pnkWWSE/id29f2IcPHVoE8wxtvE5JdvD2V28VTDPy1yEeo11Guz0sJ4ZeGRcm3uaTcAqK1LXaphA==} + canvas@2.11.2: resolution: {integrity: sha512-ItanGBMrmRV7Py2Z+Xhs7cT+FNt5K0vPL4p9EZ/UX/Mu7hFbkxSjKF2KVtPwX7UYWp7dRKnrTvReflgrItJbdw==} engines: {node: '>=6'} @@ -4715,6 +4744,10 @@ packages: resolution: {integrity: sha512-vEtk+OcP7VBRtQZ1EJ3bdgzSfBjgnEalLTp5zjJrS+2Z1w2KZly4SBdac/WDU3hhsNAZ9E8SC96ME4Ey8MZ7cg==} engines: {node: '>=8'} + detect-libc@2.1.1: + resolution: {integrity: sha512-ecqj/sy1jcK1uWrwpR67UhYrIFQ+5WlGxth34WquCbamhFA6hkkwiu37o6J5xCHdo1oixJRfVRw+ywV+Hq/0Aw==} + engines: {node: '>=8'} + detect-newline@3.1.0: resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==} engines: {node: '>=8'} @@ -6179,11 +6212,8 @@ packages: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} - lexical@0.30.0: - resolution: {integrity: sha512-6gxYeXaJiAcreJD0whCofvO0MuJmnWoIgIl1w7L5FTigfhnEohuCx2SoI/oywzfzXE9gzZnyr3rVvZrMItPL8A==} - - lexical@0.35.0: - resolution: {integrity: sha512-3VuV8xXhh5xJA6tzvfDvE0YBCMkIZUmxtRilJQDDdCgJCc+eut6qAv2qbN+pbqvarqcQqPN1UF+8YvsjmyOZpw==} + lexical@0.36.2: + resolution: {integrity: sha512-gIDJCmSAhtxD7h95WK17Nz19wCZu92Zn0p1/R45X01S/KAsLCwEtVJ2fTvIJNFTyx3QNJTuGcm5mYgRMUwq8rg==} lib0@0.2.114: resolution: {integrity: sha512-gcxmNFzA4hv8UYi8j43uPlQ7CGcyMJ2KQb5kZASw6SnAKAf10hK12i2fjrS3Cl/ugZa5Ui6WwIu1/6MIXiHttQ==} @@ -6682,8 +6712,8 @@ packages: react: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc react-dom: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc - next@15.5.0: - resolution: {integrity: sha512-N1lp9Hatw3a9XLt0307lGB4uTKsXDhyOKQo7uYMzX4i0nF/c27grcGXkLdb7VcT8QPYLBa8ouIyEoUQJ2OyeNQ==} + next@15.5.4: + resolution: {integrity: sha512-xH4Yjhb82sFYQfY3vbkJfgSDgXvBB6a8xPs9i35k6oZJRoQRihZH+4s9Yo2qsWpzBmZ3lPXaJ2KPXLfkvW4LnA==} engines: {node: ^18.18.0 || ^19.8.0 || >= 20.0.0} hasBin: true peerDependencies: @@ -7276,9 +7306,8 @@ packages: react: '>=16.4.0' react-dom: '>=16.4.0' - react-error-boundary@3.1.4: - resolution: {integrity: sha512-uM9uPzZJTF6wRQORmSrvOIgt4lJ9MC1sNgEOj2XGsDTRE4kmpWxg7ENK9EWNKJRMAOY9z0MuF4yIfl6gp4sotA==} - engines: {node: '>=10', npm: '>=6'} + react-error-boundary@6.0.0: + resolution: {integrity: sha512-gdlJjD7NWr0IfkPlaREN2d9uUZUlksrfOx7SX62VRerwXbMY6ftGCIZua1VG1aXFNOimhISsTq+Owp725b9SiA==} peerDependencies: react: '>=16.13.1' @@ -7721,8 +7750,8 @@ packages: resolution: {integrity: sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} - sharp@0.34.3: - resolution: {integrity: sha512-eX2IQ6nFohW4DbvHIOLRB3MHFpYqaqvXd3Tp5e/T/dSH83fxaNJQRvDMhASmkNTsNTVF2/OOopzRCt7xokgPfg==} + sharp@0.34.4: + resolution: {integrity: sha512-FUH39xp3SBPnxWvd5iib1X8XY7J0K0X7d93sie9CJg2PO8/7gmg89Nve6OjItK53/MlAushNNxteBYfM6DEuoA==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} shebang-command@2.0.0: @@ -7799,6 +7828,10 @@ packages: resolution: {integrity: sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==} engines: {node: '>= 8'} + source-map@0.7.6: + resolution: {integrity: sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==} + engines: {node: '>= 12'} + source-map@0.8.0-beta.0: resolution: {integrity: sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==} engines: {node: '>= 8'} @@ -8712,7 +8745,7 @@ snapshots: '@jridgewell/gen-mapping': 0.3.12 '@jridgewell/trace-mapping': 0.3.29 - '@antfu/eslint-config@5.0.0(@eslint-react/eslint-plugin@1.52.3(eslint@9.35.0(jiti@2.6.0))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3))(@next/eslint-plugin-next@15.5.0)(@vue/compiler-sfc@3.5.17)(eslint-plugin-react-hooks@5.2.0(eslint@9.35.0(jiti@2.6.0)))(eslint-plugin-react-refresh@0.4.20(eslint@9.35.0(jiti@2.6.0)))(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': + '@antfu/eslint-config@5.0.0(@eslint-react/eslint-plugin@1.52.3(eslint@9.35.0(jiti@2.6.0))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3))(@next/eslint-plugin-next@15.5.4)(@vue/compiler-sfc@3.5.17)(eslint-plugin-react-hooks@5.2.0(eslint@9.35.0(jiti@2.6.0)))(eslint-plugin-react-refresh@0.4.20(eslint@9.35.0(jiti@2.6.0)))(eslint@9.35.0(jiti@2.6.0))(typescript@5.8.3)': dependencies: '@antfu/install-pkg': 1.1.0 '@clack/prompts': 0.11.0 @@ -8753,7 +8786,7 @@ snapshots: yaml-eslint-parser: 1.3.0 optionalDependencies: '@eslint-react/eslint-plugin': 1.52.3(eslint@9.35.0(jiti@2.6.0))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3) - '@next/eslint-plugin-next': 15.5.0 + '@next/eslint-plugin-next': 15.5.4 eslint-plugin-react-hooks: 5.2.0(eslint@9.35.0(jiti@2.6.0)) eslint-plugin-react-refresh: 0.4.20(eslint@9.35.0(jiti@2.6.0)) transitivePeerDependencies: @@ -9606,6 +9639,8 @@ snapshots: '@babel/runtime@7.27.6': {} + '@babel/runtime@7.28.4': {} + '@babel/template@7.27.2': dependencies: '@babel/code-frame': 7.27.1 @@ -9757,11 +9792,6 @@ snapshots: tslib: 2.8.1 optional: true - '@emnapi/runtime@1.4.4': - dependencies: - tslib: 2.8.1 - optional: true - '@emnapi/runtime@1.5.0': dependencies: tslib: 2.8.1 @@ -10042,17 +10072,32 @@ snapshots: dependencies: '@floating-ui/utils': 0.2.10 + '@floating-ui/core@1.7.3': + dependencies: + '@floating-ui/utils': 0.2.10 + '@floating-ui/dom@1.7.2': dependencies: '@floating-ui/core': 1.7.2 '@floating-ui/utils': 0.2.10 + '@floating-ui/dom@1.7.4': + dependencies: + '@floating-ui/core': 1.7.3 + '@floating-ui/utils': 0.2.10 + '@floating-ui/react-dom@2.1.4(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@floating-ui/dom': 1.7.2 react: 19.1.1 react-dom: 19.1.1(react@19.1.1) + '@floating-ui/react-dom@2.1.6(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + dependencies: + '@floating-ui/dom': 1.7.4 + react: 19.1.1 + react-dom: 19.1.1(react@19.1.1) + '@floating-ui/react@0.26.28(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: '@floating-ui/react-dom': 2.1.4(react-dom@19.1.1(react@19.1.1))(react@19.1.1) @@ -10061,6 +10106,14 @@ snapshots: react-dom: 19.1.1(react@19.1.1) tabbable: 6.2.0 + '@floating-ui/react@0.27.16(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + dependencies: + '@floating-ui/react-dom': 2.1.6(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@floating-ui/utils': 0.2.10 + react: 19.1.1 + react-dom: 19.1.1(react@19.1.1) + tabbable: 6.2.0 + '@floating-ui/utils@0.2.10': {} '@formatjs/intl-localematcher@0.5.10': @@ -10121,14 +10174,17 @@ snapshots: transitivePeerDependencies: - supports-color + '@img/colour@1.0.0': + optional: true + '@img/sharp-darwin-arm64@0.33.5': optionalDependencies: '@img/sharp-libvips-darwin-arm64': 1.0.4 optional: true - '@img/sharp-darwin-arm64@0.34.3': + '@img/sharp-darwin-arm64@0.34.4': optionalDependencies: - '@img/sharp-libvips-darwin-arm64': 1.2.0 + '@img/sharp-libvips-darwin-arm64': 1.2.3 optional: true '@img/sharp-darwin-x64@0.33.5': @@ -10136,60 +10192,60 @@ snapshots: '@img/sharp-libvips-darwin-x64': 1.0.4 optional: true - '@img/sharp-darwin-x64@0.34.3': + '@img/sharp-darwin-x64@0.34.4': optionalDependencies: - '@img/sharp-libvips-darwin-x64': 1.2.0 + '@img/sharp-libvips-darwin-x64': 1.2.3 optional: true '@img/sharp-libvips-darwin-arm64@1.0.4': optional: true - '@img/sharp-libvips-darwin-arm64@1.2.0': + '@img/sharp-libvips-darwin-arm64@1.2.3': optional: true '@img/sharp-libvips-darwin-x64@1.0.4': optional: true - '@img/sharp-libvips-darwin-x64@1.2.0': + '@img/sharp-libvips-darwin-x64@1.2.3': optional: true '@img/sharp-libvips-linux-arm64@1.0.4': optional: true - '@img/sharp-libvips-linux-arm64@1.2.0': + '@img/sharp-libvips-linux-arm64@1.2.3': optional: true '@img/sharp-libvips-linux-arm@1.0.5': optional: true - '@img/sharp-libvips-linux-arm@1.2.0': + '@img/sharp-libvips-linux-arm@1.2.3': optional: true - '@img/sharp-libvips-linux-ppc64@1.2.0': + '@img/sharp-libvips-linux-ppc64@1.2.3': optional: true '@img/sharp-libvips-linux-s390x@1.0.4': optional: true - '@img/sharp-libvips-linux-s390x@1.2.0': + '@img/sharp-libvips-linux-s390x@1.2.3': optional: true '@img/sharp-libvips-linux-x64@1.0.4': optional: true - '@img/sharp-libvips-linux-x64@1.2.0': + '@img/sharp-libvips-linux-x64@1.2.3': optional: true '@img/sharp-libvips-linuxmusl-arm64@1.0.4': optional: true - '@img/sharp-libvips-linuxmusl-arm64@1.2.0': + '@img/sharp-libvips-linuxmusl-arm64@1.2.3': optional: true '@img/sharp-libvips-linuxmusl-x64@1.0.4': optional: true - '@img/sharp-libvips-linuxmusl-x64@1.2.0': + '@img/sharp-libvips-linuxmusl-x64@1.2.3': optional: true '@img/sharp-linux-arm64@0.33.5': @@ -10197,9 +10253,9 @@ snapshots: '@img/sharp-libvips-linux-arm64': 1.0.4 optional: true - '@img/sharp-linux-arm64@0.34.3': + '@img/sharp-linux-arm64@0.34.4': optionalDependencies: - '@img/sharp-libvips-linux-arm64': 1.2.0 + '@img/sharp-libvips-linux-arm64': 1.2.3 optional: true '@img/sharp-linux-arm@0.33.5': @@ -10207,14 +10263,14 @@ snapshots: '@img/sharp-libvips-linux-arm': 1.0.5 optional: true - '@img/sharp-linux-arm@0.34.3': + '@img/sharp-linux-arm@0.34.4': optionalDependencies: - '@img/sharp-libvips-linux-arm': 1.2.0 + '@img/sharp-libvips-linux-arm': 1.2.3 optional: true - '@img/sharp-linux-ppc64@0.34.3': + '@img/sharp-linux-ppc64@0.34.4': optionalDependencies: - '@img/sharp-libvips-linux-ppc64': 1.2.0 + '@img/sharp-libvips-linux-ppc64': 1.2.3 optional: true '@img/sharp-linux-s390x@0.33.5': @@ -10222,9 +10278,9 @@ snapshots: '@img/sharp-libvips-linux-s390x': 1.0.4 optional: true - '@img/sharp-linux-s390x@0.34.3': + '@img/sharp-linux-s390x@0.34.4': optionalDependencies: - '@img/sharp-libvips-linux-s390x': 1.2.0 + '@img/sharp-libvips-linux-s390x': 1.2.3 optional: true '@img/sharp-linux-x64@0.33.5': @@ -10232,9 +10288,9 @@ snapshots: '@img/sharp-libvips-linux-x64': 1.0.4 optional: true - '@img/sharp-linux-x64@0.34.3': + '@img/sharp-linux-x64@0.34.4': optionalDependencies: - '@img/sharp-libvips-linux-x64': 1.2.0 + '@img/sharp-libvips-linux-x64': 1.2.3 optional: true '@img/sharp-linuxmusl-arm64@0.33.5': @@ -10242,9 +10298,9 @@ snapshots: '@img/sharp-libvips-linuxmusl-arm64': 1.0.4 optional: true - '@img/sharp-linuxmusl-arm64@0.34.3': + '@img/sharp-linuxmusl-arm64@0.34.4': optionalDependencies: - '@img/sharp-libvips-linuxmusl-arm64': 1.2.0 + '@img/sharp-libvips-linuxmusl-arm64': 1.2.3 optional: true '@img/sharp-linuxmusl-x64@0.33.5': @@ -10252,9 +10308,9 @@ snapshots: '@img/sharp-libvips-linuxmusl-x64': 1.0.4 optional: true - '@img/sharp-linuxmusl-x64@0.34.3': + '@img/sharp-linuxmusl-x64@0.34.4': optionalDependencies: - '@img/sharp-libvips-linuxmusl-x64': 1.2.0 + '@img/sharp-libvips-linuxmusl-x64': 1.2.3 optional: true '@img/sharp-wasm32@0.33.5': @@ -10262,24 +10318,24 @@ snapshots: '@emnapi/runtime': 1.5.0 optional: true - '@img/sharp-wasm32@0.34.3': + '@img/sharp-wasm32@0.34.4': dependencies: - '@emnapi/runtime': 1.4.4 + '@emnapi/runtime': 1.5.0 optional: true - '@img/sharp-win32-arm64@0.34.3': + '@img/sharp-win32-arm64@0.34.4': optional: true '@img/sharp-win32-ia32@0.33.5': optional: true - '@img/sharp-win32-ia32@0.34.3': + '@img/sharp-win32-ia32@0.34.4': optional: true '@img/sharp-win32-x64@0.33.5': optional: true - '@img/sharp-win32-x64@0.34.3': + '@img/sharp-win32-x64@0.34.4': optional: true '@isaacs/balanced-match@4.0.1': {} @@ -10496,153 +10552,165 @@ snapshots: '@jridgewell/sourcemap-codec': 1.5.5 optional: true - '@lexical/clipboard@0.30.0': + '@lexical/clipboard@0.36.2': dependencies: - '@lexical/html': 0.30.0 - '@lexical/list': 0.30.0 - '@lexical/selection': 0.30.0 - '@lexical/utils': 0.30.0 - lexical: 0.30.0 + '@lexical/html': 0.36.2 + '@lexical/list': 0.36.2 + '@lexical/selection': 0.36.2 + '@lexical/utils': 0.36.2 + lexical: 0.36.2 - '@lexical/code@0.30.0': + '@lexical/code@0.36.2': dependencies: - '@lexical/utils': 0.30.0 - lexical: 0.30.0 + '@lexical/utils': 0.36.2 + lexical: 0.36.2 prismjs: 1.30.0 - '@lexical/devtools-core@0.30.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@lexical/devtools-core@0.36.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': dependencies: - '@lexical/html': 0.30.0 - '@lexical/link': 0.30.0 - '@lexical/mark': 0.30.0 - '@lexical/table': 0.30.0 - '@lexical/utils': 0.30.0 - lexical: 0.30.0 + '@lexical/html': 0.36.2 + '@lexical/link': 0.36.2 + '@lexical/mark': 0.36.2 + '@lexical/table': 0.36.2 + '@lexical/utils': 0.36.2 + lexical: 0.36.2 react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - '@lexical/dragon@0.30.0': + '@lexical/dragon@0.36.2': dependencies: - lexical: 0.30.0 + '@lexical/extension': 0.36.2 + lexical: 0.36.2 - '@lexical/hashtag@0.30.0': + '@lexical/extension@0.36.2': dependencies: - '@lexical/utils': 0.30.0 - lexical: 0.30.0 + '@lexical/utils': 0.36.2 + '@preact/signals-core': 1.12.1 + lexical: 0.36.2 - '@lexical/history@0.30.0': + '@lexical/hashtag@0.36.2': dependencies: - '@lexical/utils': 0.30.0 - lexical: 0.30.0 + '@lexical/text': 0.36.2 + '@lexical/utils': 0.36.2 + lexical: 0.36.2 - '@lexical/html@0.30.0': + '@lexical/history@0.36.2': dependencies: - '@lexical/selection': 0.30.0 - '@lexical/utils': 0.30.0 - lexical: 0.30.0 + '@lexical/extension': 0.36.2 + '@lexical/utils': 0.36.2 + lexical: 0.36.2 - '@lexical/link@0.30.0': + '@lexical/html@0.36.2': dependencies: - '@lexical/utils': 0.30.0 - lexical: 0.30.0 + '@lexical/selection': 0.36.2 + '@lexical/utils': 0.36.2 + lexical: 0.36.2 - '@lexical/list@0.30.0': + '@lexical/link@0.36.2': dependencies: - '@lexical/selection': 0.30.0 - '@lexical/utils': 0.30.0 - lexical: 0.30.0 + '@lexical/extension': 0.36.2 + '@lexical/utils': 0.36.2 + lexical: 0.36.2 - '@lexical/mark@0.30.0': + '@lexical/list@0.36.2': dependencies: - '@lexical/utils': 0.30.0 - lexical: 0.30.0 + '@lexical/extension': 0.36.2 + '@lexical/selection': 0.36.2 + '@lexical/utils': 0.36.2 + lexical: 0.36.2 - '@lexical/markdown@0.30.0': + '@lexical/mark@0.36.2': dependencies: - '@lexical/code': 0.30.0 - '@lexical/link': 0.30.0 - '@lexical/list': 0.30.0 - '@lexical/rich-text': 0.30.0 - '@lexical/text': 0.30.0 - '@lexical/utils': 0.30.0 - lexical: 0.30.0 + '@lexical/utils': 0.36.2 + lexical: 0.36.2 - '@lexical/offset@0.30.0': + '@lexical/markdown@0.36.2': dependencies: - lexical: 0.30.0 + '@lexical/code': 0.36.2 + '@lexical/link': 0.36.2 + '@lexical/list': 0.36.2 + '@lexical/rich-text': 0.36.2 + '@lexical/text': 0.36.2 + '@lexical/utils': 0.36.2 + lexical: 0.36.2 - '@lexical/overflow@0.30.0': + '@lexical/offset@0.36.2': dependencies: - lexical: 0.30.0 + lexical: 0.36.2 - '@lexical/plain-text@0.30.0': + '@lexical/overflow@0.36.2': dependencies: - '@lexical/clipboard': 0.30.0 - '@lexical/selection': 0.30.0 - '@lexical/utils': 0.30.0 - lexical: 0.30.0 + lexical: 0.36.2 - '@lexical/react@0.30.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(yjs@13.6.27)': + '@lexical/plain-text@0.36.2': dependencies: - '@lexical/devtools-core': 0.30.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@lexical/dragon': 0.30.0 - '@lexical/hashtag': 0.30.0 - '@lexical/history': 0.30.0 - '@lexical/link': 0.30.0 - '@lexical/list': 0.30.0 - '@lexical/mark': 0.30.0 - '@lexical/markdown': 0.30.0 - '@lexical/overflow': 0.30.0 - '@lexical/plain-text': 0.30.0 - '@lexical/rich-text': 0.30.0 - '@lexical/table': 0.30.0 - '@lexical/text': 0.30.0 - '@lexical/utils': 0.30.0 - '@lexical/yjs': 0.30.0(yjs@13.6.27) - lexical: 0.30.0 + '@lexical/clipboard': 0.36.2 + '@lexical/dragon': 0.36.2 + '@lexical/selection': 0.36.2 + '@lexical/utils': 0.36.2 + lexical: 0.36.2 + + '@lexical/react@0.36.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(yjs@13.6.27)': + dependencies: + '@floating-ui/react': 0.27.16(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@lexical/devtools-core': 0.36.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@lexical/dragon': 0.36.2 + '@lexical/extension': 0.36.2 + '@lexical/hashtag': 0.36.2 + '@lexical/history': 0.36.2 + '@lexical/link': 0.36.2 + '@lexical/list': 0.36.2 + '@lexical/mark': 0.36.2 + '@lexical/markdown': 0.36.2 + '@lexical/overflow': 0.36.2 + '@lexical/plain-text': 0.36.2 + '@lexical/rich-text': 0.36.2 + '@lexical/table': 0.36.2 + '@lexical/text': 0.36.2 + '@lexical/utils': 0.36.2 + '@lexical/yjs': 0.36.2(yjs@13.6.27) + lexical: 0.36.2 react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - react-error-boundary: 3.1.4(react@19.1.1) + react-error-boundary: 6.0.0(react@19.1.1) transitivePeerDependencies: - yjs - '@lexical/rich-text@0.30.0': + '@lexical/rich-text@0.36.2': dependencies: - '@lexical/clipboard': 0.30.0 - '@lexical/selection': 0.30.0 - '@lexical/utils': 0.30.0 - lexical: 0.30.0 + '@lexical/clipboard': 0.36.2 + '@lexical/dragon': 0.36.2 + '@lexical/selection': 0.36.2 + '@lexical/utils': 0.36.2 + lexical: 0.36.2 - '@lexical/selection@0.30.0': + '@lexical/selection@0.36.2': dependencies: - lexical: 0.30.0 + lexical: 0.36.2 - '@lexical/table@0.30.0': + '@lexical/table@0.36.2': dependencies: - '@lexical/clipboard': 0.30.0 - '@lexical/utils': 0.30.0 - lexical: 0.30.0 + '@lexical/clipboard': 0.36.2 + '@lexical/extension': 0.36.2 + '@lexical/utils': 0.36.2 + lexical: 0.36.2 - '@lexical/text@0.30.0': + '@lexical/text@0.36.2': dependencies: - lexical: 0.30.0 + lexical: 0.36.2 - '@lexical/text@0.35.0': + '@lexical/utils@0.36.2': dependencies: - lexical: 0.35.0 + '@lexical/list': 0.36.2 + '@lexical/selection': 0.36.2 + '@lexical/table': 0.36.2 + lexical: 0.36.2 - '@lexical/utils@0.30.0': + '@lexical/yjs@0.36.2(yjs@13.6.27)': dependencies: - '@lexical/list': 0.30.0 - '@lexical/selection': 0.30.0 - '@lexical/table': 0.30.0 - lexical: 0.30.0 - - '@lexical/yjs@0.30.0(yjs@13.6.27)': - dependencies: - '@lexical/offset': 0.30.0 - '@lexical/selection': 0.30.0 - lexical: 0.30.0 + '@lexical/offset': 0.36.2 + '@lexical/selection': 0.36.2 + lexical: 0.36.2 yjs: 13.6.27 '@mapbox/node-pre-gyp@1.0.11': @@ -10735,48 +10803,48 @@ snapshots: '@tybys/wasm-util': 0.10.1 optional: true - '@next/bundle-analyzer@15.5.3': + '@next/bundle-analyzer@15.5.4': dependencies: webpack-bundle-analyzer: 4.10.1 transitivePeerDependencies: - bufferutil - utf-8-validate - '@next/env@15.5.0': {} + '@next/env@15.5.4': {} - '@next/eslint-plugin-next@15.5.0': + '@next/eslint-plugin-next@15.5.4': dependencies: fast-glob: 3.3.1 - '@next/mdx@15.5.0(@mdx-js/loader@3.1.0(acorn@8.15.0)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)))(@mdx-js/react@3.1.0(@types/react@19.1.11)(react@19.1.1))': + '@next/mdx@15.5.4(@mdx-js/loader@3.1.0(acorn@8.15.0)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)))(@mdx-js/react@3.1.0(@types/react@19.1.11)(react@19.1.1))': dependencies: - source-map: 0.7.4 + source-map: 0.7.6 optionalDependencies: '@mdx-js/loader': 3.1.0(acorn@8.15.0)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)) '@mdx-js/react': 3.1.0(@types/react@19.1.11)(react@19.1.1) - '@next/swc-darwin-arm64@15.5.0': + '@next/swc-darwin-arm64@15.5.4': optional: true - '@next/swc-darwin-x64@15.5.0': + '@next/swc-darwin-x64@15.5.4': optional: true - '@next/swc-linux-arm64-gnu@15.5.0': + '@next/swc-linux-arm64-gnu@15.5.4': optional: true - '@next/swc-linux-arm64-musl@15.5.0': + '@next/swc-linux-arm64-musl@15.5.4': optional: true - '@next/swc-linux-x64-gnu@15.5.0': + '@next/swc-linux-x64-gnu@15.5.4': optional: true - '@next/swc-linux-x64-musl@15.5.0': + '@next/swc-linux-x64-musl@15.5.4': optional: true - '@next/swc-win32-arm64-msvc@15.5.0': + '@next/swc-win32-arm64-msvc@15.5.4': optional: true - '@next/swc-win32-x64-msvc@15.5.0': + '@next/swc-win32-x64-msvc@15.5.4': optional: true '@nodelib/fs.scandir@2.1.5': @@ -11042,6 +11110,8 @@ snapshots: '@polka/url@1.0.0-next.29': {} + '@preact/signals-core@1.12.1': {} + '@radix-ui/primitive@1.1.2': {} '@radix-ui/react-compose-refs@1.1.2(@types/react@19.1.11)(react@19.1.1)': @@ -11607,7 +11677,7 @@ snapshots: dependencies: storybook: 8.5.0 - '@storybook/nextjs@8.5.0(esbuild@0.25.0)(next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1)(storybook@8.5.0)(type-fest@2.19.0)(typescript@5.8.3)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))': + '@storybook/nextjs@8.5.0(esbuild@0.25.0)(next@15.5.4(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1)(storybook@8.5.0)(type-fest@2.19.0)(typescript@5.8.3)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))': dependencies: '@babel/core': 7.28.3 '@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.28.3) @@ -11633,7 +11703,7 @@ snapshots: find-up: 5.0.0 image-size: 1.2.1 loader-utils: 3.3.1 - next: 15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1) + next: 15.5.4(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1) node-polyfill-webpack-plugin: 2.0.1(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)) pnp-webpack-plugin: 1.7.0(typescript@5.8.3) postcss: 8.5.6 @@ -12986,6 +13056,8 @@ snapshots: caniuse-lite@1.0.30001727: {} + caniuse-lite@1.0.30001746: {} + canvas@2.11.2: dependencies: '@mapbox/node-pre-gyp': 1.0.11 @@ -13627,6 +13699,9 @@ snapshots: detect-libc@2.1.0: {} + detect-libc@2.1.1: + optional: true + detect-newline@3.1.0: {} detect-node-es@1.1.0: {} @@ -15545,9 +15620,7 @@ snapshots: prelude-ls: 1.2.1 type-check: 0.4.0 - lexical@0.30.0: {} - - lexical@0.35.0: {} + lexical@0.36.2: {} lib0@0.2.114: dependencies: @@ -16341,12 +16414,12 @@ snapshots: neo-async@2.6.2: {} - next-pwa@5.6.0(@babel/core@7.28.3)(@types/babel__core@7.20.5)(esbuild@0.25.0)(next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1))(uglify-js@3.19.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)): + next-pwa@5.6.0(@babel/core@7.28.3)(@types/babel__core@7.20.5)(esbuild@0.25.0)(next@15.5.4(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1))(uglify-js@3.19.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)): dependencies: babel-loader: 8.4.1(@babel/core@7.28.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)) clean-webpack-plugin: 4.0.0(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)) globby: 11.1.0 - next: 15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1) + next: 15.5.4(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1) terser-webpack-plugin: 5.3.14(esbuild@0.25.0)(uglify-js@3.19.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)) workbox-webpack-plugin: 6.6.0(@types/babel__core@7.20.5)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)) workbox-window: 6.6.0 @@ -16364,26 +16437,26 @@ snapshots: react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1): + next@15.5.4(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1): dependencies: - '@next/env': 15.5.0 + '@next/env': 15.5.4 '@swc/helpers': 0.5.15 - caniuse-lite: 1.0.30001727 + caniuse-lite: 1.0.30001746 postcss: 8.4.31 react: 19.1.1 react-dom: 19.1.1(react@19.1.1) styled-jsx: 5.1.6(@babel/core@7.28.3)(react@19.1.1) optionalDependencies: - '@next/swc-darwin-arm64': 15.5.0 - '@next/swc-darwin-x64': 15.5.0 - '@next/swc-linux-arm64-gnu': 15.5.0 - '@next/swc-linux-arm64-musl': 15.5.0 - '@next/swc-linux-x64-gnu': 15.5.0 - '@next/swc-linux-x64-musl': 15.5.0 - '@next/swc-win32-arm64-msvc': 15.5.0 - '@next/swc-win32-x64-msvc': 15.5.0 + '@next/swc-darwin-arm64': 15.5.4 + '@next/swc-darwin-x64': 15.5.4 + '@next/swc-linux-arm64-gnu': 15.5.4 + '@next/swc-linux-arm64-musl': 15.5.4 + '@next/swc-linux-x64-gnu': 15.5.4 + '@next/swc-linux-x64-musl': 15.5.4 + '@next/swc-win32-arm64-msvc': 15.5.4 + '@next/swc-win32-x64-msvc': 15.5.4 sass: 1.92.1 - sharp: 0.34.3 + sharp: 0.34.4 transitivePeerDependencies: - '@babel/core' - babel-plugin-macros @@ -17004,9 +17077,9 @@ snapshots: react-dom: 19.1.1(react@19.1.1) tslib: 2.8.1 - react-error-boundary@3.1.4(react@19.1.1): + react-error-boundary@6.0.0(react@19.1.1): dependencies: - '@babel/runtime': 7.27.6 + '@babel/runtime': 7.28.4 react: 19.1.1 react-fast-compare@3.2.2: {} @@ -17568,34 +17641,34 @@ snapshots: '@img/sharp-win32-ia32': 0.33.5 '@img/sharp-win32-x64': 0.33.5 - sharp@0.34.3: + sharp@0.34.4: dependencies: - color: 4.2.3 - detect-libc: 2.1.0 + '@img/colour': 1.0.0 + detect-libc: 2.1.1 semver: 7.7.2 optionalDependencies: - '@img/sharp-darwin-arm64': 0.34.3 - '@img/sharp-darwin-x64': 0.34.3 - '@img/sharp-libvips-darwin-arm64': 1.2.0 - '@img/sharp-libvips-darwin-x64': 1.2.0 - '@img/sharp-libvips-linux-arm': 1.2.0 - '@img/sharp-libvips-linux-arm64': 1.2.0 - '@img/sharp-libvips-linux-ppc64': 1.2.0 - '@img/sharp-libvips-linux-s390x': 1.2.0 - '@img/sharp-libvips-linux-x64': 1.2.0 - '@img/sharp-libvips-linuxmusl-arm64': 1.2.0 - '@img/sharp-libvips-linuxmusl-x64': 1.2.0 - '@img/sharp-linux-arm': 0.34.3 - '@img/sharp-linux-arm64': 0.34.3 - '@img/sharp-linux-ppc64': 0.34.3 - '@img/sharp-linux-s390x': 0.34.3 - '@img/sharp-linux-x64': 0.34.3 - '@img/sharp-linuxmusl-arm64': 0.34.3 - '@img/sharp-linuxmusl-x64': 0.34.3 - '@img/sharp-wasm32': 0.34.3 - '@img/sharp-win32-arm64': 0.34.3 - '@img/sharp-win32-ia32': 0.34.3 - '@img/sharp-win32-x64': 0.34.3 + '@img/sharp-darwin-arm64': 0.34.4 + '@img/sharp-darwin-x64': 0.34.4 + '@img/sharp-libvips-darwin-arm64': 1.2.3 + '@img/sharp-libvips-darwin-x64': 1.2.3 + '@img/sharp-libvips-linux-arm': 1.2.3 + '@img/sharp-libvips-linux-arm64': 1.2.3 + '@img/sharp-libvips-linux-ppc64': 1.2.3 + '@img/sharp-libvips-linux-s390x': 1.2.3 + '@img/sharp-libvips-linux-x64': 1.2.3 + '@img/sharp-libvips-linuxmusl-arm64': 1.2.3 + '@img/sharp-libvips-linuxmusl-x64': 1.2.3 + '@img/sharp-linux-arm': 0.34.4 + '@img/sharp-linux-arm64': 0.34.4 + '@img/sharp-linux-ppc64': 0.34.4 + '@img/sharp-linux-s390x': 0.34.4 + '@img/sharp-linux-x64': 0.34.4 + '@img/sharp-linuxmusl-arm64': 0.34.4 + '@img/sharp-linuxmusl-x64': 0.34.4 + '@img/sharp-wasm32': 0.34.4 + '@img/sharp-win32-arm64': 0.34.4 + '@img/sharp-win32-ia32': 0.34.4 + '@img/sharp-win32-x64': 0.34.4 optional: true shebang-command@2.0.0: @@ -17666,6 +17739,8 @@ snapshots: source-map@0.7.4: {} + source-map@0.7.6: {} + source-map@0.8.0-beta.0: dependencies: whatwg-url: 7.1.0 From 787251f00eb256c7be24b2a6f4ff03c90cb0bded Mon Sep 17 00:00:00 2001 From: crazywoola <100913391+crazywoola@users.noreply.github.com> Date: Thu, 2 Oct 2025 18:46:33 +0800 Subject: [PATCH 117/126] =?UTF-8?q?Revert=20"fix:=20sync=20FileUploader=20?= =?UTF-8?q?context=20with=20props=20to=20fix=20inconsistent=20file=20param?= =?UTF-8?q?eter=20state=20in=20=E2=80=9CView=20cached=20variables=E2=80=9D?= =?UTF-8?q?."=20(#26548)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../components/base/file-uploader/store.tsx | 31 ++----------------- 1 file changed, 3 insertions(+), 28 deletions(-) diff --git a/web/app/components/base/file-uploader/store.tsx b/web/app/components/base/file-uploader/store.tsx index 7f7cfd5693..cddfdf6f27 100644 --- a/web/app/components/base/file-uploader/store.tsx +++ b/web/app/components/base/file-uploader/store.tsx @@ -1,7 +1,6 @@ import { createContext, useContext, - useEffect, useRef, } from 'react' import { @@ -19,11 +18,13 @@ type Shape = { export const createFileStore = ( value: FileEntity[] = [], + onChange?: (files: FileEntity[]) => void, ) => { return create<Shape>(set => ({ files: value ? [...value] : [], setFiles: (files) => { set({ files }) + onChange?.(files) }, })) } @@ -54,35 +55,9 @@ export const FileContextProvider = ({ onChange, }: FileProviderProps) => { const storeRef = useRef<FileStore | undefined>(undefined) - const onChangeRef = useRef<FileProviderProps['onChange']>(onChange) - const isSyncingRef = useRef(false) if (!storeRef.current) - storeRef.current = createFileStore(value) - - // keep latest onChange - useEffect(() => { - onChangeRef.current = onChange - }, [onChange]) - - // subscribe to store changes and call latest onChange - useEffect(() => { - const store = storeRef.current! - const unsubscribe = store.subscribe((state: Shape) => { - if (isSyncingRef.current) return - onChangeRef.current?.(state.files) - }) - return unsubscribe - }, []) - - // sync external value into internal store when value changes - useEffect(() => { - const store = storeRef.current! - const nextFiles = value ? [...value] : [] - isSyncingRef.current = true - store.setState({ files: nextFiles }) - isSyncingRef.current = false - }, [value]) + storeRef.current = createFileStore(value, onChange) return ( <FileContext.Provider value={storeRef.current}> From f5161d9add2f5d995bce7149eae6b907bdb516d7 Mon Sep 17 00:00:00 2001 From: Asuka Minato <i@asukaminato.eu.org> Date: Thu, 2 Oct 2025 19:51:36 +0900 Subject: [PATCH 118/126] Exclude tests directory from pyright type checking (#26496) Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/controllers/console/app/app.py | 13 +- api/controllers/console/datasets/datasets.py | 11 +- .../service_api/dataset/dataset.py | 11 +- api/libs/validators.py | 5 + api/pyrightconfig.json | 2 +- .../app/test_chat_message_permissions.py | 23 ++- .../app/test_description_validation.py | 83 +++----- .../app/test_model_config_permissions.py | 23 ++- .../app/test_description_validation.py | 195 +++--------------- 9 files changed, 98 insertions(+), 268 deletions(-) create mode 100644 api/libs/validators.py diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py index 2d2e4b448a..23b8e2c5a2 100644 --- a/api/controllers/console/app/app.py +++ b/api/controllers/console/app/app.py @@ -19,6 +19,7 @@ from core.ops.ops_trace_manager import OpsTraceManager from extensions.ext_database import db from fields.app_fields import app_detail_fields, app_detail_fields_with_site, app_pagination_fields from libs.login import login_required +from libs.validators import validate_description_length from models import Account, App from services.app_dsl_service import AppDslService, ImportMode from services.app_service import AppService @@ -28,12 +29,6 @@ from services.feature_service import FeatureService ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "completion"] -def _validate_description_length(description): - if description and len(description) > 400: - raise ValueError("Description cannot exceed 400 characters.") - return description - - @console_ns.route("/apps") class AppListApi(Resource): @api.doc("list_apps") @@ -138,7 +133,7 @@ class AppListApi(Resource): """Create app""" parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, location="json") - parser.add_argument("description", type=_validate_description_length, location="json") + parser.add_argument("description", type=validate_description_length, location="json") parser.add_argument("mode", type=str, choices=ALLOW_CREATE_APP_MODES, location="json") parser.add_argument("icon_type", type=str, location="json") parser.add_argument("icon", type=str, location="json") @@ -219,7 +214,7 @@ class AppApi(Resource): parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, nullable=False, location="json") - parser.add_argument("description", type=_validate_description_length, location="json") + parser.add_argument("description", type=validate_description_length, location="json") parser.add_argument("icon_type", type=str, location="json") parser.add_argument("icon", type=str, location="json") parser.add_argument("icon_background", type=str, location="json") @@ -297,7 +292,7 @@ class AppCopyApi(Resource): parser = reqparse.RequestParser() parser.add_argument("name", type=str, location="json") - parser.add_argument("description", type=_validate_description_length, location="json") + parser.add_argument("description", type=validate_description_length, location="json") parser.add_argument("icon_type", type=str, location="json") parser.add_argument("icon", type=str, location="json") parser.add_argument("icon_background", type=str, location="json") diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 60eedd2197..ac088b790e 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -31,6 +31,7 @@ from fields.app_fields import related_app_list from fields.dataset_fields import dataset_detail_fields, dataset_query_detail_fields from fields.document_fields import document_status_fields from libs.login import login_required +from libs.validators import validate_description_length from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile from models.account import Account from models.dataset import DatasetPermissionEnum @@ -44,12 +45,6 @@ def _validate_name(name: str) -> str: return name -def _validate_description_length(description): - if description and len(description) > 400: - raise ValueError("Description cannot exceed 400 characters.") - return description - - @console_ns.route("/datasets") class DatasetListApi(Resource): @api.doc("get_datasets") @@ -149,7 +144,7 @@ class DatasetListApi(Resource): ) parser.add_argument( "description", - type=_validate_description_length, + type=validate_description_length, nullable=True, required=False, default="", @@ -290,7 +285,7 @@ class DatasetApi(Resource): help="type is required. Name must be between 1 to 40 characters.", type=_validate_name, ) - parser.add_argument("description", location="json", store_missing=False, type=_validate_description_length) + parser.add_argument("description", location="json", store_missing=False, type=validate_description_length) parser.add_argument( "indexing_technique", type=str, diff --git a/api/controllers/service_api/dataset/dataset.py b/api/controllers/service_api/dataset/dataset.py index 72ab05cec0..961b96db91 100644 --- a/api/controllers/service_api/dataset/dataset.py +++ b/api/controllers/service_api/dataset/dataset.py @@ -17,6 +17,7 @@ from core.provider_manager import ProviderManager from fields.dataset_fields import dataset_detail_fields from fields.tag_fields import build_dataset_tag_fields from libs.login import current_user +from libs.validators import validate_description_length from models.account import Account from models.dataset import Dataset, DatasetPermissionEnum from models.provider_ids import ModelProviderID @@ -31,12 +32,6 @@ def _validate_name(name): return name -def _validate_description_length(description): - if description and len(description) > 400: - raise ValueError("Description cannot exceed 400 characters.") - return description - - # Define parsers for dataset operations dataset_create_parser = reqparse.RequestParser() dataset_create_parser.add_argument( @@ -48,7 +43,7 @@ dataset_create_parser.add_argument( ) dataset_create_parser.add_argument( "description", - type=_validate_description_length, + type=validate_description_length, nullable=True, required=False, default="", @@ -101,7 +96,7 @@ dataset_update_parser.add_argument( type=_validate_name, ) dataset_update_parser.add_argument( - "description", location="json", store_missing=False, type=_validate_description_length + "description", location="json", store_missing=False, type=validate_description_length ) dataset_update_parser.add_argument( "indexing_technique", diff --git a/api/libs/validators.py b/api/libs/validators.py new file mode 100644 index 0000000000..4d762e8116 --- /dev/null +++ b/api/libs/validators.py @@ -0,0 +1,5 @@ +def validate_description_length(description: str | None) -> str | None: + """Validate description length.""" + if description and len(description) > 400: + raise ValueError("Description cannot exceed 400 characters.") + return description diff --git a/api/pyrightconfig.json b/api/pyrightconfig.json index 8c35dc7abb..00dda8b087 100644 --- a/api/pyrightconfig.json +++ b/api/pyrightconfig.json @@ -1,8 +1,8 @@ { "include": ["."], "exclude": [ - ".venv", "tests/", + ".venv", "migrations/", "core/rag", "extensions", diff --git a/api/tests/integration_tests/controllers/console/app/test_chat_message_permissions.py b/api/tests/integration_tests/controllers/console/app/test_chat_message_permissions.py index c8d353ad0a..da1524ff2e 100644 --- a/api/tests/integration_tests/controllers/console/app/test_chat_message_permissions.py +++ b/api/tests/integration_tests/controllers/console/app/test_chat_message_permissions.py @@ -11,8 +11,8 @@ from controllers.console.app import completion as completion_api from controllers.console.app import message as message_api from controllers.console.app import wraps from libs.datetime_utils import naive_utc_now -from models import Account, App, Tenant -from models.account import TenantAccountRole +from models import App, Tenant +from models.account import Account, TenantAccountJoin, TenantAccountRole from models.model import AppMode from services.app_generate_service import AppGenerateService @@ -31,9 +31,8 @@ class TestChatMessageApiPermissions: return app @pytest.fixture - def mock_account(self): + def mock_account(self, monkeypatch: pytest.MonkeyPatch): """Create a mock Account for testing.""" - account = Account() account.id = str(uuid.uuid4()) account.name = "Test User" @@ -42,12 +41,24 @@ class TestChatMessageApiPermissions: account.created_at = naive_utc_now() account.updated_at = naive_utc_now() - # Create mock tenant tenant = Tenant() tenant.id = str(uuid.uuid4()) tenant.name = "Test Tenant" - account._current_tenant = tenant + mock_session_instance = mock.Mock() + + mock_tenant_join = TenantAccountJoin(role=TenantAccountRole.OWNER) + monkeypatch.setattr(mock_session_instance, "scalar", mock.Mock(return_value=mock_tenant_join)) + + mock_scalars_result = mock.Mock() + mock_scalars_result.one.return_value = tenant + monkeypatch.setattr(mock_session_instance, "scalars", mock.Mock(return_value=mock_scalars_result)) + + mock_session_context = mock.Mock() + mock_session_context.__enter__.return_value = mock_session_instance + monkeypatch.setattr("models.account.Session", lambda _, expire_on_commit: mock_session_context) + + account.current_tenant = tenant return account @pytest.mark.parametrize( diff --git a/api/tests/integration_tests/controllers/console/app/test_description_validation.py b/api/tests/integration_tests/controllers/console/app/test_description_validation.py index 2d0ceac760..8160807e48 100644 --- a/api/tests/integration_tests/controllers/console/app/test_description_validation.py +++ b/api/tests/integration_tests/controllers/console/app/test_description_validation.py @@ -18,124 +18,87 @@ class TestAppDescriptionValidationUnit: """Unit tests for description validation function""" def test_validate_description_length_function(self): - """Test the _validate_description_length function directly""" - from controllers.console.app.app import _validate_description_length + """Test the validate_description_length function directly""" + from libs.validators import validate_description_length # Test valid descriptions - assert _validate_description_length("") == "" - assert _validate_description_length("x" * 400) == "x" * 400 - assert _validate_description_length(None) is None + assert validate_description_length("") == "" + assert validate_description_length("x" * 400) == "x" * 400 + assert validate_description_length(None) is None # Test invalid descriptions with pytest.raises(ValueError) as exc_info: - _validate_description_length("x" * 401) + validate_description_length("x" * 401) assert "Description cannot exceed 400 characters." in str(exc_info.value) with pytest.raises(ValueError) as exc_info: - _validate_description_length("x" * 500) + validate_description_length("x" * 500) assert "Description cannot exceed 400 characters." in str(exc_info.value) with pytest.raises(ValueError) as exc_info: - _validate_description_length("x" * 1000) + validate_description_length("x" * 1000) assert "Description cannot exceed 400 characters." in str(exc_info.value) - def test_validation_consistency_with_dataset(self): - """Test that App and Dataset validation functions are consistent""" - from controllers.console.app.app import _validate_description_length as app_validate - from controllers.console.datasets.datasets import _validate_description_length as dataset_validate - from controllers.service_api.dataset.dataset import _validate_description_length as service_dataset_validate - - # Test same valid inputs - valid_desc = "x" * 400 - assert app_validate(valid_desc) == dataset_validate(valid_desc) == service_dataset_validate(valid_desc) - assert app_validate("") == dataset_validate("") == service_dataset_validate("") - assert app_validate(None) == dataset_validate(None) == service_dataset_validate(None) - - # Test same invalid inputs produce same error - invalid_desc = "x" * 401 - - app_error = None - dataset_error = None - service_dataset_error = None - - try: - app_validate(invalid_desc) - except ValueError as e: - app_error = str(e) - - try: - dataset_validate(invalid_desc) - except ValueError as e: - dataset_error = str(e) - - try: - service_dataset_validate(invalid_desc) - except ValueError as e: - service_dataset_error = str(e) - - assert app_error == dataset_error == service_dataset_error - assert app_error == "Description cannot exceed 400 characters." - def test_boundary_values(self): """Test boundary values for description validation""" - from controllers.console.app.app import _validate_description_length + from libs.validators import validate_description_length # Test exact boundary exactly_400 = "x" * 400 - assert _validate_description_length(exactly_400) == exactly_400 + assert validate_description_length(exactly_400) == exactly_400 # Test just over boundary just_over_400 = "x" * 401 with pytest.raises(ValueError): - _validate_description_length(just_over_400) + validate_description_length(just_over_400) # Test just under boundary just_under_400 = "x" * 399 - assert _validate_description_length(just_under_400) == just_under_400 + assert validate_description_length(just_under_400) == just_under_400 def test_edge_cases(self): """Test edge cases for description validation""" - from controllers.console.app.app import _validate_description_length + from libs.validators import validate_description_length # Test None input - assert _validate_description_length(None) is None + assert validate_description_length(None) is None # Test empty string - assert _validate_description_length("") == "" + assert validate_description_length("") == "" # Test single character - assert _validate_description_length("a") == "a" + assert validate_description_length("a") == "a" # Test unicode characters unicode_desc = "测试" * 200 # 400 characters in Chinese - assert _validate_description_length(unicode_desc) == unicode_desc + assert validate_description_length(unicode_desc) == unicode_desc # Test unicode over limit unicode_over = "测试" * 201 # 402 characters with pytest.raises(ValueError): - _validate_description_length(unicode_over) + validate_description_length(unicode_over) def test_whitespace_handling(self): """Test how validation handles whitespace""" - from controllers.console.app.app import _validate_description_length + from libs.validators import validate_description_length # Test description with spaces spaces_400 = " " * 400 - assert _validate_description_length(spaces_400) == spaces_400 + assert validate_description_length(spaces_400) == spaces_400 # Test description with spaces over limit spaces_401 = " " * 401 with pytest.raises(ValueError): - _validate_description_length(spaces_401) + validate_description_length(spaces_401) # Test mixed content mixed_400 = "a" * 200 + " " * 200 - assert _validate_description_length(mixed_400) == mixed_400 + assert validate_description_length(mixed_400) == mixed_400 # Test mixed over limit mixed_401 = "a" * 200 + " " * 201 with pytest.raises(ValueError): - _validate_description_length(mixed_401) + validate_description_length(mixed_401) if __name__ == "__main__": diff --git a/api/tests/integration_tests/controllers/console/app/test_model_config_permissions.py b/api/tests/integration_tests/controllers/console/app/test_model_config_permissions.py index ca4d452963..c0fd56ef63 100644 --- a/api/tests/integration_tests/controllers/console/app/test_model_config_permissions.py +++ b/api/tests/integration_tests/controllers/console/app/test_model_config_permissions.py @@ -9,8 +9,8 @@ from flask.testing import FlaskClient from controllers.console.app import model_config as model_config_api from controllers.console.app import wraps from libs.datetime_utils import naive_utc_now -from models import Account, App, Tenant -from models.account import TenantAccountRole +from models import App, Tenant +from models.account import Account, TenantAccountJoin, TenantAccountRole from models.model import AppMode from services.app_model_config_service import AppModelConfigService @@ -30,9 +30,8 @@ class TestModelConfigResourcePermissions: return app @pytest.fixture - def mock_account(self): + def mock_account(self, monkeypatch: pytest.MonkeyPatch): """Create a mock Account for testing.""" - account = Account() account.id = str(uuid.uuid4()) account.name = "Test User" @@ -41,12 +40,24 @@ class TestModelConfigResourcePermissions: account.created_at = naive_utc_now() account.updated_at = naive_utc_now() - # Create mock tenant tenant = Tenant() tenant.id = str(uuid.uuid4()) tenant.name = "Test Tenant" - account._current_tenant = tenant + mock_session_instance = mock.Mock() + + mock_tenant_join = TenantAccountJoin(role=TenantAccountRole.OWNER) + monkeypatch.setattr(mock_session_instance, "scalar", mock.Mock(return_value=mock_tenant_join)) + + mock_scalars_result = mock.Mock() + mock_scalars_result.one.return_value = tenant + monkeypatch.setattr(mock_session_instance, "scalars", mock.Mock(return_value=mock_scalars_result)) + + mock_session_context = mock.Mock() + mock_session_context.__enter__.return_value = mock_session_instance + monkeypatch.setattr("models.account.Session", lambda _, expire_on_commit: mock_session_context) + + account.current_tenant = tenant return account @pytest.mark.parametrize( diff --git a/api/tests/unit_tests/controllers/console/app/test_description_validation.py b/api/tests/unit_tests/controllers/console/app/test_description_validation.py index 178267e560..dcc408a21c 100644 --- a/api/tests/unit_tests/controllers/console/app/test_description_validation.py +++ b/api/tests/unit_tests/controllers/console/app/test_description_validation.py @@ -1,174 +1,53 @@ import pytest -from controllers.console.app.app import _validate_description_length as app_validate -from controllers.console.datasets.datasets import _validate_description_length as dataset_validate -from controllers.service_api.dataset.dataset import _validate_description_length as service_dataset_validate +from libs.validators import validate_description_length class TestDescriptionValidationUnit: - """Unit tests for description validation functions in App and Dataset APIs""" + """Unit tests for the centralized description validation function.""" - def test_app_validate_description_length_valid(self): - """Test App validation function with valid descriptions""" + def test_validate_description_length_valid(self): + """Test validation function with valid descriptions.""" # Empty string should be valid - assert app_validate("") == "" + assert validate_description_length("") == "" # None should be valid - assert app_validate(None) is None + assert validate_description_length(None) is None # Short description should be valid short_desc = "Short description" - assert app_validate(short_desc) == short_desc + assert validate_description_length(short_desc) == short_desc # Exactly 400 characters should be valid exactly_400 = "x" * 400 - assert app_validate(exactly_400) == exactly_400 + assert validate_description_length(exactly_400) == exactly_400 # Just under limit should be valid just_under = "x" * 399 - assert app_validate(just_under) == just_under + assert validate_description_length(just_under) == just_under - def test_app_validate_description_length_invalid(self): - """Test App validation function with invalid descriptions""" + def test_validate_description_length_invalid(self): + """Test validation function with invalid descriptions.""" # 401 characters should fail just_over = "x" * 401 with pytest.raises(ValueError) as exc_info: - app_validate(just_over) + validate_description_length(just_over) assert "Description cannot exceed 400 characters." in str(exc_info.value) # 500 characters should fail way_over = "x" * 500 with pytest.raises(ValueError) as exc_info: - app_validate(way_over) + validate_description_length(way_over) assert "Description cannot exceed 400 characters." in str(exc_info.value) # 1000 characters should fail very_long = "x" * 1000 with pytest.raises(ValueError) as exc_info: - app_validate(very_long) + validate_description_length(very_long) assert "Description cannot exceed 400 characters." in str(exc_info.value) - def test_dataset_validate_description_length_valid(self): - """Test Dataset validation function with valid descriptions""" - # Empty string should be valid - assert dataset_validate("") == "" - - # Short description should be valid - short_desc = "Short description" - assert dataset_validate(short_desc) == short_desc - - # Exactly 400 characters should be valid - exactly_400 = "x" * 400 - assert dataset_validate(exactly_400) == exactly_400 - - # Just under limit should be valid - just_under = "x" * 399 - assert dataset_validate(just_under) == just_under - - def test_dataset_validate_description_length_invalid(self): - """Test Dataset validation function with invalid descriptions""" - # 401 characters should fail - just_over = "x" * 401 - with pytest.raises(ValueError) as exc_info: - dataset_validate(just_over) - assert "Description cannot exceed 400 characters." in str(exc_info.value) - - # 500 characters should fail - way_over = "x" * 500 - with pytest.raises(ValueError) as exc_info: - dataset_validate(way_over) - assert "Description cannot exceed 400 characters." in str(exc_info.value) - - def test_service_dataset_validate_description_length_valid(self): - """Test Service Dataset validation function with valid descriptions""" - # Empty string should be valid - assert service_dataset_validate("") == "" - - # None should be valid - assert service_dataset_validate(None) is None - - # Short description should be valid - short_desc = "Short description" - assert service_dataset_validate(short_desc) == short_desc - - # Exactly 400 characters should be valid - exactly_400 = "x" * 400 - assert service_dataset_validate(exactly_400) == exactly_400 - - # Just under limit should be valid - just_under = "x" * 399 - assert service_dataset_validate(just_under) == just_under - - def test_service_dataset_validate_description_length_invalid(self): - """Test Service Dataset validation function with invalid descriptions""" - # 401 characters should fail - just_over = "x" * 401 - with pytest.raises(ValueError) as exc_info: - service_dataset_validate(just_over) - assert "Description cannot exceed 400 characters." in str(exc_info.value) - - # 500 characters should fail - way_over = "x" * 500 - with pytest.raises(ValueError) as exc_info: - service_dataset_validate(way_over) - assert "Description cannot exceed 400 characters." in str(exc_info.value) - - def test_app_dataset_validation_consistency(self): - """Test that App and Dataset validation functions behave identically""" - test_cases = [ - "", # Empty string - "Short description", # Normal description - "x" * 100, # Medium description - "x" * 400, # Exactly at limit - ] - - # Test valid cases produce same results - for test_desc in test_cases: - assert app_validate(test_desc) == dataset_validate(test_desc) == service_dataset_validate(test_desc) - - # Test invalid cases produce same errors - invalid_cases = [ - "x" * 401, # Just over limit - "x" * 500, # Way over limit - "x" * 1000, # Very long - ] - - for invalid_desc in invalid_cases: - app_error = None - dataset_error = None - service_dataset_error = None - - # Capture App validation error - try: - app_validate(invalid_desc) - except ValueError as e: - app_error = str(e) - - # Capture Dataset validation error - try: - dataset_validate(invalid_desc) - except ValueError as e: - dataset_error = str(e) - - # Capture Service Dataset validation error - try: - service_dataset_validate(invalid_desc) - except ValueError as e: - service_dataset_error = str(e) - - # All should produce errors - assert app_error is not None, f"App validation should fail for {len(invalid_desc)} characters" - assert dataset_error is not None, f"Dataset validation should fail for {len(invalid_desc)} characters" - error_msg = f"Service Dataset validation should fail for {len(invalid_desc)} characters" - assert service_dataset_error is not None, error_msg - - # Errors should be identical - error_msg = f"Error messages should be identical for {len(invalid_desc)} characters" - assert app_error == dataset_error == service_dataset_error, error_msg - assert app_error == "Description cannot exceed 400 characters." - def test_boundary_values(self): - """Test boundary values around the 400 character limit""" + """Test boundary values around the 400 character limit.""" boundary_tests = [ (0, True), # Empty (1, True), # Minimum @@ -184,69 +63,45 @@ class TestDescriptionValidationUnit: if should_pass: # Should not raise exception - assert app_validate(test_desc) == test_desc - assert dataset_validate(test_desc) == test_desc - assert service_dataset_validate(test_desc) == test_desc + assert validate_description_length(test_desc) == test_desc else: # Should raise ValueError with pytest.raises(ValueError): - app_validate(test_desc) - with pytest.raises(ValueError): - dataset_validate(test_desc) - with pytest.raises(ValueError): - service_dataset_validate(test_desc) + validate_description_length(test_desc) def test_special_characters(self): """Test validation with special characters, Unicode, etc.""" # Unicode characters unicode_desc = "测试描述" * 100 # Chinese characters if len(unicode_desc) <= 400: - assert app_validate(unicode_desc) == unicode_desc - assert dataset_validate(unicode_desc) == unicode_desc - assert service_dataset_validate(unicode_desc) == unicode_desc + assert validate_description_length(unicode_desc) == unicode_desc # Special characters special_desc = "Special chars: !@#$%^&*()_+-=[]{}|;':\",./<>?" * 10 if len(special_desc) <= 400: - assert app_validate(special_desc) == special_desc - assert dataset_validate(special_desc) == special_desc - assert service_dataset_validate(special_desc) == special_desc + assert validate_description_length(special_desc) == special_desc # Mixed content mixed_desc = "Mixed content: 测试 123 !@# " * 15 if len(mixed_desc) <= 400: - assert app_validate(mixed_desc) == mixed_desc - assert dataset_validate(mixed_desc) == mixed_desc - assert service_dataset_validate(mixed_desc) == mixed_desc + assert validate_description_length(mixed_desc) == mixed_desc elif len(mixed_desc) > 400: with pytest.raises(ValueError): - app_validate(mixed_desc) - with pytest.raises(ValueError): - dataset_validate(mixed_desc) - with pytest.raises(ValueError): - service_dataset_validate(mixed_desc) + validate_description_length(mixed_desc) def test_whitespace_handling(self): - """Test validation with various whitespace scenarios""" + """Test validation with various whitespace scenarios.""" # Leading/trailing whitespace whitespace_desc = " Description with whitespace " if len(whitespace_desc) <= 400: - assert app_validate(whitespace_desc) == whitespace_desc - assert dataset_validate(whitespace_desc) == whitespace_desc - assert service_dataset_validate(whitespace_desc) == whitespace_desc + assert validate_description_length(whitespace_desc) == whitespace_desc # Newlines and tabs multiline_desc = "Line 1\nLine 2\tTabbed content" if len(multiline_desc) <= 400: - assert app_validate(multiline_desc) == multiline_desc - assert dataset_validate(multiline_desc) == multiline_desc - assert service_dataset_validate(multiline_desc) == multiline_desc + assert validate_description_length(multiline_desc) == multiline_desc # Only whitespace over limit only_spaces = " " * 401 with pytest.raises(ValueError): - app_validate(only_spaces) - with pytest.raises(ValueError): - dataset_validate(only_spaces) - with pytest.raises(ValueError): - service_dataset_validate(only_spaces) + validate_description_length(only_spaces) From 98e4bfcda8ea2a7da3f9c62e0f054d39cc29990d Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Fri, 3 Oct 2025 23:36:56 +0800 Subject: [PATCH 119/126] click comment icon not switch to comment mode --- web/app/components/workflow/hooks/use-workflow-comment.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/web/app/components/workflow/hooks/use-workflow-comment.ts b/web/app/components/workflow/hooks/use-workflow-comment.ts index 5b65023d25..73c8e9e947 100644 --- a/web/app/components/workflow/hooks/use-workflow-comment.ts +++ b/web/app/components/workflow/hooks/use-workflow-comment.ts @@ -128,7 +128,6 @@ export const useWorkflowComment = () => { setPendingComment(null) activeCommentIdRef.current = comment.id - setControlMode(ControlMode.Comment) setActiveCommentId(comment.id) const cachedDetail = commentDetailCacheRef.current[comment.id] From c4e7cb75cd406b77d9db5ce7603374a81e2e0874 Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Sat, 4 Oct 2025 11:22:02 +0800 Subject: [PATCH 120/126] cache the mentioned users --- .../workflow/comment/mention-input.tsx | 27 ++++++++++++++++--- .../workflow/store/workflow/comment-slice.ts | 20 +++++++++++++- 2 files changed, 42 insertions(+), 5 deletions(-) diff --git a/web/app/components/workflow/comment/mention-input.tsx b/web/app/components/workflow/comment/mention-input.tsx index 3213942572..8fa43d47a9 100644 --- a/web/app/components/workflow/comment/mention-input.tsx +++ b/web/app/components/workflow/comment/mention-input.tsx @@ -11,6 +11,7 @@ import Button from '@/app/components/base/button' import Avatar from '@/app/components/base/avatar' import cn from '@/utils/classnames' import { type UserProfile, fetchMentionableUsers } from '@/service/workflow-comment' +import { useStore, useWorkflowStore } from '../store' type MentionInputProps = { value: string @@ -42,7 +43,12 @@ export const MentionInput: FC<MentionInputProps> = memo(({ const appId = params.appId as string const textareaRef = useRef<HTMLTextAreaElement>(null) - const [mentionUsers, setMentionUsers] = useState<UserProfile[]>([]) + const workflowStore = useWorkflowStore() + const mentionUsersFromStore = useStore(state => ( + appId ? state.mentionableUsersCache[appId] : undefined + )) + const mentionUsers = mentionUsersFromStore ?? [] + const [showMentionDropdown, setShowMentionDropdown] = useState(false) const [mentionQuery, setMentionQuery] = useState('') const [mentionPosition, setMentionPosition] = useState(0) @@ -121,15 +127,28 @@ export const MentionInput: FC<MentionInputProps> = memo(({ }, [value, mentionNameList]) const loadMentionableUsers = useCallback(async () => { - if (!appId) return + if (!appId) + return + + const state = workflowStore.getState() + if (state.mentionableUsersCache[appId] !== undefined) + return + + if (state.mentionableUsersLoading[appId]) + return + + state.setMentionableUsersLoading(appId, true) try { const users = await fetchMentionableUsers(appId) - setMentionUsers(users) + workflowStore.getState().setMentionableUsersCache(appId, users) } catch (error) { console.error('Failed to load mentionable users:', error) } - }, [appId]) + finally { + workflowStore.getState().setMentionableUsersLoading(appId, false) + } + }, [appId, workflowStore]) useEffect(() => { loadMentionableUsers() diff --git a/web/app/components/workflow/store/workflow/comment-slice.ts b/web/app/components/workflow/store/workflow/comment-slice.ts index b870020ba4..c0e9a7a0c0 100644 --- a/web/app/components/workflow/store/workflow/comment-slice.ts +++ b/web/app/components/workflow/store/workflow/comment-slice.ts @@ -1,5 +1,5 @@ import type { StateCreator } from 'zustand' -import type { WorkflowCommentDetail, WorkflowCommentList } from '@/service/workflow-comment' +import type { UserProfile, WorkflowCommentDetail, WorkflowCommentList } from '@/service/workflow-comment' export type CommentSliceShape = { comments: WorkflowCommentList[] @@ -12,6 +12,10 @@ export type CommentSliceShape = { setActiveCommentDetailLoading: (loading: boolean) => void commentDetailCache: Record<string, WorkflowCommentDetail> setCommentDetailCache: (cache: Record<string, WorkflowCommentDetail>) => void + mentionableUsersCache: Record<string, UserProfile[]> + setMentionableUsersCache: (appId: string, users: UserProfile[]) => void + mentionableUsersLoading: Record<string, boolean> + setMentionableUsersLoading: (appId: string, loading: boolean) => void } export const createCommentSlice: StateCreator<CommentSliceShape> = set => ({ @@ -25,4 +29,18 @@ export const createCommentSlice: StateCreator<CommentSliceShape> = set => ({ setActiveCommentDetailLoading: activeCommentDetailLoading => set({ activeCommentDetailLoading }), commentDetailCache: {}, setCommentDetailCache: commentDetailCache => set({ commentDetailCache }), + mentionableUsersCache: {}, + setMentionableUsersCache: (appId, users) => set(state => ({ + mentionableUsersCache: { + ...state.mentionableUsersCache, + [appId]: users, + }, + })), + mentionableUsersLoading: {}, + setMentionableUsersLoading: (appId, loading) => set(state => ({ + mentionableUsersLoading: { + ...state.mentionableUsersLoading, + [appId]: loading, + }, + })), }) From 827a1b181b5d5c475643bc1b2b38ee9b31abdf03 Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Sat, 4 Oct 2025 13:25:59 +0800 Subject: [PATCH 121/126] fix comment icon position --- .../components/workflow/hooks/use-workflow-comment.ts | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/web/app/components/workflow/hooks/use-workflow-comment.ts b/web/app/components/workflow/hooks/use-workflow-comment.ts index 73c8e9e947..2eb6ca0d90 100644 --- a/web/app/components/workflow/hooks/use-workflow-comment.ts +++ b/web/app/components/workflow/hooks/use-workflow-comment.ts @@ -133,7 +133,15 @@ export const useWorkflowComment = () => { const cachedDetail = commentDetailCacheRef.current[comment.id] setActiveComment(cachedDetail || comment) - reactflow.setCenter(comment.position_x, comment.position_y, { zoom: 1, duration: 600 }) + let horizontalOffsetPx = 220 + const maxOffset = Math.max(0, (window.innerWidth / 2) - 60) + horizontalOffsetPx = Math.min(horizontalOffsetPx, maxOffset) + + reactflow.setCenter( + comment.position_x + horizontalOffsetPx, + comment.position_y, + { zoom: 1, duration: 600 }, + ) if (!appId) return From 37ecd4a0bc5ebd525ea3412fbe8c0848db290511 Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Sat, 4 Oct 2025 13:39:00 +0800 Subject: [PATCH 122/126] fix @ input problem --- web/app/components/workflow/comment/mention-input.tsx | 8 ++++++-- web/app/components/workflow/comment/thread.tsx | 1 - 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/web/app/components/workflow/comment/mention-input.tsx b/web/app/components/workflow/comment/mention-input.tsx index 8fa43d47a9..8f775b46cf 100644 --- a/web/app/components/workflow/comment/mention-input.tsx +++ b/web/app/components/workflow/comment/mention-input.tsx @@ -223,7 +223,10 @@ export const MentionInput: FC<MentionInputProps> = memo(({ const beforeMention = value.slice(0, mentionPosition) const afterMention = value.slice(textarea.selectionStart || 0) - const newContent = `${beforeMention}@${user.name} ${afterMention}` + + const needsSpaceBefore = mentionPosition > 0 && !/\s/.test(value[mentionPosition - 1]) + const prefix = needsSpaceBefore ? ' ' : '' + const newContent = `${beforeMention}${prefix}@${user.name} ${afterMention}` onChange(newContent) setShowMentionDropdown(false) @@ -232,7 +235,8 @@ export const MentionInput: FC<MentionInputProps> = memo(({ setMentionedUserIds(newMentionedUserIds) setTimeout(() => { - const newCursorPos = mentionPosition + user.name.length + 2 // @ + name + space + const extraSpace = needsSpaceBefore ? 1 : 0 + const newCursorPos = mentionPosition + extraSpace + user.name.length + 2 // (space) + @ + name + space textarea.setSelectionRange(newCursorPos, newCursorPos) textarea.focus() }, 0) diff --git a/web/app/components/workflow/comment/thread.tsx b/web/app/components/workflow/comment/thread.tsx index 2295eecac9..e20a649664 100644 --- a/web/app/components/workflow/comment/thread.tsx +++ b/web/app/components/workflow/comment/thread.tsx @@ -377,7 +377,6 @@ export const CommentThread: FC<CommentThreadProps> = memo(({ placeholder={t('workflow.comments.placeholder.reply')} disabled={loading} loading={loading} - className='px-2' /> </div> </div> From bf713006350dd06aec248c43798107de06076834 Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Sat, 4 Oct 2025 14:36:10 +0800 Subject: [PATCH 123/126] improve comment cursor move --- web/app/components/workflow/comment/cursor.tsx | 7 ++----- web/app/components/workflow/index.tsx | 3 +-- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/web/app/components/workflow/comment/cursor.tsx b/web/app/components/workflow/comment/cursor.tsx index aafd2a4fb9..56b5c24f16 100644 --- a/web/app/components/workflow/comment/cursor.tsx +++ b/web/app/components/workflow/comment/cursor.tsx @@ -4,12 +4,9 @@ import { useStore } from '../store' import { ControlMode } from '../types' import { Comment } from '@/app/components/base/icons/src/public/other' -type CommentCursorProps = { - mousePosition: { elementX: number; elementY: number } -} - -export const CommentCursor: FC<CommentCursorProps> = memo(({ mousePosition }) => { +export const CommentCursor: FC = memo(() => { const controlMode = useStore(s => s.controlMode) + const mousePosition = useStore(s => s.mousePosition) if (controlMode !== ControlMode.Comment) return null diff --git a/web/app/components/workflow/index.tsx b/web/app/components/workflow/index.tsx index d1b026a0d6..7df1de179c 100644 --- a/web/app/components/workflow/index.tsx +++ b/web/app/components/workflow/index.tsx @@ -202,7 +202,6 @@ export const Workflow: FC<WorkflowProps> = memo(({ } = useWorkflowComment() const showUserComments = useStore(s => s.showUserComments) const showUserCursors = useStore(s => s.showUserCursors) - const mousePosition = useStore(s => s.mousePosition) const { t } = useTranslation() eventEmitter?.useSubscription((v: any) => { @@ -442,7 +441,7 @@ export const Workflow: FC<WorkflowProps> = memo(({ )} <LimitTips /> {controlMode === ControlMode.Comment && isMouseOverCanvas && ( - <CommentCursor mousePosition={mousePosition} /> + <CommentCursor /> )} {pendingComment && ( <CommentInput From 6ce65de2cd5fd58e923abe272952eeb96e2d01d6 Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Sat, 4 Oct 2025 21:11:59 +0800 Subject: [PATCH 124/126] fix merged main issues --- .../workflow/comment/comment-preview.tsx | 2 +- web/app/components/workflow/comment/thread.tsx | 2 +- .../components/workflow/hooks/use-workflow.ts | 16 +--------------- web/app/components/workflow/index.tsx | 1 - .../workflow/panel/comments-panel/index.tsx | 2 +- web/package.json | 1 + web/pnpm-lock.yaml | 8 ++++++++ 7 files changed, 13 insertions(+), 19 deletions(-) diff --git a/web/app/components/workflow/comment/comment-preview.tsx b/web/app/components/workflow/comment/comment-preview.tsx index 5ed78aaab1..94b356edd8 100644 --- a/web/app/components/workflow/comment/comment-preview.tsx +++ b/web/app/components/workflow/comment/comment-preview.tsx @@ -4,7 +4,7 @@ import type { FC } from 'react' import { memo } from 'react' import { UserAvatarList } from '@/app/components/base/user-avatar-list' import type { WorkflowCommentList } from '@/service/workflow-comment' -import { useFormatTimeFromNow } from '@/app/components/workflow/hooks' +import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now' type CommentPreviewProps = { comment: WorkflowCommentList diff --git a/web/app/components/workflow/comment/thread.tsx b/web/app/components/workflow/comment/thread.tsx index e20a649664..b727d8e640 100644 --- a/web/app/components/workflow/comment/thread.tsx +++ b/web/app/components/workflow/comment/thread.tsx @@ -8,7 +8,7 @@ import { RiArrowDownSLine, RiArrowUpSLine, RiCheckboxCircleFill, RiCheckboxCircl import Avatar from '@/app/components/base/avatar' import Divider from '@/app/components/base/divider' import cn from '@/utils/classnames' -import { useFormatTimeFromNow } from '@/app/components/workflow/hooks' +import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now' import type { WorkflowCommentDetail, WorkflowCommentDetailReply } from '@/service/workflow-comment' import { useAppContext } from '@/context/app-context' import { MentionInput } from './mention-input' diff --git a/web/app/components/workflow/hooks/use-workflow.ts b/web/app/components/workflow/hooks/use-workflow.ts index 4bf255636b..bedc91d8df 100644 --- a/web/app/components/workflow/hooks/use-workflow.ts +++ b/web/app/components/workflow/hooks/use-workflow.ts @@ -50,9 +50,7 @@ export const useIsChatMode = () => { } export const useWorkflow = () => { - const { t } = useTranslation() const collaborativeWorkflow = useCollaborativeWorkflow() - const workflowStore = useWorkflowStore() const { getAvailableBlocks } = useAvailableBlocks() const { nodesMap } = useNodesMetaData() @@ -258,18 +256,6 @@ export const useWorkflow = () => { return isUsed }, [isVarUsedInNodes]) - const checkParallelLimit = useCallback((nodeId: string, nodeHandle = 'source') => { - const { edges } = collaborativeWorkflow.getState() - const connectedEdges = edges.filter(edge => edge.source === nodeId && edge.sourceHandle === nodeHandle) - if (connectedEdges.length > MAX_PARALLEL_LIMIT - 1) { - const { setShowTips } = workflowStore.getState() - setShowTips(t('workflow.common.parallelTip.limit', { num: MAX_PARALLEL_LIMIT })) - return false - } - - return true - }, [collaborativeWorkflow, workflowStore, t]) - const getRootNodesById = useCallback((nodeId: string) => { const { nodes, edges } = collaborativeWorkflow.getState() const currentNode = nodes.find(node => node.id === nodeId) @@ -373,7 +359,7 @@ export const useWorkflow = () => { } return !hasCycle(targetNode) - }, [collaborativeWorkflow, checkParallelLimit, getAvailableBlocks]) + }, [collaborativeWorkflow, getAvailableBlocks]) return { getNodeById, diff --git a/web/app/components/workflow/index.tsx b/web/app/components/workflow/index.tsx index db7ee17d53..006ba8186f 100644 --- a/web/app/components/workflow/index.tsx +++ b/web/app/components/workflow/index.tsx @@ -438,7 +438,6 @@ export const Workflow: FC<WorkflowProps> = memo(({ content={showConfirm.desc} /> )} - <LimitTips /> {controlMode === ControlMode.Comment && isMouseOverCanvas && ( <CommentCursor /> )} diff --git a/web/app/components/workflow/panel/comments-panel/index.tsx b/web/app/components/workflow/panel/comments-panel/index.tsx index cefdb78516..1bb7e27c51 100644 --- a/web/app/components/workflow/panel/comments-panel/index.tsx +++ b/web/app/components/workflow/panel/comments-panel/index.tsx @@ -8,7 +8,7 @@ import cn from '@/utils/classnames' import { ControlMode } from '@/app/components/workflow/types' import { resolveWorkflowComment } from '@/service/workflow-comment' import { useParams } from 'next/navigation' -import { useFormatTimeFromNow } from '@/app/components/workflow/hooks' +import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now' import { useAppContext } from '@/context/app-context' import { collaborationManager } from '@/app/components/workflow/collaboration' diff --git a/web/package.json b/web/package.json index 01fd602380..90720b36cc 100644 --- a/web/package.json +++ b/web/package.json @@ -94,6 +94,7 @@ "lexical": "^0.36.2", "line-clamp": "^1.0.0", "lodash-es": "^4.17.21", + "loro-crdt": "^1.8.2", "mermaid": "11.10.0", "mime": "^4.0.4", "mitt": "^3.0.1", diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index 6c785b3099..17b3cebfb8 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -202,6 +202,9 @@ importers: lodash-es: specifier: ^4.17.21 version: 4.17.21 + loro-crdt: + specifier: ^1.8.2 + version: 1.8.2 mermaid: specifier: 11.10.0 version: 11.10.0 @@ -6313,6 +6316,9 @@ packages: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} hasBin: true + loro-crdt@1.8.2: + resolution: {integrity: sha512-rv33Ma8ZHGvpik9it2Zty+EJw4x/jrbtw/PWSd0Pwm/qZV3mUlB8c0a61WtlPMd/AT6rQ34i1OSg0f2UrPz3Ww==} + loupe@3.1.3: resolution: {integrity: sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==} @@ -15691,6 +15697,8 @@ snapshots: dependencies: js-tokens: 4.0.0 + loro-crdt@1.8.2: {} + loupe@3.1.3: {} lower-case@2.0.2: From 659cbc05a9581b08e95c9659c61b6b554c9658db Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Sat, 4 Oct 2025 21:24:27 +0800 Subject: [PATCH 125/126] fix mention-input in the bottom of the browser --- .../workflow/comment/mention-input.tsx | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/web/app/components/workflow/comment/mention-input.tsx b/web/app/components/workflow/comment/mention-input.tsx index 8f775b46cf..6972d935fa 100644 --- a/web/app/components/workflow/comment/mention-input.tsx +++ b/web/app/components/workflow/comment/mention-input.tsx @@ -164,12 +164,20 @@ export const MentionInput: FC<MentionInputProps> = memo(({ const dropdownPosition = useMemo(() => { if (!showMentionDropdown || !textareaRef.current) - return { x: 0, y: 0 } + return { x: 0, y: 0, placement: 'bottom' as const } const textareaRect = textareaRef.current.getBoundingClientRect() + const dropdownHeight = 160 // max-h-40 = 10rem = 160px + const viewportHeight = window.innerHeight + const spaceBelow = viewportHeight - textareaRect.bottom + const spaceAbove = textareaRect.top + + const shouldPlaceAbove = spaceBelow < dropdownHeight && spaceAbove > spaceBelow + return { x: textareaRect.left, - y: textareaRect.bottom + 4, + y: shouldPlaceAbove ? textareaRect.top - 4 : textareaRect.bottom + 4, + placement: shouldPlaceAbove ? 'top' as const : 'bottom' as const, } }, [showMentionDropdown]) @@ -391,7 +399,9 @@ export const MentionInput: FC<MentionInputProps> = memo(({ className="fixed z-[9999] max-h-40 w-64 overflow-y-auto rounded-lg border border-components-panel-border bg-components-panel-bg shadow-lg" style={{ left: dropdownPosition.x, - top: dropdownPosition.y, + [dropdownPosition.placement === 'top' ? 'bottom' : 'top']: dropdownPosition.placement === 'top' + ? window.innerHeight - dropdownPosition.y + : dropdownPosition.y, }} data-mention-dropdown > From 33d4c95470690640d135774ef489e4fca3854f2e Mon Sep 17 00:00:00 2001 From: hjlarry <hjlarry@163.com> Date: Sun, 5 Oct 2025 10:17:04 +0800 Subject: [PATCH 126/126] can update comment position --- .../workflow/comment/comment-icon.tsx | 157 ++++++++++++++++-- .../workflow/hooks/use-workflow-comment.ts | 66 +++++++- web/app/components/workflow/index.tsx | 3 + 3 files changed, 207 insertions(+), 19 deletions(-) diff --git a/web/app/components/workflow/comment/comment-icon.tsx b/web/app/components/workflow/comment/comment-icon.tsx index 27db952f26..873a824eff 100644 --- a/web/app/components/workflow/comment/comment-icon.tsx +++ b/web/app/components/workflow/comment/comment-icon.tsx @@ -1,7 +1,7 @@ 'use client' -import type { FC } from 'react' -import { memo, useMemo, useState } from 'react' +import type { FC, PointerEvent as ReactPointerEvent } from 'react' +import { memo, useCallback, useMemo, useRef, useState } from 'react' import { useReactFlow, useViewport } from 'reactflow' import { UserAvatarList } from '@/app/components/base/user-avatar-list' import CommentPreview from './comment-preview' @@ -11,17 +11,22 @@ type CommentIconProps = { comment: WorkflowCommentList onClick: () => void isActive?: boolean + onPositionUpdate?: (position: { x: number; y: number }) => void } -export const CommentIcon: FC<CommentIconProps> = memo(({ comment, onClick, isActive = false }) => { - const { flowToScreenPosition } = useReactFlow() +export const CommentIcon: FC<CommentIconProps> = memo(({ comment, onClick, isActive = false, onPositionUpdate }) => { + const { flowToScreenPosition, screenToFlowPosition } = useReactFlow() const viewport = useViewport() const [showPreview, setShowPreview] = useState(false) - - const handlePreviewClick = () => { - setShowPreview(false) - onClick() - } + const [dragPosition, setDragPosition] = useState<{ x: number; y: number } | null>(null) + const [isDragging, setIsDragging] = useState(false) + const dragStateRef = useRef<{ + offsetX: number + offsetY: number + startX: number + startY: number + hasMoved: boolean + } | null>(null) const screenPosition = useMemo(() => { return flowToScreenPosition({ @@ -30,6 +35,108 @@ export const CommentIcon: FC<CommentIconProps> = memo(({ comment, onClick, isAct }) }, [comment.position_x, comment.position_y, viewport.x, viewport.y, viewport.zoom, flowToScreenPosition]) + const effectivePosition = dragPosition ?? screenPosition + + const handlePointerDown = useCallback((event: ReactPointerEvent<HTMLDivElement>) => { + if (event.button !== 0) + return + + event.stopPropagation() + event.preventDefault() + + dragStateRef.current = { + offsetX: event.clientX - screenPosition.x, + offsetY: event.clientY - screenPosition.y, + startX: event.clientX, + startY: event.clientY, + hasMoved: false, + } + + setDragPosition(screenPosition) + setIsDragging(false) + + if (event.currentTarget.dataset.role !== 'comment-preview') + setShowPreview(false) + + if (event.currentTarget.setPointerCapture) + event.currentTarget.setPointerCapture(event.pointerId) + }, [screenPosition]) + + const handlePointerMove = useCallback((event: ReactPointerEvent<HTMLDivElement>) => { + const dragState = dragStateRef.current + if (!dragState) + return + + event.stopPropagation() + event.preventDefault() + + const nextX = event.clientX - dragState.offsetX + const nextY = event.clientY - dragState.offsetY + + if (!dragState.hasMoved) { + const distance = Math.hypot(event.clientX - dragState.startX, event.clientY - dragState.startY) + if (distance > 4) { + dragState.hasMoved = true + setIsDragging(true) + } + } + + setDragPosition({ x: nextX, y: nextY }) + }, []) + + const finishDrag = useCallback((event: ReactPointerEvent<HTMLDivElement>) => { + const dragState = dragStateRef.current + if (!dragState) + return false + + if (event.currentTarget.hasPointerCapture?.(event.pointerId)) + event.currentTarget.releasePointerCapture(event.pointerId) + + dragStateRef.current = null + setDragPosition(null) + setIsDragging(false) + return dragState.hasMoved + }, []) + + const handlePointerUp = useCallback((event: ReactPointerEvent<HTMLDivElement>) => { + event.stopPropagation() + event.preventDefault() + + const finalScreenPosition = dragPosition ?? screenPosition + const didDrag = finishDrag(event) + + setShowPreview(false) + + if (didDrag) { + if (onPositionUpdate) { + const flowPosition = screenToFlowPosition({ + x: finalScreenPosition.x, + y: finalScreenPosition.y, + }) + onPositionUpdate(flowPosition) + } + } + else if (!isActive) { + onClick() + } + }, [dragPosition, finishDrag, isActive, onClick, onPositionUpdate, screenPosition, screenToFlowPosition]) + + const handlePointerCancel = useCallback((event: ReactPointerEvent<HTMLDivElement>) => { + event.stopPropagation() + event.preventDefault() + finishDrag(event) + }, [finishDrag]) + + const handleMouseEnter = useCallback(() => { + if (isActive || isDragging) + return + setShowPreview(true) + }, [isActive, isDragging]) + + const handleMouseLeave = useCallback(() => { + setShowPreview(false) + }, []) + // Calculate dynamic width based on number of participants const participantCount = comment.participants?.length || 0 const maxVisible = Math.min(3, participantCount) @@ -42,21 +149,29 @@ export const CommentIcon: FC<CommentIconProps> = memo(({ comment, onClick, isAct 8 + avatarSize + Math.max(0, (showCount ? 2 : maxVisible - 1)) * (avatarSize - avatarSpacing) + 8, ) + const pointerEventHandlers = useMemo(() => ({ + onPointerDown: handlePointerDown, + onPointerMove: handlePointerMove, + onPointerUp: handlePointerUp, + onPointerCancel: handlePointerCancel, + }), [handlePointerCancel, handlePointerDown, handlePointerMove, handlePointerUp]) + return ( <> <div className="absolute z-10" style={{ - left: screenPosition.x, - top: screenPosition.y, + left: effectivePosition.x, + top: effectivePosition.y, transform: 'translate(-50%, -50%)', }} + data-role='comment-marker' + {...pointerEventHandlers} > <div - className={isActive ? '' : 'cursor-pointer'} - onClick={isActive ? undefined : onClick} - onMouseEnter={isActive ? undefined : () => setShowPreview(true)} - onMouseLeave={isActive ? undefined : () => setShowPreview(false)} + className={isActive ? (isDragging ? 'cursor-grabbing' : '') : isDragging ? 'cursor-grabbing' : 'cursor-pointer'} + onMouseEnter={handleMouseEnter} + onMouseLeave={handleMouseLeave} > <div className={'relative h-10 overflow-hidden rounded-br-full rounded-tl-full rounded-tr-full'} @@ -84,14 +199,19 @@ export const CommentIcon: FC<CommentIconProps> = memo(({ comment, onClick, isAct <div className="absolute z-20" style={{ - left: screenPosition.x - dynamicWidth / 2, - top: screenPosition.y + 20, + left: (dragPosition ?? screenPosition).x - dynamicWidth / 2, + top: (dragPosition ?? screenPosition).y + 20, transform: 'translateY(-100%)', }} + data-role='comment-preview' + {...pointerEventHandlers} onMouseEnter={() => setShowPreview(true)} onMouseLeave={() => setShowPreview(false)} > - <CommentPreview comment={comment} onClick={handlePreviewClick} /> + <CommentPreview comment={comment} onClick={() => { + setShowPreview(false) + onClick() + }} /> </div> )} </> @@ -103,6 +223,7 @@ export const CommentIcon: FC<CommentIconProps> = memo(({ comment, onClick, isAct && prevProps.comment.position_y === nextProps.comment.position_y && prevProps.onClick === nextProps.onClick && prevProps.isActive === nextProps.isActive + && prevProps.onPositionUpdate === nextProps.onPositionUpdate ) }) diff --git a/web/app/components/workflow/hooks/use-workflow-comment.ts b/web/app/components/workflow/hooks/use-workflow-comment.ts index 2eb6ca0d90..64df63b2f2 100644 --- a/web/app/components/workflow/hooks/use-workflow-comment.ts +++ b/web/app/components/workflow/hooks/use-workflow-comment.ts @@ -4,7 +4,7 @@ import { useReactFlow } from 'reactflow' import { useStore } from '../store' import { ControlMode } from '../types' import type { WorkflowCommentDetail, WorkflowCommentList } from '@/service/workflow-comment' -import { createWorkflowComment, createWorkflowCommentReply, deleteWorkflowComment, deleteWorkflowCommentReply, fetchWorkflowComment, fetchWorkflowComments, resolveWorkflowComment, updateWorkflowCommentReply } from '@/service/workflow-comment' +import { createWorkflowComment, createWorkflowCommentReply, deleteWorkflowComment, deleteWorkflowCommentReply, fetchWorkflowComment, fetchWorkflowComments, resolveWorkflowComment, updateWorkflowComment, updateWorkflowCommentReply } from '@/service/workflow-comment' import { collaborationManager } from '@/app/components/workflow/collaboration' export const useWorkflowComment = () => { @@ -229,6 +229,69 @@ export const useWorkflowComment = () => { } }, [appId, comments, handleCommentIconClick, loadComments, setActiveComment, setActiveCommentId, setActiveCommentLoading, setCommentDetailCache]) + const handleCommentPositionUpdate = useCallback(async (commentId: string, position: { x: number; y: number }) => { + if (!appId) return + + const targetComment = comments.find(c => c.id === commentId) + if (!targetComment) return + + const nextPosition = { + position_x: position.x, + position_y: position.y, + } + + const previousComments = comments + const updatedComments = comments.map(c => + c.id === commentId + ? { ...c, ...nextPosition } + : c, + ) + setComments(updatedComments) + + const cachedDetail = commentDetailCacheRef.current[commentId] + const updatedDetail = cachedDetail ? { ...cachedDetail, ...nextPosition } : null + if (updatedDetail) { + commentDetailCacheRef.current = { + ...commentDetailCacheRef.current, + [commentId]: updatedDetail, + } + setCommentDetailCache(commentDetailCacheRef.current) + + if (activeCommentIdRef.current === commentId) + setActiveComment(updatedDetail) + } + else if (activeComment?.id === commentId) { + setActiveComment({ ...activeComment, ...nextPosition }) + } + + try { + await updateWorkflowComment(appId, commentId, { + content: targetComment.content, + position_x: nextPosition.position_x, + position_y: nextPosition.position_y, + }) + collaborationManager.emitCommentsUpdate(appId) + } + catch (error) { + console.error('Failed to update comment position:', error) + setComments(previousComments) + + if (cachedDetail) { + commentDetailCacheRef.current = { + ...commentDetailCacheRef.current, + [commentId]: cachedDetail, + } + setCommentDetailCache(commentDetailCacheRef.current) + + if (activeCommentIdRef.current === commentId) + setActiveComment(cachedDetail) + } + else if (activeComment?.id === commentId) { + setActiveComment(activeComment) + } + } + }, [activeComment, appId, comments, setComments, setCommentDetailCache, setActiveComment]) + const handleCommentReply = useCallback(async (commentId: string, content: string, mentionedUserIds: string[] = []) => { if (!appId) return const trimmed = content.trim() @@ -336,6 +399,7 @@ export const useWorkflowComment = () => { handleCommentReply, handleCommentReplyUpdate, handleCommentReplyDelete, + handleCommentPositionUpdate, refreshActiveComment, handleCreateComment, loadComments, diff --git a/web/app/components/workflow/index.tsx b/web/app/components/workflow/index.tsx index 006ba8186f..560fa87cd2 100644 --- a/web/app/components/workflow/index.tsx +++ b/web/app/components/workflow/index.tsx @@ -198,6 +198,7 @@ export const Workflow: FC<WorkflowProps> = memo(({ handleCommentReply, handleCommentReplyUpdate, handleCommentReplyDelete, + handleCommentPositionUpdate, } = useWorkflowComment() const showUserComments = useStore(s => s.showUserComments) const showUserCursors = useStore(s => s.showUserCursors) @@ -461,6 +462,7 @@ export const Workflow: FC<WorkflowProps> = memo(({ comment={comment} onClick={() => handleCommentIconClick(comment)} isActive={true} + onPositionUpdate={position => handleCommentPositionUpdate(comment.id, position)} /> <CommentThread key={`${comment.id}-thread`} @@ -486,6 +488,7 @@ export const Workflow: FC<WorkflowProps> = memo(({ key={comment.id} comment={comment} onClick={() => handleCommentIconClick(comment)} + onPositionUpdate={position => handleCommentPositionUpdate(comment.id, position)} /> ) : null })}