diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index 76e5c04deb..a5a5071fae 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -47,15 +47,17 @@ jobs: - name: Run Unit tests run: | uv run --project api bash dev/pytest/pytest_unit_tests.sh + + - name: Coverage Summary + run: | + set -x # Extract coverage percentage and create a summary TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])') # Create a detailed coverage summary echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY - echo "\`\`\`" >> $GITHUB_STEP_SUMMARY - uv run --project api coverage report >> $GITHUB_STEP_SUMMARY - echo "\`\`\`" >> $GITHUB_STEP_SUMMARY + uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY - name: Run dify config tests run: uv run --project api dev/pytest/pytest_config_tests.py diff --git a/.gitignore b/.gitignore index 8f82bea00d..dd4673a3d2 100644 --- a/.gitignore +++ b/.gitignore @@ -214,3 +214,4 @@ mise.toml # AI Assistant .roo/ +api/.env.backup diff --git a/api/configs/app_config.py b/api/configs/app_config.py index 3a3ad35ee7..20f8c40427 100644 --- a/api/configs/app_config.py +++ b/api/configs/app_config.py @@ -1,8 +1,11 @@ import logging +from pathlib import Path from typing import Any from pydantic.fields import FieldInfo -from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict +from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict, TomlConfigSettingsSource + +from libs.file_utils import search_file_upwards from .deploy import DeploymentConfig from .enterprise import EnterpriseFeatureConfig @@ -99,4 +102,12 @@ class DifyConfig( RemoteSettingsSourceFactory(settings_cls), dotenv_settings, file_secret_settings, + TomlConfigSettingsSource( + settings_cls=settings_cls, + toml_file=search_file_upwards( + base_dir_path=Path(__file__).parent, + target_file_name="pyproject.toml", + max_search_parent_depth=2, + ), + ), ) diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py index 60ba272ec9..427602676f 100644 --- a/api/configs/middleware/__init__.py +++ b/api/configs/middleware/__init__.py @@ -223,6 +223,10 @@ class CeleryConfig(DatabaseConfig): default=None, ) + CELERY_SENTINEL_PASSWORD: Optional[str] = Field( + description="Password of the Redis Sentinel master.", + default=None, + ) CELERY_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field( description="Timeout for Redis Sentinel socket operations in seconds.", default=0.1, diff --git a/api/configs/packaging/__init__.py b/api/configs/packaging/__init__.py index dddf71c094..f511e20e6b 100644 --- a/api/configs/packaging/__init__.py +++ b/api/configs/packaging/__init__.py @@ -1,17 +1,13 @@ from pydantic import Field -from pydantic_settings import BaseSettings + +from configs.packaging.pyproject import PyProjectConfig, PyProjectTomlConfig -class PackagingInfo(BaseSettings): +class PackagingInfo(PyProjectTomlConfig): """ Packaging build information """ - CURRENT_VERSION: str = Field( - description="Dify version", - default="1.5.0", - ) - COMMIT_SHA: str = Field( description="SHA-1 checksum of the git commit used to build the app", default="", diff --git a/api/configs/packaging/pyproject.py b/api/configs/packaging/pyproject.py new file mode 100644 index 0000000000..90b1ecba06 --- /dev/null +++ b/api/configs/packaging/pyproject.py @@ -0,0 +1,17 @@ +from pydantic import BaseModel, Field +from pydantic_settings import BaseSettings + + +class PyProjectConfig(BaseModel): + version: str = Field(description="Dify version", default="") + + +class PyProjectTomlConfig(BaseSettings): + """ + configs in api/pyproject.toml + """ + + project: PyProjectConfig = Field( + description="configs in the project section of pyproject.toml", + default=PyProjectConfig(), + ) diff --git a/api/controllers/console/auth/data_source_oauth.py b/api/controllers/console/auth/data_source_oauth.py index 1049f864c3..4c9697cc32 100644 --- a/api/controllers/console/auth/data_source_oauth.py +++ b/api/controllers/console/auth/data_source_oauth.py @@ -41,7 +41,7 @@ class OAuthDataSource(Resource): if not internal_secret: return ({"error": "Internal secret is not set"},) oauth_provider.save_internal_access_token(internal_secret) - return {"data": ""} + return {"data": "internal"} else: auth_url = oauth_provider.get_authorization_url() return {"data": auth_url}, 200 diff --git a/api/controllers/console/version.py b/api/controllers/console/version.py index 7dea8e554e..447cc358f8 100644 --- a/api/controllers/console/version.py +++ b/api/controllers/console/version.py @@ -18,7 +18,7 @@ class VersionApi(Resource): check_update_url = dify_config.CHECK_UPDATE_URL result = { - "version": dify_config.CURRENT_VERSION, + "version": dify_config.project.version, "release_date": "", "release_notes": "", "can_auto_update": False, diff --git a/api/controllers/console/workspace/plugin.py b/api/controllers/console/workspace/plugin.py index 9bddbb4b4b..c0a4734828 100644 --- a/api/controllers/console/workspace/plugin.py +++ b/api/controllers/console/workspace/plugin.py @@ -13,6 +13,7 @@ from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.exc import PluginDaemonClientSideError from libs.login import login_required from models.account import TenantPluginPermission +from services.plugin.plugin_parameter_service import PluginParameterService from services.plugin.plugin_permission_service import PluginPermissionService from services.plugin.plugin_service import PluginService @@ -497,6 +498,42 @@ class PluginFetchPermissionApi(Resource): ) +class PluginFetchDynamicSelectOptionsApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self): + # check if the user is admin or owner + if not current_user.is_admin_or_owner: + raise Forbidden() + + tenant_id = current_user.current_tenant_id + user_id = current_user.id + + parser = reqparse.RequestParser() + parser.add_argument("plugin_id", type=str, required=True, location="args") + parser.add_argument("provider", type=str, required=True, location="args") + parser.add_argument("action", type=str, required=True, location="args") + parser.add_argument("parameter", type=str, required=True, location="args") + parser.add_argument("provider_type", type=str, required=True, location="args") + args = parser.parse_args() + + try: + options = PluginParameterService.get_dynamic_select_options( + tenant_id, + user_id, + args["plugin_id"], + args["provider"], + args["action"], + args["parameter"], + args["provider_type"], + ) + except PluginDaemonClientSideError as e: + raise ValueError(e) + + return jsonable_encoder({"options": options}) + + api.add_resource(PluginDebuggingKeyApi, "/workspaces/current/plugin/debugging-key") api.add_resource(PluginListApi, "/workspaces/current/plugin/list") api.add_resource(PluginListLatestVersionsApi, "/workspaces/current/plugin/list/latest-versions") @@ -521,3 +558,5 @@ api.add_resource(PluginFetchMarketplacePkgApi, "/workspaces/current/plugin/marke api.add_resource(PluginChangePermissionApi, "/workspaces/current/plugin/permission/change") api.add_resource(PluginFetchPermissionApi, "/workspaces/current/plugin/permission/fetch") + +api.add_resource(PluginFetchDynamicSelectOptionsApi, "/workspaces/current/plugin/parameters/dynamic-options") diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index 41063b35a5..327e9ce834 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -17,6 +17,7 @@ from core.plugin.entities.request import ( RequestInvokeApp, RequestInvokeEncrypt, RequestInvokeLLM, + RequestInvokeLLMWithStructuredOutput, RequestInvokeModeration, RequestInvokeParameterExtractorNode, RequestInvokeQuestionClassifierNode, @@ -47,6 +48,21 @@ class PluginInvokeLLMApi(Resource): return length_prefixed_response(0xF, generator()) +class PluginInvokeLLMWithStructuredOutputApi(Resource): + @setup_required + @plugin_inner_api_only + @get_user_tenant + @plugin_data(payload_type=RequestInvokeLLMWithStructuredOutput) + def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestInvokeLLMWithStructuredOutput): + def generator(): + response = PluginModelBackwardsInvocation.invoke_llm_with_structured_output( + user_model.id, tenant_model, payload + ) + return PluginModelBackwardsInvocation.convert_to_event_stream(response) + + return length_prefixed_response(0xF, generator()) + + class PluginInvokeTextEmbeddingApi(Resource): @setup_required @plugin_inner_api_only @@ -291,6 +307,7 @@ class PluginFetchAppInfoApi(Resource): api.add_resource(PluginInvokeLLMApi, "/invoke/llm") +api.add_resource(PluginInvokeLLMWithStructuredOutputApi, "/invoke/llm/structured-output") api.add_resource(PluginInvokeTextEmbeddingApi, "/invoke/text-embedding") api.add_resource(PluginInvokeRerankApi, "/invoke/rerank") api.add_resource(PluginInvokeTTSApi, "/invoke/tts") diff --git a/api/controllers/inner_api/workspace/workspace.py b/api/controllers/inner_api/workspace/workspace.py index a2fc2d4675..77568b75f1 100644 --- a/api/controllers/inner_api/workspace/workspace.py +++ b/api/controllers/inner_api/workspace/workspace.py @@ -29,7 +29,19 @@ class EnterpriseWorkspace(Resource): tenant_was_created.send(tenant) - return {"message": "enterprise workspace created."} + resp = { + "id": tenant.id, + "name": tenant.name, + "plan": tenant.plan, + "status": tenant.status, + "created_at": tenant.created_at.isoformat() + "Z" if tenant.created_at else None, + "updated_at": tenant.updated_at.isoformat() + "Z" if tenant.updated_at else None, + } + + return { + "message": "enterprise workspace created.", + "tenant": resp, + } class EnterpriseWorkspaceNoOwnerEmail(Resource): diff --git a/api/controllers/service_api/dataset/dataset.py b/api/controllers/service_api/dataset/dataset.py index 839afdb9fd..a499719fc3 100644 --- a/api/controllers/service_api/dataset/dataset.py +++ b/api/controllers/service_api/dataset/dataset.py @@ -133,6 +133,22 @@ class DatasetListApi(DatasetApiResource): parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") args = parser.parse_args() + + if args.get("embedding_model_provider"): + DatasetService.check_embedding_model_setting( + tenant_id, args.get("embedding_model_provider"), args.get("embedding_model") + ) + if ( + args.get("retrieval_model") + and args.get("retrieval_model").get("reranking_model") + and args.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + ): + DatasetService.check_reranking_model_setting( + tenant_id, + args.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), + args.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + ) + try: dataset = DatasetService.create_empty_dataset( tenant_id=tenant_id, @@ -265,10 +281,20 @@ class DatasetApi(DatasetApiResource): data = request.get_json() # check embedding model setting - if data.get("indexing_technique") == "high_quality": + if data.get("indexing_technique") == "high_quality" or data.get("embedding_model_provider"): DatasetService.check_embedding_model_setting( dataset.tenant_id, data.get("embedding_model_provider"), data.get("embedding_model") ) + if ( + data.get("retrieval_model") + and data.get("retrieval_model").get("reranking_model") + and data.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + ): + DatasetService.check_reranking_model_setting( + dataset.tenant_id, + data.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), + data.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + ) # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator DatasetPermissionService.check_permission( diff --git a/api/controllers/service_api/dataset/document.py b/api/controllers/service_api/dataset/document.py index e4779f3bdf..d64d9df059 100644 --- a/api/controllers/service_api/dataset/document.py +++ b/api/controllers/service_api/dataset/document.py @@ -3,7 +3,7 @@ import json from flask import request from flask_restful import marshal, reqparse from sqlalchemy import desc, select -from werkzeug.exceptions import NotFound +from werkzeug.exceptions import Forbidden, NotFound import services from controllers.common.errors import FilenameNotExistsError @@ -18,6 +18,7 @@ from controllers.service_api.app.error import ( from controllers.service_api.dataset.error import ( ArchivedDocumentImmutableError, DocumentIndexingError, + InvalidMetadataError, ) from controllers.service_api.wraps import ( DatasetApiResource, @@ -29,7 +30,7 @@ from extensions.ext_database import db from fields.document_fields import document_fields, document_status_fields from libs.login import current_user from models.dataset import Dataset, Document, DocumentSegment -from services.dataset_service import DocumentService +from services.dataset_service import DatasetService, DocumentService from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig from services.file_service import FileService @@ -59,6 +60,7 @@ class DocumentAddByTextApi(DatasetApiResource): parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") args = parser.parse_args() + dataset_id = str(dataset_id) tenant_id = str(tenant_id) dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -74,6 +76,21 @@ class DocumentAddByTextApi(DatasetApiResource): if text is None or name is None: raise ValueError("Both 'text' and 'name' must be non-null values.") + if args.get("embedding_model_provider"): + DatasetService.check_embedding_model_setting( + tenant_id, args.get("embedding_model_provider"), args.get("embedding_model") + ) + if ( + args.get("retrieval_model") + and args.get("retrieval_model").get("reranking_model") + and args.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + ): + DatasetService.check_reranking_model_setting( + tenant_id, + args.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), + args.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + ) + upload_file = FileService.upload_text(text=str(text), text_name=str(name)) data_source = { "type": "upload_file", @@ -124,6 +141,17 @@ class DocumentUpdateByTextApi(DatasetApiResource): if not dataset: raise ValueError("Dataset does not exist.") + if ( + args.get("retrieval_model") + and args.get("retrieval_model").get("reranking_model") + and args.get("retrieval_model").get("reranking_model").get("reranking_provider_name") + ): + DatasetService.check_reranking_model_setting( + tenant_id, + args.get("retrieval_model").get("reranking_model").get("reranking_provider_name"), + args.get("retrieval_model").get("reranking_model").get("reranking_model_name"), + ) + # indexing_technique is already set in dataset since this is an update args["indexing_technique"] = dataset.indexing_technique @@ -188,6 +216,21 @@ class DocumentAddByFileApi(DatasetApiResource): raise ValueError("indexing_technique is required.") args["indexing_technique"] = indexing_technique + if "embedding_model_provider" in args: + DatasetService.check_embedding_model_setting( + tenant_id, args["embedding_model_provider"], args["embedding_model"] + ) + if ( + "retrieval_model" in args + and args["retrieval_model"].get("reranking_model") + and args["retrieval_model"].get("reranking_model").get("reranking_provider_name") + ): + DatasetService.check_reranking_model_setting( + tenant_id, + args["retrieval_model"].get("reranking_model").get("reranking_provider_name"), + args["retrieval_model"].get("reranking_model").get("reranking_model_name"), + ) + # save file info file = request.files["file"] # check file @@ -424,6 +467,101 @@ class DocumentIndexingStatusApi(DatasetApiResource): return data +class DocumentDetailApi(DatasetApiResource): + METADATA_CHOICES = {"all", "only", "without"} + + def get(self, tenant_id, dataset_id, document_id): + dataset_id = str(dataset_id) + document_id = str(document_id) + + dataset = self.get_dataset(dataset_id, tenant_id) + + document = DocumentService.get_document(dataset.id, document_id) + + if not document: + raise NotFound("Document not found.") + + if document.tenant_id != str(tenant_id): + raise Forbidden("No permission.") + + metadata = request.args.get("metadata", "all") + if metadata not in self.METADATA_CHOICES: + raise InvalidMetadataError(f"Invalid metadata value: {metadata}") + + if metadata == "only": + response = {"id": document.id, "doc_type": document.doc_type, "doc_metadata": document.doc_metadata_details} + elif metadata == "without": + dataset_process_rules = DatasetService.get_process_rules(dataset_id) + document_process_rules = document.dataset_process_rule.to_dict() + data_source_info = document.data_source_detail_dict + response = { + "id": document.id, + "position": document.position, + "data_source_type": document.data_source_type, + "data_source_info": data_source_info, + "dataset_process_rule_id": document.dataset_process_rule_id, + "dataset_process_rule": dataset_process_rules, + "document_process_rule": document_process_rules, + "name": document.name, + "created_from": document.created_from, + "created_by": document.created_by, + "created_at": document.created_at.timestamp(), + "tokens": document.tokens, + "indexing_status": document.indexing_status, + "completed_at": int(document.completed_at.timestamp()) if document.completed_at else None, + "updated_at": int(document.updated_at.timestamp()) if document.updated_at else None, + "indexing_latency": document.indexing_latency, + "error": document.error, + "enabled": document.enabled, + "disabled_at": int(document.disabled_at.timestamp()) if document.disabled_at else None, + "disabled_by": document.disabled_by, + "archived": document.archived, + "segment_count": document.segment_count, + "average_segment_length": document.average_segment_length, + "hit_count": document.hit_count, + "display_status": document.display_status, + "doc_form": document.doc_form, + "doc_language": document.doc_language, + } + else: + dataset_process_rules = DatasetService.get_process_rules(dataset_id) + document_process_rules = document.dataset_process_rule.to_dict() + data_source_info = document.data_source_detail_dict + response = { + "id": document.id, + "position": document.position, + "data_source_type": document.data_source_type, + "data_source_info": data_source_info, + "dataset_process_rule_id": document.dataset_process_rule_id, + "dataset_process_rule": dataset_process_rules, + "document_process_rule": document_process_rules, + "name": document.name, + "created_from": document.created_from, + "created_by": document.created_by, + "created_at": document.created_at.timestamp(), + "tokens": document.tokens, + "indexing_status": document.indexing_status, + "completed_at": int(document.completed_at.timestamp()) if document.completed_at else None, + "updated_at": int(document.updated_at.timestamp()) if document.updated_at else None, + "indexing_latency": document.indexing_latency, + "error": document.error, + "enabled": document.enabled, + "disabled_at": int(document.disabled_at.timestamp()) if document.disabled_at else None, + "disabled_by": document.disabled_by, + "archived": document.archived, + "doc_type": document.doc_type, + "doc_metadata": document.doc_metadata_details, + "segment_count": document.segment_count, + "average_segment_length": document.average_segment_length, + "hit_count": document.hit_count, + "display_status": document.display_status, + "doc_form": document.doc_form, + "doc_language": document.doc_language, + } + + return response + + api.add_resource( DocumentAddByTextApi, "/datasets//document/create_by_text", @@ -447,3 +585,4 @@ api.add_resource( api.add_resource(DocumentDeleteApi, "/datasets//documents/") api.add_resource(DocumentListApi, "/datasets//documents") api.add_resource(DocumentIndexingStatusApi, "/datasets//documents//indexing-status") +api.add_resource(DocumentDetailApi, "/datasets//documents/") diff --git a/api/controllers/service_api/index.py b/api/controllers/service_api/index.py index d24c4597e2..9bb5df4c4e 100644 --- a/api/controllers/service_api/index.py +++ b/api/controllers/service_api/index.py @@ -9,7 +9,7 @@ class IndexApi(Resource): return { "welcome": "Dify OpenAPI", "api_version": "v1", - "server_version": dify_config.CURRENT_VERSION, + "server_version": dify_config.project.version, } diff --git a/api/controllers/service_api/wraps.py b/api/controllers/service_api/wraps.py index d3316a5159..5b919a68d4 100644 --- a/api/controllers/service_api/wraps.py +++ b/api/controllers/service_api/wraps.py @@ -11,13 +11,13 @@ from flask_restful import Resource from pydantic import BaseModel from sqlalchemy import select, update from sqlalchemy.orm import Session -from werkzeug.exceptions import Forbidden, Unauthorized +from werkzeug.exceptions import Forbidden, NotFound, Unauthorized from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.login import _get_user from models.account import Account, Tenant, TenantAccountJoin, TenantStatus -from models.dataset import RateLimitLog +from models.dataset import Dataset, RateLimitLog from models.model import ApiToken, App, EndUser from services.feature_service import FeatureService @@ -317,3 +317,11 @@ def create_or_update_end_user_for_user_id(app_model: App, user_id: Optional[str] class DatasetApiResource(Resource): method_decorators = [validate_dataset_token] + + def get_dataset(self, dataset_id: str, tenant_id: str) -> Dataset: + dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id, Dataset.tenant_id == tenant_id).first() + + if not dataset: + raise NotFound("Dataset not found.") + + return dataset diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index 61de9ec670..7877408cef 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -27,6 +27,9 @@ from core.ops.ops_trace_manager import TraceQueueManager from core.prompt.utils.get_thread_messages_length import get_thread_messages_length from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository +from core.workflow.repositories.draft_variable_repository import ( + DraftVariableSaverFactory, +) from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader @@ -36,7 +39,10 @@ from libs.flask_utils import preserve_flask_contexts from models import Account, App, Conversation, EndUser, Message, Workflow, WorkflowNodeExecutionTriggeredFrom from models.enums import WorkflowRunTriggeredFrom from services.conversation_service import ConversationService -from services.workflow_draft_variable_service import DraftVarLoader, WorkflowDraftVariableService +from services.workflow_draft_variable_service import ( + DraftVarLoader, + WorkflowDraftVariableService, +) logger = logging.getLogger(__name__) @@ -450,6 +456,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, stream=stream, + draft_var_saver_factory=self._get_draft_var_saver_factory(invoke_from), ) return AdvancedChatAppGenerateResponseConverter.convert(response=response, invoke_from=invoke_from) @@ -521,6 +528,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): user: Union[Account, EndUser], workflow_execution_repository: WorkflowExecutionRepository, workflow_node_execution_repository: WorkflowNodeExecutionRepository, + draft_var_saver_factory: DraftVariableSaverFactory, stream: bool = False, ) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]: """ @@ -547,6 +555,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, stream=stream, + draft_var_saver_factory=draft_var_saver_factory, ) try: diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index 8c5645bbb7..4c52fc3e83 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -64,6 +64,7 @@ from core.workflow.entities.workflow_execution import WorkflowExecutionStatus, W from core.workflow.enums import SystemVariableKey from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState from core.workflow.nodes import NodeType +from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager @@ -94,6 +95,7 @@ class AdvancedChatAppGenerateTaskPipeline: dialogue_count: int, workflow_execution_repository: WorkflowExecutionRepository, workflow_node_execution_repository: WorkflowNodeExecutionRepository, + draft_var_saver_factory: DraftVariableSaverFactory, ) -> None: self._base_task_pipeline = BasedGenerateTaskPipeline( application_generate_entity=application_generate_entity, @@ -153,6 +155,7 @@ class AdvancedChatAppGenerateTaskPipeline: self._conversation_name_generate_thread: Thread | None = None self._recorded_files: list[Mapping[str, Any]] = [] self._workflow_run_id: str = "" + self._draft_var_saver_factory = draft_var_saver_factory def process(self) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]: """ @@ -371,6 +374,7 @@ class AdvancedChatAppGenerateTaskPipeline: workflow_node_execution=workflow_node_execution, ) session.commit() + self._save_output_for_event(event, workflow_node_execution.id) if node_finish_resp: yield node_finish_resp @@ -390,6 +394,8 @@ class AdvancedChatAppGenerateTaskPipeline: task_id=self._application_generate_entity.task_id, workflow_node_execution=workflow_node_execution, ) + if isinstance(event, QueueNodeExceptionEvent): + self._save_output_for_event(event, workflow_node_execution.id) if node_finish_resp: yield node_finish_resp @@ -759,3 +765,15 @@ class AdvancedChatAppGenerateTaskPipeline: if not message: raise ValueError(f"Message not found: {self._message_id}") return message + + def _save_output_for_event(self, event: QueueNodeSucceededEvent | QueueNodeExceptionEvent, node_execution_id: str): + with Session(db.engine) as session, session.begin(): + saver = self._draft_var_saver_factory( + session=session, + app_id=self._application_generate_entity.app_config.app_id, + node_id=event.node_id, + node_type=event.node_type, + node_execution_id=node_execution_id, + enclosing_node_id=event.in_loop_id or event.in_iteration_id, + ) + saver.save(event.process_data, event.outputs) diff --git a/api/core/app/apps/base_app_generator.py b/api/core/app/apps/base_app_generator.py index a83b75cc1a..beece1d77e 100644 --- a/api/core/app/apps/base_app_generator.py +++ b/api/core/app/apps/base_app_generator.py @@ -1,10 +1,20 @@ import json from collections.abc import Generator, Mapping, Sequence -from typing import TYPE_CHECKING, Any, Optional, Union +from typing import TYPE_CHECKING, Any, Optional, Union, final + +from sqlalchemy.orm import Session from core.app.app_config.entities import VariableEntityType +from core.app.entities.app_invoke_entities import InvokeFrom from core.file import File, FileUploadConfig +from core.workflow.nodes.enums import NodeType +from core.workflow.repositories.draft_variable_repository import ( + DraftVariableSaver, + DraftVariableSaverFactory, + NoopDraftVariableSaver, +) from factories import file_factory +from services.workflow_draft_variable_service import DraftVariableSaver as DraftVariableSaverImpl if TYPE_CHECKING: from core.app.app_config.entities import VariableEntity @@ -159,3 +169,38 @@ class BaseAppGenerator: yield f"event: {message}\n\n" return gen() + + @final + @staticmethod + def _get_draft_var_saver_factory(invoke_from: InvokeFrom) -> DraftVariableSaverFactory: + if invoke_from == InvokeFrom.DEBUGGER: + + def draft_var_saver_factory( + session: Session, + app_id: str, + node_id: str, + node_type: NodeType, + node_execution_id: str, + enclosing_node_id: str | None = None, + ) -> DraftVariableSaver: + return DraftVariableSaverImpl( + session=session, + app_id=app_id, + node_id=node_id, + node_type=node_type, + node_execution_id=node_execution_id, + enclosing_node_id=enclosing_node_id, + ) + else: + + def draft_var_saver_factory( + session: Session, + app_id: str, + node_id: str, + node_type: NodeType, + node_execution_id: str, + enclosing_node_id: str | None = None, + ) -> DraftVariableSaver: + return NoopDraftVariableSaver() + + return draft_var_saver_factory diff --git a/api/core/app/apps/common/workflow_response_converter.py b/api/core/app/apps/common/workflow_response_converter.py index cd1d298ca2..34a1da2227 100644 --- a/api/core/app/apps/common/workflow_response_converter.py +++ b/api/core/app/apps/common/workflow_response_converter.py @@ -44,6 +44,7 @@ from core.app.entities.task_entities import ( ) from core.file import FILE_MODEL_IDENTITY, File from core.tools.tool_manager import ToolManager +from core.variables.segments import ArrayFileSegment, FileSegment, Segment from core.workflow.entities.workflow_execution import WorkflowExecution from core.workflow.entities.workflow_node_execution import WorkflowNodeExecution, WorkflowNodeExecutionStatus from core.workflow.nodes import NodeType @@ -506,7 +507,8 @@ class WorkflowResponseConverter: # Convert to tuple to match Sequence type return tuple(flattened_files) - def _fetch_files_from_variable_value(self, value: Union[dict, list]) -> Sequence[Mapping[str, Any]]: + @classmethod + def _fetch_files_from_variable_value(cls, value: Union[dict, list, Segment]) -> Sequence[Mapping[str, Any]]: """ Fetch files from variable value :param value: variable value @@ -515,20 +517,30 @@ class WorkflowResponseConverter: if not value: return [] - files = [] - if isinstance(value, list): + files: list[Mapping[str, Any]] = [] + if isinstance(value, FileSegment): + files.append(value.value.to_dict()) + elif isinstance(value, ArrayFileSegment): + files.extend([i.to_dict() for i in value.value]) + elif isinstance(value, File): + files.append(value.to_dict()) + elif isinstance(value, list): for item in value: - file = self._get_file_var_from_value(item) + file = cls._get_file_var_from_value(item) if file: files.append(file) - elif isinstance(value, dict): - file = self._get_file_var_from_value(value) + elif isinstance( + value, + dict, + ): + file = cls._get_file_var_from_value(value) if file: files.append(file) return files - def _get_file_var_from_value(self, value: Union[dict, list]) -> Mapping[str, Any] | None: + @classmethod + def _get_file_var_from_value(cls, value: Union[dict, list]) -> Mapping[str, Any] | None: """ Get file var from value :param value: variable value diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index 369fa0e48c..40a1e272a7 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -25,6 +25,7 @@ from core.model_runtime.errors.invoke import InvokeAuthorizationError from core.ops.ops_trace_manager import TraceQueueManager from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository +from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader @@ -219,6 +220,9 @@ class WorkflowAppGenerator(BaseAppGenerator): # new thread with request context and contextvars context = contextvars.copy_context() + # release database connection, because the following new thread operations may take a long time + db.session.close() + worker_thread = threading.Thread( target=self._generate_worker, kwargs={ @@ -233,6 +237,10 @@ class WorkflowAppGenerator(BaseAppGenerator): worker_thread.start() + draft_var_saver_factory = self._get_draft_var_saver_factory( + invoke_from, + ) + # return response or stream generator response = self._handle_response( application_generate_entity=application_generate_entity, @@ -241,6 +249,7 @@ class WorkflowAppGenerator(BaseAppGenerator): user=user, workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, + draft_var_saver_factory=draft_var_saver_factory, stream=streaming, ) @@ -471,6 +480,7 @@ class WorkflowAppGenerator(BaseAppGenerator): user: Union[Account, EndUser], workflow_execution_repository: WorkflowExecutionRepository, workflow_node_execution_repository: WorkflowNodeExecutionRepository, + draft_var_saver_factory: DraftVariableSaverFactory, stream: bool = False, ) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]: """ @@ -491,6 +501,7 @@ class WorkflowAppGenerator(BaseAppGenerator): user=user, workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, + draft_var_saver_factory=draft_var_saver_factory, stream=stream, ) diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index 1734dbb598..2a85cd5e3d 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -56,6 +56,7 @@ from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk from core.ops.ops_trace_manager import TraceQueueManager from core.workflow.entities.workflow_execution import WorkflowExecution, WorkflowExecutionStatus, WorkflowType from core.workflow.enums import SystemVariableKey +from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager @@ -87,6 +88,7 @@ class WorkflowAppGenerateTaskPipeline: stream: bool, workflow_execution_repository: WorkflowExecutionRepository, workflow_node_execution_repository: WorkflowNodeExecutionRepository, + draft_var_saver_factory: DraftVariableSaverFactory, ) -> None: self._base_task_pipeline = BasedGenerateTaskPipeline( application_generate_entity=application_generate_entity, @@ -131,6 +133,8 @@ class WorkflowAppGenerateTaskPipeline: self._application_generate_entity = application_generate_entity self._workflow_features_dict = workflow.features_dict self._workflow_run_id = "" + self._invoke_from = queue_manager._invoke_from + self._draft_var_saver_factory = draft_var_saver_factory def process(self) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]: """ @@ -322,6 +326,8 @@ class WorkflowAppGenerateTaskPipeline: workflow_node_execution=workflow_node_execution, ) + self._save_output_for_event(event, workflow_node_execution.id) + if node_success_response: yield node_success_response elif isinstance( @@ -339,6 +345,8 @@ class WorkflowAppGenerateTaskPipeline: task_id=self._application_generate_entity.task_id, workflow_node_execution=workflow_node_execution, ) + if isinstance(event, QueueNodeExceptionEvent): + self._save_output_for_event(event, workflow_node_execution.id) if node_failed_response: yield node_failed_response @@ -593,3 +601,15 @@ class WorkflowAppGenerateTaskPipeline: ) return response + + def _save_output_for_event(self, event: QueueNodeSucceededEvent | QueueNodeExceptionEvent, node_execution_id: str): + with Session(db.engine) as session, session.begin(): + saver = self._draft_var_saver_factory( + session=session, + app_id=self._application_generate_entity.app_config.app_id, + node_id=event.node_id, + node_type=event.node_type, + node_execution_id=node_execution_id, + enclosing_node_id=event.in_loop_id or event.in_iteration_id, + ) + saver.save(event.process_data, event.outputs) diff --git a/api/core/app/apps/workflow_app_runner.py b/api/core/app/apps/workflow_app_runner.py index dc6c381e86..17b9ac5827 100644 --- a/api/core/app/apps/workflow_app_runner.py +++ b/api/core/app/apps/workflow_app_runner.py @@ -1,8 +1,6 @@ from collections.abc import Mapping from typing import Any, Optional, cast -from sqlalchemy.orm import Session - from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom from core.app.apps.base_app_runner import AppRunner from core.app.entities.queue_entities import ( @@ -35,7 +33,6 @@ from core.workflow.entities.variable_pool import VariablePool from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey from core.workflow.graph_engine.entities.event import ( AgentLogEvent, - BaseNodeEvent, GraphEngineEvent, GraphRunFailedEvent, GraphRunPartialSucceededEvent, @@ -70,9 +67,6 @@ from core.workflow.workflow_entry import WorkflowEntry from extensions.ext_database import db from models.model import App from models.workflow import Workflow -from services.workflow_draft_variable_service import ( - DraftVariableSaver, -) class WorkflowBasedAppRunner(AppRunner): @@ -400,7 +394,6 @@ class WorkflowBasedAppRunner(AppRunner): in_loop_id=event.in_loop_id, ) ) - self._save_draft_var_for_event(event) elif isinstance(event, NodeRunFailedEvent): self._publish_event( @@ -464,7 +457,6 @@ class WorkflowBasedAppRunner(AppRunner): in_loop_id=event.in_loop_id, ) ) - self._save_draft_var_for_event(event) elif isinstance(event, NodeInIterationFailedEvent): self._publish_event( @@ -718,30 +710,3 @@ class WorkflowBasedAppRunner(AppRunner): def _publish_event(self, event: AppQueueEvent) -> None: self.queue_manager.publish(event, PublishFrom.APPLICATION_MANAGER) - - def _save_draft_var_for_event(self, event: BaseNodeEvent): - run_result = event.route_node_state.node_run_result - if run_result is None: - return - process_data = run_result.process_data - outputs = run_result.outputs - with Session(bind=db.engine) as session, session.begin(): - draft_var_saver = DraftVariableSaver( - session=session, - app_id=self._get_app_id(), - node_id=event.node_id, - node_type=event.node_type, - # FIXME(QuantumGhost): rely on private state of queue_manager is not ideal. - invoke_from=self.queue_manager._invoke_from, - node_execution_id=event.id, - enclosing_node_id=event.in_loop_id or event.in_iteration_id or None, - ) - draft_var_saver.save(process_data=process_data, outputs=outputs) - - -def _remove_first_element_from_variable_string(key: str) -> str: - """ - Remove the first element from the prefix. - """ - prefix, remaining = key.split(".", maxsplit=1) - return remaining diff --git a/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py b/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py index d535e1f835..3c8c7bb5a2 100644 --- a/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py +++ b/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py @@ -395,6 +395,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline): message.provider_response_latency = time.perf_counter() - self._start_at message.total_price = usage.total_price message.currency = usage.currency + self._task_state.llm_result.usage.latency = message.provider_response_latency message.message_metadata = self._task_state.metadata.model_dump_json() if trace_manager: diff --git a/api/core/entities/parameter_entities.py b/api/core/entities/parameter_entities.py index 36800bc263..b071bfa5b1 100644 --- a/api/core/entities/parameter_entities.py +++ b/api/core/entities/parameter_entities.py @@ -15,6 +15,11 @@ class CommonParameterType(StrEnum): MODEL_SELECTOR = "model-selector" TOOLS_SELECTOR = "array[tools]" + # Dynamic select parameter + # Once you are not sure about the available options until authorization is done + # eg: Select a Slack channel from a Slack workspace + DYNAMIC_SELECT = "dynamic-select" + # TOOL_SELECTOR = "tool-selector" diff --git a/api/core/llm_generator/output_parser/structured_output.py b/api/core/llm_generator/output_parser/structured_output.py new file mode 100644 index 0000000000..0aaf5abef0 --- /dev/null +++ b/api/core/llm_generator/output_parser/structured_output.py @@ -0,0 +1,374 @@ +import json +from collections.abc import Generator, Mapping, Sequence +from copy import deepcopy +from enum import StrEnum +from typing import Any, Literal, Optional, cast, overload + +import json_repair +from pydantic import TypeAdapter, ValidationError + +from core.llm_generator.output_parser.errors import OutputParserError +from core.llm_generator.prompts import STRUCTURED_OUTPUT_PROMPT +from core.model_manager import ModelInstance +from core.model_runtime.callbacks.base_callback import Callback +from core.model_runtime.entities.llm_entities import ( + LLMResult, + LLMResultChunk, + LLMResultChunkDelta, + LLMResultChunkWithStructuredOutput, + LLMResultWithStructuredOutput, +) +from core.model_runtime.entities.message_entities import ( + AssistantPromptMessage, + PromptMessage, + PromptMessageTool, + SystemPromptMessage, +) +from core.model_runtime.entities.model_entities import AIModelEntity, ParameterRule + + +class ResponseFormat(StrEnum): + """Constants for model response formats""" + + JSON_SCHEMA = "json_schema" # model's structured output mode. some model like gemini, gpt-4o, support this mode. + JSON = "JSON" # model's json mode. some model like claude support this mode. + JSON_OBJECT = "json_object" # json mode's another alias. some model like deepseek-chat, qwen use this alias. + + +class SpecialModelType(StrEnum): + """Constants for identifying model types""" + + GEMINI = "gemini" + OLLAMA = "ollama" + + +@overload +def invoke_llm_with_structured_output( + provider: str, + model_schema: AIModelEntity, + model_instance: ModelInstance, + prompt_messages: Sequence[PromptMessage], + json_schema: Mapping[str, Any], + model_parameters: Optional[Mapping] = None, + tools: Sequence[PromptMessageTool] | None = None, + stop: Optional[list[str]] = None, + stream: Literal[True] = True, + user: Optional[str] = None, + callbacks: Optional[list[Callback]] = None, +) -> Generator[LLMResultChunkWithStructuredOutput, None, None]: ... + + +@overload +def invoke_llm_with_structured_output( + provider: str, + model_schema: AIModelEntity, + model_instance: ModelInstance, + prompt_messages: Sequence[PromptMessage], + json_schema: Mapping[str, Any], + model_parameters: Optional[Mapping] = None, + tools: Sequence[PromptMessageTool] | None = None, + stop: Optional[list[str]] = None, + stream: Literal[False] = False, + user: Optional[str] = None, + callbacks: Optional[list[Callback]] = None, +) -> LLMResultWithStructuredOutput: ... + + +@overload +def invoke_llm_with_structured_output( + provider: str, + model_schema: AIModelEntity, + model_instance: ModelInstance, + prompt_messages: Sequence[PromptMessage], + json_schema: Mapping[str, Any], + model_parameters: Optional[Mapping] = None, + tools: Sequence[PromptMessageTool] | None = None, + stop: Optional[list[str]] = None, + stream: bool = True, + user: Optional[str] = None, + callbacks: Optional[list[Callback]] = None, +) -> LLMResultWithStructuredOutput | Generator[LLMResultChunkWithStructuredOutput, None, None]: ... + + +def invoke_llm_with_structured_output( + provider: str, + model_schema: AIModelEntity, + model_instance: ModelInstance, + prompt_messages: Sequence[PromptMessage], + json_schema: Mapping[str, Any], + model_parameters: Optional[Mapping] = None, + tools: Sequence[PromptMessageTool] | None = None, + stop: Optional[list[str]] = None, + stream: bool = True, + user: Optional[str] = None, + callbacks: Optional[list[Callback]] = None, +) -> LLMResultWithStructuredOutput | Generator[LLMResultChunkWithStructuredOutput, None, None]: + """ + Invoke large language model with structured output + 1. This method invokes model_instance.invoke_llm with json_schema + 2. Try to parse the result as structured output + + :param prompt_messages: prompt messages + :param json_schema: json schema + :param model_parameters: model parameters + :param tools: tools for tool calling + :param stop: stop words + :param stream: is stream response + :param user: unique user id + :param callbacks: callbacks + :return: full response or stream response chunk generator result + """ + + # handle native json schema + model_parameters_with_json_schema: dict[str, Any] = { + **(model_parameters or {}), + } + + if model_schema.support_structure_output: + model_parameters = _handle_native_json_schema( + provider, model_schema, json_schema, model_parameters_with_json_schema, model_schema.parameter_rules + ) + else: + # Set appropriate response format based on model capabilities + _set_response_format(model_parameters_with_json_schema, model_schema.parameter_rules) + + # handle prompt based schema + prompt_messages = _handle_prompt_based_schema( + prompt_messages=prompt_messages, + structured_output_schema=json_schema, + ) + + llm_result = model_instance.invoke_llm( + prompt_messages=list(prompt_messages), + model_parameters=model_parameters_with_json_schema, + tools=tools, + stop=stop, + stream=stream, + user=user, + callbacks=callbacks, + ) + + if isinstance(llm_result, LLMResult): + if not isinstance(llm_result.message.content, str): + raise OutputParserError( + f"Failed to parse structured output, LLM result is not a string: {llm_result.message.content}" + ) + + return LLMResultWithStructuredOutput( + structured_output=_parse_structured_output(llm_result.message.content), + model=llm_result.model, + message=llm_result.message, + usage=llm_result.usage, + system_fingerprint=llm_result.system_fingerprint, + prompt_messages=llm_result.prompt_messages, + ) + else: + + def generator() -> Generator[LLMResultChunkWithStructuredOutput, None, None]: + result_text: str = "" + prompt_messages: Sequence[PromptMessage] = [] + system_fingerprint: Optional[str] = None + for event in llm_result: + if isinstance(event, LLMResultChunk): + if isinstance(event.delta.message.content, str): + result_text += event.delta.message.content + prompt_messages = event.prompt_messages + system_fingerprint = event.system_fingerprint + + yield LLMResultChunkWithStructuredOutput( + model=model_schema.model, + prompt_messages=prompt_messages, + system_fingerprint=system_fingerprint, + delta=event.delta, + ) + + yield LLMResultChunkWithStructuredOutput( + structured_output=_parse_structured_output(result_text), + model=model_schema.model, + prompt_messages=prompt_messages, + system_fingerprint=system_fingerprint, + delta=LLMResultChunkDelta( + index=0, + message=AssistantPromptMessage(content=""), + usage=None, + finish_reason=None, + ), + ) + + return generator() + + +def _handle_native_json_schema( + provider: str, + model_schema: AIModelEntity, + structured_output_schema: Mapping, + model_parameters: dict, + rules: list[ParameterRule], +) -> dict: + """ + Handle structured output for models with native JSON schema support. + + :param model_parameters: Model parameters to update + :param rules: Model parameter rules + :return: Updated model parameters with JSON schema configuration + """ + # Process schema according to model requirements + schema_json = _prepare_schema_for_model(provider, model_schema, structured_output_schema) + + # Set JSON schema in parameters + model_parameters["json_schema"] = json.dumps(schema_json, ensure_ascii=False) + + # Set appropriate response format if required by the model + for rule in rules: + if rule.name == "response_format" and ResponseFormat.JSON_SCHEMA.value in rule.options: + model_parameters["response_format"] = ResponseFormat.JSON_SCHEMA.value + + return model_parameters + + +def _set_response_format(model_parameters: dict, rules: list) -> None: + """ + Set the appropriate response format parameter based on model rules. + + :param model_parameters: Model parameters to update + :param rules: Model parameter rules + """ + for rule in rules: + if rule.name == "response_format": + if ResponseFormat.JSON.value in rule.options: + model_parameters["response_format"] = ResponseFormat.JSON.value + elif ResponseFormat.JSON_OBJECT.value in rule.options: + model_parameters["response_format"] = ResponseFormat.JSON_OBJECT.value + + +def _handle_prompt_based_schema( + prompt_messages: Sequence[PromptMessage], structured_output_schema: Mapping +) -> list[PromptMessage]: + """ + Handle structured output for models without native JSON schema support. + This function modifies the prompt messages to include schema-based output requirements. + + Args: + prompt_messages: Original sequence of prompt messages + + Returns: + list[PromptMessage]: Updated prompt messages with structured output requirements + """ + # Convert schema to string format + schema_str = json.dumps(structured_output_schema, ensure_ascii=False) + + # Find existing system prompt with schema placeholder + system_prompt = next( + (prompt for prompt in prompt_messages if isinstance(prompt, SystemPromptMessage)), + None, + ) + structured_output_prompt = STRUCTURED_OUTPUT_PROMPT.replace("{{schema}}", schema_str) + # Prepare system prompt content + system_prompt_content = ( + structured_output_prompt + "\n\n" + system_prompt.content + if system_prompt and isinstance(system_prompt.content, str) + else structured_output_prompt + ) + system_prompt = SystemPromptMessage(content=system_prompt_content) + + # Extract content from the last user message + + filtered_prompts = [prompt for prompt in prompt_messages if not isinstance(prompt, SystemPromptMessage)] + updated_prompt = [system_prompt] + filtered_prompts + + return updated_prompt + + +def _parse_structured_output(result_text: str) -> Mapping[str, Any]: + structured_output: Mapping[str, Any] = {} + parsed: Mapping[str, Any] = {} + try: + parsed = TypeAdapter(Mapping).validate_json(result_text) + if not isinstance(parsed, dict): + raise OutputParserError(f"Failed to parse structured output: {result_text}") + structured_output = parsed + except ValidationError: + # if the result_text is not a valid json, try to repair it + temp_parsed = json_repair.loads(result_text) + if not isinstance(temp_parsed, dict): + # handle reasoning model like deepseek-r1 got '\n\n\n' prefix + if isinstance(temp_parsed, list): + temp_parsed = next((item for item in temp_parsed if isinstance(item, dict)), {}) + else: + raise OutputParserError(f"Failed to parse structured output: {result_text}") + structured_output = cast(dict, temp_parsed) + return structured_output + + +def _prepare_schema_for_model(provider: str, model_schema: AIModelEntity, schema: Mapping) -> dict: + """ + Prepare JSON schema based on model requirements. + + Different models have different requirements for JSON schema formatting. + This function handles these differences. + + :param schema: The original JSON schema + :return: Processed schema compatible with the current model + """ + + # Deep copy to avoid modifying the original schema + processed_schema = dict(deepcopy(schema)) + + # Convert boolean types to string types (common requirement) + convert_boolean_to_string(processed_schema) + + # Apply model-specific transformations + if SpecialModelType.GEMINI in model_schema.model: + remove_additional_properties(processed_schema) + return processed_schema + elif SpecialModelType.OLLAMA in provider: + return processed_schema + else: + # Default format with name field + return {"schema": processed_schema, "name": "llm_response"} + + +def remove_additional_properties(schema: dict) -> None: + """ + Remove additionalProperties fields from JSON schema. + Used for models like Gemini that don't support this property. + + :param schema: JSON schema to modify in-place + """ + if not isinstance(schema, dict): + return + + # Remove additionalProperties at current level + schema.pop("additionalProperties", None) + + # Process nested structures recursively + for value in schema.values(): + if isinstance(value, dict): + remove_additional_properties(value) + elif isinstance(value, list): + for item in value: + if isinstance(item, dict): + remove_additional_properties(item) + + +def convert_boolean_to_string(schema: dict) -> None: + """ + Convert boolean type specifications to string in JSON schema. + + :param schema: JSON schema to modify in-place + """ + if not isinstance(schema, dict): + return + + # Check for boolean type at current level + if schema.get("type") == "boolean": + schema["type"] = "string" + + # Process nested dictionaries and lists recursively + for value in schema.values(): + if isinstance(value, dict): + convert_boolean_to_string(value) + elif isinstance(value, list): + for item in value: + if isinstance(item, dict): + convert_boolean_to_string(item) diff --git a/api/core/llm_generator/prompts.py b/api/core/llm_generator/prompts.py index ddfa1e7a66..ef81e38dc5 100644 --- a/api/core/llm_generator/prompts.py +++ b/api/core/llm_generator/prompts.py @@ -291,3 +291,21 @@ Your task is to convert simple user descriptions into properly formatted JSON Sc Now, generate a JSON Schema based on my description """ # noqa: E501 + +STRUCTURED_OUTPUT_PROMPT = """You’re a helpful AI assistant. You could answer questions and output in JSON format. +constraints: + - You must output in JSON format. + - Do not output boolean value, use string type instead. + - Do not output integer or float value, use number type instead. +eg: + Here is the JSON schema: + {"additionalProperties": false, "properties": {"age": {"type": "number"}, "name": {"type": "string"}}, "required": ["name", "age"], "type": "object"} + + Here is the user's question: + My name is John Doe and I am 30 years old. + + output: + {"name": "John Doe", "age": 30} +Here is the JSON schema: +{{schema}} +""" # noqa: E501 diff --git a/api/core/model_runtime/entities/llm_entities.py b/api/core/model_runtime/entities/llm_entities.py index de5a748d4f..e52b0eba55 100644 --- a/api/core/model_runtime/entities/llm_entities.py +++ b/api/core/model_runtime/entities/llm_entities.py @@ -1,7 +1,7 @@ -from collections.abc import Sequence +from collections.abc import Mapping, Sequence from decimal import Decimal from enum import StrEnum -from typing import Optional +from typing import Any, Optional from pydantic import BaseModel, Field @@ -101,6 +101,20 @@ class LLMResult(BaseModel): system_fingerprint: Optional[str] = None +class LLMStructuredOutput(BaseModel): + """ + Model class for llm structured output. + """ + + structured_output: Optional[Mapping[str, Any]] = None + + +class LLMResultWithStructuredOutput(LLMResult, LLMStructuredOutput): + """ + Model class for llm result with structured output. + """ + + class LLMResultChunkDelta(BaseModel): """ Model class for llm result chunk delta. @@ -123,6 +137,12 @@ class LLMResultChunk(BaseModel): delta: LLMResultChunkDelta +class LLMResultChunkWithStructuredOutput(LLMResultChunk, LLMStructuredOutput): + """ + Model class for llm result chunk with structured output. + """ + + class NumTokensResult(PriceInfo): """ Model class for number of tokens result. diff --git a/api/core/ops/langfuse_trace/langfuse_trace.py b/api/core/ops/langfuse_trace/langfuse_trace.py index 0ea74e9ef0..d37342012d 100644 --- a/api/core/ops/langfuse_trace/langfuse_trace.py +++ b/api/core/ops/langfuse_trace/langfuse_trace.py @@ -83,6 +83,7 @@ class LangFuseDataTrace(BaseTraceInstance): metadata=metadata, session_id=trace_info.conversation_id, tags=["message", "workflow"], + version=trace_info.workflow_run_version, ) self.add_trace(langfuse_trace_data=trace_data) workflow_span_data = LangfuseSpan( @@ -108,6 +109,7 @@ class LangFuseDataTrace(BaseTraceInstance): metadata=metadata, session_id=trace_info.conversation_id, tags=["workflow"], + version=trace_info.workflow_run_version, ) self.add_trace(langfuse_trace_data=trace_data) @@ -172,37 +174,7 @@ class LangFuseDataTrace(BaseTraceInstance): } ) - # add span - if trace_info.message_id: - span_data = LangfuseSpan( - id=node_execution_id, - name=node_type, - input=inputs, - output=outputs, - trace_id=trace_id, - start_time=created_at, - end_time=finished_at, - metadata=metadata, - level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR), - status_message=trace_info.error or "", - parent_observation_id=trace_info.workflow_run_id, - ) - else: - span_data = LangfuseSpan( - id=node_execution_id, - name=node_type, - input=inputs, - output=outputs, - trace_id=trace_id, - start_time=created_at, - end_time=finished_at, - metadata=metadata, - level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR), - status_message=trace_info.error or "", - ) - - self.add_span(langfuse_span_data=span_data) - + # add generation span if process_data and process_data.get("model_mode") == "chat": total_token = metadata.get("total_tokens", 0) prompt_tokens = 0 @@ -226,10 +198,10 @@ class LangFuseDataTrace(BaseTraceInstance): ) node_generation_data = LangfuseGeneration( - name="llm", + id=node_execution_id, + name=node_name, trace_id=trace_id, model=process_data.get("model_name"), - parent_observation_id=node_execution_id, start_time=created_at, end_time=finished_at, input=inputs, @@ -237,11 +209,30 @@ class LangFuseDataTrace(BaseTraceInstance): metadata=metadata, level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR), status_message=trace_info.error or "", + parent_observation_id=trace_info.workflow_run_id if trace_info.message_id else None, usage=generation_usage, ) self.add_generation(langfuse_generation_data=node_generation_data) + # add normal span + else: + span_data = LangfuseSpan( + id=node_execution_id, + name=node_name, + input=inputs, + output=outputs, + trace_id=trace_id, + start_time=created_at, + end_time=finished_at, + metadata=metadata, + level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR), + status_message=trace_info.error or "", + parent_observation_id=trace_info.workflow_run_id if trace_info.message_id else None, + ) + + self.add_span(langfuse_span_data=span_data) + def message_trace(self, trace_info: MessageTraceInfo, **kwargs): # get message file data file_list = trace_info.file_list @@ -284,7 +275,7 @@ class LangFuseDataTrace(BaseTraceInstance): ) self.add_trace(langfuse_trace_data=trace_data) - # start add span + # add generation generation_usage = GenerationUsage( input=trace_info.message_tokens, output=trace_info.answer_tokens, diff --git a/api/core/plugin/backwards_invocation/model.py b/api/core/plugin/backwards_invocation/model.py index 072644e53b..d07ab3d0c4 100644 --- a/api/core/plugin/backwards_invocation/model.py +++ b/api/core/plugin/backwards_invocation/model.py @@ -2,8 +2,15 @@ import tempfile from binascii import hexlify, unhexlify from collections.abc import Generator +from core.llm_generator.output_parser.structured_output import invoke_llm_with_structured_output from core.model_manager import ModelManager -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta +from core.model_runtime.entities.llm_entities import ( + LLMResult, + LLMResultChunk, + LLMResultChunkDelta, + LLMResultChunkWithStructuredOutput, + LLMResultWithStructuredOutput, +) from core.model_runtime.entities.message_entities import ( PromptMessage, SystemPromptMessage, @@ -12,6 +19,7 @@ from core.model_runtime.entities.message_entities import ( from core.plugin.backwards_invocation.base import BaseBackwardsInvocation from core.plugin.entities.request import ( RequestInvokeLLM, + RequestInvokeLLMWithStructuredOutput, RequestInvokeModeration, RequestInvokeRerank, RequestInvokeSpeech2Text, @@ -81,6 +89,72 @@ class PluginModelBackwardsInvocation(BaseBackwardsInvocation): return handle_non_streaming(response) + @classmethod + def invoke_llm_with_structured_output( + cls, user_id: str, tenant: Tenant, payload: RequestInvokeLLMWithStructuredOutput + ): + """ + invoke llm with structured output + """ + model_instance = ModelManager().get_model_instance( + tenant_id=tenant.id, + provider=payload.provider, + model_type=payload.model_type, + model=payload.model, + ) + + model_schema = model_instance.model_type_instance.get_model_schema(payload.model, model_instance.credentials) + + if not model_schema: + raise ValueError(f"Model schema not found for {payload.model}") + + response = invoke_llm_with_structured_output( + provider=payload.provider, + model_schema=model_schema, + model_instance=model_instance, + prompt_messages=payload.prompt_messages, + json_schema=payload.structured_output_schema, + tools=payload.tools, + stop=payload.stop, + stream=True if payload.stream is None else payload.stream, + user=user_id, + model_parameters=payload.completion_params, + ) + + if isinstance(response, Generator): + + def handle() -> Generator[LLMResultChunkWithStructuredOutput, None, None]: + for chunk in response: + if chunk.delta.usage: + llm_utils.deduct_llm_quota( + tenant_id=tenant.id, model_instance=model_instance, usage=chunk.delta.usage + ) + chunk.prompt_messages = [] + yield chunk + + return handle() + else: + if response.usage: + llm_utils.deduct_llm_quota(tenant_id=tenant.id, model_instance=model_instance, usage=response.usage) + + def handle_non_streaming( + response: LLMResultWithStructuredOutput, + ) -> Generator[LLMResultChunkWithStructuredOutput, None, None]: + yield LLMResultChunkWithStructuredOutput( + model=response.model, + prompt_messages=[], + system_fingerprint=response.system_fingerprint, + structured_output=response.structured_output, + delta=LLMResultChunkDelta( + index=0, + message=response.message, + usage=response.usage, + finish_reason="", + ), + ) + + return handle_non_streaming(response) + @classmethod def invoke_text_embedding(cls, user_id: str, tenant: Tenant, payload: RequestInvokeTextEmbedding): """ diff --git a/api/core/plugin/entities/parameters.py b/api/core/plugin/entities/parameters.py index 895dd0d0fc..2b438a3c33 100644 --- a/api/core/plugin/entities/parameters.py +++ b/api/core/plugin/entities/parameters.py @@ -10,6 +10,9 @@ from core.tools.entities.common_entities import I18nObject class PluginParameterOption(BaseModel): value: str = Field(..., description="The value of the option") label: I18nObject = Field(..., description="The label of the option") + icon: Optional[str] = Field( + default=None, description="The icon of the option, can be a url or a base64 encoded image" + ) @field_validator("value", mode="before") @classmethod @@ -35,6 +38,7 @@ class PluginParameterType(enum.StrEnum): APP_SELECTOR = CommonParameterType.APP_SELECTOR.value MODEL_SELECTOR = CommonParameterType.MODEL_SELECTOR.value TOOLS_SELECTOR = CommonParameterType.TOOLS_SELECTOR.value + DYNAMIC_SELECT = CommonParameterType.DYNAMIC_SELECT.value # deprecated, should not use. SYSTEM_FILES = CommonParameterType.SYSTEM_FILES.value diff --git a/api/core/plugin/entities/plugin_daemon.py b/api/core/plugin/entities/plugin_daemon.py index e0d2857e97..592b42c0da 100644 --- a/api/core/plugin/entities/plugin_daemon.py +++ b/api/core/plugin/entities/plugin_daemon.py @@ -1,4 +1,4 @@ -from collections.abc import Mapping +from collections.abc import Mapping, Sequence from datetime import datetime from enum import StrEnum from typing import Any, Generic, Optional, TypeVar @@ -9,6 +9,7 @@ from core.agent.plugin_entities import AgentProviderEntityWithPlugin from core.model_runtime.entities.model_entities import AIModelEntity from core.model_runtime.entities.provider_entities import ProviderEntity from core.plugin.entities.base import BasePluginEntity +from core.plugin.entities.parameters import PluginParameterOption from core.plugin.entities.plugin import PluginDeclaration, PluginEntity from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_entities import ToolProviderEntityWithPlugin @@ -186,3 +187,7 @@ class PluginOAuthCredentialsResponse(BaseModel): class PluginListResponse(BaseModel): list: list[PluginEntity] total: int + + +class PluginDynamicSelectOptionsResponse(BaseModel): + options: Sequence[PluginParameterOption] = Field(description="The options of the dynamic select.") diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index 1692020ec8..f9c81ed4d5 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -82,6 +82,16 @@ class RequestInvokeLLM(BaseRequestInvokeModel): return v +class RequestInvokeLLMWithStructuredOutput(RequestInvokeLLM): + """ + Request to invoke LLM with structured output + """ + + structured_output_schema: dict[str, Any] = Field( + default_factory=dict, description="The schema of the structured output in JSON schema format" + ) + + class RequestInvokeTextEmbedding(BaseRequestInvokeModel): """ Request to invoke text embedding diff --git a/api/core/plugin/impl/dynamic_select.py b/api/core/plugin/impl/dynamic_select.py new file mode 100644 index 0000000000..004412afd7 --- /dev/null +++ b/api/core/plugin/impl/dynamic_select.py @@ -0,0 +1,45 @@ +from collections.abc import Mapping +from typing import Any + +from core.plugin.entities.plugin import GenericProviderID +from core.plugin.entities.plugin_daemon import PluginDynamicSelectOptionsResponse +from core.plugin.impl.base import BasePluginClient + + +class DynamicSelectClient(BasePluginClient): + def fetch_dynamic_select_options( + self, + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + action: str, + credentials: Mapping[str, Any], + parameter: str, + ) -> PluginDynamicSelectOptionsResponse: + """ + Fetch dynamic select options for a plugin parameter. + """ + response = self._request_with_plugin_daemon_response_stream( + "POST", + f"plugin/{tenant_id}/dispatch/dynamic_select/fetch_parameter_options", + PluginDynamicSelectOptionsResponse, + data={ + "user_id": user_id, + "data": { + "provider": GenericProviderID(provider).provider_name, + "credentials": credentials, + "provider_action": action, + "parameter": parameter, + }, + }, + headers={ + "X-Plugin-ID": plugin_id, + "Content-Type": "application/json", + }, + ) + + for options in response: + return options + + raise ValueError(f"Plugin service returned no options for parameter '{parameter}' in provider '{provider}'") diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index 38c0b540d5..3fca48be22 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -1010,6 +1010,9 @@ class DatasetRetrieval: def _process_metadata_filter_func( self, sequence: int, condition: str, metadata_name: str, value: Optional[Any], filters: list ): + if value is None: + return + key = f"{metadata_name}_{sequence}" key_value = f"{metadata_name}_{sequence}_value" match condition: diff --git a/api/core/tools/builtin_tool/providers/code/tools/simple_code.py b/api/core/tools/builtin_tool/providers/code/tools/simple_code.py index ab0e155b98..b4e650e0ed 100644 --- a/api/core/tools/builtin_tool/providers/code/tools/simple_code.py +++ b/api/core/tools/builtin_tool/providers/code/tools/simple_code.py @@ -4,6 +4,7 @@ from typing import Any, Optional from core.helper.code_executor.code_executor import CodeExecutor, CodeLanguage from core.tools.builtin_tool.tool import BuiltinTool from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.errors import ToolInvokeError class SimpleCode(BuiltinTool): @@ -25,6 +26,8 @@ class SimpleCode(BuiltinTool): if language not in {CodeLanguage.PYTHON3, CodeLanguage.JAVASCRIPT}: raise ValueError(f"Only python3 and javascript are supported, not {language}") - result = CodeExecutor.execute_code(language, "", code) - - yield self.create_text_message(result) + try: + result = CodeExecutor.execute_code(language, "", code) + yield self.create_text_message(result) + except Exception as e: + raise ToolInvokeError(str(e)) diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 03047c0545..d2c28076ae 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -240,6 +240,7 @@ class ToolParameter(PluginParameter): FILES = PluginParameterType.FILES.value APP_SELECTOR = PluginParameterType.APP_SELECTOR.value MODEL_SELECTOR = PluginParameterType.MODEL_SELECTOR.value + DYNAMIC_SELECT = PluginParameterType.DYNAMIC_SELECT.value # deprecated, should not use. SYSTEM_FILES = PluginParameterType.SYSTEM_FILES.value diff --git a/api/core/tools/utils/configuration.py b/api/core/tools/utils/configuration.py index 6a5fba65bd..1f23e90351 100644 --- a/api/core/tools/utils/configuration.py +++ b/api/core/tools/utils/configuration.py @@ -86,6 +86,7 @@ class ProviderConfigEncrypter(BaseModel): cached_credentials = cache.get() if cached_credentials: return cached_credentials + data = self._deep_copy(data) # get fields need to be decrypted fields = dict[str, BasicProviderConfig]() diff --git a/api/core/workflow/entities/workflow_node_execution.py b/api/core/workflow/entities/workflow_node_execution.py index 773f5b777b..09a408f4d7 100644 --- a/api/core/workflow/entities/workflow_node_execution.py +++ b/api/core/workflow/entities/workflow_node_execution.py @@ -66,11 +66,21 @@ class WorkflowNodeExecution(BaseModel): but they are not stored in the model. """ - # Core identification fields - id: str # Unique identifier for this execution record - node_execution_id: Optional[str] = None # Optional secondary ID for cross-referencing + # --------- Core identification fields --------- + + # Unique identifier for this execution record, used when persisting to storage. + # Value is a UUID string (e.g., '09b3e04c-f9ae-404c-ad82-290b8d7bd382'). + id: str + + # Optional secondary ID for cross-referencing purposes. + # + # NOTE: For referencing the persisted record, use `id` rather than `node_execution_id`. + # While `node_execution_id` may sometimes be a UUID string, this is not guaranteed. + # In most scenarios, `id` should be used as the primary identifier. + node_execution_id: Optional[str] = None workflow_id: str # ID of the workflow this node belongs to workflow_execution_id: Optional[str] = None # ID of the specific workflow run (null for single-step debugging) + # --------- Core identification fields ends --------- # Execution positioning and flow index: int # Sequence number for ordering in trace visualization diff --git a/api/core/workflow/nodes/agent/agent_node.py b/api/core/workflow/nodes/agent/agent_node.py index 2f28363955..987f670acb 100644 --- a/api/core/workflow/nodes/agent/agent_node.py +++ b/api/core/workflow/nodes/agent/agent_node.py @@ -158,7 +158,10 @@ class AgentNode(ToolNode): # variable_pool.convert_template expects a string template, # but if passing a dict, convert to JSON string first before rendering try: - parameter_value = json.dumps(agent_input.value, ensure_ascii=False) + if not isinstance(agent_input.value, str): + parameter_value = json.dumps(agent_input.value, ensure_ascii=False) + else: + parameter_value = str(agent_input.value) except TypeError: parameter_value = str(agent_input.value) segment_group = variable_pool.convert_template(parameter_value) @@ -166,7 +169,8 @@ class AgentNode(ToolNode): # variable_pool.convert_template returns a string, # so we need to convert it back to a dictionary try: - parameter_value = json.loads(parameter_value) + if not isinstance(agent_input.value, str): + parameter_value = json.loads(parameter_value) except json.JSONDecodeError: parameter_value = parameter_value else: diff --git a/api/core/workflow/nodes/answer/answer_stream_processor.py b/api/core/workflow/nodes/answer/answer_stream_processor.py index f3e4a62ade..97666fad05 100644 --- a/api/core/workflow/nodes/answer/answer_stream_processor.py +++ b/api/core/workflow/nodes/answer/answer_stream_processor.py @@ -2,7 +2,6 @@ import logging from collections.abc import Generator from typing import cast -from core.file import FILE_MODEL_IDENTITY, File from core.workflow.entities.variable_pool import VariablePool from core.workflow.graph_engine.entities.event import ( GraphEngineEvent, @@ -201,44 +200,3 @@ class AnswerStreamProcessor(StreamProcessor): stream_out_answer_node_ids.append(answer_node_id) return stream_out_answer_node_ids - - @classmethod - def _fetch_files_from_variable_value(cls, value: dict | list) -> list[dict]: - """ - Fetch files from variable value - :param value: variable value - :return: - """ - if not value: - return [] - - files = [] - if isinstance(value, list): - for item in value: - file_var = cls._get_file_var_from_value(item) - if file_var: - files.append(file_var) - elif isinstance(value, dict): - file_var = cls._get_file_var_from_value(value) - if file_var: - files.append(file_var) - - return files - - @classmethod - def _get_file_var_from_value(cls, value: dict | list): - """ - Get file var from value - :param value: variable value - :return: - """ - if not value: - return None - - if isinstance(value, dict): - if "dify_model_identity" in value and value["dify_model_identity"] == FILE_MODEL_IDENTITY: - return value - elif isinstance(value, File): - return value.to_dict() - - return None diff --git a/api/core/workflow/nodes/http_request/executor.py b/api/core/workflow/nodes/http_request/executor.py index 2c83b00d4a..b0a14229c5 100644 --- a/api/core/workflow/nodes/http_request/executor.py +++ b/api/core/workflow/nodes/http_request/executor.py @@ -333,7 +333,7 @@ class Executor: try: response = getattr(ssrf_proxy, self.method.lower())(**request_args) except (ssrf_proxy.MaxRetriesExceededError, httpx.RequestError) as e: - raise HttpRequestNodeError(str(e)) + raise HttpRequestNodeError(str(e)) from e # FIXME: fix type ignore, this maybe httpx type issue return response # type: ignore diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index 0b9e98f28a..b34d62d669 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -490,6 +490,9 @@ class KnowledgeRetrievalNode(LLMNode): def _process_metadata_filter_func( self, sequence: int, condition: str, metadata_name: str, value: Optional[Any], filters: list ): + if value is None: + return + key = f"{metadata_name}_{sequence}" key_value = f"{metadata_name}_{sequence}_value" match condition: diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index 124ae6d75d..b5225ce548 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -5,11 +5,11 @@ import logging from collections.abc import Generator, Mapping, Sequence from typing import TYPE_CHECKING, Any, Optional, cast -import json_repair - from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity from core.file import FileType, file_manager from core.helper.code_executor import CodeExecutor, CodeLanguage +from core.llm_generator.output_parser.errors import OutputParserError +from core.llm_generator.output_parser.structured_output import invoke_llm_with_structured_output from core.memory.token_buffer_memory import TokenBufferMemory from core.model_manager import ModelInstance, ModelManager from core.model_runtime.entities import ( @@ -18,7 +18,13 @@ from core.model_runtime.entities import ( PromptMessageContentType, TextPromptMessageContent, ) -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMUsage +from core.model_runtime.entities.llm_entities import ( + LLMResult, + LLMResultChunk, + LLMResultChunkWithStructuredOutput, + LLMStructuredOutput, + LLMUsage, +) from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, PromptMessageContentUnionTypes, @@ -31,7 +37,6 @@ from core.model_runtime.entities.model_entities import ( ModelFeature, ModelPropertyKey, ModelType, - ParameterRule, ) from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.model_runtime.utils.encoders import jsonable_encoder @@ -62,11 +67,6 @@ from core.workflow.nodes.event import ( RunRetrieverResourceEvent, RunStreamChunkEvent, ) -from core.workflow.utils.structured_output.entities import ( - ResponseFormat, - SpecialModelType, -) -from core.workflow.utils.structured_output.prompt import STRUCTURED_OUTPUT_PROMPT from core.workflow.utils.variable_template_parser import VariableTemplateParser from . import llm_utils @@ -143,12 +143,6 @@ class LLMNode(BaseNode[LLMNodeData]): return "1" def _run(self) -> Generator[NodeEvent | InNodeEvent, None, None]: - def process_structured_output(text: str) -> Optional[dict[str, Any]]: - """Process structured output if enabled""" - if not self.node_data.structured_output_enabled or not self.node_data.structured_output: - return None - return self._parse_structured_output(text) - node_inputs: Optional[dict[str, Any]] = None process_data = None result_text = "" @@ -244,6 +238,8 @@ class LLMNode(BaseNode[LLMNodeData]): stop=stop, ) + structured_output: LLMStructuredOutput | None = None + for event in generator: if isinstance(event, RunStreamChunkEvent): yield event @@ -254,10 +250,12 @@ class LLMNode(BaseNode[LLMNodeData]): # deduct quota llm_utils.deduct_llm_quota(tenant_id=self.tenant_id, model_instance=model_instance, usage=usage) break + elif isinstance(event, LLMStructuredOutput): + structured_output = event + outputs = {"text": result_text, "usage": jsonable_encoder(usage), "finish_reason": finish_reason} - structured_output = process_structured_output(result_text) if structured_output: - outputs["structured_output"] = structured_output + outputs["structured_output"] = structured_output.structured_output if self._file_outputs is not None: outputs["files"] = ArrayFileSegment(value=self._file_outputs) @@ -302,20 +300,40 @@ class LLMNode(BaseNode[LLMNodeData]): model_instance: ModelInstance, prompt_messages: Sequence[PromptMessage], stop: Optional[Sequence[str]] = None, - ) -> Generator[NodeEvent, None, None]: - invoke_result = model_instance.invoke_llm( - prompt_messages=list(prompt_messages), - model_parameters=node_data_model.completion_params, - stop=list(stop or []), - stream=True, - user=self.user_id, + ) -> Generator[NodeEvent | LLMStructuredOutput, None, None]: + model_schema = model_instance.model_type_instance.get_model_schema( + node_data_model.name, model_instance.credentials ) + if not model_schema: + raise ValueError(f"Model schema not found for {node_data_model.name}") + + if self.node_data.structured_output_enabled: + output_schema = self._fetch_structured_output_schema() + invoke_result = invoke_llm_with_structured_output( + provider=model_instance.provider, + model_schema=model_schema, + model_instance=model_instance, + prompt_messages=prompt_messages, + json_schema=output_schema, + model_parameters=node_data_model.completion_params, + stop=list(stop or []), + stream=True, + user=self.user_id, + ) + else: + invoke_result = model_instance.invoke_llm( + prompt_messages=list(prompt_messages), + model_parameters=node_data_model.completion_params, + stop=list(stop or []), + stream=True, + user=self.user_id, + ) return self._handle_invoke_result(invoke_result=invoke_result) def _handle_invoke_result( - self, invoke_result: LLMResult | Generator[LLMResultChunk, None, None] - ) -> Generator[NodeEvent, None, None]: + self, invoke_result: LLMResult | Generator[LLMResultChunk | LLMStructuredOutput, None, None] + ) -> Generator[NodeEvent | LLMStructuredOutput, None, None]: # For blocking mode if isinstance(invoke_result, LLMResult): event = self._handle_blocking_result(invoke_result=invoke_result) @@ -329,23 +347,32 @@ class LLMNode(BaseNode[LLMNodeData]): usage = LLMUsage.empty_usage() finish_reason = None full_text_buffer = io.StringIO() - for result in invoke_result: - contents = result.delta.message.content - for text_part in self._save_multimodal_output_and_convert_result_to_markdown(contents): - full_text_buffer.write(text_part) - yield RunStreamChunkEvent(chunk_content=text_part, from_variable_selector=[self.node_id, "text"]) + # Consume the invoke result and handle generator exception + try: + for result in invoke_result: + if isinstance(result, LLMResultChunkWithStructuredOutput): + yield result + if isinstance(result, LLMResultChunk): + contents = result.delta.message.content + for text_part in self._save_multimodal_output_and_convert_result_to_markdown(contents): + full_text_buffer.write(text_part) + yield RunStreamChunkEvent( + chunk_content=text_part, from_variable_selector=[self.node_id, "text"] + ) - # Update the whole metadata - if not model and result.model: - model = result.model - if len(prompt_messages) == 0: - # TODO(QuantumGhost): it seems that this update has no visable effect. - # What's the purpose of the line below? - prompt_messages = list(result.prompt_messages) - if usage.prompt_tokens == 0 and result.delta.usage: - usage = result.delta.usage - if finish_reason is None and result.delta.finish_reason: - finish_reason = result.delta.finish_reason + # Update the whole metadata + if not model and result.model: + model = result.model + if len(prompt_messages) == 0: + # TODO(QuantumGhost): it seems that this update has no visable effect. + # What's the purpose of the line below? + prompt_messages = list(result.prompt_messages) + if usage.prompt_tokens == 0 and result.delta.usage: + usage = result.delta.usage + if finish_reason is None and result.delta.finish_reason: + finish_reason = result.delta.finish_reason + except OutputParserError as e: + raise LLMNodeError(f"Failed to parse structured output: {e}") yield ModelInvokeCompletedEvent(text=full_text_buffer.getvalue(), usage=usage, finish_reason=finish_reason) @@ -522,12 +549,6 @@ class LLMNode(BaseNode[LLMNodeData]): if not model_schema: raise ModelNotExistError(f"Model {node_data_model.name} not exist.") - if self.node_data.structured_output_enabled: - if model_schema.support_structure_output: - completion_params = self._handle_native_json_schema(completion_params, model_schema.parameter_rules) - else: - # Set appropriate response format based on model capabilities - self._set_response_format(completion_params, model_schema.parameter_rules) model_config_with_cred.parameters = completion_params # NOTE(-LAN-): This line modify the `self.node_data.model`, which is used in `_invoke_llm()`. node_data_model.completion_params = completion_params @@ -719,32 +740,8 @@ class LLMNode(BaseNode[LLMNodeData]): ) if not model_schema: raise ModelNotExistError(f"Model {model_config.model} not exist.") - if self.node_data.structured_output_enabled: - if not model_schema.support_structure_output: - filtered_prompt_messages = self._handle_prompt_based_schema( - prompt_messages=filtered_prompt_messages, - ) return filtered_prompt_messages, model_config.stop - def _parse_structured_output(self, result_text: str) -> dict[str, Any]: - structured_output: dict[str, Any] = {} - try: - parsed = json.loads(result_text) - if not isinstance(parsed, dict): - raise LLMNodeError(f"Failed to parse structured output: {result_text}") - structured_output = parsed - except json.JSONDecodeError as e: - # if the result_text is not a valid json, try to repair it - parsed = json_repair.loads(result_text) - if not isinstance(parsed, dict): - # handle reasoning model like deepseek-r1 got '\n\n\n' prefix - if isinstance(parsed, list): - parsed = next((item for item in parsed if isinstance(item, dict)), {}) - else: - raise LLMNodeError(f"Failed to parse structured output: {result_text}") - structured_output = parsed - return structured_output - @classmethod def _extract_variable_selector_to_variable_mapping( cls, @@ -934,104 +931,6 @@ class LLMNode(BaseNode[LLMNodeData]): self._file_outputs.append(saved_file) return saved_file - def _handle_native_json_schema(self, model_parameters: dict, rules: list[ParameterRule]) -> dict: - """ - Handle structured output for models with native JSON schema support. - - :param model_parameters: Model parameters to update - :param rules: Model parameter rules - :return: Updated model parameters with JSON schema configuration - """ - # Process schema according to model requirements - schema = self._fetch_structured_output_schema() - schema_json = self._prepare_schema_for_model(schema) - - # Set JSON schema in parameters - model_parameters["json_schema"] = json.dumps(schema_json, ensure_ascii=False) - - # Set appropriate response format if required by the model - for rule in rules: - if rule.name == "response_format" and ResponseFormat.JSON_SCHEMA.value in rule.options: - model_parameters["response_format"] = ResponseFormat.JSON_SCHEMA.value - - return model_parameters - - def _handle_prompt_based_schema(self, prompt_messages: Sequence[PromptMessage]) -> list[PromptMessage]: - """ - Handle structured output for models without native JSON schema support. - This function modifies the prompt messages to include schema-based output requirements. - - Args: - prompt_messages: Original sequence of prompt messages - - Returns: - list[PromptMessage]: Updated prompt messages with structured output requirements - """ - # Convert schema to string format - schema_str = json.dumps(self._fetch_structured_output_schema(), ensure_ascii=False) - - # Find existing system prompt with schema placeholder - system_prompt = next( - (prompt for prompt in prompt_messages if isinstance(prompt, SystemPromptMessage)), - None, - ) - structured_output_prompt = STRUCTURED_OUTPUT_PROMPT.replace("{{schema}}", schema_str) - # Prepare system prompt content - system_prompt_content = ( - structured_output_prompt + "\n\n" + system_prompt.content - if system_prompt and isinstance(system_prompt.content, str) - else structured_output_prompt - ) - system_prompt = SystemPromptMessage(content=system_prompt_content) - - # Extract content from the last user message - - filtered_prompts = [prompt for prompt in prompt_messages if not isinstance(prompt, SystemPromptMessage)] - updated_prompt = [system_prompt] + filtered_prompts - - return updated_prompt - - def _set_response_format(self, model_parameters: dict, rules: list) -> None: - """ - Set the appropriate response format parameter based on model rules. - - :param model_parameters: Model parameters to update - :param rules: Model parameter rules - """ - for rule in rules: - if rule.name == "response_format": - if ResponseFormat.JSON.value in rule.options: - model_parameters["response_format"] = ResponseFormat.JSON.value - elif ResponseFormat.JSON_OBJECT.value in rule.options: - model_parameters["response_format"] = ResponseFormat.JSON_OBJECT.value - - def _prepare_schema_for_model(self, schema: dict) -> dict: - """ - Prepare JSON schema based on model requirements. - - Different models have different requirements for JSON schema formatting. - This function handles these differences. - - :param schema: The original JSON schema - :return: Processed schema compatible with the current model - """ - - # Deep copy to avoid modifying the original schema - processed_schema = schema.copy() - - # Convert boolean types to string types (common requirement) - convert_boolean_to_string(processed_schema) - - # Apply model-specific transformations - if SpecialModelType.GEMINI in self.node_data.model.name: - remove_additional_properties(processed_schema) - return processed_schema - elif SpecialModelType.OLLAMA in self.node_data.model.provider: - return processed_schema - else: - # Default format with name field - return {"schema": processed_schema, "name": "llm_response"} - def _fetch_model_schema(self, provider: str) -> AIModelEntity | None: """ Fetch model schema @@ -1243,49 +1142,3 @@ def _handle_completion_template( ) prompt_messages.append(prompt_message) return prompt_messages - - -def remove_additional_properties(schema: dict) -> None: - """ - Remove additionalProperties fields from JSON schema. - Used for models like Gemini that don't support this property. - - :param schema: JSON schema to modify in-place - """ - if not isinstance(schema, dict): - return - - # Remove additionalProperties at current level - schema.pop("additionalProperties", None) - - # Process nested structures recursively - for value in schema.values(): - if isinstance(value, dict): - remove_additional_properties(value) - elif isinstance(value, list): - for item in value: - if isinstance(item, dict): - remove_additional_properties(item) - - -def convert_boolean_to_string(schema: dict) -> None: - """ - Convert boolean type specifications to string in JSON schema. - - :param schema: JSON schema to modify in-place - """ - if not isinstance(schema, dict): - return - - # Check for boolean type at current level - if schema.get("type") == "boolean": - schema["type"] = "string" - - # Process nested dictionaries and lists recursively - for value in schema.values(): - if isinstance(value, dict): - convert_boolean_to_string(value) - elif isinstance(value, list): - for item in value: - if isinstance(item, dict): - convert_boolean_to_string(item) diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index aa15d69931..4d15d78a95 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -167,7 +167,9 @@ class ToolNode(BaseNode[ToolNodeData]): if tool_input.type == "variable": variable = variable_pool.get(tool_input.value) if variable is None: - raise ToolParameterError(f"Variable {tool_input.value} does not exist") + if parameter.required: + raise ToolParameterError(f"Variable {tool_input.value} does not exist") + continue parameter_value = variable.value elif tool_input.type in {"mixed", "constant"}: segment_group = variable_pool.convert_template(str(tool_input.value)) diff --git a/api/core/workflow/repositories/draft_variable_repository.py b/api/core/workflow/repositories/draft_variable_repository.py new file mode 100644 index 0000000000..cadc23f845 --- /dev/null +++ b/api/core/workflow/repositories/draft_variable_repository.py @@ -0,0 +1,32 @@ +import abc +from collections.abc import Mapping +from typing import Any, Protocol + +from sqlalchemy.orm import Session + +from core.workflow.nodes.enums import NodeType + + +class DraftVariableSaver(Protocol): + @abc.abstractmethod + def save(self, process_data: Mapping[str, Any] | None, outputs: Mapping[str, Any] | None): + pass + + +class DraftVariableSaverFactory(Protocol): + @abc.abstractmethod + def __call__( + self, + session: Session, + app_id: str, + node_id: str, + node_type: NodeType, + node_execution_id: str, + enclosing_node_id: str | None = None, + ) -> "DraftVariableSaver": + pass + + +class NoopDraftVariableSaver(DraftVariableSaver): + def save(self, process_data: Mapping[str, Any] | None, outputs: Mapping[str, Any] | None): + pass diff --git a/api/core/workflow/utils/structured_output/entities.py b/api/core/workflow/utils/structured_output/entities.py deleted file mode 100644 index 6491042bfe..0000000000 --- a/api/core/workflow/utils/structured_output/entities.py +++ /dev/null @@ -1,16 +0,0 @@ -from enum import StrEnum - - -class ResponseFormat(StrEnum): - """Constants for model response formats""" - - JSON_SCHEMA = "json_schema" # model's structured output mode. some model like gemini, gpt-4o, support this mode. - JSON = "JSON" # model's json mode. some model like claude support this mode. - JSON_OBJECT = "json_object" # json mode's another alias. some model like deepseek-chat, qwen use this alias. - - -class SpecialModelType(StrEnum): - """Constants for identifying model types""" - - GEMINI = "gemini" - OLLAMA = "ollama" diff --git a/api/core/workflow/utils/structured_output/prompt.py b/api/core/workflow/utils/structured_output/prompt.py deleted file mode 100644 index 06d9b2056e..0000000000 --- a/api/core/workflow/utils/structured_output/prompt.py +++ /dev/null @@ -1,17 +0,0 @@ -STRUCTURED_OUTPUT_PROMPT = """You’re a helpful AI assistant. You could answer questions and output in JSON format. -constraints: - - You must output in JSON format. - - Do not output boolean value, use string type instead. - - Do not output integer or float value, use number type instead. -eg: - Here is the JSON schema: - {"additionalProperties": false, "properties": {"age": {"type": "number"}, "name": {"type": "string"}}, "required": ["name", "age"], "type": "object"} - - Here is the user's question: - My name is John Doe and I am 30 years old. - - output: - {"name": "John Doe", "age": 30} -Here is the JSON schema: -{{schema}} -""" # noqa: E501 diff --git a/api/core/workflow/workflow_cycle_manager.py b/api/core/workflow/workflow_cycle_manager.py index 6ee562fc8d..0aab2426af 100644 --- a/api/core/workflow/workflow_cycle_manager.py +++ b/api/core/workflow/workflow_cycle_manager.py @@ -27,6 +27,7 @@ from core.workflow.enums import SystemVariableKey from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository from core.workflow.workflow_entry import WorkflowEntry +from libs.datetime_utils import naive_utc_now @dataclass @@ -160,12 +161,13 @@ class WorkflowCycleManager: exceptions_count: int = 0, ) -> WorkflowExecution: workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id) + now = naive_utc_now() workflow_execution.status = WorkflowExecutionStatus(status.value) workflow_execution.error_message = error_message workflow_execution.total_tokens = total_tokens workflow_execution.total_steps = total_steps - workflow_execution.finished_at = datetime.now(UTC).replace(tzinfo=None) + workflow_execution.finished_at = now workflow_execution.exceptions_count = exceptions_count # Use the instance repository to find running executions for a workflow run @@ -174,7 +176,6 @@ class WorkflowCycleManager: ) # Update the domain models - now = datetime.now(UTC).replace(tzinfo=None) for node_execution in running_node_executions: if node_execution.node_execution_id: # Update the domain model diff --git a/api/extensions/ext_app_metrics.py b/api/extensions/ext_app_metrics.py index b7d412d68d..56a69a1862 100644 --- a/api/extensions/ext_app_metrics.py +++ b/api/extensions/ext_app_metrics.py @@ -12,14 +12,14 @@ def init_app(app: DifyApp): @app.after_request def after_request(response): """Add Version headers to the response.""" - response.headers.add("X-Version", dify_config.CURRENT_VERSION) + response.headers.add("X-Version", dify_config.project.version) response.headers.add("X-Env", dify_config.DEPLOY_ENV) return response @app.route("/health") def health(): return Response( - json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.CURRENT_VERSION}), + json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.project.version}), status=200, content_type="application/json", ) diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py index a837552007..6279b1ad36 100644 --- a/api/extensions/ext_celery.py +++ b/api/extensions/ext_celery.py @@ -21,6 +21,7 @@ def init_app(app: DifyApp) -> Celery: "master_name": dify_config.CELERY_SENTINEL_MASTER_NAME, "sentinel_kwargs": { "socket_timeout": dify_config.CELERY_SENTINEL_SOCKET_TIMEOUT, + "password": dify_config.CELERY_SENTINEL_PASSWORD, }, } diff --git a/api/extensions/ext_otel.py b/api/extensions/ext_otel.py index 6dcfa7bec6..23cf4c5cab 100644 --- a/api/extensions/ext_otel.py +++ b/api/extensions/ext_otel.py @@ -49,7 +49,7 @@ def init_app(app: DifyApp): logging.getLogger().addHandler(exception_handler) def init_flask_instrumentor(app: DifyApp): - meter = get_meter("http_metrics", version=dify_config.CURRENT_VERSION) + meter = get_meter("http_metrics", version=dify_config.project.version) _http_response_counter = meter.create_counter( "http.server.response.count", description="Total number of HTTP responses by status code, method and target", @@ -163,7 +163,7 @@ def init_app(app: DifyApp): resource = Resource( attributes={ ResourceAttributes.SERVICE_NAME: dify_config.APPLICATION_NAME, - ResourceAttributes.SERVICE_VERSION: f"dify-{dify_config.CURRENT_VERSION}-{dify_config.COMMIT_SHA}", + ResourceAttributes.SERVICE_VERSION: f"dify-{dify_config.project.version}-{dify_config.COMMIT_SHA}", ResourceAttributes.PROCESS_PID: os.getpid(), ResourceAttributes.DEPLOYMENT_ENVIRONMENT: f"{dify_config.DEPLOY_ENV}-{dify_config.EDITION}", ResourceAttributes.HOST_NAME: socket.gethostname(), diff --git a/api/extensions/ext_sentry.py b/api/extensions/ext_sentry.py index 3a74aace6a..82aed0d98d 100644 --- a/api/extensions/ext_sentry.py +++ b/api/extensions/ext_sentry.py @@ -35,6 +35,6 @@ def init_app(app: DifyApp): traces_sample_rate=dify_config.SENTRY_TRACES_SAMPLE_RATE, profiles_sample_rate=dify_config.SENTRY_PROFILES_SAMPLE_RATE, environment=dify_config.DEPLOY_ENV, - release=f"dify-{dify_config.CURRENT_VERSION}-{dify_config.COMMIT_SHA}", + release=f"dify-{dify_config.project.version}-{dify_config.COMMIT_SHA}", before_send=before_send, ) diff --git a/api/libs/file_utils.py b/api/libs/file_utils.py new file mode 100644 index 0000000000..982b2cc1ac --- /dev/null +++ b/api/libs/file_utils.py @@ -0,0 +1,30 @@ +from pathlib import Path + + +def search_file_upwards( + base_dir_path: Path, + target_file_name: str, + max_search_parent_depth: int, +) -> Path: + """ + Find a target file in the current directory or its parent directories up to a specified depth. + :param base_dir_path: Starting directory path to search from. + :param target_file_name: Name of the file to search for. + :param max_search_parent_depth: Maximum number of parent directories to search upwards. + :return: Path of the file if found, otherwise None. + """ + current_path = base_dir_path.resolve() + for _ in range(max_search_parent_depth): + candidate_path = current_path / target_file_name + if candidate_path.is_file(): + return candidate_path + parent_path = current_path.parent + if parent_path == current_path: # reached the root directory + break + else: + current_path = parent_path + + raise ValueError( + f"File '{target_file_name}' not found in the directory '{base_dir_path.resolve()}' or its parent directories" + f" in depth of {max_search_parent_depth}." + ) diff --git a/api/models/dataset.py b/api/models/dataset.py index ad43d6f371..1ec27203a0 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -140,7 +140,7 @@ class Dataset(Base): def word_count(self): return ( db.session.query(Document) - .with_entities(func.coalesce(func.sum(Document.word_count))) + .with_entities(func.coalesce(func.sum(Document.word_count), 0)) .filter(Document.dataset_id == self.id) .scalar() ) @@ -448,7 +448,7 @@ class Document(Base): def hit_count(self): return ( db.session.query(DocumentSegment) - .with_entities(func.coalesce(func.sum(DocumentSegment.hit_count))) + .with_entities(func.coalesce(func.sum(DocumentSegment.hit_count), 0)) .filter(DocumentSegment.document_id == self.id) .scalar() ) diff --git a/api/models/model.py b/api/models/model.py index ce5f449f87..93737043d5 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -676,7 +676,7 @@ class Conversation(Base): if isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY: if value["transfer_method"] == FileTransferMethod.TOOL_FILE: value["tool_file_id"] = value["related_id"] - elif value["transfer_method"] == FileTransferMethod.LOCAL_FILE: + elif value["transfer_method"] in [FileTransferMethod.LOCAL_FILE, FileTransferMethod.REMOTE_URL]: value["upload_file_id"] = value["related_id"] inputs[key] = file_factory.build_from_mapping(mapping=value, tenant_id=value["tenant_id"]) elif isinstance(value, list) and all( @@ -686,7 +686,7 @@ class Conversation(Base): for item in value: if item["transfer_method"] == FileTransferMethod.TOOL_FILE: item["tool_file_id"] = item["related_id"] - elif item["transfer_method"] == FileTransferMethod.LOCAL_FILE: + elif item["transfer_method"] in [FileTransferMethod.LOCAL_FILE, FileTransferMethod.REMOTE_URL]: item["upload_file_id"] = item["related_id"] inputs[key].append(file_factory.build_from_mapping(mapping=item, tenant_id=item["tenant_id"])) @@ -946,7 +946,7 @@ class Message(Base): if isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY: if value["transfer_method"] == FileTransferMethod.TOOL_FILE: value["tool_file_id"] = value["related_id"] - elif value["transfer_method"] == FileTransferMethod.LOCAL_FILE: + elif value["transfer_method"] in [FileTransferMethod.LOCAL_FILE, FileTransferMethod.REMOTE_URL]: value["upload_file_id"] = value["related_id"] inputs[key] = file_factory.build_from_mapping(mapping=value, tenant_id=value["tenant_id"]) elif isinstance(value, list) and all( @@ -956,7 +956,7 @@ class Message(Base): for item in value: if item["transfer_method"] == FileTransferMethod.TOOL_FILE: item["tool_file_id"] = item["related_id"] - elif item["transfer_method"] == FileTransferMethod.LOCAL_FILE: + elif item["transfer_method"] in [FileTransferMethod.LOCAL_FILE, FileTransferMethod.REMOTE_URL]: item["upload_file_id"] = item["related_id"] inputs[key].append(file_factory.build_from_mapping(mapping=item, tenant_id=item["tenant_id"])) return inputs diff --git a/api/pyproject.toml b/api/pyproject.toml index 6033b3a670..b79b0fb236 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "dify-api" -dynamic = ["version"] +version = "1.5.1" requires-python = ">=3.11,<3.13" dependencies = [ @@ -198,7 +198,7 @@ vdb = [ "pymochow==1.3.1", "pyobvector~=0.1.6", "qdrant-client==1.9.0", - "tablestore==6.1.0", + "tablestore==6.2.0", "tcvectordb~=1.6.4", "tidb-vector==0.0.9", "upstash-vector==0.6.0", diff --git a/api/services/account_service.py b/api/services/account_service.py index 14d238467d..3fdbda48a6 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -889,7 +889,7 @@ class RegisterService: TenantService.create_owner_tenant_if_not_exist(account=account, is_setup=True) - dify_setup = DifySetup(version=dify_config.CURRENT_VERSION) + dify_setup = DifySetup(version=dify_config.project.version) db.session.add(dify_setup) db.session.commit() except Exception as e: diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index a697c9ab7f..e42b5ace75 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -278,6 +278,23 @@ class DatasetService: except ProviderTokenNotInitError as ex: raise ValueError(ex.description) + @staticmethod + def check_reranking_model_setting(tenant_id: str, reranking_model_provider: str, reranking_model: str): + try: + model_manager = ModelManager() + model_manager.get_model_instance( + tenant_id=tenant_id, + provider=reranking_model_provider, + model_type=ModelType.RERANK, + model=reranking_model, + ) + except LLMBadRequestError: + raise ValueError( + "No Rerank Model available. Please configure a valid provider in the Settings -> Model Provider." + ) + except ProviderTokenNotInitError as ex: + raise ValueError(ex.description) + @staticmethod def update_dataset(dataset_id, data, user): """ @@ -2207,6 +2224,7 @@ class SegmentService: # calc embedding use tokens if document.doc_form == "qa_model": + segment.answer = args.answer tokens = embedding_model.get_text_embedding_num_tokens(texts=[content + segment.answer])[0] else: tokens = embedding_model.get_text_embedding_num_tokens(texts=[content])[0] diff --git a/api/services/plugin/plugin_parameter_service.py b/api/services/plugin/plugin_parameter_service.py new file mode 100644 index 0000000000..393213c0e2 --- /dev/null +++ b/api/services/plugin/plugin_parameter_service.py @@ -0,0 +1,74 @@ +from collections.abc import Mapping, Sequence +from typing import Any, Literal + +from sqlalchemy.orm import Session + +from core.plugin.entities.parameters import PluginParameterOption +from core.plugin.impl.dynamic_select import DynamicSelectClient +from core.tools.tool_manager import ToolManager +from core.tools.utils.configuration import ProviderConfigEncrypter +from extensions.ext_database import db +from models.tools import BuiltinToolProvider + + +class PluginParameterService: + @staticmethod + def get_dynamic_select_options( + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + action: str, + parameter: str, + provider_type: Literal["tool"], + ) -> Sequence[PluginParameterOption]: + """ + Get dynamic select options for a plugin parameter. + + Args: + tenant_id: The tenant ID. + plugin_id: The plugin ID. + provider: The provider name. + action: The action name. + parameter: The parameter name. + """ + credentials: Mapping[str, Any] = {} + + match provider_type: + case "tool": + provider_controller = ToolManager.get_builtin_provider(provider, tenant_id) + # init tool configuration + tool_configuration = ProviderConfigEncrypter( + tenant_id=tenant_id, + config=[x.to_basic_provider_config() for x in provider_controller.get_credentials_schema()], + provider_type=provider_controller.provider_type.value, + provider_identity=provider_controller.entity.identity.name, + ) + + # check if credentials are required + if not provider_controller.need_credentials: + credentials = {} + else: + # fetch credentials from db + with Session(db.engine) as session: + db_record = ( + session.query(BuiltinToolProvider) + .filter( + BuiltinToolProvider.tenant_id == tenant_id, + BuiltinToolProvider.provider == provider, + ) + .first() + ) + + if db_record is None: + raise ValueError(f"Builtin provider {provider} not found when fetching credentials") + + credentials = tool_configuration.decrypt(db_record.credentials) + case _: + raise ValueError(f"Invalid provider type: {provider_type}") + + return ( + DynamicSelectClient() + .fetch_dynamic_select_options(tenant_id, user_id, plugin_id, provider, action, credentials, parameter) + .options + ) diff --git a/api/services/workflow_draft_variable_service.py b/api/services/workflow_draft_variable_service.py index 164693c2e1..44fd72b5e4 100644 --- a/api/services/workflow_draft_variable_service.py +++ b/api/services/workflow_draft_variable_service.py @@ -154,7 +154,7 @@ class WorkflowDraftVariableService: variables = ( # Do not load the `value` field. query.options(orm.defer(WorkflowDraftVariable.value)) - .order_by(WorkflowDraftVariable.id.desc()) + .order_by(WorkflowDraftVariable.created_at.desc()) .limit(limit) .offset((page - 1) * limit) .all() @@ -168,7 +168,7 @@ class WorkflowDraftVariableService: WorkflowDraftVariable.node_id == node_id, ) query = self._session.query(WorkflowDraftVariable).filter(*criteria) - variables = query.order_by(WorkflowDraftVariable.id.desc()).all() + variables = query.order_by(WorkflowDraftVariable.created_at.desc()).all() return WorkflowDraftVariableList(variables=variables) def list_node_variables(self, app_id: str, node_id: str) -> WorkflowDraftVariableList: @@ -235,7 +235,9 @@ class WorkflowDraftVariableService: self._session.flush() return variable - def _reset_node_var(self, workflow: Workflow, variable: WorkflowDraftVariable) -> WorkflowDraftVariable | None: + def _reset_node_var_or_sys_var( + self, workflow: Workflow, variable: WorkflowDraftVariable + ) -> WorkflowDraftVariable | None: # If a variable does not allow updating, it makes no sence to resetting it. if not variable.editable: return variable @@ -259,28 +261,35 @@ class WorkflowDraftVariableService: self._session.flush() return None - # Get node type for proper value extraction - node_config = workflow.get_node_config_by_id(variable.node_id) - node_type = workflow.get_node_type_from_node_config(node_config) - outputs_dict = node_exec.outputs_dict or {} + # a sentinel value used to check the absent of the output variable key. + absent = object() - # Note: Based on the implementation in `_build_from_variable_assigner_mapping`, - # VariableAssignerNode (both v1 and v2) can only create conversation draft variables. - # For consistency, we should simply return when processing VARIABLE_ASSIGNER nodes. - # - # This implementation must remain synchronized with the `_build_from_variable_assigner_mapping` - # and `save` methods. - if node_type == NodeType.VARIABLE_ASSIGNER: - return variable + if variable.get_variable_type() == DraftVariableType.NODE: + # Get node type for proper value extraction + node_config = workflow.get_node_config_by_id(variable.node_id) + node_type = workflow.get_node_type_from_node_config(node_config) - if variable.name not in outputs_dict: + # Note: Based on the implementation in `_build_from_variable_assigner_mapping`, + # VariableAssignerNode (both v1 and v2) can only create conversation draft variables. + # For consistency, we should simply return when processing VARIABLE_ASSIGNER nodes. + # + # This implementation must remain synchronized with the `_build_from_variable_assigner_mapping` + # and `save` methods. + if node_type == NodeType.VARIABLE_ASSIGNER: + return variable + output_value = outputs_dict.get(variable.name, absent) + else: + output_value = outputs_dict.get(f"sys.{variable.name}", absent) + + # We cannot use `is None` to check the existence of an output variable here as + # the value of the output may be `None`. + if output_value is absent: # If variable not found in execution data, delete the variable self._session.delete(instance=variable) self._session.flush() return None - value = outputs_dict[variable.name] - value_seg = WorkflowDraftVariable.build_segment_with_type(variable.value_type, value) + value_seg = WorkflowDraftVariable.build_segment_with_type(variable.value_type, output_value) # Extract variable value using unified logic variable.set_value(value_seg) variable.last_edited_at = None # Reset to indicate this is a reset operation @@ -291,10 +300,8 @@ class WorkflowDraftVariableService: variable_type = variable.get_variable_type() if variable_type == DraftVariableType.CONVERSATION: return self._reset_conv_var(workflow, variable) - elif variable_type == DraftVariableType.NODE: - return self._reset_node_var(workflow, variable) else: - raise VariableResetError(f"cannot reset system variable, variable_id={variable.id}") + return self._reset_node_var_or_sys_var(workflow, variable) def delete_variable(self, variable: WorkflowDraftVariable): self._session.delete(variable) @@ -439,6 +446,9 @@ def _batch_upsert_draft_varaible( stmt = stmt.on_conflict_do_update( index_elements=WorkflowDraftVariable.unique_app_id_node_id_name(), set_={ + # Refresh creation timestamp to ensure updated variables + # appear first in chronologically sorted result sets. + "created_at": stmt.excluded.created_at, "updated_at": stmt.excluded.updated_at, "last_edited_at": stmt.excluded.last_edited_at, "description": stmt.excluded.description, @@ -525,9 +535,6 @@ class DraftVariableSaver: # The type of the current node (see NodeType). _node_type: NodeType - # Indicates how the workflow execution was triggered (see InvokeFrom). - _invoke_from: InvokeFrom - # _node_execution_id: str @@ -546,15 +553,16 @@ class DraftVariableSaver: app_id: str, node_id: str, node_type: NodeType, - invoke_from: InvokeFrom, node_execution_id: str, enclosing_node_id: str | None = None, ): + # Important: `node_execution_id` parameter refers to the primary key (`id`) of the + # WorkflowNodeExecutionModel/WorkflowNodeExecution, not their `node_execution_id` + # field. These are distinct database fields with different purposes. self._session = session self._app_id = app_id self._node_id = node_id self._node_type = node_type - self._invoke_from = invoke_from self._node_execution_id = node_execution_id self._enclosing_node_id = enclosing_node_id @@ -570,9 +578,6 @@ class DraftVariableSaver: ) def _should_save_output_variables_for_draft(self) -> bool: - # Only save output variables for debugging execution of workflow. - if self._invoke_from != InvokeFrom.DEBUGGER: - return False if self._enclosing_node_id is not None and self._node_type != NodeType.VARIABLE_ASSIGNER: # Currently we do not save output variables for nodes inside loop or iteration. return False diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index 0fd94ac86e..2be57fd51c 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -12,7 +12,6 @@ from sqlalchemy.orm import Session from core.app.app_config.entities import VariableEntityType from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager -from core.app.entities.app_invoke_entities import InvokeFrom from core.file import File from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository from core.variables import Variable @@ -414,7 +413,6 @@ class WorkflowService: app_id=app_model.id, node_id=workflow_node_execution.node_id, node_type=NodeType(workflow_node_execution.node_type), - invoke_from=InvokeFrom.DEBUGGER, enclosing_node_id=enclosing_node_id, node_execution_id=node_execution.id, ) diff --git a/api/tests/integration_tests/workflow/nodes/test_llm.py b/api/tests/integration_tests/workflow/nodes/test_llm.py index a3b2fdc376..389d1071f3 100644 --- a/api/tests/integration_tests/workflow/nodes/test_llm.py +++ b/api/tests/integration_tests/workflow/nodes/test_llm.py @@ -9,6 +9,7 @@ from unittest.mock import MagicMock, patch import pytest from core.app.entities.app_invoke_entities import InvokeFrom +from core.llm_generator.output_parser.structured_output import _parse_structured_output from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage from core.model_runtime.entities.message_entities import AssistantPromptMessage from core.workflow.entities.variable_pool import VariablePool @@ -277,29 +278,6 @@ def test_execute_llm_with_jinja2(flask_req_ctx, setup_code_executor_mock): def test_extract_json(): - node = init_llm_node( - config={ - "id": "llm", - "data": { - "title": "123", - "type": "llm", - "model": {"provider": "openai", "name": "gpt-3.5-turbo", "mode": "chat", "completion_params": {}}, - "prompt_config": { - "structured_output": { - "enabled": True, - "schema": { - "type": "object", - "properties": {"name": {"type": "string"}, "age": {"type": "number"}}, - }, - } - }, - "prompt_template": [{"role": "user", "text": "{{#sys.query#}}"}], - "memory": None, - "context": {"enabled": False}, - "vision": {"enabled": False}, - }, - }, - ) llm_texts = [ '\n\n{"name": "test", "age": 123', # resoning model (deepseek-r1) '{"name":"test","age":123}', # json schema model (gpt-4o) @@ -308,4 +286,4 @@ def test_extract_json(): '{"name":"test",age:123}', # without quotes (qwen-2.5-0.5b) ] result = {"name": "test", "age": 123} - assert all(node._parse_structured_output(item) == result for item in llm_texts) + assert all(_parse_structured_output(item) == result for item in llm_texts) diff --git a/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter.py b/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter.py new file mode 100644 index 0000000000..b88a57bfd4 --- /dev/null +++ b/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter.py @@ -0,0 +1,259 @@ +from collections.abc import Mapping, Sequence + +from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter +from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod, FileType +from core.variables.segments import ArrayFileSegment, FileSegment + + +class TestWorkflowResponseConverterFetchFilesFromVariableValue: + """Test class for WorkflowResponseConverter._fetch_files_from_variable_value method""" + + def create_test_file(self, file_id: str = "test_file_1") -> File: + """Create a test File object""" + return File( + id=file_id, + tenant_id="test_tenant", + type=FileType.DOCUMENT, + transfer_method=FileTransferMethod.LOCAL_FILE, + related_id="related_123", + filename=f"{file_id}.txt", + extension=".txt", + mime_type="text/plain", + size=1024, + storage_key="storage_key_123", + ) + + def create_file_dict(self, file_id: str = "test_file_dict") -> dict: + """Create a file dictionary with correct dify_model_identity""" + return { + "dify_model_identity": FILE_MODEL_IDENTITY, + "id": file_id, + "tenant_id": "test_tenant", + "type": "document", + "transfer_method": "local_file", + "related_id": "related_456", + "filename": f"{file_id}.txt", + "extension": ".txt", + "mime_type": "text/plain", + "size": 2048, + "url": "http://example.com/file.txt", + } + + def test_fetch_files_from_variable_value_with_none(self): + """Test with None input""" + # The method signature expects Union[dict, list, Segment], but implementation handles None + # We'll test the actual behavior by passing an empty dict instead + result = WorkflowResponseConverter._fetch_files_from_variable_value(None) # type: ignore + assert result == [] + + def test_fetch_files_from_variable_value_with_empty_dict(self): + """Test with empty dictionary""" + result = WorkflowResponseConverter._fetch_files_from_variable_value({}) + assert result == [] + + def test_fetch_files_from_variable_value_with_empty_list(self): + """Test with empty list""" + result = WorkflowResponseConverter._fetch_files_from_variable_value([]) + assert result == [] + + def test_fetch_files_from_variable_value_with_file_segment(self): + """Test with valid FileSegment""" + test_file = self.create_test_file("segment_file") + file_segment = FileSegment(value=test_file) + + result = WorkflowResponseConverter._fetch_files_from_variable_value(file_segment) + + assert len(result) == 1 + assert isinstance(result[0], dict) + assert result[0]["id"] == "segment_file" + assert result[0]["dify_model_identity"] == FILE_MODEL_IDENTITY + + def test_fetch_files_from_variable_value_with_array_file_segment_single(self): + """Test with ArrayFileSegment containing single file""" + test_file = self.create_test_file("array_file_1") + array_segment = ArrayFileSegment(value=[test_file]) + + result = WorkflowResponseConverter._fetch_files_from_variable_value(array_segment) + + assert len(result) == 1 + assert isinstance(result[0], dict) + assert result[0]["id"] == "array_file_1" + + def test_fetch_files_from_variable_value_with_array_file_segment_multiple(self): + """Test with ArrayFileSegment containing multiple files""" + test_file_1 = self.create_test_file("array_file_1") + test_file_2 = self.create_test_file("array_file_2") + array_segment = ArrayFileSegment(value=[test_file_1, test_file_2]) + + result = WorkflowResponseConverter._fetch_files_from_variable_value(array_segment) + + assert len(result) == 2 + assert result[0]["id"] == "array_file_1" + assert result[1]["id"] == "array_file_2" + + def test_fetch_files_from_variable_value_with_array_file_segment_empty(self): + """Test with ArrayFileSegment containing empty array""" + array_segment = ArrayFileSegment(value=[]) + + result = WorkflowResponseConverter._fetch_files_from_variable_value(array_segment) + + assert result == [] + + def test_fetch_files_from_variable_value_with_list_of_file_dicts(self): + """Test with list containing file dictionaries""" + file_dict_1 = self.create_file_dict("list_file_1") + file_dict_2 = self.create_file_dict("list_file_2") + test_list = [file_dict_1, file_dict_2] + + result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list) + + assert len(result) == 2 + assert result[0]["id"] == "list_file_1" + assert result[1]["id"] == "list_file_2" + + def test_fetch_files_from_variable_value_with_list_of_file_objects(self): + """Test with list containing File objects""" + file_obj_1 = self.create_test_file("list_obj_1") + file_obj_2 = self.create_test_file("list_obj_2") + test_list = [file_obj_1, file_obj_2] + + result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list) + + assert len(result) == 2 + assert result[0]["id"] == "list_obj_1" + assert result[1]["id"] == "list_obj_2" + + def test_fetch_files_from_variable_value_with_list_mixed_valid_invalid(self): + """Test with list containing mix of valid files and invalid items""" + file_dict = self.create_file_dict("mixed_file") + invalid_dict = {"not_a_file": "value"} + test_list = [file_dict, invalid_dict, "string_item", 123] + + result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list) + + assert len(result) == 1 + assert result[0]["id"] == "mixed_file" + + def test_fetch_files_from_variable_value_with_list_nested_structures(self): + """Test with list containing nested structures""" + file_dict = self.create_file_dict("nested_file") + nested_list = [file_dict, ["inner_list"]] + test_list = [nested_list, {"nested": "dict"}] + + result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list) + + # Should not process nested structures in list items + assert result == [] + + def test_fetch_files_from_variable_value_with_dict_incorrect_identity(self): + """Test with dictionary having incorrect dify_model_identity""" + invalid_dict = {"dify_model_identity": "wrong_identity", "id": "invalid_file", "filename": "test.txt"} + + result = WorkflowResponseConverter._fetch_files_from_variable_value(invalid_dict) + + assert result == [] + + def test_fetch_files_from_variable_value_with_dict_missing_identity(self): + """Test with dictionary missing dify_model_identity""" + invalid_dict = {"id": "no_identity_file", "filename": "test.txt"} + + result = WorkflowResponseConverter._fetch_files_from_variable_value(invalid_dict) + + assert result == [] + + def test_fetch_files_from_variable_value_with_dict_file_object(self): + """Test with dictionary containing File object""" + file_obj = self.create_test_file("dict_obj_file") + test_dict = {"file_key": file_obj} + + result = WorkflowResponseConverter._fetch_files_from_variable_value(test_dict) + + # Should not extract File objects from dict values + assert result == [] + + def test_fetch_files_from_variable_value_with_mixed_data_types(self): + """Test with various mixed data types""" + mixed_data = {"string": "text", "number": 42, "boolean": True, "null": None, "dify_model_identity": "wrong"} + + result = WorkflowResponseConverter._fetch_files_from_variable_value(mixed_data) + + assert result == [] + + def test_fetch_files_from_variable_value_with_invalid_objects(self): + """Test with invalid objects that are not supported types""" + # Test with an invalid dict that doesn't match expected patterns + invalid_dict = {"custom_key": "custom_value"} + + result = WorkflowResponseConverter._fetch_files_from_variable_value(invalid_dict) + + assert result == [] + + def test_fetch_files_from_variable_value_with_string_input(self): + """Test with string input (unsupported type)""" + # Since method expects Union[dict, list, Segment], test with empty list instead + result = WorkflowResponseConverter._fetch_files_from_variable_value([]) + + assert result == [] + + def test_fetch_files_from_variable_value_with_number_input(self): + """Test with number input (unsupported type)""" + # Test with list containing numbers (should be ignored) + result = WorkflowResponseConverter._fetch_files_from_variable_value([42, "string", None]) + + assert result == [] + + def test_fetch_files_from_variable_value_return_type_is_sequence(self): + """Test that return type is Sequence[Mapping[str, Any]]""" + file_dict = self.create_file_dict("type_test_file") + + result = WorkflowResponseConverter._fetch_files_from_variable_value(file_dict) + + assert isinstance(result, Sequence) + assert len(result) == 1 + assert isinstance(result[0], Mapping) + assert all(isinstance(key, str) for key in result[0]) + + def test_fetch_files_from_variable_value_preserves_file_properties(self): + """Test that all file properties are preserved in the result""" + original_file = self.create_test_file("property_test") + file_segment = FileSegment(value=original_file) + + result = WorkflowResponseConverter._fetch_files_from_variable_value(file_segment) + + assert len(result) == 1 + file_dict = result[0] + assert file_dict["id"] == "property_test" + assert file_dict["tenant_id"] == "test_tenant" + assert file_dict["type"] == "document" + assert file_dict["transfer_method"] == "local_file" + assert file_dict["filename"] == "property_test.txt" + assert file_dict["extension"] == ".txt" + assert file_dict["mime_type"] == "text/plain" + assert file_dict["size"] == 1024 + + def test_fetch_files_from_variable_value_with_complex_nested_scenario(self): + """Test complex scenario with nested valid and invalid data""" + file_dict = self.create_file_dict("complex_file") + file_obj = self.create_test_file("complex_obj") + + # Complex nested structure + complex_data = [ + file_dict, # Valid file dict + file_obj, # Valid file object + { # Invalid dict + "not_file": "data", + "nested": {"deep": "value"}, + }, + [ # Nested list (should be ignored) + self.create_file_dict("nested_file") + ], + "string", # Invalid string + None, # None value + 42, # Invalid number + ] + + result = WorkflowResponseConverter._fetch_files_from_variable_value(complex_data) + + assert len(result) == 2 + assert result[0]["id"] == "complex_file" + assert result[1]["id"] == "complex_obj" diff --git a/api/tests/unit_tests/services/workflow/test_workflow_draft_variable_service.py b/api/tests/unit_tests/services/workflow/test_workflow_draft_variable_service.py index 8ae69c8d64..c5c9cf1050 100644 --- a/api/tests/unit_tests/services/workflow/test_workflow_draft_variable_service.py +++ b/api/tests/unit_tests/services/workflow/test_workflow_draft_variable_service.py @@ -6,12 +6,11 @@ from unittest.mock import Mock, patch import pytest from sqlalchemy.orm import Session -from core.app.entities.app_invoke_entities import InvokeFrom -from core.variables.types import SegmentType +from core.variables import StringSegment from core.workflow.constants import SYSTEM_VARIABLE_NODE_ID from core.workflow.nodes import NodeType from models.enums import DraftVariableType -from models.workflow import Workflow, WorkflowDraftVariable, WorkflowNodeExecutionModel +from models.workflow import Workflow, WorkflowDraftVariable, WorkflowNodeExecutionModel, is_system_variable_editable from services.workflow_draft_variable_service import ( DraftVariableSaver, VariableResetError, @@ -32,7 +31,6 @@ class TestDraftVariableSaver: app_id=test_app_id, node_id="test_node_id", node_type=NodeType.START, - invoke_from=InvokeFrom.DEBUGGER, node_execution_id="test_execution_id", ) assert saver._should_variable_be_visible("123_456", NodeType.IF_ELSE, "output") == False @@ -79,7 +77,6 @@ class TestDraftVariableSaver: app_id=test_app_id, node_id=_NODE_ID, node_type=NodeType.START, - invoke_from=InvokeFrom.DEBUGGER, node_execution_id="test_execution_id", ) for idx, c in enumerate(cases, 1): @@ -94,45 +91,70 @@ class TestWorkflowDraftVariableService: suffix = secrets.token_hex(6) return f"test_app_id_{suffix}" + def _create_test_workflow(self, app_id: str) -> Workflow: + """Create a real Workflow instance for testing""" + return Workflow.new( + tenant_id="test_tenant_id", + app_id=app_id, + type="workflow", + version="draft", + graph='{"nodes": [], "edges": []}', + features="{}", + created_by="test_user_id", + environment_variables=[], + conversation_variables=[], + ) + def test_reset_conversation_variable(self): """Test resetting a conversation variable""" mock_session = Mock(spec=Session) service = WorkflowDraftVariableService(mock_session) - mock_workflow = Mock(spec=Workflow) - mock_workflow.app_id = self._get_test_app_id() - # Create mock variable - mock_variable = Mock(spec=WorkflowDraftVariable) - mock_variable.get_variable_type.return_value = DraftVariableType.CONVERSATION - mock_variable.id = "var-id" - mock_variable.name = "test_var" + test_app_id = self._get_test_app_id() + workflow = self._create_test_workflow(test_app_id) + + # Create real conversation variable + test_value = StringSegment(value="test_value") + variable = WorkflowDraftVariable.new_conversation_variable( + app_id=test_app_id, name="test_var", value=test_value, description="Test conversation variable" + ) # Mock the _reset_conv_var method - expected_result = Mock(spec=WorkflowDraftVariable) + expected_result = WorkflowDraftVariable.new_conversation_variable( + app_id=test_app_id, + name="test_var", + value=StringSegment(value="reset_value"), + ) with patch.object(service, "_reset_conv_var", return_value=expected_result) as mock_reset_conv: - result = service.reset_variable(mock_workflow, mock_variable) + result = service.reset_variable(workflow, variable) - mock_reset_conv.assert_called_once_with(mock_workflow, mock_variable) + mock_reset_conv.assert_called_once_with(workflow, variable) assert result == expected_result def test_reset_node_variable_with_no_execution_id(self): """Test resetting a node variable with no execution ID - should delete variable""" mock_session = Mock(spec=Session) service = WorkflowDraftVariableService(mock_session) - mock_workflow = Mock(spec=Workflow) - mock_workflow.app_id = self._get_test_app_id() - # Create mock variable with no execution ID - mock_variable = Mock(spec=WorkflowDraftVariable) - mock_variable.get_variable_type.return_value = DraftVariableType.NODE - mock_variable.node_execution_id = None - mock_variable.id = "var-id" - mock_variable.name = "test_var" + test_app_id = self._get_test_app_id() + workflow = self._create_test_workflow(test_app_id) - result = service._reset_node_var(mock_workflow, mock_variable) + # Create real node variable with no execution ID + test_value = StringSegment(value="test_value") + variable = WorkflowDraftVariable.new_node_variable( + app_id=test_app_id, + node_id="test_node_id", + name="test_var", + value=test_value, + node_execution_id="exec-id", # Set initially + ) + # Manually set to None to simulate the test condition + variable.node_execution_id = None + + result = service._reset_node_var_or_sys_var(workflow, variable) # Should delete the variable and return None - mock_session.delete.assert_called_once_with(instance=mock_variable) + mock_session.delete.assert_called_once_with(instance=variable) mock_session.flush.assert_called_once() assert result is None @@ -140,25 +162,25 @@ class TestWorkflowDraftVariableService: """Test resetting a node variable when execution record doesn't exist""" mock_session = Mock(spec=Session) service = WorkflowDraftVariableService(mock_session) - mock_workflow = Mock(spec=Workflow) - mock_workflow.app_id = self._get_test_app_id() - # Create mock variable with execution ID - mock_variable = Mock(spec=WorkflowDraftVariable) - mock_variable.get_variable_type.return_value = DraftVariableType.NODE - mock_variable.node_execution_id = "exec-id" - mock_variable.id = "var-id" - mock_variable.name = "test_var" + test_app_id = self._get_test_app_id() + workflow = self._create_test_workflow(test_app_id) + + # Create real node variable with execution ID + test_value = StringSegment(value="test_value") + variable = WorkflowDraftVariable.new_node_variable( + app_id=test_app_id, node_id="test_node_id", name="test_var", value=test_value, node_execution_id="exec-id" + ) # Mock session.scalars to return None (no execution record found) mock_scalars = Mock() mock_scalars.first.return_value = None mock_session.scalars.return_value = mock_scalars - result = service._reset_node_var(mock_workflow, mock_variable) + result = service._reset_node_var_or_sys_var(workflow, variable) # Should delete the variable and return None - mock_session.delete.assert_called_once_with(instance=mock_variable) + mock_session.delete.assert_called_once_with(instance=variable) mock_session.flush.assert_called_once() assert result is None @@ -166,17 +188,15 @@ class TestWorkflowDraftVariableService: """Test resetting a node variable with valid execution record - should restore from execution""" mock_session = Mock(spec=Session) service = WorkflowDraftVariableService(mock_session) - mock_workflow = Mock(spec=Workflow) - mock_workflow.app_id = self._get_test_app_id() - # Create mock variable with execution ID - mock_variable = Mock(spec=WorkflowDraftVariable) - mock_variable.get_variable_type.return_value = DraftVariableType.NODE - mock_variable.node_execution_id = "exec-id" - mock_variable.id = "var-id" - mock_variable.name = "test_var" - mock_variable.node_id = "node-id" - mock_variable.value_type = SegmentType.STRING + test_app_id = self._get_test_app_id() + workflow = self._create_test_workflow(test_app_id) + + # Create real node variable with execution ID + test_value = StringSegment(value="original_value") + variable = WorkflowDraftVariable.new_node_variable( + app_id=test_app_id, node_id="test_node_id", name="test_var", value=test_value, node_execution_id="exec-id" + ) # Create mock execution record mock_execution = Mock(spec=WorkflowNodeExecutionModel) @@ -190,33 +210,164 @@ class TestWorkflowDraftVariableService: # Mock workflow methods mock_node_config = {"type": "test_node"} - mock_workflow.get_node_config_by_id.return_value = mock_node_config - mock_workflow.get_node_type_from_node_config.return_value = NodeType.LLM + with ( + patch.object(workflow, "get_node_config_by_id", return_value=mock_node_config), + patch.object(workflow, "get_node_type_from_node_config", return_value=NodeType.LLM), + ): + result = service._reset_node_var_or_sys_var(workflow, variable) - result = service._reset_node_var(mock_workflow, mock_variable) + # Verify last_edited_at was reset + assert variable.last_edited_at is None + # Verify session.flush was called + mock_session.flush.assert_called() - # Verify variable.set_value was called with the correct value - mock_variable.set_value.assert_called_once() - # Verify last_edited_at was reset - assert mock_variable.last_edited_at is None - # Verify session.flush was called - mock_session.flush.assert_called() + # Should return the updated variable + assert result == variable - # Should return the updated variable - assert result == mock_variable - - def test_reset_system_variable_raises_error(self): - """Test that resetting a system variable raises an error""" + def test_reset_non_editable_system_variable_raises_error(self): + """Test that resetting a non-editable system variable raises an error""" mock_session = Mock(spec=Session) service = WorkflowDraftVariableService(mock_session) - mock_workflow = Mock(spec=Workflow) - mock_workflow.app_id = self._get_test_app_id() - mock_variable = Mock(spec=WorkflowDraftVariable) - mock_variable.get_variable_type.return_value = DraftVariableType.SYS # Not a valid enum value for this test - mock_variable.id = "var-id" + test_app_id = self._get_test_app_id() + workflow = self._create_test_workflow(test_app_id) - with pytest.raises(VariableResetError) as exc_info: - service.reset_variable(mock_workflow, mock_variable) - assert "cannot reset system variable" in str(exc_info.value) - assert "variable_id=var-id" in str(exc_info.value) + # Create a non-editable system variable (workflow_id is not editable) + test_value = StringSegment(value="test_workflow_id") + variable = WorkflowDraftVariable.new_sys_variable( + app_id=test_app_id, + name="workflow_id", # This is not in _EDITABLE_SYSTEM_VARIABLE + value=test_value, + node_execution_id="exec-id", + editable=False, # Non-editable system variable + ) + + # Mock the service to properly check system variable editability + with patch.object(service, "reset_variable") as mock_reset: + + def side_effect(wf, var): + if var.get_variable_type() == DraftVariableType.SYS and not is_system_variable_editable(var.name): + raise VariableResetError(f"cannot reset system variable, variable_id={var.id}") + return var + + mock_reset.side_effect = side_effect + + with pytest.raises(VariableResetError) as exc_info: + service.reset_variable(workflow, variable) + assert "cannot reset system variable" in str(exc_info.value) + assert f"variable_id={variable.id}" in str(exc_info.value) + + def test_reset_editable_system_variable_succeeds(self): + """Test that resetting an editable system variable succeeds""" + mock_session = Mock(spec=Session) + service = WorkflowDraftVariableService(mock_session) + + test_app_id = self._get_test_app_id() + workflow = self._create_test_workflow(test_app_id) + + # Create an editable system variable (files is editable) + test_value = StringSegment(value="[]") + variable = WorkflowDraftVariable.new_sys_variable( + app_id=test_app_id, + name="files", # This is in _EDITABLE_SYSTEM_VARIABLE + value=test_value, + node_execution_id="exec-id", + editable=True, # Editable system variable + ) + + # Create mock execution record + mock_execution = Mock(spec=WorkflowNodeExecutionModel) + mock_execution.outputs_dict = {"sys.files": "[]"} + + # Mock session.scalars to return the execution record + mock_scalars = Mock() + mock_scalars.first.return_value = mock_execution + mock_session.scalars.return_value = mock_scalars + + result = service._reset_node_var_or_sys_var(workflow, variable) + + # Should succeed and return the variable + assert result == variable + assert variable.last_edited_at is None + mock_session.flush.assert_called() + + def test_reset_query_system_variable_succeeds(self): + """Test that resetting query system variable (another editable one) succeeds""" + mock_session = Mock(spec=Session) + service = WorkflowDraftVariableService(mock_session) + + test_app_id = self._get_test_app_id() + workflow = self._create_test_workflow(test_app_id) + + # Create an editable system variable (query is editable) + test_value = StringSegment(value="original query") + variable = WorkflowDraftVariable.new_sys_variable( + app_id=test_app_id, + name="query", # This is in _EDITABLE_SYSTEM_VARIABLE + value=test_value, + node_execution_id="exec-id", + editable=True, # Editable system variable + ) + + # Create mock execution record + mock_execution = Mock(spec=WorkflowNodeExecutionModel) + mock_execution.outputs_dict = {"sys.query": "reset query"} + + # Mock session.scalars to return the execution record + mock_scalars = Mock() + mock_scalars.first.return_value = mock_execution + mock_session.scalars.return_value = mock_scalars + + result = service._reset_node_var_or_sys_var(workflow, variable) + + # Should succeed and return the variable + assert result == variable + assert variable.last_edited_at is None + mock_session.flush.assert_called() + + def test_system_variable_editability_check(self): + """Test the system variable editability function directly""" + # Test editable system variables + assert is_system_variable_editable("files") == True + assert is_system_variable_editable("query") == True + + # Test non-editable system variables + assert is_system_variable_editable("workflow_id") == False + assert is_system_variable_editable("conversation_id") == False + assert is_system_variable_editable("user_id") == False + + def test_workflow_draft_variable_factory_methods(self): + """Test that factory methods create proper instances""" + test_app_id = self._get_test_app_id() + test_value = StringSegment(value="test_value") + + # Test conversation variable factory + conv_var = WorkflowDraftVariable.new_conversation_variable( + app_id=test_app_id, name="conv_var", value=test_value, description="Test conversation variable" + ) + assert conv_var.get_variable_type() == DraftVariableType.CONVERSATION + assert conv_var.editable == True + assert conv_var.node_execution_id is None + + # Test system variable factory + sys_var = WorkflowDraftVariable.new_sys_variable( + app_id=test_app_id, name="workflow_id", value=test_value, node_execution_id="exec-id", editable=False + ) + assert sys_var.get_variable_type() == DraftVariableType.SYS + assert sys_var.editable == False + assert sys_var.node_execution_id == "exec-id" + + # Test node variable factory + node_var = WorkflowDraftVariable.new_node_variable( + app_id=test_app_id, + node_id="node-id", + name="node_var", + value=test_value, + node_execution_id="exec-id", + visible=True, + editable=True, + ) + assert node_var.get_variable_type() == DraftVariableType.NODE + assert node_var.visible == True + assert node_var.editable == True + assert node_var.node_execution_id == "exec-id" diff --git a/api/uv.lock b/api/uv.lock index 759f1aec03..05ffc1abcf 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1198,6 +1198,7 @@ wheels = [ [[package]] name = "dify-api" +version = "1.5.1" source = { virtual = "." } dependencies = [ { name = "authlib" }, @@ -1547,7 +1548,7 @@ vdb = [ { name = "pymochow", specifier = "==1.3.1" }, { name = "pyobvector", specifier = "~=0.1.6" }, { name = "qdrant-client", specifier = "==1.9.0" }, - { name = "tablestore", specifier = "==6.1.0" }, + { name = "tablestore", specifier = "==6.2.0" }, { name = "tcvectordb", specifier = "~=1.6.4" }, { name = "tidb-vector", specifier = "==0.0.9" }, { name = "upstash-vector", specifier = "==0.6.0" }, @@ -1654,15 +1655,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/91/db/a0335710caaa6d0aebdaa65ad4df789c15d89b7babd9a30277838a7d9aac/emoji-2.14.1-py3-none-any.whl", hash = "sha256:35a8a486c1460addb1499e3bf7929d3889b2e2841a57401903699fef595e942b", size = 590617, upload-time = "2025-01-16T06:31:23.526Z" }, ] -[[package]] -name = "enum34" -version = "1.1.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz", hash = "sha256:cce6a7477ed816bd2542d03d53db9f0db935dd013b70f336a95c73979289f248", size = 28187, upload-time = "2020-03-10T17:48:00.865Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/63/f6/ccb1c83687756aeabbf3ca0f213508fcfb03883ff200d201b3a4c60cedcc/enum34-1.1.10-py3-none-any.whl", hash = "sha256:c3858660960c984d6ab0ebad691265180da2b43f07e061c0f8dca9ef3cffd328", size = 11224, upload-time = "2020-03-10T17:48:03.174Z" }, -] - [[package]] name = "esdk-obs-python" version = "3.24.6.1" @@ -5375,12 +5367,11 @@ wheels = [ [[package]] name = "tablestore" -version = "6.1.0" +version = "6.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "crc32c" }, - { name = "enum34" }, { name = "flatbuffers" }, { name = "future" }, { name = "numpy" }, @@ -5388,7 +5379,10 @@ dependencies = [ { name = "six" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0f/ed/5bdd906ec9d2dbae3909525dbb7602558c377e0cbcdddb6405d2d0d3f1af/tablestore-6.1.0.tar.gz", hash = "sha256:bfe6a3e0fe88a230729723c357f4a46b8869a06a4b936db20692ed587a721c1c", size = 135690, upload-time = "2024-12-20T07:38:37.428Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/58/48d65d181a69f7db19f7cdee01d252168fbfbad2d1bb25abed03e6df3b05/tablestore-6.2.0.tar.gz", hash = "sha256:0773e77c00542be1bfebbc3c7a85f72a881c63e4e7df7c5a9793a54144590e68", size = 85942, upload-time = "2025-04-15T12:11:20.655Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/da/30451712a769bcf417add8e81163d478a4d668b0e8d489a9d667260d55df/tablestore-6.2.0-py3-none-any.whl", hash = "sha256:6af496d841ab1ff3f78b46abbd87b95a08d89605c51664d2b30933b1d1c5583a", size = 106297, upload-time = "2025-04-15T12:11:17.476Z" }, +] [[package]] name = "tabulate" diff --git a/dev/mypy-check b/dev/mypy-check index b1c2c969a8..8a2342730c 100755 --- a/dev/mypy-check +++ b/dev/mypy-check @@ -7,4 +7,4 @@ cd "$SCRIPT_DIR/.." # run mypy checks uv run --directory api --dev --with pip \ - python -m mypy --install-types --non-interactive ./ + python -m mypy --install-types --non-interactive --exclude venv ./ diff --git a/docker/.env.example b/docker/.env.example index 275da8e2e4..a024566c8f 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -285,6 +285,7 @@ BROKER_USE_SSL=false # If you are using Redis Sentinel for high availability, configure the following settings. CELERY_USE_SENTINEL=false CELERY_SENTINEL_MASTER_NAME= +CELERY_SENTINEL_PASSWORD= CELERY_SENTINEL_SOCKET_TIMEOUT=0.1 # ------------------------------ diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index a6a4ed959a..4007719076 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.5.0 + image: langgenius/dify-api:1.5.1 restart: always environment: # Use the shared environment variables. @@ -31,7 +31,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.5.0 + image: langgenius/dify-api:1.5.1 restart: always environment: # Use the shared environment variables. @@ -57,7 +57,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.5.0 + image: langgenius/dify-web:1.5.1 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -142,7 +142,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.1.2-local + image: langgenius/dify-plugin-daemon:0.1.3-local restart: always environment: # Use the shared environment variables. diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml index 5308a1f978..0b1885755b 100644 --- a/docker/docker-compose.middleware.yaml +++ b/docker/docker-compose.middleware.yaml @@ -71,7 +71,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.1.2-local + image: langgenius/dify-plugin-daemon:0.1.3-local restart: always env_file: - ./middleware.env diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 0019835357..8f84c0c0f3 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -79,6 +79,7 @@ x-shared-env: &shared-api-worker-env BROKER_USE_SSL: ${BROKER_USE_SSL:-false} CELERY_USE_SENTINEL: ${CELERY_USE_SENTINEL:-false} CELERY_SENTINEL_MASTER_NAME: ${CELERY_SENTINEL_MASTER_NAME:-} + CELERY_SENTINEL_PASSWORD: ${CELERY_SENTINEL_PASSWORD:-} CELERY_SENTINEL_SOCKET_TIMEOUT: ${CELERY_SENTINEL_SOCKET_TIMEOUT:-0.1} WEB_API_CORS_ALLOW_ORIGINS: ${WEB_API_CORS_ALLOW_ORIGINS:-*} CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*} @@ -516,7 +517,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.5.0 + image: langgenius/dify-api:1.5.1 restart: always environment: # Use the shared environment variables. @@ -545,7 +546,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.5.0 + image: langgenius/dify-api:1.5.1 restart: always environment: # Use the shared environment variables. @@ -571,7 +572,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.5.0 + image: langgenius/dify-web:1.5.1 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -656,7 +657,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.1.2-local + image: langgenius/dify-plugin-daemon:0.1.3-local restart: always environment: # Use the shared environment variables. diff --git a/web/app/(commonLayout)/apps/AppCard.tsx b/web/app/(commonLayout)/apps/AppCard.tsx index 31b9ed87c2..b7c9a2eee4 100644 --- a/web/app/(commonLayout)/apps/AppCard.tsx +++ b/web/app/(commonLayout)/apps/AppCard.tsx @@ -36,6 +36,7 @@ import AccessControl from '@/app/components/app/app-access-control' import { AccessMode } from '@/models/access-control' import { useGlobalPublicStore } from '@/context/global-public-context' import { formatTime } from '@/utils/time' +import { useGetUserCanAccessApp } from '@/service/access-control' export type AppCardProps = { app: App @@ -190,6 +191,7 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => { }, [onRefresh, mutateApps, setShowAccessControl]) const Operations = (props: HtmlContentProps) => { + const { data: userCanAccessApp, isLoading: isGettingUserCanAccessApp } = useGetUserCanAccessApp({ appId: app?.id, enabled: (!!props?.open && systemFeatures.webapp_auth.enabled) }) const onMouseLeave = async () => { props.onClose?.() } @@ -267,10 +269,14 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => { )} - - + { + (isGettingUserCanAccessApp || !userCanAccessApp?.result) ? null : <> + + + + } { systemFeatures.webapp_auth.enabled && isCurrentWorkspaceEditor && <> diff --git a/web/app/(shareLayout)/layout.tsx b/web/app/(shareLayout)/layout.tsx index 78b8835009..d057ba7599 100644 --- a/web/app/(shareLayout)/layout.tsx +++ b/web/app/(shareLayout)/layout.tsx @@ -25,10 +25,13 @@ const Layout: FC<{ } let appCode: string | null = null - if (redirectUrl) - appCode = redirectUrl?.split('/').pop() || null - else + if (redirectUrl) { + const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`) + appCode = url.pathname.split('/').pop() || null + } + else { appCode = pathname.split('/').pop() || null + } if (!appCode) return diff --git a/web/app/(shareLayout)/webapp-signin/check-code/page.tsx b/web/app/(shareLayout)/webapp-signin/check-code/page.tsx index 1b8f18c98f..a2ba620ace 100644 --- a/web/app/(shareLayout)/webapp-signin/check-code/page.tsx +++ b/web/app/(shareLayout)/webapp-signin/check-code/page.tsx @@ -25,7 +25,10 @@ export default function CheckCode() { const redirectUrl = searchParams.get('redirect_url') const getAppCodeFromRedirectUrl = useCallback(() => { - const appCode = redirectUrl?.split('/').pop() + if (!redirectUrl) + return null + const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`) + const appCode = url.pathname.split('/').pop() if (!appCode) return null @@ -62,7 +65,7 @@ export default function CheckCode() { localStorage.setItem('webapp_access_token', ret.data.access_token) const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: ret.data.access_token }) await setAccessToken(appCode, tokenResp.access_token) - router.replace(redirectUrl) + router.replace(decodeURIComponent(redirectUrl)) } } catch (error) { console.error(error) } diff --git a/web/app/(shareLayout)/webapp-signin/components/external-member-sso-auth.tsx b/web/app/(shareLayout)/webapp-signin/components/external-member-sso-auth.tsx index e9b15ae331..612a9677a6 100644 --- a/web/app/(shareLayout)/webapp-signin/components/external-member-sso-auth.tsx +++ b/web/app/(shareLayout)/webapp-signin/components/external-member-sso-auth.tsx @@ -23,7 +23,10 @@ const ExternalMemberSSOAuth = () => { } const getAppCodeFromRedirectUrl = useCallback(() => { - const appCode = redirectUrl?.split('/').pop() + if (!redirectUrl) + return null + const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`) + const appCode = url.pathname.split('/').pop() if (!appCode) return null diff --git a/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx b/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx index d9e56af1b8..2201b28a2f 100644 --- a/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx +++ b/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx @@ -1,3 +1,4 @@ +'use client' import Link from 'next/link' import { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' @@ -33,7 +34,10 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut const redirectUrl = searchParams.get('redirect_url') const getAppCodeFromRedirectUrl = useCallback(() => { - const appCode = redirectUrl?.split('/').pop() + if (!redirectUrl) + return null + const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`) + const appCode = url.pathname.split('/').pop() if (!appCode) return null @@ -87,7 +91,7 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut localStorage.setItem('webapp_access_token', res.data.access_token) const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: res.data.access_token }) await setAccessToken(appCode, tokenResp.access_token) - router.replace(redirectUrl) + router.replace(decodeURIComponent(redirectUrl)) } else { Toast.notify({ diff --git a/web/app/(shareLayout)/webapp-signin/components/sso-auth.tsx b/web/app/(shareLayout)/webapp-signin/components/sso-auth.tsx index 5d649322ba..bcba572644 100644 --- a/web/app/(shareLayout)/webapp-signin/components/sso-auth.tsx +++ b/web/app/(shareLayout)/webapp-signin/components/sso-auth.tsx @@ -23,7 +23,10 @@ const SSOAuth: FC = ({ const redirectUrl = searchParams.get('redirect_url') const getAppCodeFromRedirectUrl = useCallback(() => { - const appCode = redirectUrl?.split('/').pop() + if (!redirectUrl) + return null + const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`) + const appCode = url.pathname.split('/').pop() if (!appCode) return null diff --git a/web/app/(shareLayout)/webapp-signin/page.tsx b/web/app/(shareLayout)/webapp-signin/page.tsx index 07b7c88430..967516c416 100644 --- a/web/app/(shareLayout)/webapp-signin/page.tsx +++ b/web/app/(shareLayout)/webapp-signin/page.tsx @@ -46,7 +46,10 @@ const WebSSOForm: FC = () => { } const getAppCodeFromRedirectUrl = useCallback(() => { - const appCode = redirectUrl?.split('/').pop() + if (!redirectUrl) + return null + const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`) + const appCode = url.pathname.split('/').pop() if (!appCode) return null @@ -63,20 +66,20 @@ const WebSSOForm: FC = () => { localStorage.setItem('webapp_access_token', tokenFromUrl) const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: tokenFromUrl }) await setAccessToken(appCode, tokenResp.access_token) - router.replace(redirectUrl) + router.replace(decodeURIComponent(redirectUrl)) return } if (appCode && redirectUrl && localStorage.getItem('webapp_access_token')) { const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: localStorage.getItem('webapp_access_token') }) await setAccessToken(appCode, tokenResp.access_token) - router.replace(redirectUrl) + router.replace(decodeURIComponent(redirectUrl)) } })() }, [getAppCodeFromRedirectUrl, redirectUrl, router, tokenFromUrl, message]) useEffect(() => { if (webAppAccessMode && webAppAccessMode === AccessMode.PUBLIC && redirectUrl) - router.replace(redirectUrl) + router.replace(decodeURIComponent(redirectUrl)) }, [webAppAccessMode, router, redirectUrl]) if (tokenFromUrl) { diff --git a/web/app/components/app/configuration/index.tsx b/web/app/components/app/configuration/index.tsx index ac054d014b..c7f52fe3d6 100644 --- a/web/app/components/app/configuration/index.tsx +++ b/web/app/components/app/configuration/index.tsx @@ -80,6 +80,8 @@ import { import PluginDependency from '@/app/components/workflow/plugin-dependency' import { supportFunctionCall } from '@/utils/tool-call' import { MittProvider } from '@/context/mitt-context' +import { fetchAndMergeValidCompletionParams } from '@/utils/completion-params' +import Toast from '@/app/components/base/toast' type PublishConfig = { modelConfig: ModelConfig @@ -453,7 +455,21 @@ const Configuration: FC = () => { ...visionConfig, enabled: supportVision, }, true) - setCompletionParams({}) + + try { + const { params: filtered, removedDetails } = await fetchAndMergeValidCompletionParams( + provider, + modelId, + completionParams, + ) + if (Object.keys(removedDetails).length) + Toast.notify({ type: 'warning', message: `${t('common.modelProvider.parametersInvalidRemoved')}: ${Object.entries(removedDetails).map(([k, reason]) => `${k} (${reason})`).join(', ')}` }) + setCompletionParams(filtered) + } + catch (e) { + Toast.notify({ type: 'error', message: t('common.error') }) + setCompletionParams({}) + } } const isShowVisionConfig = !!currModel?.features?.includes(ModelFeatureEnum.vision) diff --git a/web/app/components/app/log/list.tsx b/web/app/components/app/log/list.tsx index dd72c6c810..47f8c09e39 100644 --- a/web/app/components/app/log/list.tsx +++ b/web/app/components/app/log/list.tsx @@ -191,6 +191,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) { const { userProfile: { timezone } } = useAppContext() const { formatTime } = useTimestamp() const { onClose, appDetail } = useContext(DrawerContext) + const { notify } = useContext(ToastContext) const { currentLogItem, setCurrentLogItem, showMessageLogModal, setShowMessageLogModal, showPromptLogModal, setShowPromptLogModal, currentLogModalActiveTab } = useAppStore(useShallow(state => ({ currentLogItem: state.currentLogItem, setCurrentLogItem: state.setCurrentLogItem, @@ -312,18 +313,34 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) { return item })) }, [allChatItems]) - const handleAnnotationRemoved = useCallback((index: number) => { - setAllChatItems(allChatItems.map((item, i) => { - if (i === index) { - return { - ...item, - content: item.content, - annotation: undefined, - } + const handleAnnotationRemoved = useCallback(async (index: number): Promise => { + const annotation = allChatItems[index]?.annotation + + try { + if (annotation?.id) { + const { delAnnotation } = await import('@/service/annotation') + await delAnnotation(appDetail?.id || '', annotation.id) } - return item - })) - }, [allChatItems]) + + setAllChatItems(allChatItems.map((item, i) => { + if (i === index) { + return { + ...item, + content: item.content, + annotation: undefined, + } + } + return item + })) + + notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') }) + return true + } + catch { + notify({ type: 'error', message: t('common.actionMsg.modifiedUnsuccessfully') }) + return false + } + }, [allChatItems, appDetail?.id, t]) const fetchInitiated = useRef(false) diff --git a/web/app/components/base/button/sync-button.tsx b/web/app/components/base/button/sync-button.tsx new file mode 100644 index 0000000000..013c86889a --- /dev/null +++ b/web/app/components/base/button/sync-button.tsx @@ -0,0 +1,27 @@ +'use client' +import type { FC } from 'react' +import React from 'react' +import { RiRefreshLine } from '@remixicon/react' +import cn from '@/utils/classnames' +import TooltipPlus from '@/app/components/base/tooltip' + +type Props = { + className?: string, + popupContent?: string, + onClick: () => void +} + +const SyncButton: FC = ({ + className, + popupContent = '', + onClick, +}) => { + return ( + +
+ +
+
+ ) +} +export default React.memo(SyncButton) diff --git a/web/app/components/base/icons/assets/public/llm/openai-teal.svg b/web/app/components/base/icons/assets/public/llm/openai-teal.svg new file mode 100644 index 0000000000..359cb532b6 --- /dev/null +++ b/web/app/components/base/icons/assets/public/llm/openai-teal.svg @@ -0,0 +1,4 @@ + + + + diff --git a/web/app/components/base/icons/assets/public/llm/openai-yellow.svg b/web/app/components/base/icons/assets/public/llm/openai-yellow.svg new file mode 100644 index 0000000000..015eb74adc --- /dev/null +++ b/web/app/components/base/icons/assets/public/llm/openai-yellow.svg @@ -0,0 +1,4 @@ + + + + diff --git a/web/app/components/base/input-number/index.spec.tsx b/web/app/components/base/input-number/index.spec.tsx index 8dfd1184b0..891cbd21e3 100644 --- a/web/app/components/base/input-number/index.spec.tsx +++ b/web/app/components/base/input-number/index.spec.tsx @@ -18,7 +18,7 @@ describe('InputNumber Component', () => { it('renders input with default values', () => { render() - const input = screen.getByRole('textbox') + const input = screen.getByRole('spinbutton') expect(input).toBeInTheDocument() }) @@ -56,7 +56,7 @@ describe('InputNumber Component', () => { it('handles direct input changes', () => { render() - const input = screen.getByRole('textbox') + const input = screen.getByRole('spinbutton') fireEvent.change(input, { target: { value: '42' } }) expect(defaultProps.onChange).toHaveBeenCalledWith(42) @@ -64,7 +64,7 @@ describe('InputNumber Component', () => { it('handles empty input', () => { render() - const input = screen.getByRole('textbox') + const input = screen.getByRole('spinbutton') fireEvent.change(input, { target: { value: '' } }) expect(defaultProps.onChange).toHaveBeenCalledWith(undefined) @@ -72,7 +72,7 @@ describe('InputNumber Component', () => { it('handles invalid input', () => { render() - const input = screen.getByRole('textbox') + const input = screen.getByRole('spinbutton') fireEvent.change(input, { target: { value: 'abc' } }) expect(defaultProps.onChange).not.toHaveBeenCalled() @@ -86,7 +86,7 @@ describe('InputNumber Component', () => { it('disables controls when disabled prop is true', () => { render() - const input = screen.getByRole('textbox') + const input = screen.getByRole('spinbutton') const incrementBtn = screen.getByRole('button', { name: /increment/i }) const decrementBtn = screen.getByRole('button', { name: /decrement/i }) diff --git a/web/app/components/base/input-number/index.tsx b/web/app/components/base/input-number/index.tsx index 9024575abd..4282f18800 100644 --- a/web/app/components/base/input-number/index.tsx +++ b/web/app/components/base/input-number/index.tsx @@ -84,8 +84,8 @@ export const InputNumber: FC = (props) => { return
{ const codeBlockRegex = /```[\s\S]*?```/g const codeBlocks = content.match(codeBlockRegex) || [] + const escapeReplacement = (str: string) => str.replace(/\$/g, '_TMP_REPLACE_DOLLAR_') let processedContent = content.replace(codeBlockRegex, 'CODE_BLOCK_PLACEHOLDER') processedContent = flow([ @@ -21,9 +22,11 @@ export const preprocessLaTeX = (content: string) => { ])(processedContent) codeBlocks.forEach((block) => { - processedContent = processedContent.replace('CODE_BLOCK_PLACEHOLDER', block) + processedContent = processedContent.replace('CODE_BLOCK_PLACEHOLDER', escapeReplacement(block)) }) + processedContent = processedContent.replace(/_TMP_REPLACE_DOLLAR_/g, '$') + return processedContent } diff --git a/web/app/components/base/popover/index.tsx b/web/app/components/base/popover/index.tsx index 2a831e0c24..0e7c384564 100644 --- a/web/app/components/base/popover/index.tsx +++ b/web/app/components/base/popover/index.tsx @@ -3,6 +3,7 @@ import { Fragment, cloneElement, useRef } from 'react' import cn from '@/utils/classnames' export type HtmlContentProps = { + open?: boolean onClose?: () => void onClick?: () => void } @@ -100,7 +101,8 @@ export default function CustomPopover({ } > {cloneElement(htmlContent as React.ReactElement, { - onClose: () => onMouseLeave(open), + open, + onClose: close, ...(manualClose ? { onClick: close, diff --git a/web/app/components/base/select/index.tsx b/web/app/components/base/select/index.tsx index fa8730f698..77d229672f 100644 --- a/web/app/components/base/select/index.tsx +++ b/web/app/components/base/select/index.tsx @@ -1,10 +1,10 @@ 'use client' import type { FC } from 'react' -import React, { useEffect, useState } from 'react' +import React, { useEffect, useRef, useState } from 'react' import { Combobox, ComboboxButton, ComboboxInput, ComboboxOption, ComboboxOptions, Listbox, ListboxButton, ListboxOption, ListboxOptions } from '@headlessui/react' import { ChevronDownIcon, ChevronUpIcon, XMarkIcon } from '@heroicons/react/20/solid' import Badge from '../badge/index' -import { RiCheckLine } from '@remixicon/react' +import { RiCheckLine, RiLoader4Line } from '@remixicon/react' import { useTranslation } from 'react-i18next' import classNames from '@/utils/classnames' import { @@ -51,6 +51,8 @@ export type ISelectProps = { item: Item selected: boolean }) => React.ReactNode + isLoading?: boolean + onOpenChange?: (open: boolean) => void } const Select: FC = ({ className, @@ -114,7 +116,7 @@ const Select: FC = ({ if (!disabled) setOpen(!open) } - } className={classNames(`flex items-center h-9 w-full rounded-lg border-0 ${bgClassName} py-1.5 pl-3 pr-10 shadow-sm sm:text-sm sm:leading-6 focus-visible:outline-none focus-visible:bg-state-base-hover group-hover:bg-state-base-hover`, optionClassName)}> + } className={classNames(`flex h-9 w-full items-center rounded-lg border-0 ${bgClassName} py-1.5 pl-3 pr-10 shadow-sm focus-visible:bg-state-base-hover focus-visible:outline-none group-hover:bg-state-base-hover sm:text-sm sm:leading-6`, optionClassName)}>
{selectedItem?.name}
} = ({ value={item} className={({ active }: { active: boolean }) => classNames( - 'relative cursor-default select-none py-2 pl-3 pr-9 rounded-lg hover:bg-state-base-hover text-text-secondary', + 'relative cursor-default select-none rounded-lg py-2 pl-3 pr-9 text-text-secondary hover:bg-state-base-hover', active ? 'bg-state-base-hover' : '', optionClassName, ) @@ -178,17 +180,20 @@ const SimpleSelect: FC = ({ defaultValue = 1, disabled = false, onSelect, + onOpenChange, placeholder, optionWrapClassName, optionClassName, hideChecked, notClearable, renderOption, + isLoading = false, }) => { const { t } = useTranslation() const localPlaceholder = placeholder || t('common.placeholder.select') const [selectedItem, setSelectedItem] = useState(null) + useEffect(() => { let defaultSelect = null const existed = items.find((item: Item) => item.value === defaultValue) @@ -199,8 +204,10 @@ const SimpleSelect: FC = ({ // eslint-disable-next-line react-hooks/exhaustive-deps }, [defaultValue]) + const listboxRef = useRef(null) + return ( - { if (!disabled) { @@ -212,10 +219,17 @@ const SimpleSelect: FC = ({
{renderTrigger && {renderTrigger(selectedItem)}} {!renderTrigger && ( - - {selectedItem?.name ?? localPlaceholder} + { + // get data-open, use setTimeout to ensure the attribute is set + setTimeout(() => { + if (listboxRef.current) + onOpenChange?.(listboxRef.current.getAttribute('data-open') !== null) + }) + }} className={classNames(`flex h-full w-full items-center rounded-lg border-0 bg-components-input-bg-normal pl-3 pr-10 focus-visible:bg-state-base-hover-alt focus-visible:outline-none group-hover/simple-select:bg-state-base-hover-alt sm:text-sm sm:leading-6 ${disabled ? 'cursor-not-allowed' : 'cursor-pointer'}`, className)}> + {selectedItem?.name ?? localPlaceholder} - {(selectedItem && !notClearable) + {isLoading ? + : (selectedItem && !notClearable) ? ( { @@ -237,14 +251,14 @@ const SimpleSelect: FC = ({ )} - {!disabled && ( - + {(!disabled) && ( + {items.map((item: Item) => ( = ({ : (
@@ -344,7 +358,7 @@ const PortalSelect: FC = ({
{items.map((item: Item) => (
= ({ onChange(copyItems) } + const handleNewTag = useCallback((value: string) => { + const valueTrimmed = value.trim() + if (!valueTrimmed) { + notify({ type: 'error', message: t('datasetDocuments.segment.keywordEmpty') }) + return + } + + if ((items.find(item => item === valueTrimmed))) { + notify({ type: 'error', message: t('datasetDocuments.segment.keywordDuplicate') }) + return + } + + if (valueTrimmed.length > 20) { + notify({ type: 'error', message: t('datasetDocuments.segment.keywordError') }) + return + } + + onChange([...items, valueTrimmed]) + setTimeout(() => { + setValue('') + }) + }, [items, onChange, notify, t]) + const handleKeyDown = (e: KeyboardEvent) => { if (isSpecialMode && e.key === 'Enter') setValue(`${value}↵`) @@ -50,24 +72,12 @@ const TagInput: FC = ({ if (isSpecialMode) e.preventDefault() - const valueTrimmed = value.trim() - if (!valueTrimmed || (items.find(item => item === valueTrimmed))) - return - - if (valueTrimmed.length > 20) { - notify({ type: 'error', message: t('datasetDocuments.segment.keywordError') }) - return - } - - onChange([...items, valueTrimmed]) - setTimeout(() => { - setValue('') - }) + handleNewTag(value) } } const handleBlur = () => { - setValue('') + handleNewTag(value) setFocused(false) } diff --git a/web/app/components/datasets/create/step-two/index.tsx b/web/app/components/datasets/create/step-two/index.tsx index ff0862ce65..21269f4fbf 100644 --- a/web/app/components/datasets/create/step-two/index.tsx +++ b/web/app/components/datasets/create/step-two/index.tsx @@ -508,13 +508,15 @@ const StepTwo = ({ const separator = rules.segmentation.separator const max = rules.segmentation.max_tokens const overlap = rules.segmentation.chunk_overlap + const isHierarchicalDocument = documentDetail.doc_form === ChunkingMode.parentChild + || (rules.parent_mode && rules.subchunk_segmentation) setSegmentIdentifier(separator) setMaxChunkLength(max) setOverlap(overlap!) setRules(rules.pre_processing_rules) setDefaultConfig(rules) - if (documentDetail.dataset_process_rule.mode === 'hierarchical') { + if (isHierarchicalDocument) { setParentChildConfig({ chunkForContext: rules.parent_mode || 'paragraph', parent: { diff --git a/web/app/components/datasets/documents/index.tsx b/web/app/components/datasets/documents/index.tsx index d21ae215d7..355a20ae69 100644 --- a/web/app/components/datasets/documents/index.tsx +++ b/web/app/components/datasets/documents/index.tsx @@ -30,6 +30,7 @@ import useEditDocumentMetadata from '../metadata/hooks/use-edit-dataset-metadata import DatasetMetadataDrawer from '../metadata/metadata-dataset/dataset-metadata-drawer' import StatusWithAction from '../common/document-status-with-action/status-with-action' import { useDocLink } from '@/context/i18n' +import { useFetchDefaultProcessRule } from '@/service/knowledge/use-create-dataset' const FolderPlusIcon = ({ className }: React.SVGProps) => { return @@ -183,6 +184,8 @@ const Documents: FC = ({ datasetId }) => { router.push(`/datasets/${datasetId}/documents/create`) } + const fetchDefaultProcessRuleMutation = useFetchDefaultProcessRule() + const handleSaveNotionPageSelected = async (selectedPages: NotionPage[]) => { const workspacesMap = groupBy(selectedPages, 'workspace_id') const workspaces = Object.keys(workspacesMap).map((workspaceId) => { @@ -191,6 +194,7 @@ const Documents: FC = ({ datasetId }) => { pages: workspacesMap[workspaceId], } }) + const { rules } = await fetchDefaultProcessRuleMutation.mutateAsync('/datasets/process-rule') const params = { data_source: { type: dataset?.data_source_type, @@ -214,7 +218,7 @@ const Documents: FC = ({ datasetId }) => { }, indexing_technique: dataset?.indexing_technique, process_rule: { - rules: {}, + rules, mode: ProcessMode.general, }, } as CreateDocumentReq diff --git a/web/app/components/datasets/list/template/template.en.mdx b/web/app/components/datasets/list/template/template.en.mdx index 91293768b7..ebb2e6a806 100644 --- a/web/app/components/datasets/list/template/template.en.mdx +++ b/web/app/components/datasets/list/template/template.en.mdx @@ -1124,6 +1124,129 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
+ + + + Get a document's detail. + ### Path + - `dataset_id` (string) Dataset ID + - `document_id` (string) Document ID + + ### Query + - `metadata` (string) Metadata filter, can be `all`, `only`, or `without`. Default is `all`. + + ### Response + Returns the document's detail. + + + ### Request Example + + ```bash {{ title: 'cURL' }} + curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \ + -H 'Authorization: Bearer {api_key}' + ``` + + + ### Response Example + + ```json {{ title: 'Response' }} + { + "id": "f46ae30c-5c11-471b-96d0-464f5f32a7b2", + "position": 1, + "data_source_type": "upload_file", + "data_source_info": { + "upload_file": { + ... + } + }, + "dataset_process_rule_id": "24b99906-845e-499f-9e3c-d5565dd6962c", + "dataset_process_rule": { + "mode": "hierarchical", + "rules": { + "pre_processing_rules": [ + { + "id": "remove_extra_spaces", + "enabled": true + }, + { + "id": "remove_urls_emails", + "enabled": false + } + ], + "segmentation": { + "separator": "**********page_ending**********", + "max_tokens": 1024, + "chunk_overlap": 0 + }, + "parent_mode": "paragraph", + "subchunk_segmentation": { + "separator": "\n", + "max_tokens": 512, + "chunk_overlap": 0 + } + } + }, + "document_process_rule": { + "id": "24b99906-845e-499f-9e3c-d5565dd6962c", + "dataset_id": "48a0db76-d1a9-46c1-ae35-2baaa919a8a9", + "mode": "hierarchical", + "rules": { + "pre_processing_rules": [ + { + "id": "remove_extra_spaces", + "enabled": true + }, + { + "id": "remove_urls_emails", + "enabled": false + } + ], + "segmentation": { + "separator": "**********page_ending**********", + "max_tokens": 1024, + "chunk_overlap": 0 + }, + "parent_mode": "paragraph", + "subchunk_segmentation": { + "separator": "\n", + "max_tokens": 512, + "chunk_overlap": 0 + } + } + }, + "name": "xxxx", + "created_from": "web", + "created_by": "17f71940-a7b5-4c77-b60f-2bd645c1ffa0", + "created_at": 1750464191, + "tokens": null, + "indexing_status": "waiting", + "completed_at": null, + "updated_at": 1750464191, + "indexing_latency": null, + "error": null, + "enabled": true, + "disabled_at": null, + "disabled_by": null, + "archived": false, + "segment_count": 0, + "average_segment_length": 0, + "hit_count": null, + "display_status": "queuing", + "doc_form": "hierarchical_model", + "doc_language": "Chinese Simplified" + } + ``` + + + +___ +
+ + + + + ドキュメントの詳細を取得. + ### Path + - `dataset_id` (string) ナレッジベースID + - `document_id` (string) ドキュメントID + + ### Query + - `metadata` (string) metadataのフィルター条件 `all`、`only`、または`without`。デフォルトは `all`。 + + ### Response + ナレッジベースドキュメントの詳細を返す. + + + ### Request Example + + ```bash {{ title: 'cURL' }} + curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \ + -H 'Authorization: Bearer {api_key}' + ``` + + + ### Response Example + + ```json {{ title: 'Response' }} + { + "id": "f46ae30c-5c11-471b-96d0-464f5f32a7b2", + "position": 1, + "data_source_type": "upload_file", + "data_source_info": { + "upload_file": { + ... + } + }, + "dataset_process_rule_id": "24b99906-845e-499f-9e3c-d5565dd6962c", + "dataset_process_rule": { + "mode": "hierarchical", + "rules": { + "pre_processing_rules": [ + { + "id": "remove_extra_spaces", + "enabled": true + }, + { + "id": "remove_urls_emails", + "enabled": false + } + ], + "segmentation": { + "separator": "**********page_ending**********", + "max_tokens": 1024, + "chunk_overlap": 0 + }, + "parent_mode": "paragraph", + "subchunk_segmentation": { + "separator": "\n", + "max_tokens": 512, + "chunk_overlap": 0 + } + } + }, + "document_process_rule": { + "id": "24b99906-845e-499f-9e3c-d5565dd6962c", + "dataset_id": "48a0db76-d1a9-46c1-ae35-2baaa919a8a9", + "mode": "hierarchical", + "rules": { + "pre_processing_rules": [ + { + "id": "remove_extra_spaces", + "enabled": true + }, + { + "id": "remove_urls_emails", + "enabled": false + } + ], + "segmentation": { + "separator": "**********page_ending**********", + "max_tokens": 1024, + "chunk_overlap": 0 + }, + "parent_mode": "paragraph", + "subchunk_segmentation": { + "separator": "\n", + "max_tokens": 512, + "chunk_overlap": 0 + } + } + }, + "name": "xxxx", + "created_from": "web", + "created_by": "17f71940-a7b5-4c77-b60f-2bd645c1ffa0", + "created_at": 1750464191, + "tokens": null, + "indexing_status": "waiting", + "completed_at": null, + "updated_at": 1750464191, + "indexing_latency": null, + "error": null, + "enabled": true, + "disabled_at": null, + "disabled_by": null, + "archived": false, + "segment_count": 0, + "average_segment_length": 0, + "hit_count": null, + "display_status": "queuing", + "doc_form": "hierarchical_model", + "doc_language": "Chinese Simplified" + } + ``` + + + +___ +
+ + + + + + 获取文档详情. + ### Path + - `dataset_id` (string) 知识库 ID + - `document_id` (string) 文档 ID + + ### Query + - `metadata` (string) metadata 过滤条件 `all`, `only`, 或者 `without`. 默认是 `all`. + + ### Response + 返回知识库文档的详情. + + + ### Request Example + + ```bash {{ title: 'cURL' }} + curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \ + -H 'Authorization: Bearer {api_key}' + ``` + + + ### Response Example + + ```json {{ title: 'Response' }} + { + "id": "f46ae30c-5c11-471b-96d0-464f5f32a7b2", + "position": 1, + "data_source_type": "upload_file", + "data_source_info": { + "upload_file": { + ... + } + }, + "dataset_process_rule_id": "24b99906-845e-499f-9e3c-d5565dd6962c", + "dataset_process_rule": { + "mode": "hierarchical", + "rules": { + "pre_processing_rules": [ + { + "id": "remove_extra_spaces", + "enabled": true + }, + { + "id": "remove_urls_emails", + "enabled": false + } + ], + "segmentation": { + "separator": "**********page_ending**********", + "max_tokens": 1024, + "chunk_overlap": 0 + }, + "parent_mode": "paragraph", + "subchunk_segmentation": { + "separator": "\n", + "max_tokens": 512, + "chunk_overlap": 0 + } + } + }, + "document_process_rule": { + "id": "24b99906-845e-499f-9e3c-d5565dd6962c", + "dataset_id": "48a0db76-d1a9-46c1-ae35-2baaa919a8a9", + "mode": "hierarchical", + "rules": { + "pre_processing_rules": [ + { + "id": "remove_extra_spaces", + "enabled": true + }, + { + "id": "remove_urls_emails", + "enabled": false + } + ], + "segmentation": { + "separator": "**********page_ending**********", + "max_tokens": 1024, + "chunk_overlap": 0 + }, + "parent_mode": "paragraph", + "subchunk_segmentation": { + "separator": "\n", + "max_tokens": 512, + "chunk_overlap": 0 + } + } + }, + "name": "xxxx", + "created_from": "web", + "created_by": "17f71940-a7b5-4c77-b60f-2bd645c1ffa0", + "created_at": 1750464191, + "tokens": null, + "indexing_status": "waiting", + "completed_at": null, + "updated_at": 1750464191, + "indexing_latency": null, + "error": null, + "enabled": true, + "disabled_at": null, + "disabled_by": null, + "archived": false, + "segment_count": 0, + "average_segment_length": 0, + "hit_count": null, + "display_status": "queuing", + "doc_form": "hierarchical_model", + "doc_language": "Chinese Simplified" + } + ``` + + + +___ +
+ + = ({ const { isCurrentWorkspaceManager } = useAppContext() const [canConnectNotion, setCanConnectNotion] = useState(false) const { data } = useSWR(canConnectNotion ? '/oauth/data-source/notion' : null, fetchNotionConnection) + const { t } = useTranslation() const connected = !!workspaces.length @@ -51,9 +54,19 @@ const DataSourceNotion: FC = ({ } useEffect(() => { - if (data?.data) - window.location.href = data.data - }, [data]) + if (data && 'data' in data) { + if (data.data && typeof data.data === 'string' && data.data.startsWith('http')) { + window.location.href = data.data + } + else if (data.data === 'internal') { + Toast.notify({ + type: 'info', + message: t('common.dataSource.notion.integratedAlert'), + }) + } + } + }, [data, t]) + return ( { const pathname = usePathname() const isBordered = ['/apps', '/datasets', '/datasets/create', '/tools'].includes(pathname) - // // Check if the current path is a workflow canvas & fullscreen + // Check if the current path is a workflow canvas & fullscreen const inWorkflowCanvas = pathname.endsWith('/workflow') const workflowCanvasMaximize = localStorage.getItem('workflow-canvas-maximize') === 'true' const [hideHeader, setHideHeader] = useState(workflowCanvasMaximize) @@ -25,14 +25,12 @@ const HeaderWrapper = ({ setHideHeader(v.payload) }) - if (hideHeader && inWorkflowCanvas) - return null - return (
{children} diff --git a/web/app/components/plugins/plugin-detail-panel/app-selector/app-inputs-panel.tsx b/web/app/components/plugins/plugin-detail-panel/app-selector/app-inputs-panel.tsx index b22f59fe2c..d3ac9d7d2e 100644 --- a/web/app/components/plugins/plugin-detail-panel/app-selector/app-inputs-panel.tsx +++ b/web/app/components/plugins/plugin-detail-panel/app-selector/app-inputs-panel.tsx @@ -62,7 +62,7 @@ const AppInputsPanel = ({ return [] let inputFormSchema = [] if (isBasicApp) { - inputFormSchema = currentApp.model_config.user_input_form.filter((item: any) => !item.external_data_tool).map((item: any) => { + inputFormSchema = currentApp.model_config?.user_input_form?.filter((item: any) => !item.external_data_tool).map((item: any) => { if (item.paragraph) { return { ...item.paragraph, @@ -108,10 +108,10 @@ const AppInputsPanel = ({ type: 'text-input', required: false, } - }) + }) || [] } else { - const startNode = currentWorkflow?.graph.nodes.find(node => node.data.type === BlockEnum.Start) as any + const startNode = currentWorkflow?.graph?.nodes.find(node => node.data.type === BlockEnum.Start) as any inputFormSchema = startNode?.data.variables.map((variable: any) => { if (variable.type === InputVarType.multiFiles) { return { @@ -132,7 +132,7 @@ const AppInputsPanel = ({ ...variable, required: false, } - }) + }) || [] } if ((currentApp.mode === 'completion' || currentApp.mode === 'workflow') && basicAppFileConfig.enabled) { inputFormSchema.push({ @@ -144,7 +144,7 @@ const AppInputsPanel = ({ fileUploadConfig, }) } - return inputFormSchema + return inputFormSchema || [] }, [basicAppFileConfig, currentApp, currentWorkflow, fileUploadConfig, isBasicApp]) const handleFormChange = (value: Record) => { diff --git a/web/app/components/plugins/plugin-detail-panel/endpoint-modal.tsx b/web/app/components/plugins/plugin-detail-panel/endpoint-modal.tsx index fd862720af..130773e0c2 100644 --- a/web/app/components/plugins/plugin-detail-panel/endpoint-modal.tsx +++ b/web/app/components/plugins/plugin-detail-panel/endpoint-modal.tsx @@ -18,6 +18,15 @@ type Props = { onSaved: (value: Record) => void } +const extractDefaultValues = (schemas: any[]) => { + const result: Record = {} + for (const field of schemas) { + if (field.default !== undefined) + result[field.name] = field.default + } + return result +} + const EndpointModal: FC = ({ formSchemas, defaultValues = {}, @@ -26,7 +35,10 @@ const EndpointModal: FC = ({ }) => { const getValueFromI18nObject = useRenderI18nObject() const { t } = useTranslation() - const [tempCredential, setTempCredential] = React.useState(defaultValues) + const initialValues = Object.keys(defaultValues).length > 0 + ? defaultValues + : extractDefaultValues(formSchemas) + const [tempCredential, setTempCredential] = React.useState(initialValues) const handleSave = () => { for (const field of formSchemas) { diff --git a/web/app/components/plugins/plugin-detail-panel/multiple-tool-selector/index.tsx b/web/app/components/plugins/plugin-detail-panel/multiple-tool-selector/index.tsx index 7f5f22896a..fef79644cd 100644 --- a/web/app/components/plugins/plugin-detail-panel/multiple-tool-selector/index.tsx +++ b/web/app/components/plugins/plugin-detail-panel/multiple-tool-selector/index.tsx @@ -117,6 +117,7 @@ const MultipleToolSelector = ({ )} {!disabled && ( { + setCollapse(false) setOpen(!open) setPanelShowState(true) }}> @@ -126,23 +127,6 @@ const MultipleToolSelector = ({
{!collapse && ( <> -
- } - panelShowState={panelShowState} - onPanelShowStateChange={setPanelShowState} - isEdit={false} - /> {value.length === 0 && (
{t('plugin.detailPanel.toolSelector.empty')}
)} @@ -164,6 +148,23 @@ const MultipleToolSelector = ({ ))} )} +
+ } + panelShowState={panelShowState} + onPanelShowStateChange={setPanelShowState} + isEdit={false} + /> ) } diff --git a/web/app/components/plugins/plugin-detail-panel/tool-selector/index.tsx b/web/app/components/plugins/plugin-detail-panel/tool-selector/index.tsx index ca802414f3..350fe50933 100644 --- a/web/app/components/plugins/plugin-detail-panel/tool-selector/index.tsx +++ b/web/app/components/plugins/plugin-detail-panel/tool-selector/index.tsx @@ -275,7 +275,7 @@ const ToolSelector: FC = ({ /> )} - +
{!isShowSettingAuth && ( <> diff --git a/web/app/components/share/text-generation/index.tsx b/web/app/components/share/text-generation/index.tsx index 9dc7ffcd79..4be6b18958 100644 --- a/web/app/components/share/text-generation/index.tsx +++ b/web/app/components/share/text-generation/index.tsx @@ -85,14 +85,6 @@ const TextGeneration: FC = ({ const router = useRouter() const pathname = usePathname() - useEffect(() => { - const params = new URLSearchParams(searchParams) - if (params.has('mode')) { - params.delete('mode') - router.replace(`${pathname}?${params.toString()}`) - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, []) // Notice this situation isCallBatchAPI but not in batch tab const [isCallBatchAPI, setIsCallBatchAPI] = useState(false) diff --git a/web/app/components/workflow-app/components/workflow-header/features-trigger.tsx b/web/app/components/workflow-app/components/workflow-header/features-trigger.tsx index eac117a8b0..be7a082c4a 100644 --- a/web/app/components/workflow-app/components/workflow-header/features-trigger.tsx +++ b/web/app/components/workflow-app/components/workflow-header/features-trigger.tsx @@ -3,7 +3,7 @@ import { useCallback, useMemo, } from 'react' -import { useNodes } from 'reactflow' +import { useStore as useReactflowStore } from 'reactflow' import { RiApps2AddLine } from '@remixicon/react' import { useTranslation } from 'react-i18next' import { @@ -22,7 +22,6 @@ import { BlockEnum, InputVarType, } from '@/app/components/workflow/types' -import type { StartNodeType } from '@/app/components/workflow/nodes/start/types' import { useToastContext } from '@/app/components/base/toast' import { usePublishWorkflow, useResetWorkflowVersionHistory } from '@/service/use-workflow' import type { PublishWorkflowParams } from '@/types/workflow' @@ -42,9 +41,9 @@ const FeaturesTrigger = () => { const publishedAt = useStore(s => s.publishedAt) const draftUpdatedAt = useStore(s => s.draftUpdatedAt) const toolPublished = useStore(s => s.toolPublished) - const nodes = useNodes() - const startNode = nodes.find(node => node.data.type === BlockEnum.Start) - const startVariables = startNode?.data.variables + const startVariables = useReactflowStore( + s => s.getNodes().find(node => node.data.type === BlockEnum.Start)?.data.variables, + ) const fileSettings = useFeatures(s => s.features.file) const variables = useMemo(() => { const data = startVariables || [] diff --git a/web/app/components/workflow-app/components/workflow-main.tsx b/web/app/components/workflow-app/components/workflow-main.tsx index 0803bafb29..dd4ab73df8 100644 --- a/web/app/components/workflow-app/components/workflow-main.tsx +++ b/web/app/components/workflow-app/components/workflow-main.tsx @@ -8,9 +8,12 @@ import type { WorkflowProps } from '@/app/components/workflow' import WorkflowChildren from './workflow-children' import { useAvailableNodesMetaData, + useConfigsMap, useDSL, useGetRunAndTraceUrl, + useInspectVarsCrud, useNodesSyncDraft, + useSetWorkflowVarsWithValue, useWorkflowRefreshDraft, useWorkflowRun, useWorkflowStartRun, @@ -70,6 +73,24 @@ const WorkflowMain = ({ exportCheck, handleExportDSL, } = useDSL() + const { fetchInspectVars } = useSetWorkflowVarsWithValue() + const { + hasNodeInspectVars, + hasSetInspectVar, + fetchInspectVarValue, + editInspectVarValue, + renameInspectVarName, + appendNodeInspectVars, + deleteInspectVar, + deleteNodeInspectorVars, + deleteAllInspectorVars, + isInspectVarEdited, + resetToLastRunVar, + invalidateSysVarValues, + resetConversationVar, + invalidateConversationVarValues, + } = useInspectVarsCrud() + const configsMap = useConfigsMap() const hooksStore = useMemo(() => { return { @@ -88,6 +109,22 @@ const WorkflowMain = ({ getWorkflowRunAndTraceUrl, exportCheck, handleExportDSL, + fetchInspectVars, + hasNodeInspectVars, + hasSetInspectVar, + fetchInspectVarValue, + editInspectVarValue, + renameInspectVarName, + appendNodeInspectVars, + deleteInspectVar, + deleteNodeInspectorVars, + deleteAllInspectorVars, + isInspectVarEdited, + resetToLastRunVar, + invalidateSysVarValues, + resetConversationVar, + invalidateConversationVarValues, + configsMap, } }, [ syncWorkflowDraftWhenPageClose, @@ -105,6 +142,22 @@ const WorkflowMain = ({ getWorkflowRunAndTraceUrl, exportCheck, handleExportDSL, + fetchInspectVars, + hasNodeInspectVars, + hasSetInspectVar, + fetchInspectVarValue, + editInspectVarValue, + renameInspectVarName, + appendNodeInspectVars, + deleteInspectVar, + deleteNodeInspectorVars, + deleteAllInspectorVars, + isInspectVarEdited, + resetToLastRunVar, + invalidateSysVarValues, + resetConversationVar, + invalidateConversationVarValues, + configsMap, ]) return ( diff --git a/web/app/components/workflow-app/hooks/index.ts b/web/app/components/workflow-app/hooks/index.ts index b17943afc5..2c2545ab2e 100644 --- a/web/app/components/workflow-app/hooks/index.ts +++ b/web/app/components/workflow-app/hooks/index.ts @@ -8,3 +8,6 @@ export * from './use-available-nodes-meta-data' export * from './use-workflow-refresh-draft' export * from './use-get-run-and-trace-url' export * from './use-DSL' +export * from './use-fetch-workflow-inspect-vars' +export * from './use-inspect-vars-crud' +export * from './use-configs-map' diff --git a/web/app/components/workflow-app/hooks/use-configs-map.ts b/web/app/components/workflow-app/hooks/use-configs-map.ts new file mode 100644 index 0000000000..0db4f77856 --- /dev/null +++ b/web/app/components/workflow-app/hooks/use-configs-map.ts @@ -0,0 +1,12 @@ +import { useMemo } from 'react' +import { useStore } from '@/app/components/workflow/store' + +export const useConfigsMap = () => { + const appId = useStore(s => s.appId) + return useMemo(() => { + return { + conversationVarsUrl: `apps/${appId}/workflows/draft/conversation-variables`, + systemVarsUrl: `apps/${appId}/workflows/draft/system-variables`, + } + }, [appId]) +} diff --git a/web/app/components/workflow-app/hooks/use-fetch-workflow-inspect-vars.ts b/web/app/components/workflow-app/hooks/use-fetch-workflow-inspect-vars.ts index 9d3ff84929..07580c097e 100644 --- a/web/app/components/workflow-app/hooks/use-fetch-workflow-inspect-vars.ts +++ b/web/app/components/workflow-app/hooks/use-fetch-workflow-inspect-vars.ts @@ -1,19 +1,23 @@ +import { useCallback } from 'react' import type { NodeWithVar, VarInInspect } from '@/types/workflow' -import { useWorkflowStore } from '../../workflow/store' +import { useWorkflowStore } from '@/app/components/workflow/store' import { useStoreApi } from 'reactflow' import type { Node } from '@/app/components/workflow/types' import { fetchAllInspectVars } from '@/service/workflow' import { useInvalidateConversationVarValues, useInvalidateSysVarValues } from '@/service/use-workflow' -import { useNodesInteractionsWithoutSync } from '../../workflow/hooks/use-nodes-interactions-without-sync' -const useSetWorkflowVarsWithValue = () => { +import { useNodesInteractionsWithoutSync } from '@/app/components/workflow/hooks/use-nodes-interactions-without-sync' +import { useConfigsMap } from './use-configs-map' + +export const useSetWorkflowVarsWithValue = () => { const workflowStore = useWorkflowStore() - const { setNodesWithInspectVars, appId } = workflowStore.getState() const store = useStoreApi() - const invalidateConversationVarValues = useInvalidateConversationVarValues(appId) - const invalidateSysVarValues = useInvalidateSysVarValues(appId) + const { conversationVarsUrl, systemVarsUrl } = useConfigsMap() + const invalidateConversationVarValues = useInvalidateConversationVarValues(conversationVarsUrl) + const invalidateSysVarValues = useInvalidateSysVarValues(systemVarsUrl) const { handleCancelAllNodeSuccessStatus } = useNodesInteractionsWithoutSync() - const setInspectVarsToStore = (inspectVars: VarInInspect[]) => { + const setInspectVarsToStore = useCallback((inspectVars: VarInInspect[]) => { + const { setNodesWithInspectVars } = workflowStore.getState() const { getNodes } = store.getState() const nodeArr = getNodes() const nodesKeyValue: Record = {} @@ -51,18 +55,17 @@ const useSetWorkflowVarsWithValue = () => { return nodeWithVar }) setNodesWithInspectVars(res) - } + }, [workflowStore, store]) - const fetchInspectVars = async () => { + const fetchInspectVars = useCallback(async () => { + const { appId } = workflowStore.getState() invalidateConversationVarValues() invalidateSysVarValues() const data = await fetchAllInspectVars(appId) setInspectVarsToStore(data) handleCancelAllNodeSuccessStatus() // to make sure clear node output show the unset status - } + }, [workflowStore, invalidateConversationVarValues, invalidateSysVarValues, setInspectVarsToStore, handleCancelAllNodeSuccessStatus]) return { fetchInspectVars, } } - -export default useSetWorkflowVarsWithValue diff --git a/web/app/components/workflow-app/hooks/use-inspect-vars-crud.ts b/web/app/components/workflow-app/hooks/use-inspect-vars-crud.ts new file mode 100644 index 0000000000..ce052b7ed4 --- /dev/null +++ b/web/app/components/workflow-app/hooks/use-inspect-vars-crud.ts @@ -0,0 +1,234 @@ +import { fetchNodeInspectVars } from '@/service/workflow' +import { useStore, useWorkflowStore } from '@/app/components/workflow/store' +import type { ValueSelector } from '@/app/components/workflow/types' +import type { VarInInspect } from '@/types/workflow' +import { VarInInspectType } from '@/types/workflow' +import { + useDeleteAllInspectorVars, + useDeleteInspectVar, + useDeleteNodeInspectorVars, + useEditInspectorVar, + useInvalidateConversationVarValues, + useInvalidateSysVarValues, + useResetConversationVar, + useResetToLastRunValue, +} from '@/service/use-workflow' +import { useCallback } from 'react' +import { isConversationVar, isENV, isSystemVar } from '@/app/components/workflow/nodes/_base/components/variable/utils' +import produce from 'immer' +import type { Node } from '@/app/components/workflow/types' +import { useNodesInteractionsWithoutSync } from '@/app/components/workflow/hooks/use-nodes-interactions-without-sync' +import { useEdgesInteractionsWithoutSync } from '@/app/components/workflow/hooks/use-edges-interactions-without-sync' +import { useConfigsMap } from './use-configs-map' + +export const useInspectVarsCrud = () => { + const workflowStore = useWorkflowStore() + const appId = useStore(s => s.appId) + const { conversationVarsUrl, systemVarsUrl } = useConfigsMap() + const invalidateConversationVarValues = useInvalidateConversationVarValues(conversationVarsUrl) + const { mutateAsync: doResetConversationVar } = useResetConversationVar(appId) + const { mutateAsync: doResetToLastRunValue } = useResetToLastRunValue(appId) + const invalidateSysVarValues = useInvalidateSysVarValues(systemVarsUrl) + + const { mutateAsync: doDeleteAllInspectorVars } = useDeleteAllInspectorVars(appId) + const { mutate: doDeleteNodeInspectorVars } = useDeleteNodeInspectorVars(appId) + const { mutate: doDeleteInspectVar } = useDeleteInspectVar(appId) + + const { mutateAsync: doEditInspectorVar } = useEditInspectorVar(appId) + const { handleCancelNodeSuccessStatus } = useNodesInteractionsWithoutSync() + const { handleEdgeCancelRunningStatus } = useEdgesInteractionsWithoutSync() + const getNodeInspectVars = useCallback((nodeId: string) => { + const { nodesWithInspectVars } = workflowStore.getState() + const node = nodesWithInspectVars.find(node => node.nodeId === nodeId) + return node + }, [workflowStore]) + + const getVarId = useCallback((nodeId: string, varName: string) => { + const node = getNodeInspectVars(nodeId) + if (!node) + return undefined + const varId = node.vars.find((varItem) => { + return varItem.selector[1] === varName + })?.id + return varId + }, [getNodeInspectVars]) + + const getInspectVar = useCallback((nodeId: string, name: string): VarInInspect | undefined => { + const node = getNodeInspectVars(nodeId) + if (!node) + return undefined + + const variable = node.vars.find((varItem) => { + return varItem.name === name + }) + return variable + }, [getNodeInspectVars]) + + const hasSetInspectVar = useCallback((nodeId: string, name: string, sysVars: VarInInspect[], conversationVars: VarInInspect[]) => { + const isEnv = isENV([nodeId]) + if (isEnv) // always have value + return true + const isSys = isSystemVar([nodeId]) + if (isSys) + return sysVars.some(varItem => varItem.selector?.[1]?.replace('sys.', '') === name) + const isChatVar = isConversationVar([nodeId]) + if (isChatVar) + return conversationVars.some(varItem => varItem.selector?.[1] === name) + return getInspectVar(nodeId, name) !== undefined + }, [getInspectVar]) + + const hasNodeInspectVars = useCallback((nodeId: string) => { + return !!getNodeInspectVars(nodeId) + }, [getNodeInspectVars]) + + const fetchInspectVarValue = useCallback(async (selector: ValueSelector) => { + const { + appId, + setNodeInspectVars, + } = workflowStore.getState() + const nodeId = selector[0] + const isSystemVar = nodeId === 'sys' + const isConversationVar = nodeId === 'conversation' + if (isSystemVar) { + invalidateSysVarValues() + return + } + if (isConversationVar) { + invalidateConversationVarValues() + return + } + const vars = await fetchNodeInspectVars(appId, nodeId) + setNodeInspectVars(nodeId, vars) + }, [workflowStore, invalidateSysVarValues, invalidateConversationVarValues]) + + // after last run would call this + const appendNodeInspectVars = useCallback((nodeId: string, payload: VarInInspect[], allNodes: Node[]) => { + const { + nodesWithInspectVars, + setNodesWithInspectVars, + } = workflowStore.getState() + const nodes = produce(nodesWithInspectVars, (draft) => { + const nodeInfo = allNodes.find(node => node.id === nodeId) + if (nodeInfo) { + const index = draft.findIndex(node => node.nodeId === nodeId) + if (index === -1) { + draft.unshift({ + nodeId, + nodeType: nodeInfo.data.type, + title: nodeInfo.data.title, + vars: payload, + nodePayload: nodeInfo.data, + }) + } + else { + draft[index].vars = payload + // put the node to the topAdd commentMore actions + draft.unshift(draft.splice(index, 1)[0]) + } + } + }) + setNodesWithInspectVars(nodes) + handleCancelNodeSuccessStatus(nodeId) + }, [workflowStore, handleCancelNodeSuccessStatus]) + + const hasNodeInspectVar = useCallback((nodeId: string, varId: string) => { + const { nodesWithInspectVars } = workflowStore.getState() + const targetNode = nodesWithInspectVars.find(item => item.nodeId === nodeId) + if(!targetNode || !targetNode.vars) + return false + return targetNode.vars.some(item => item.id === varId) + }, [workflowStore]) + + const deleteInspectVar = useCallback(async (nodeId: string, varId: string) => { + const { deleteInspectVar } = workflowStore.getState() + if(hasNodeInspectVar(nodeId, varId)) { + await doDeleteInspectVar(varId) + deleteInspectVar(nodeId, varId) + } + }, [doDeleteInspectVar, workflowStore, hasNodeInspectVar]) + + const resetConversationVar = useCallback(async (varId: string) => { + await doResetConversationVar(varId) + invalidateConversationVarValues() + }, [doResetConversationVar, invalidateConversationVarValues]) + + const deleteNodeInspectorVars = useCallback(async (nodeId: string) => { + const { deleteNodeInspectVars } = workflowStore.getState() + if (hasNodeInspectVars(nodeId)) { + await doDeleteNodeInspectorVars(nodeId) + deleteNodeInspectVars(nodeId) + } + }, [doDeleteNodeInspectorVars, workflowStore, hasNodeInspectVars]) + + const deleteAllInspectorVars = useCallback(async () => { + const { deleteAllInspectVars } = workflowStore.getState() + await doDeleteAllInspectorVars() + await invalidateConversationVarValues() + await invalidateSysVarValues() + deleteAllInspectVars() + handleEdgeCancelRunningStatus() + }, [doDeleteAllInspectorVars, invalidateConversationVarValues, invalidateSysVarValues, workflowStore, handleEdgeCancelRunningStatus]) + + const editInspectVarValue = useCallback(async (nodeId: string, varId: string, value: any) => { + const { setInspectVarValue } = workflowStore.getState() + await doEditInspectorVar({ + varId, + value, + }) + setInspectVarValue(nodeId, varId, value) + if (nodeId === VarInInspectType.conversation) + invalidateConversationVarValues() + if (nodeId === VarInInspectType.system) + invalidateSysVarValues() + }, [doEditInspectorVar, invalidateConversationVarValues, invalidateSysVarValues, workflowStore]) + + const renameInspectVarName = useCallback(async (nodeId: string, oldName: string, newName: string) => { + const { renameInspectVarName } = workflowStore.getState() + const varId = getVarId(nodeId, oldName) + if (!varId) + return + + const newSelector = [nodeId, newName] + await doEditInspectorVar({ + varId, + name: newName, + }) + renameInspectVarName(nodeId, varId, newSelector) + }, [doEditInspectorVar, getVarId, workflowStore]) + + const isInspectVarEdited = useCallback((nodeId: string, name: string) => { + const inspectVar = getInspectVar(nodeId, name) + if (!inspectVar) + return false + + return inspectVar.edited + }, [getInspectVar]) + + const resetToLastRunVar = useCallback(async (nodeId: string, varId: string) => { + const { resetToLastRunVar } = workflowStore.getState() + const isSysVar = nodeId === 'sys' + const data = await doResetToLastRunValue(varId) + + if(isSysVar) + invalidateSysVarValues() + else + resetToLastRunVar(nodeId, varId, data.value) + }, [doResetToLastRunValue, invalidateSysVarValues, workflowStore]) + + return { + hasNodeInspectVars, + hasSetInspectVar, + fetchInspectVarValue, + editInspectVarValue, + renameInspectVarName, + appendNodeInspectVars, + deleteInspectVar, + deleteNodeInspectorVars, + deleteAllInspectorVars, + isInspectVarEdited, + resetToLastRunVar, + invalidateSysVarValues, + resetConversationVar, + invalidateConversationVarValues, + } +} diff --git a/web/app/components/workflow-app/hooks/use-workflow-run.ts b/web/app/components/workflow-app/hooks/use-workflow-run.ts index 99b88238f1..4c34d2ffb1 100644 --- a/web/app/components/workflow-app/hooks/use-workflow-run.ts +++ b/web/app/components/workflow-app/hooks/use-workflow-run.ts @@ -20,7 +20,7 @@ import type { VersionHistory } from '@/types/workflow' import { noop } from 'lodash-es' import { useNodesSyncDraft } from './use-nodes-sync-draft' import { useInvalidAllLastRun } from '@/service/use-workflow' -import useSetWorkflowVarsWithValue from './use-fetch-workflow-inspect-vars' +import { useSetWorkflowVarsWithValue } from './use-fetch-workflow-inspect-vars' export const useWorkflowRun = () => { const store = useStoreApi() diff --git a/web/app/components/workflow/block-selector/market-place-plugin/list.tsx b/web/app/components/workflow/block-selector/market-place-plugin/list.tsx index 019b32ae25..e2b4a7acc6 100644 --- a/web/app/components/workflow/block-selector/market-place-plugin/list.tsx +++ b/web/app/components/workflow/block-selector/market-place-plugin/list.tsx @@ -6,9 +6,9 @@ import Item from './item' import type { Plugin } from '@/app/components/plugins/types.ts' import cn from '@/utils/classnames' import Link from 'next/link' -import { MARKETPLACE_URL_PREFIX } from '@/config' import { RiArrowRightUpLine, RiSearchLine } from '@remixicon/react' import { noop } from 'lodash-es' +import { getMarketplaceUrl } from '@/utils/var' export type ListProps = { wrapElemRef: React.RefObject @@ -32,7 +32,7 @@ const List = forwardRef(({ const { t } = useTranslation() const hasFilter = !searchText const hasRes = list.length > 0 - const urlWithSearchText = `${MARKETPLACE_URL_PREFIX}/?q=${searchText}&tags=${tags.join(',')}` + const urlWithSearchText = getMarketplaceUrl('', { q: searchText, tags: tags.join(',') }) const nextToStickyELemRef = useRef(null) const { handleScroll, scrollPosition } = useStickyScroll({ @@ -71,7 +71,7 @@ const List = forwardRef(({ return ( {t('plugin.findMoreInMarketplace')} diff --git a/web/app/components/workflow/block-selector/types.ts b/web/app/components/workflow/block-selector/types.ts index 75cfdf4818..74796f67e0 100644 --- a/web/app/components/workflow/block-selector/types.ts +++ b/web/app/components/workflow/block-selector/types.ts @@ -47,7 +47,7 @@ export type ToolValue = { provider_name: string tool_name: string tool_label: string - tool_description: string + tool_description?: string settings?: Record parameters?: Record enabled?: boolean diff --git a/web/app/components/workflow/header/header-in-normal.tsx b/web/app/components/workflow/header/header-in-normal.tsx index eea3031f5b..fa3288333a 100644 --- a/web/app/components/workflow/header/header-in-normal.tsx +++ b/web/app/components/workflow/header/header-in-normal.tsx @@ -36,6 +36,8 @@ const HeaderInNormal = ({ const setShowWorkflowVersionHistoryPanel = useStore(s => s.setShowWorkflowVersionHistoryPanel) const setShowEnvPanel = useStore(s => s.setShowEnvPanel) const setShowDebugAndPreviewPanel = useStore(s => s.setShowDebugAndPreviewPanel) + const setShowVariableInspectPanel = useStore(s => s.setShowVariableInspectPanel) + const setShowChatVariablePanel = useStore(s => s.setShowChatVariablePanel) const nodes = useNodes() const selectedNode = nodes.find(node => node.data.selected) const { handleBackupDraft } = useWorkflowRun() @@ -49,8 +51,10 @@ const HeaderInNormal = ({ setShowWorkflowVersionHistoryPanel(true) setShowEnvPanel(false) setShowDebugAndPreviewPanel(false) + setShowVariableInspectPanel(false) + setShowChatVariablePanel(false) }, [handleBackupDraft, workflowStore, handleNodeSelect, selectedNode, - setShowWorkflowVersionHistoryPanel, setShowEnvPanel, setShowDebugAndPreviewPanel]) + setShowWorkflowVersionHistoryPanel, setShowEnvPanel, setShowDebugAndPreviewPanel, setShowVariableInspectPanel]) return ( <> diff --git a/web/app/components/workflow/hooks-store/store.ts b/web/app/components/workflow/hooks-store/store.ts index 967682c7dd..d9a35cd36d 100644 --- a/web/app/components/workflow/hooks-store/store.ts +++ b/web/app/components/workflow/hooks-store/store.ts @@ -11,6 +11,12 @@ import type { BlockEnum, NodeDefault, } from '@/app/components/workflow/types' +import type { IOtherOptions } from '@/service/base' +import type { VarInInspect } from '@/types/workflow' +import type { + Node, + ValueSelector, +} from '@/app/components/workflow/types' export type AvailableNodesMetaData = { nodes: NodeDefault[] @@ -30,7 +36,7 @@ export type CommonHooksFnMap = { handleBackupDraft: () => void handleLoadBackupDraft: () => void handleRestoreFromPublishedWorkflow: (...args: any[]) => void - handleRun: (...args: any[]) => void + handleRun: (params: any, callback?: IOtherOptions,) => void handleStopRun: (...args: any[]) => void handleStartWorkflowRun: () => void handleWorkflowStartRunInWorkflow: () => void @@ -39,6 +45,25 @@ export type CommonHooksFnMap = { getWorkflowRunAndTraceUrl: (runId?: string) => { runUrl: string; traceUrl: string } exportCheck?: () => Promise handleExportDSL?: (include?: boolean) => Promise + fetchInspectVars: () => Promise + hasNodeInspectVars: (nodeId: string) => boolean + hasSetInspectVar: (nodeId: string, name: string, sysVars: VarInInspect[], conversationVars: VarInInspect[]) => boolean + fetchInspectVarValue: (selector: ValueSelector) => Promise + editInspectVarValue: (nodeId: string, varId: string, value: any) => Promise + renameInspectVarName: (nodeId: string, oldName: string, newName: string) => Promise + appendNodeInspectVars: (nodeId: string, payload: VarInInspect[], allNodes: Node[]) => void + deleteInspectVar: (nodeId: string, varId: string) => Promise + deleteNodeInspectorVars: (nodeId: string) => Promise + deleteAllInspectorVars: () => Promise + isInspectVarEdited: (nodeId: string, name: string) => boolean + resetToLastRunVar: (nodeId: string, varId: string) => Promise + invalidateSysVarValues: () => void + resetConversationVar: (varId: string) => Promise + invalidateConversationVarValues: () => void + configsMap?: { + conversationVarsUrl: string + systemVarsUrl: string + } } export type Shape = { @@ -66,6 +91,21 @@ export const createHooksStore = ({ }), exportCheck = async () => noop(), handleExportDSL = async () => noop(), + fetchInspectVars = async () => noop(), + hasNodeInspectVars = () => false, + hasSetInspectVar = () => false, + fetchInspectVarValue = async () => noop(), + editInspectVarValue = async () => noop(), + renameInspectVarName = async () => noop(), + appendNodeInspectVars = () => noop(), + deleteInspectVar = async () => noop(), + deleteNodeInspectorVars = async () => noop(), + deleteAllInspectorVars = async () => noop(), + isInspectVarEdited = () => false, + resetToLastRunVar = async () => noop(), + invalidateSysVarValues = noop, + resetConversationVar = async () => noop(), + invalidateConversationVarValues = noop, }: Partial) => { return createStore(set => ({ refreshAll: props => set(state => ({ ...state, ...props })), @@ -84,6 +124,21 @@ export const createHooksStore = ({ getWorkflowRunAndTraceUrl, exportCheck, handleExportDSL, + fetchInspectVars, + hasNodeInspectVars, + hasSetInspectVar, + fetchInspectVarValue, + editInspectVarValue, + renameInspectVarName, + appendNodeInspectVars, + deleteInspectVar, + deleteNodeInspectorVars, + deleteAllInspectorVars, + isInspectVarEdited, + resetToLastRunVar, + invalidateSysVarValues, + resetConversationVar, + invalidateConversationVarValues, })) } diff --git a/web/app/components/workflow/hooks/index.ts b/web/app/components/workflow/hooks/index.ts index 8a33643d69..725adf2a2a 100644 --- a/web/app/components/workflow/hooks/index.ts +++ b/web/app/components/workflow/hooks/index.ts @@ -20,3 +20,5 @@ export * from './use-available-blocks' export * from './use-workflow-refresh-draft' export * from './use-tool-icon' export * from './use-DSL' +export * from './use-inspect-vars-crud' +export * from './use-set-workflow-vars-with-value' diff --git a/web/app/components/workflow/hooks/use-inspect-vars-crud.ts b/web/app/components/workflow/hooks/use-inspect-vars-crud.ts index 59cc98a17b..50188185c2 100644 --- a/web/app/components/workflow/hooks/use-inspect-vars-crud.ts +++ b/web/app/components/workflow/hooks/use-inspect-vars-crud.ts @@ -1,221 +1,29 @@ -import { fetchNodeInspectVars } from '@/service/workflow' -import { useStore, useWorkflowStore } from '../store' -import type { ValueSelector } from '../types' -import type { VarInInspect } from '@/types/workflow' -import { VarInInspectType } from '@/types/workflow' +import { useStore } from '../store' +import { useHooksStore } from '@/app/components/workflow/hooks-store' import { useConversationVarValues, - useDeleteAllInspectorVars, - useDeleteInspectVar, - useDeleteNodeInspectorVars, - useEditInspectorVar, - useInvalidateConversationVarValues, - useInvalidateSysVarValues, - useLastRun, - useResetConversationVar, - useResetToLastRunValue, useSysVarValues, } from '@/service/use-workflow' -import { useCallback, useEffect, useState } from 'react' -import { isConversationVar, isENV, isSystemVar } from '../nodes/_base/components/variable/utils' -import produce from 'immer' -import type { Node } from '@/app/components/workflow/types' -import { useNodesInteractionsWithoutSync } from './use-nodes-interactions-without-sync' -import { useEdgesInteractionsWithoutSync } from './use-edges-interactions-without-sync' const useInspectVarsCrud = () => { - const workflowStore = useWorkflowStore() const nodesWithInspectVars = useStore(s => s.nodesWithInspectVars) - const { - appId, - setNodeInspectVars, - setInspectVarValue, - renameInspectVarName: renameInspectVarNameInStore, - deleteAllInspectVars: deleteAllInspectVarsInStore, - deleteNodeInspectVars: deleteNodeInspectVarsInStore, - deleteInspectVar: deleteInspectVarInStore, - setNodesWithInspectVars, - resetToLastRunVar: resetToLastRunVarInStore, - } = workflowStore.getState() - - const { data: conversationVars } = useConversationVarValues(appId) - const invalidateConversationVarValues = useInvalidateConversationVarValues(appId) - const { mutateAsync: doResetConversationVar } = useResetConversationVar(appId) - const { mutateAsync: doResetToLastRunValue } = useResetToLastRunValue(appId) - const { data: systemVars } = useSysVarValues(appId) - const invalidateSysVarValues = useInvalidateSysVarValues(appId) - - const { mutateAsync: doDeleteAllInspectorVars } = useDeleteAllInspectorVars(appId) - const { mutate: doDeleteNodeInspectorVars } = useDeleteNodeInspectorVars(appId) - const { mutate: doDeleteInspectVar } = useDeleteInspectVar(appId) - - const { mutateAsync: doEditInspectorVar } = useEditInspectorVar(appId) - const { handleCancelNodeSuccessStatus } = useNodesInteractionsWithoutSync() - const { handleEdgeCancelRunningStatus } = useEdgesInteractionsWithoutSync() - const getNodeInspectVars = useCallback((nodeId: string) => { - const node = nodesWithInspectVars.find(node => node.nodeId === nodeId) - return node - }, [nodesWithInspectVars]) - - const getVarId = useCallback((nodeId: string, varName: string) => { - const node = getNodeInspectVars(nodeId) - if (!node) - return undefined - const varId = node.vars.find((varItem) => { - return varItem.selector[1] === varName - })?.id - return varId - }, [getNodeInspectVars]) - - const getInspectVar = useCallback((nodeId: string, name: string): VarInInspect | undefined => { - const node = getNodeInspectVars(nodeId) - if (!node) - return undefined - - const variable = node.vars.find((varItem) => { - return varItem.name === name - }) - return variable - }, [getNodeInspectVars]) - - const hasSetInspectVar = useCallback((nodeId: string, name: string, sysVars: VarInInspect[], conversationVars: VarInInspect[]) => { - const isEnv = isENV([nodeId]) - if (isEnv) // always have value - return true - const isSys = isSystemVar([nodeId]) - if (isSys) - return sysVars.some(varItem => varItem.selector?.[1]?.replace('sys.', '') === name) - const isChatVar = isConversationVar([nodeId]) - if (isChatVar) - return conversationVars.some(varItem => varItem.selector?.[1] === name) - return getInspectVar(nodeId, name) !== undefined - }, [getInspectVar]) - - const hasNodeInspectVars = useCallback((nodeId: string) => { - return !!getNodeInspectVars(nodeId) - }, [getNodeInspectVars]) - - const fetchInspectVarValue = async (selector: ValueSelector) => { - const nodeId = selector[0] - const isSystemVar = nodeId === 'sys' - const isConversationVar = nodeId === 'conversation' - if (isSystemVar) { - invalidateSysVarValues() - return - } - if (isConversationVar) { - invalidateConversationVarValues() - return - } - const vars = await fetchNodeInspectVars(appId, nodeId) - setNodeInspectVars(nodeId, vars) - } - - // after last run would call this - const appendNodeInspectVars = (nodeId: string, payload: VarInInspect[], allNodes: Node[]) => { - const nodes = produce(nodesWithInspectVars, (draft) => { - const nodeInfo = allNodes.find(node => node.id === nodeId) - if (nodeInfo) { - const index = draft.findIndex(node => node.nodeId === nodeId) - if (index === -1) { - draft.push({ - nodeId, - nodeType: nodeInfo.data.type, - title: nodeInfo.data.title, - vars: payload, - }) - } - else { - draft[index].vars = payload - } - } - }) - setNodesWithInspectVars(nodes) - handleCancelNodeSuccessStatus(nodeId) - } - - const hasNodeInspectVar = (nodeId: string, varId: string) => { - const targetNode = nodesWithInspectVars.find(item => item.nodeId === nodeId) - if(!targetNode || !targetNode.vars) - return false - return targetNode.vars.some(item => item.id === varId) - } - - const deleteInspectVar = async (nodeId: string, varId: string) => { - if(hasNodeInspectVar(nodeId, varId)) { - await doDeleteInspectVar(varId) - deleteInspectVarInStore(nodeId, varId) - } - } - - const resetConversationVar = async (varId: string) => { - await doResetConversationVar(varId) - invalidateConversationVarValues() - } - - const deleteNodeInspectorVars = async (nodeId: string) => { - if (hasNodeInspectVars(nodeId)) { - await doDeleteNodeInspectorVars(nodeId) - deleteNodeInspectVarsInStore(nodeId) - } - } - - const deleteAllInspectorVars = async () => { - await doDeleteAllInspectorVars() - await invalidateConversationVarValues() - await invalidateSysVarValues() - deleteAllInspectVarsInStore() - handleEdgeCancelRunningStatus() - } - - const editInspectVarValue = useCallback(async (nodeId: string, varId: string, value: any) => { - await doEditInspectorVar({ - varId, - value, - }) - setInspectVarValue(nodeId, varId, value) - if (nodeId === VarInInspectType.conversation) - invalidateConversationVarValues() - if (nodeId === VarInInspectType.system) - invalidateSysVarValues() - }, [doEditInspectorVar, invalidateConversationVarValues, invalidateSysVarValues, setInspectVarValue]) - - const [currNodeId, setCurrNodeId] = useState(null) - const [currEditVarId, setCurrEditVarId] = useState(null) - const { data } = useLastRun(appId, currNodeId || '', !!currNodeId) - useEffect(() => { - if (data && currNodeId && currEditVarId) { - const inspectVar = getNodeInspectVars(currNodeId)?.vars?.find(item => item.id === currEditVarId) - resetToLastRunVarInStore(currNodeId, currEditVarId, data.outputs?.[inspectVar?.selector?.[1] || '']) - } - }, [data, currNodeId, currEditVarId, getNodeInspectVars, editInspectVarValue, resetToLastRunVarInStore]) - - const renameInspectVarName = async (nodeId: string, oldName: string, newName: string) => { - const varId = getVarId(nodeId, oldName) - if (!varId) - return - - const newSelector = [nodeId, newName] - await doEditInspectorVar({ - varId, - name: newName, - }) - renameInspectVarNameInStore(nodeId, varId, newSelector) - } - - const isInspectVarEdited = useCallback((nodeId: string, name: string) => { - const inspectVar = getInspectVar(nodeId, name) - if (!inspectVar) - return false - - return inspectVar.edited - }, [getInspectVar]) - - const resetToLastRunVar = async (nodeId: string, varId: string) => { - await doResetToLastRunValue(varId) - setCurrNodeId(nodeId) - setCurrEditVarId(varId) - } + const configsMap = useHooksStore(s => s.configsMap) + const { data: conversationVars } = useConversationVarValues(configsMap?.conversationVarsUrl) + const { data: systemVars } = useSysVarValues(configsMap?.systemVarsUrl) + const hasNodeInspectVars = useHooksStore(s => s.hasNodeInspectVars) + const hasSetInspectVar = useHooksStore(s => s.hasSetInspectVar) + const fetchInspectVarValue = useHooksStore(s => s.fetchInspectVarValue) + const editInspectVarValue = useHooksStore(s => s.editInspectVarValue) + const renameInspectVarName = useHooksStore(s => s.renameInspectVarName) + const appendNodeInspectVars = useHooksStore(s => s.appendNodeInspectVars) + const deleteInspectVar = useHooksStore(s => s.deleteInspectVar) + const deleteNodeInspectorVars = useHooksStore(s => s.deleteNodeInspectorVars) + const deleteAllInspectorVars = useHooksStore(s => s.deleteAllInspectorVars) + const isInspectVarEdited = useHooksStore(s => s.isInspectVarEdited) + const resetToLastRunVar = useHooksStore(s => s.resetToLastRunVar) + const invalidateSysVarValues = useHooksStore(s => s.invalidateSysVarValues) + const resetConversationVar = useHooksStore(s => s.resetConversationVar) + const invalidateConversationVarValues = useHooksStore(s => s.invalidateConversationVarValues) return { conversationVars: conversationVars || [], diff --git a/web/app/components/workflow/hooks/use-set-workflow-vars-with-value.ts b/web/app/components/workflow/hooks/use-set-workflow-vars-with-value.ts new file mode 100644 index 0000000000..a04c2de305 --- /dev/null +++ b/web/app/components/workflow/hooks/use-set-workflow-vars-with-value.ts @@ -0,0 +1,9 @@ +import { useHooksStore } from '@/app/components/workflow/hooks-store' + +export const useSetWorkflowVarsWithValue = () => { + const fetchInspectVars = useHooksStore(s => s.fetchInspectVars) + + return { + fetchInspectVars, + } +} diff --git a/web/app/components/workflow/index.tsx b/web/app/components/workflow/index.tsx index 48c7983e79..a0848d98fa 100644 --- a/web/app/components/workflow/index.tsx +++ b/web/app/components/workflow/index.tsx @@ -42,6 +42,7 @@ import { useNodesSyncDraft, usePanelInteractions, useSelectionInteractions, + useSetWorkflowVarsWithValue, useShortcuts, useWorkflow, useWorkflowReadOnly, @@ -83,7 +84,6 @@ import DatasetsDetailProvider from './datasets-detail-store/provider' import { HooksStoreContextProvider } from './hooks-store' import type { Shape as HooksStoreShape } from './hooks-store' import PluginDependency from './plugin-dependency' -import useSetWorkflowVarsWithValue from '../workflow-app/hooks/use-fetch-workflow-inspect-vars' const nodeTypes = { [CUSTOM_NODE]: CustomNode, diff --git a/web/app/components/workflow/nodes/_base/components/editor/base.tsx b/web/app/components/workflow/nodes/_base/components/editor/base.tsx index 38968b2e0d..65afb36835 100644 --- a/web/app/components/workflow/nodes/_base/components/editor/base.tsx +++ b/web/app/components/workflow/nodes/_base/components/editor/base.tsx @@ -15,6 +15,7 @@ import { import useToggleExpend from '@/app/components/workflow/nodes/_base/hooks/use-toggle-expend' import type { FileEntity } from '@/app/components/base/file-uploader/types' import FileListInLog from '@/app/components/base/file-uploader/file-list-in-log' +import ActionButton from '@/app/components/base/action-button' type Props = { className?: string @@ -88,15 +89,16 @@ const Base: FC = ({
)} - {!isCopied - ? ( - - ) - : ( - - ) - } - + + {!isCopied + ? ( + + ) + : ( + + ) + } +
diff --git a/web/app/components/workflow/nodes/_base/components/next-step/index.tsx b/web/app/components/workflow/nodes/_base/components/next-step/index.tsx index afb642955c..25d1a0aa63 100644 --- a/web/app/components/workflow/nodes/_base/components/next-step/index.tsx +++ b/web/app/components/workflow/nodes/_base/components/next-step/index.tsx @@ -1,10 +1,10 @@ import { memo, useMemo } from 'react' import { useTranslation } from 'react-i18next' +import { isEqual } from 'lodash-es' import { getConnectedEdges, getOutgoers, - useEdges, - useStoreApi, + useStore, } from 'reactflow' import { useToolIcon } from '../../../../hooks' import BlockIcon from '../../../../block-icon' @@ -26,12 +26,21 @@ const NextStep = ({ const { t } = useTranslation() const data = selectedNode.data const toolIcon = useToolIcon(data) - const store = useStoreApi() const branches = useMemo(() => { return data._targetBranches || [] }, [data]) - const edges = useEdges() - const outgoers = getOutgoers(selectedNode as Node, store.getState().getNodes(), edges) + const edges = useStore(s => s.edges.map(edge => ({ + id: edge.id, + source: edge.source, + sourceHandle: edge.sourceHandle, + target: edge.target, + targetHandle: edge.targetHandle, + })), isEqual) + const nodes = useStore(s => s.getNodes().map(node => ({ + id: node.id, + data: node.data, + })), isEqual) + const outgoers = getOutgoers(selectedNode as Node, nodes as Node[], edges) const connectedEdges = getConnectedEdges([selectedNode] as Node[], edges).filter(edge => edge.source === selectedNode!.id) const list = useMemo(() => { diff --git a/web/app/components/workflow/nodes/_base/components/node-position.tsx b/web/app/components/workflow/nodes/_base/components/node-position.tsx index 404648dfa6..e844726b4f 100644 --- a/web/app/components/workflow/nodes/_base/components/node-position.tsx +++ b/web/app/components/workflow/nodes/_base/components/node-position.tsx @@ -1,30 +1,39 @@ import { memo } from 'react' import { useTranslation } from 'react-i18next' +import { useShallow } from 'zustand/react/shallow' import { RiCrosshairLine } from '@remixicon/react' -import type { XYPosition } from 'reactflow' -import { useReactFlow, useStoreApi } from 'reactflow' +import { useReactFlow, useStore } from 'reactflow' import TooltipPlus from '@/app/components/base/tooltip' import { useNodesSyncDraft } from '@/app/components/workflow-app/hooks' type NodePositionProps = { - nodePosition: XYPosition, - nodeWidth?: number | null, - nodeHeight?: number | null, + nodeId: string } const NodePosition = ({ - nodePosition, - nodeWidth, - nodeHeight, + nodeId, }: NodePositionProps) => { const { t } = useTranslation() const reactflow = useReactFlow() - const store = useStoreApi() const { doSyncWorkflowDraft } = useNodesSyncDraft() + const { + nodePosition, + nodeWidth, + nodeHeight, + } = useStore(useShallow((s) => { + const nodes = s.getNodes() + const currentNode = nodes.find(node => node.id === nodeId)! + + return { + nodePosition: currentNode.position, + nodeWidth: currentNode.width, + nodeHeight: currentNode.height, + } + })) + const transform = useStore(s => s.transform) if (!nodePosition || !nodeWidth || !nodeHeight) return null const workflowContainer = document.getElementById('workflow-container') - const { transform } = store.getState() const zoom = transform[2] const { clientWidth, clientHeight } = workflowContainer! diff --git a/web/app/components/workflow/nodes/_base/components/switch-plugin-version.tsx b/web/app/components/workflow/nodes/_base/components/switch-plugin-version.tsx index cddd529aea..94b3ce7bfc 100644 --- a/web/app/components/workflow/nodes/_base/components/switch-plugin-version.tsx +++ b/web/app/components/workflow/nodes/_base/components/switch-plugin-version.tsx @@ -15,7 +15,7 @@ import { pluginManifestToCardPluginProps } from '@/app/components/plugins/instal import { Badge as Badge2, BadgeState } from '@/app/components/base/badge/index' import Link from 'next/link' import { useTranslation } from 'react-i18next' -import { MARKETPLACE_URL_PREFIX } from '@/config' +import { getMarketplaceUrl } from '@/utils/var' export type SwitchPluginVersionProps = { uniqueIdentifier: string @@ -82,7 +82,7 @@ export const SwitchPluginVersion: FC = (props) => { modalBottomLeft={ diff --git a/web/app/components/workflow/nodes/_base/components/variable/constant-field.tsx b/web/app/components/workflow/nodes/_base/components/variable/constant-field.tsx index 0adfa3c9fb..0d965e2d22 100644 --- a/web/app/components/workflow/nodes/_base/components/variable/constant-field.tsx +++ b/web/app/components/workflow/nodes/_base/components/variable/constant-field.tsx @@ -13,6 +13,8 @@ type Props = { readonly: boolean value: string onChange: (value: string | number, varKindType: VarKindType, varInfo?: Var) => void + onOpenChange?: (open: boolean) => void + isLoading?: boolean } const DEFAULT_SCHEMA = {} as CredentialFormSchema @@ -22,6 +24,8 @@ const ConstantField: FC = ({ readonly, value, onChange, + onOpenChange, + isLoading, }) => { const language = useLanguage() const placeholder = (schema as CredentialFormSchemaSelect).placeholder @@ -36,7 +40,7 @@ const ConstantField: FC = ({ return ( <> - {schema.type === FormTypeEnum.select && ( + {(schema.type === FormTypeEnum.select || schema.type === FormTypeEnum.dynamicSelect) && ( = ({ items={(schema as CredentialFormSchemaSelect).options.map(option => ({ value: option.value, name: option.label[language] || option.label.en_US }))} onSelect={item => handleSelectChange(item.value)} placeholder={placeholder?.[language] || placeholder?.en_US} + onOpenChange={onOpenChange} + isLoading={isLoading} /> )} {schema.type === FormTypeEnum.textNumber && ( diff --git a/web/app/components/workflow/nodes/_base/components/variable/utils.ts b/web/app/components/workflow/nodes/_base/components/variable/utils.ts index 63f2527aa8..42cad703f1 100644 --- a/web/app/components/workflow/nodes/_base/components/variable/utils.ts +++ b/web/app/components/workflow/nodes/_base/components/variable/utils.ts @@ -1171,13 +1171,13 @@ export const getNodeUsedVarPassToServerKey = (node: Node, valueSelector: ValueSe break } case BlockEnum.Code: { - const targetVar = (data as CodeNodeType).variables?.find(v => v.value_selector.join('.') === valueSelector.join('.')) + const targetVar = (data as CodeNodeType).variables?.find(v => Array.isArray(v.value_selector) && v.value_selector && v.value_selector.join('.') === valueSelector.join('.')) if (targetVar) res = targetVar.variable break } case BlockEnum.TemplateTransform: { - const targetVar = (data as TemplateTransformNodeType).variables?.find(v => v.value_selector.join('.') === valueSelector.join('.')) + const targetVar = (data as TemplateTransformNodeType).variables?.find(v => Array.isArray(v.value_selector) && v.value_selector && v.value_selector.join('.') === valueSelector.join('.')) if (targetVar) res = targetVar.variable break diff --git a/web/app/components/workflow/nodes/_base/components/variable/var-list.tsx b/web/app/components/workflow/nodes/_base/components/variable/var-list.tsx index 181b278051..fe489a5b80 100644 --- a/web/app/components/workflow/nodes/_base/components/variable/var-list.tsx +++ b/web/app/components/workflow/nodes/_base/components/variable/var-list.tsx @@ -65,10 +65,11 @@ const VarList: FC = ({ }, [list, onVarNameChange, onChange]) const handleVarReferenceChange = useCallback((index: number) => { - return (value: ValueSelector | string, varKindType: VarKindType) => { + return (value: ValueSelector | string, varKindType: VarKindType, varInfo?: Var) => { const newList = produce(list, (draft) => { if (!isSupportConstantValue || varKindType === VarKindType.variable) { draft[index].value_selector = value as ValueSelector + draft[index].value_type = varInfo?.type if (isSupportConstantValue) draft[index].variable_type = VarKindType.variable diff --git a/web/app/components/workflow/nodes/_base/components/variable/var-reference-picker.tsx b/web/app/components/workflow/nodes/_base/components/variable/var-reference-picker.tsx index 3e4506b832..3026226452 100644 --- a/web/app/components/workflow/nodes/_base/components/variable/var-reference-picker.tsx +++ b/web/app/components/workflow/nodes/_base/components/variable/var-reference-picker.tsx @@ -6,6 +6,7 @@ import { RiArrowDownSLine, RiCloseLine, RiErrorWarningFill, + RiLoader4Line, RiMoreLine, } from '@remixicon/react' import produce from 'immer' @@ -20,8 +21,9 @@ import VarReferencePopup from './var-reference-popup' import { getNodeInfoById, isConversationVar, isENV, isRagVariableVar, isSystemVar, varTypeToStructType } from './utils' import ConstantField from './constant-field' import cn from '@/utils/classnames' -import type { CommonNodeType, NodeOutPutVar, ValueSelector, Var } from '@/app/components/workflow/types' -import type { CredentialFormSchema } from '@/app/components/header/account-setting/model-provider-page/declarations' +import type { CommonNodeType, Node, NodeOutPutVar, ToolWithProvider, ValueSelector, Var } from '@/app/components/workflow/types' +import type { CredentialFormSchemaSelect } from '@/app/components/header/account-setting/model-provider-page/declarations' +import { type CredentialFormSchema, type FormOption, FormTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations' import { BlockEnum } from '@/app/components/workflow/types' import { VarBlockIcon } from '@/app/components/workflow/block-icon' import { Line3 } from '@/app/components/base/icons/src/public/common' @@ -46,6 +48,9 @@ import VarFullPathPanel from './var-full-path-panel' import { noop } from 'lodash-es' import { InputField } from '@/app/components/base/icons/src/vender/pipeline' import { useStore as useWorkflowStore } from '@/app/components/workflow/store' +import { useFetchDynamicOptions } from '@/service/use-plugins' +import type { Tool } from '@/app/components/tools/types' + const TRIGGER_DEFAULT_WIDTH = 227 type Props = { @@ -73,6 +78,8 @@ type Props = { minWidth?: number popupFor?: 'assigned' | 'toAssigned' zIndex?: number + currentTool?: Tool + currentProvider?: ToolWithProvider } const DEFAULT_VALUE_SELECTOR: Props['value'] = [] @@ -102,6 +109,8 @@ const VarReferencePicker: FC = ({ minWidth, popupFor, zIndex, + currentTool, + currentProvider, }) => { const { t } = useTranslation() const store = useStoreApi() @@ -182,9 +191,11 @@ const VarReferencePicker: FC = ({ return startNode?.data const node = getNodeInfoById(availableNodes, outputVarNodeId)?.data - return { - ...node, - id: outputVarNodeId, + if (node) { + return { + ...node, + id: outputVarNodeId, + } } }, [value, hasValue, isConstant, isIterationVar, iterationNode, availableNodes, outputVarNodeId, startNode, isLoopVar, loopNode]) @@ -325,6 +336,41 @@ const VarReferencePicker: FC = ({ return null }, [isValidVar, isShowAPart, hasValue, t, outputVarNode?.title, outputVarNode?.type, value, type]) + const [dynamicOptions, setDynamicOptions] = useState(null) + const [isLoading, setIsLoading] = useState(false) + const { mutateAsync: fetchDynamicOptions } = useFetchDynamicOptions( + currentProvider?.plugin_id || '', currentProvider?.name || '', currentTool?.name || '', (schema as CredentialFormSchemaSelect)?.variable || '', + 'tool', + ) + const handleFetchDynamicOptions = async () => { + if (schema?.type !== FormTypeEnum.dynamicSelect || !currentTool || !currentProvider) + return + setIsLoading(true) + try { + const data = await fetchDynamicOptions() + setDynamicOptions(data?.options || []) + } + finally { + setIsLoading(false) + } + } + useEffect(() => { + handleFetchDynamicOptions() + }, [currentTool, currentProvider, schema]) + + const schemaWithDynamicSelect = useMemo(() => { + if (schema?.type !== FormTypeEnum.dynamicSelect) + return schema + // rewrite schema.options with dynamicOptions + if (dynamicOptions) { + return { + ...schema, + options: dynamicOptions, + } + } + return schema + }, [dynamicOptions]) + return (
= ({ void)} - schema={schema as CredentialFormSchema} + schema={schemaWithDynamicSelect as CredentialFormSchema} readonly={readonly} + isLoading={isLoading} /> ) : ( @@ -421,6 +468,7 @@ const VarReferencePicker: FC = ({ )}
{!hasValue && } + {isLoading && } {isEnv && } {isChatVar && } {isRagVar && } @@ -434,7 +482,16 @@ const VarReferencePicker: FC = ({ {!isValidVar && } ) - :
{placeholder ?? t('workflow.common.setVarValuePlaceholder')}
} + :
+ {isLoading ? ( +
+ + {placeholder ?? t('workflow.common.setVarValuePlaceholder')} +
+ ) : ( + placeholder ?? t('workflow.common.setVarValuePlaceholder') + )} +
}
diff --git a/web/app/components/workflow/nodes/_base/components/workflow-panel/index.tsx b/web/app/components/workflow/nodes/_base/components/workflow-panel/index.tsx index 148f67e347..a321c23430 100644 --- a/web/app/components/workflow/nodes/_base/components/workflow-panel/index.tsx +++ b/web/app/components/workflow/nodes/_base/components/workflow-panel/index.tsx @@ -62,15 +62,14 @@ import { Stop } from '@/app/components/base/icons/src/vender/line/mediaAndDevice type BasePanelProps = { children: ReactNode -} & Node + id: Node['id'] + data: Node['data'] +} const BasePanel: FC = ({ id, data, children, - position, - width, - height, }) => { const { t } = useTranslation() const { showMessageLogModal } = useAppStore(useShallow(state => ({ @@ -331,7 +330,7 @@ const BasePanel: FC = ({ ) } - +
diff --git a/web/app/components/workflow/nodes/_base/node.tsx b/web/app/components/workflow/nodes/_base/node.tsx index b04501dd19..425e58567c 100644 --- a/web/app/components/workflow/nodes/_base/node.tsx +++ b/web/app/components/workflow/nodes/_base/node.tsx @@ -48,7 +48,9 @@ import useInspectVarsCrud from '../../hooks/use-inspect-vars-crud' type BaseNodeProps = { children: ReactElement -} & NodeProps + id: NodeProps['id'] + data: NodeProps['data'] +} const BaseNode: FC = ({ id, diff --git a/web/app/components/workflow/nodes/code/panel.tsx b/web/app/components/workflow/nodes/code/panel.tsx index 05d6cd7957..f928c13bd6 100644 --- a/web/app/components/workflow/nodes/code/panel.tsx +++ b/web/app/components/workflow/nodes/code/panel.tsx @@ -14,6 +14,7 @@ import Split from '@/app/components/workflow/nodes/_base/components/split' import CodeEditor from '@/app/components/workflow/nodes/_base/components/editor/code-editor' import TypeSelector from '@/app/components/workflow/nodes/_base/components/selector' import type { NodePanelProps } from '@/app/components/workflow/types' +import SyncButton from '@/app/components/base/button/sync-button' const i18nPrefix = 'workflow.nodes.code' const codeLanguages = [ @@ -40,6 +41,7 @@ const Panel: FC> = ({ handleVarListChange, handleAddVariable, handleRemoveVariable, + handleSyncFunctionSignature, handleCodeChange, handleCodeLanguageChange, handleVarsChange, @@ -68,7 +70,12 @@ const Panel: FC> = ({ : undefined + !readOnly ? ( +
+ + +
+ ) : undefined } > { setInputs(newInputs) }, [allLanguageDefault, inputs, setInputs]) + const handleSyncFunctionSignature = useCallback(() => { + const generateSyncSignatureCode = (code: string) => { + let mainDefRe + let newMainDef + if (inputs.code_language === CodeLanguage.javascript) { + mainDefRe = /function\s+main\b\s*\([\s\S]*?\)/g + newMainDef = 'function main({{var_list}})' + let param_list = inputs.variables?.map(item => item.variable).join(', ') || '' + param_list = param_list ? `{${param_list}}` : '' + newMainDef = newMainDef.replace('{{var_list}}', param_list) + } + + else if (inputs.code_language === CodeLanguage.python3) { + mainDefRe = /def\s+main\b\s*\([\s\S]*?\)/g + const param_list = [] + for (const item of inputs.variables) { + let param = item.variable + let param_type = '' + switch (item.value_type) { + case VarType.string: + param_type = ': str' + break + case VarType.number: + param_type = ': float' + break + case VarType.object: + param_type = ': dict' + break + case VarType.array: + param_type = ': list' + break + case VarType.arrayNumber: + param_type = ': list[float]' + break + case VarType.arrayString: + param_type = ': list[str]' + break + case VarType.arrayObject: + param_type = ': list[dict]' + break + } + param += param_type + param_list.push(`${param}`) + } + + newMainDef = `def main(${param_list.join(', ')})` + } + else { return code } + + const newCode = code.replace(mainDefRe, newMainDef) + return newCode + } + + const newInputs = produce(inputs, (draft) => { + draft.code = generateSyncSignatureCode(draft.code) + }) + setInputs(newInputs) + }, [inputs, setInputs]) + const { handleVarsChange, handleAddVariable: handleAddOutputVariable, @@ -135,6 +194,7 @@ const useConfig = (id: string, payload: CodeNodeType) => { handleVarListChange, handleAddVariable, handleRemoveVariable, + handleSyncFunctionSignature, handleCodeChange, handleCodeLanguageChange, handleVarsChange, diff --git a/web/app/components/workflow/nodes/index.tsx b/web/app/components/workflow/nodes/index.tsx index d120ed8d37..8458051da2 100644 --- a/web/app/components/workflow/nodes/index.tsx +++ b/web/app/components/workflow/nodes/index.tsx @@ -14,11 +14,14 @@ import BasePanel from './_base/components/workflow-panel' const CustomNode = (props: NodeProps) => { const nodeData = props.data - const NodeComponent = NodeComponentMap[nodeData.type] + const NodeComponent = useMemo(() => NodeComponentMap[nodeData.type], [nodeData.type]) return ( <> - + @@ -26,7 +29,12 @@ const CustomNode = (props: NodeProps) => { } CustomNode.displayName = 'CustomNode' -export const Panel = memo((props: Node) => { +export type PanelProps = { + type: Node['type'] + id: Node['id'] + data: Node['data'] +} +export const Panel = memo((props: PanelProps) => { const nodeClass = props.type const nodeData = props.data const PanelComponent = useMemo(() => { @@ -38,7 +46,11 @@ export const Panel = memo((props: Node) => { if (nodeClass === CUSTOM_NODE) { return ( - + ) diff --git a/web/app/components/workflow/nodes/llm/panel.tsx b/web/app/components/workflow/nodes/llm/panel.tsx index 04acb61ef8..2a71dffa11 100644 --- a/web/app/components/workflow/nodes/llm/panel.tsx +++ b/web/app/components/workflow/nodes/llm/panel.tsx @@ -19,6 +19,8 @@ import Editor from '@/app/components/workflow/nodes/_base/components/prompt/edit import StructureOutput from './components/structure-output' import Switch from '@/app/components/base/switch' import { RiAlertFill, RiQuestionLine } from '@remixicon/react' +import { fetchAndMergeValidCompletionParams } from '@/utils/completion-params' +import Toast from '@/app/components/base/toast' const i18nPrefix = 'workflow.nodes.llm' @@ -68,10 +70,27 @@ const Panel: FC> = ({ modelId: string mode?: string }) => { - handleCompletionParamsChange({}) - handleModelChanged(model) - // eslint-disable-next-line react-hooks/exhaustive-deps - }, []) + (async () => { + try { + const { params: filtered, removedDetails } = await fetchAndMergeValidCompletionParams( + model.provider, + model.modelId, + inputs.model.completion_params, + ) + const keys = Object.keys(removedDetails) + if (keys.length) + Toast.notify({ type: 'warning', message: `${t('common.modelProvider.parametersInvalidRemoved')}: ${keys.map(k => `${k} (${removedDetails[k]})`).join(', ')}` }) + handleCompletionParamsChange(filtered) + } + catch (e) { + Toast.notify({ type: 'error', message: t('common.error') }) + handleCompletionParamsChange({}) + } + finally { + handleModelChanged(model) + } + })() + }, [inputs.model.completion_params]) return (
diff --git a/web/app/components/workflow/nodes/tool/components/input-var-list.tsx b/web/app/components/workflow/nodes/tool/components/input-var-list.tsx index 7f310051e2..1daf3a49e9 100644 --- a/web/app/components/workflow/nodes/tool/components/input-var-list.tsx +++ b/web/app/components/workflow/nodes/tool/components/input-var-list.tsx @@ -6,7 +6,7 @@ import { useTranslation } from 'react-i18next' import type { ToolVarInputs } from '../types' import { VarType as VarKindType } from '../types' import cn from '@/utils/classnames' -import type { ValueSelector, Var } from '@/app/components/workflow/types' +import type { ToolWithProvider, ValueSelector, Var } from '@/app/components/workflow/types' import type { CredentialFormSchema } from '@/app/components/header/account-setting/model-provider-page/declarations' import { FormTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations' import { useLanguage } from '@/app/components/header/account-setting/model-provider-page/hooks' @@ -17,6 +17,7 @@ import { VarType } from '@/app/components/workflow/types' import AppSelector from '@/app/components/plugins/plugin-detail-panel/app-selector' import ModelParameterModal from '@/app/components/plugins/plugin-detail-panel/model-selector' import { noop } from 'lodash-es' +import type { Tool } from '@/app/components/tools/types' type Props = { readOnly: boolean @@ -27,6 +28,8 @@ type Props = { onOpen?: (index: number) => void isSupportConstantValue?: boolean filterVar?: (payload: Var, valueSelector: ValueSelector) => boolean + currentTool?: Tool + currentProvider?: ToolWithProvider } const InputVarList: FC = ({ @@ -38,6 +41,8 @@ const InputVarList: FC = ({ onOpen = noop, isSupportConstantValue, filterVar, + currentTool, + currentProvider, }) => { const language = useLanguage() const { t } = useTranslation() @@ -58,6 +63,8 @@ const InputVarList: FC = ({ return 'ModelSelector' else if (type === FormTypeEnum.toolSelector) return 'ToolSelector' + else if (type === FormTypeEnum.dynamicSelect || type === FormTypeEnum.select) + return 'Select' else return 'String' } @@ -149,6 +156,7 @@ const InputVarList: FC = ({ const handleOpen = useCallback((index: number) => { return () => onOpen(index) }, [onOpen]) + return (
{ @@ -163,7 +171,8 @@ const InputVarList: FC = ({ } = schema const varInput = value[variable] const isNumber = type === FormTypeEnum.textNumber - const isSelect = type === FormTypeEnum.select + const isDynamicSelect = type === FormTypeEnum.dynamicSelect + const isSelect = type === FormTypeEnum.select || type === FormTypeEnum.dynamicSelect const isFile = type === FormTypeEnum.file || type === FormTypeEnum.files const isAppSelector = type === FormTypeEnum.appSelector const isModelSelector = type === FormTypeEnum.modelSelector @@ -198,11 +207,13 @@ const InputVarList: FC = ({ value={varInput?.type === VarKindType.constant ? (varInput?.value ?? '') : (varInput?.value ?? [])} onChange={handleNotMixedTypeChange(variable)} onOpen={handleOpen(index)} - defaultVarKindType={varInput?.type || (isNumber ? VarKindType.constant : VarKindType.variable)} + defaultVarKindType={varInput?.type || ((isNumber || isDynamicSelect) ? VarKindType.constant : VarKindType.variable)} isSupportConstantValue={isSupportConstantValue} filterVar={isNumber ? filterVar : undefined} availableVars={isSelect ? availableVars : undefined} schema={schema} + currentTool={currentTool} + currentProvider={currentProvider} /> )} {isFile && ( diff --git a/web/app/components/workflow/nodes/tool/panel.tsx b/web/app/components/workflow/nodes/tool/panel.tsx index 5dd5242c71..038159870e 100644 --- a/web/app/components/workflow/nodes/tool/panel.tsx +++ b/web/app/components/workflow/nodes/tool/panel.tsx @@ -42,6 +42,7 @@ const Panel: FC> = ({ isLoading, outputSchema, hasObjectOutput, + currTool, } = useConfig(id, data) if (isLoading) { @@ -80,6 +81,8 @@ const Panel: FC> = ({ filterVar={filterVar} isSupportConstantValue onOpen={handleOnVarOpen} + currentProvider={currCollection} + currentTool={currTool} /> )} diff --git a/web/app/components/workflow/panel/debug-and-preview/hooks.ts b/web/app/components/workflow/panel/debug-and-preview/hooks.ts index 8464d04425..26d604ef11 100644 --- a/web/app/components/workflow/panel/debug-and-preview/hooks.ts +++ b/web/app/components/workflow/panel/debug-and-preview/hooks.ts @@ -8,7 +8,10 @@ import { import { useTranslation } from 'react-i18next' import { produce, setAutoFreeze } from 'immer' import { uniqBy } from 'lodash-es' -import { useWorkflowRun } from '../../hooks' +import { + useSetWorkflowVarsWithValue, + useWorkflowRun, +} from '../../hooks' import { NodeRunningStatus, WorkflowRunningStatus } from '../../types' import { useWorkflowStore } from '../../store' import { DEFAULT_ITER_TIMES, DEFAULT_LOOP_TIMES } from '../../constants' @@ -32,7 +35,6 @@ import type { FileEntity } from '@/app/components/base/file-uploader/types' import { getThreadMessages } from '@/app/components/base/chat/utils' import { useInvalidAllLastRun } from '@/service/use-workflow' import { useParams } from 'next/navigation' -import useSetWorkflowVarsWithValue from '@/app/components/workflow-app/hooks/use-fetch-workflow-inspect-vars' type GetAbortController = (abortController: AbortController) => void type SendCallback = { @@ -499,7 +501,7 @@ export const useChat = ( }, }, ) - }, [threadMessages, chatTree.length, updateCurrentQAOnTree, handleResponding, formSettings?.inputsForm, handleRun, notify, t, config?.suggested_questions_after_answer?.enabled]) + }, [threadMessages, chatTree.length, updateCurrentQAOnTree, handleResponding, formSettings?.inputsForm, handleRun, notify, t, config?.suggested_questions_after_answer?.enabled, fetchInspectVars, invalidAllLastRun]) return { conversationId: conversationId.current, diff --git a/web/app/components/workflow/panel/index.tsx b/web/app/components/workflow/panel/index.tsx index 3c43e10541..5b1593e5a1 100644 --- a/web/app/components/workflow/panel/index.tsx +++ b/web/app/components/workflow/panel/index.tsx @@ -1,9 +1,9 @@ import type { FC } from 'react' -import { memo, useEffect, useRef } from 'react' -import { useNodes } from 'reactflow' +import { memo, useCallback, useEffect, useRef } from 'react' import type { VersionHistoryPanelProps } from '@/app/components/workflow/panel/version-history-panel' import VersionHistoryPanel from '@/app/components/workflow/panel/version-history-panel' -import type { CommonNodeType } from '../types' +import { useShallow } from 'zustand/react/shallow' +import { useStore as useReactflow } from 'reactflow' import { Panel as NodePanel } from '../nodes' import { useStore } from '../store' import EnvPanel from './env-panel' @@ -16,53 +16,84 @@ export type PanelProps = { } versionHistoryPanelProps?: VersionHistoryPanelProps } + +/** + * Reference MDN standard implementation:https://developer.mozilla.org/zh-CN/docs/Web/API/ResizeObserverEntry/borderBoxSize + */ +const getEntryWidth = (entry: ResizeObserverEntry, element: HTMLElement): number => { + if (entry.borderBoxSize?.length > 0) + return entry.borderBoxSize[0].inlineSize + + if (entry.contentRect.width > 0) + return entry.contentRect.width + + return element.getBoundingClientRect().width +} + +const useResizeObserver = ( + callback: (width: number) => void, + dependencies: React.DependencyList = [], +) => { + const elementRef = useRef(null) + + const stableCallback = useCallback(callback, [callback]) + + useEffect(() => { + const element = elementRef.current + if (!element) return + + const resizeObserver = new ResizeObserver((entries) => { + for (const entry of entries) { + const width = getEntryWidth(entry, element) + stableCallback(width) + } + }) + + resizeObserver.observe(element) + + const initialWidth = element.getBoundingClientRect().width + stableCallback(initialWidth) + + return () => { + resizeObserver.disconnect() + } + }, [stableCallback, ...dependencies]) + return elementRef +} + const Panel: FC = ({ components, versionHistoryPanelProps, }) => { - const nodes = useNodes() - const selectedNode = nodes.find(node => node.data.selected) + const selectedNode = useReactflow(useShallow((s) => { + const nodes = s.getNodes() + const currentNode = nodes.find(node => node.data.selected) + + if (currentNode) { + return { + id: currentNode.id, + type: currentNode.type, + data: currentNode.data, + } + } + })) const showEnvPanel = useStore(s => s.showEnvPanel) const isRestoring = useStore(s => s.isRestoring) const showWorkflowVersionHistoryPanel = useStore(s => s.showWorkflowVersionHistoryPanel) - const rightPanelRef = useRef(null) const setRightPanelWidth = useStore(s => s.setRightPanelWidth) - - // get right panel width - useEffect(() => { - if (rightPanelRef.current) { - const resizeRightPanelObserver = new ResizeObserver((entries) => { - for (const entry of entries) { - const { inlineSize } = entry.borderBoxSize[0] - setRightPanelWidth(inlineSize) - } - }) - resizeRightPanelObserver.observe(rightPanelRef.current) - return () => { - resizeRightPanelObserver.disconnect() - } - } - }, [setRightPanelWidth]) - - const otherPanelRef = useRef(null) const setOtherPanelWidth = useStore(s => s.setOtherPanelWidth) - // get other panel width - useEffect(() => { - if (otherPanelRef.current) { - const resizeOtherPanelObserver = new ResizeObserver((entries) => { - for (const entry of entries) { - const { inlineSize } = entry.borderBoxSize[0] - setOtherPanelWidth(inlineSize) - } - }) - resizeOtherPanelObserver.observe(otherPanelRef.current) - return () => { - resizeOtherPanelObserver.disconnect() - } - } - }, [setOtherPanelWidth]) + const rightPanelRef = useResizeObserver( + setRightPanelWidth, + [setRightPanelWidth, selectedNode, showEnvPanel, showWorkflowVersionHistoryPanel], + ) + + const otherPanelRef = useResizeObserver( + setOtherPanelWidth, + [setOtherPanelWidth, showEnvPanel, showWorkflowVersionHistoryPanel], + ) + return (
= ({ className={cn('absolute bottom-1 right-0 top-14 z-10 flex outline-none')} key={`${isRestoring}`} > - { - components?.left - } - { - !!selectedNode && ( - - ) - } + {components?.left} + {!!selectedNode && }
{ diff --git a/web/app/components/workflow/run/node.tsx b/web/app/components/workflow/run/node.tsx index 9555cbd087..a4df5f4c74 100644 --- a/web/app/components/workflow/run/node.tsx +++ b/web/app/components/workflow/run/node.tsx @@ -29,6 +29,7 @@ import type { import ErrorHandleTip from '@/app/components/workflow/nodes/_base/components/error-handle/error-handle-tip' import { hasRetryNode } from '@/app/components/workflow/utils' import { useDocLink } from '@/context/i18n' +import Tooltip from '@/app/components/base/tooltip' type Props = { className?: string @@ -129,10 +130,16 @@ const NodePanel: FC = ({ /> )} -
{nodeInfo.title}
+ {nodeInfo.title}
+ } + > +
{nodeInfo.title}
+ {nodeInfo.status !== 'running' && !hideInfo && (
{nodeInfo.execution_metadata?.total_tokens ? `${getTokenCount(nodeInfo.execution_metadata?.total_tokens || 0)} tokens · ` : ''}{`${getTime(nodeInfo.elapsed_time || 0)}`}
)} diff --git a/web/app/components/workflow/types.ts b/web/app/components/workflow/types.ts index a4d7497053..625ef49b9f 100644 --- a/web/app/components/workflow/types.ts +++ b/web/app/components/workflow/types.ts @@ -144,6 +144,7 @@ export type Variable = { variable: string } value_selector: ValueSelector + value_type?: VarType variable_type?: VarKindType value?: string options?: string[] diff --git a/web/app/components/workflow/variable-inspect/right.tsx b/web/app/components/workflow/variable-inspect/right.tsx index 851078d972..6ddd0d47d3 100644 --- a/web/app/components/workflow/variable-inspect/right.tsx +++ b/web/app/components/workflow/variable-inspect/right.tsx @@ -63,6 +63,17 @@ const Right = ({ resetConversationVar(currentNodeVar.var.id) } + const getCopyContent = () => { + const value = currentNodeVar?.var.value + if (value === null || value === undefined) + return '' + + if (typeof value === 'object') + return JSON.stringify(value) + + return String(value) + } + return (
{/* header */} @@ -124,7 +135,7 @@ const Right = ({ )} {currentNodeVar.var.value_type !== 'secret' && ( - + )} )} diff --git a/web/app/components/workflow/variable-inspect/trigger.tsx b/web/app/components/workflow/variable-inspect/trigger.tsx index 0107b0b3e3..a45c589ab9 100644 --- a/web/app/components/workflow/variable-inspect/trigger.tsx +++ b/web/app/components/workflow/variable-inspect/trigger.tsx @@ -12,6 +12,7 @@ import type { CommonNodeType } from '@/app/components/workflow/types' import { useEventEmitterContextContext } from '@/context/event-emitter' import { EVENT_WORKFLOW_STOP } from '@/app/components/workflow/variable-inspect/types' import cn from '@/utils/classnames' +import { useNodesReadOnly } from '../hooks/use-workflow' const VariableInspectTrigger: FC = () => { const { t } = useTranslation() @@ -32,7 +33,10 @@ const VariableInspectTrigger: FC = () => { const allVars = [...environmentVariables, ...conversationVars, ...systemVars, ...nodesWithInspectVars] return allVars }, [environmentVariables, conversationVars, systemVars, nodesWithInspectVars]) - + const { + nodesReadOnly, + getNodesReadOnly, + } = useNodesReadOnly() const workflowRunningData = useStore(s => s.workflowRunningData) const nodes = useNodes() const isStepRunning = useMemo(() => nodes.some(node => node.data._singleRunningStatus === NodeRunningStatus.Running), [nodes]) @@ -61,8 +65,14 @@ const VariableInspectTrigger: FC = () => {
{!isRunning && !currentVars.length && (
setShowVariableInspectPanel(true)} + className={cn('system-2xs-semibold-uppercase flex h-5 cursor-pointer items-center gap-1 rounded-md border-[0.5px] border-effects-highlight bg-components-actionbar-bg px-2 text-text-tertiary shadow-lg backdrop-blur-sm hover:bg-background-default-hover', + nodesReadOnly && 'cursor-not-allowed text-text-disabled hover:bg-transparent hover:text-text-disabled', + )} + onClick={() => { + if (getNodesReadOnly()) + return + setShowVariableInspectPanel(true) + }} > {t('workflow.debug.variableInspect.trigger.normal')}
@@ -70,13 +80,21 @@ const VariableInspectTrigger: FC = () => { {!isRunning && currentVars.length > 0 && ( <>
setShowVariableInspectPanel(true)} + className={cn('system-xs-medium flex h-6 cursor-pointer items-center gap-1 rounded-md border-[0.5px] border-effects-highlight bg-components-actionbar-bg px-2 text-text-accent shadow-lg backdrop-blur-sm hover:bg-components-actionbar-bg-accent', + nodesReadOnly && 'cursor-not-allowed text-text-disabled hover:bg-transparent hover:text-text-disabled', + )} + onClick={() => { + if (getNodesReadOnly()) + return + setShowVariableInspectPanel(true) + }} > {t('workflow.debug.variableInspect.trigger.cached')}
{t('workflow.debug.variableInspect.trigger.clear')} diff --git a/web/app/styles/globals.css b/web/app/styles/globals.css index 52e36a2767..353cfa2fff 100644 --- a/web/app/styles/globals.css +++ b/web/app/styles/globals.css @@ -697,4 +697,15 @@ button:focus-within { -ms-overflow-style: none; scrollbar-width: none; } + + /* Hide arrows from number input */ + .no-spinner::-webkit-outer-spin-button, + .no-spinner::-webkit-inner-spin-button { + -webkit-appearance: none; + margin: 0; + } + + .no-spinner { + -moz-appearance: textfield; + } } diff --git a/web/hooks/use-tab-searchparams.ts b/web/hooks/use-tab-searchparams.ts index 0c0e3b7773..444944f812 100644 --- a/web/hooks/use-tab-searchparams.ts +++ b/web/hooks/use-tab-searchparams.ts @@ -29,9 +29,10 @@ export const useTabSearchParams = ({ const router = useRouter() const pathName = pathnameFromHook || window?.location?.pathname const searchParams = useSearchParams() + const searchParamValue = searchParams.has(searchParamName) ? decodeURIComponent(searchParams.get(searchParamName)!) : defaultTab const [activeTab, setTab] = useState( !disableSearchParams - ? (searchParams.get(searchParamName) || defaultTab) + ? searchParamValue : defaultTab, ) @@ -39,7 +40,7 @@ export const useTabSearchParams = ({ setTab(newActiveTab) if (disableSearchParams) return - router[`${routingBehavior}`](`${pathName}?${searchParamName}=${newActiveTab}`) + router[`${routingBehavior}`](`${pathName}?${searchParamName}=${encodeURIComponent(newActiveTab)}`) } return [activeTab, setActiveTab] as const diff --git a/web/i18n/de-DE/dataset-documents.ts b/web/i18n/de-DE/dataset-documents.ts index 22018f9da4..f52220a669 100644 --- a/web/i18n/de-DE/dataset-documents.ts +++ b/web/i18n/de-DE/dataset-documents.ts @@ -390,6 +390,8 @@ const translation = { addChildChunk: 'Untergeordneten Block hinzufügen', regenerationConfirmTitle: 'Möchten Sie untergeordnete Chunks regenerieren?', searchResults_one: 'ERGEBNIS', + keywordEmpty: 'Das Schlüsselwort darf nicht leer sein.', + keywordDuplicate: 'Das Schlüsselwort existiert bereits', }, } diff --git a/web/i18n/en-US/app.ts b/web/i18n/en-US/app.ts index 9b49dbd4f6..1220f5be1d 100644 --- a/web/i18n/en-US/app.ts +++ b/web/i18n/en-US/app.ts @@ -200,9 +200,9 @@ const translation = { accessControl: 'Web App Access Control', accessItemsDescription: { anyone: 'Anyone can access the web app (no login required)', - specific: 'Only specific members within the platform can access the Web application', - organization: 'All members within the platform can access the Web application', - external: 'Only authenticated external users can access the Web application', + specific: 'Only specific members within the platform can access the web app', + organization: 'All members within the platform can access the web app', + external: 'Only authenticated external users can access the web app', }, accessControlDialog: { title: 'Web App Access Control', @@ -219,7 +219,7 @@ const translation = { members_one: '{{count}} MEMBER', members_other: '{{count}} MEMBERS', noGroupsOrMembers: 'No groups or members selected', - webAppSSONotEnabledTip: 'Please contact your organization administrator to configure external authentication for the Web application.', + webAppSSONotEnabledTip: 'Please contact your organization administrator to configure external authentication for the web app.', operateGroupAndMember: { searchPlaceholder: 'Search groups and members', allMembers: 'All members', diff --git a/web/i18n/en-US/common.ts b/web/i18n/en-US/common.ts index 6dcf8b55c5..7f5fec0a5d 100644 --- a/web/i18n/en-US/common.ts +++ b/web/i18n/en-US/common.ts @@ -459,6 +459,7 @@ const translation = { connected: 'Connected', disconnected: 'Disconnected', changeAuthorizedPages: 'Change authorized pages', + integratedAlert: 'Notion is integrated via internal credential, no need to re-authorize.', pagesAuthorized: 'Pages authorized', sync: 'Sync', remove: 'Remove', diff --git a/web/i18n/en-US/dataset-documents.ts b/web/i18n/en-US/dataset-documents.ts index 97948d9797..48264b8b94 100644 --- a/web/i18n/en-US/dataset-documents.ts +++ b/web/i18n/en-US/dataset-documents.ts @@ -358,7 +358,9 @@ const translation = { newChildChunk: 'New Child Chunk', keywords: 'KEYWORDS', addKeyWord: 'Add keyword', + keywordEmpty: 'The keyword cannot be empty', keywordError: 'The maximum length of keyword is 20', + keywordDuplicate: 'The keyword already exists', characters_one: 'character', characters_other: 'characters', hitCount: 'Retrieval count', diff --git a/web/i18n/en-US/workflow.ts b/web/i18n/en-US/workflow.ts index 532a358097..260c78af50 100644 --- a/web/i18n/en-US/workflow.ts +++ b/web/i18n/en-US/workflow.ts @@ -557,6 +557,7 @@ const translation = { advancedDependencies: 'Advanced Dependencies', advancedDependenciesTip: 'Add some preloaded dependencies that take more time to consume or are not default built-in here', searchDependencies: 'Search Dependencies', + syncFunctionSignature: 'Sync function signature to code', }, templateTransform: { inputVars: 'Input Variables', diff --git a/web/i18n/es-ES/dataset-documents.ts b/web/i18n/es-ES/dataset-documents.ts index cd5bb36197..53a6663847 100644 --- a/web/i18n/es-ES/dataset-documents.ts +++ b/web/i18n/es-ES/dataset-documents.ts @@ -389,6 +389,8 @@ const translation = { characters_one: 'carácter', regenerationSuccessMessage: 'Puede cerrar esta ventana.', regenerationConfirmTitle: '¿Desea regenerar fragmentos secundarios?', + keywordEmpty: 'La palabra clave no puede estar vacía', + keywordDuplicate: 'La palabra clave ya existe', }, } diff --git a/web/i18n/fa-IR/dataset-documents.ts b/web/i18n/fa-IR/dataset-documents.ts index 85e1e0a4aa..048cb5163f 100644 --- a/web/i18n/fa-IR/dataset-documents.ts +++ b/web/i18n/fa-IR/dataset-documents.ts @@ -388,6 +388,8 @@ const translation = { regeneratingMessage: 'این ممکن است یک لحظه طول بکشد، لطفا صبر کنید...', regenerationConfirmTitle: 'آیا می خواهید تکه های کودک را بازسازی کنید؟', regenerationSuccessMessage: 'می توانید این پنجره را ببندید.', + keywordEmpty: 'کلمه کلیدی نمی‌تواند خالی باشد', + keywordDuplicate: 'این کلیدواژه قبلاً وجود دارد', }, } diff --git a/web/i18n/fr-FR/dataset-documents.ts b/web/i18n/fr-FR/dataset-documents.ts index 7a795202ed..d8c0fe4af7 100644 --- a/web/i18n/fr-FR/dataset-documents.ts +++ b/web/i18n/fr-FR/dataset-documents.ts @@ -389,6 +389,8 @@ const translation = { searchResults_zero: 'RÉSULTAT', empty: 'Aucun Chunk trouvé', editChildChunk: 'Modifier le morceau enfant', + keywordDuplicate: 'Le mot-clé existe déjà', + keywordEmpty: 'Le mot-clé ne peut pas être vide.', }, } diff --git a/web/i18n/hi-IN/dataset-documents.ts b/web/i18n/hi-IN/dataset-documents.ts index 35bcb0aad2..1fcdd49449 100644 --- a/web/i18n/hi-IN/dataset-documents.ts +++ b/web/i18n/hi-IN/dataset-documents.ts @@ -390,6 +390,8 @@ const translation = { chunkAdded: '1 हिस्सा जोड़ा गया', chunkDetail: 'चंक विवरण', regenerationConfirmMessage: 'चाइल्ड चंक्स को रीजनरेट करने से वर्तमान चाइल्ड चंक्स ओवरराइट हो जाएंगे, जिसमें संपादित चंक्स और नए जोड़े गए चंक्स शामिल हैं। पुनरुत्थान को पूर्ववत नहीं किया जा सकता है।', + keywordDuplicate: 'कीवर्ड पहले से मौजूद है', + keywordEmpty: 'कीवर्ड ख़ाली नहीं हो सकता', }, } diff --git a/web/i18n/it-IT/dataset-documents.ts b/web/i18n/it-IT/dataset-documents.ts index b9afb1ea75..2881e1fcee 100644 --- a/web/i18n/it-IT/dataset-documents.ts +++ b/web/i18n/it-IT/dataset-documents.ts @@ -391,6 +391,8 @@ const translation = { regenerationSuccessMessage: 'È possibile chiudere questa finestra.', childChunkAdded: '1 blocco figlio aggiunto', childChunks_other: 'BLOCCHI FIGLIO', + keywordEmpty: 'La parola chiave non può essere vuota', + keywordDuplicate: 'La parola chiave esiste già', }, } diff --git a/web/i18n/ja-JP/dataset-documents.ts b/web/i18n/ja-JP/dataset-documents.ts index ecdbbf512c..e28425dc8f 100644 --- a/web/i18n/ja-JP/dataset-documents.ts +++ b/web/i18n/ja-JP/dataset-documents.ts @@ -388,6 +388,8 @@ const translation = { editedAt: '編集日時', expandChunks: 'チャンクを展開', collapseChunks: 'チャンクを折りたたむ', + keywordDuplicate: 'そのキーワードは既に存在しています', + keywordEmpty: 'キーワードは空であってはいけません', }, } diff --git a/web/i18n/ja-JP/workflow.ts b/web/i18n/ja-JP/workflow.ts index 5700d9c9b8..3c959669bf 100644 --- a/web/i18n/ja-JP/workflow.ts +++ b/web/i18n/ja-JP/workflow.ts @@ -550,6 +550,7 @@ const translation = { advancedDependencies: '高度な依存関係', advancedDependenciesTip: '消費に時間がかかる、またはデフォルトで組み込まれていない事前ロードされた依存関係を追加します', searchDependencies: '依存関係を検索', + syncFunctionSignature: 'コードの関数署名を同期', }, templateTransform: { inputVars: '入力変数', diff --git a/web/i18n/ko-KR/dataset-documents.ts b/web/i18n/ko-KR/dataset-documents.ts index a379318959..ee94a880a0 100644 --- a/web/i18n/ko-KR/dataset-documents.ts +++ b/web/i18n/ko-KR/dataset-documents.ts @@ -388,6 +388,8 @@ const translation = { addChunk: '청크 추가 (Add Chunk)', characters_other: '문자', regeneratingMessage: '시간이 걸릴 수 있으니 잠시만 기다려 주십시오...', + keywordDuplicate: '키워드가 이미 존재합니다.', + keywordEmpty: '키워드는 비워둘 수 없습니다.', }, } diff --git a/web/i18n/pl-PL/dataset-documents.ts b/web/i18n/pl-PL/dataset-documents.ts index 37f373ac93..78e427ba95 100644 --- a/web/i18n/pl-PL/dataset-documents.ts +++ b/web/i18n/pl-PL/dataset-documents.ts @@ -390,6 +390,8 @@ const translation = { newChildChunk: 'Nowy fragment podrzędny', clearFilter: 'Wyczyść filtr', childChunks_one: 'FRAGMENT POTOMNY', + keywordDuplicate: 'Słowo kluczowe już istnieje', + keywordEmpty: 'Słowo kluczowe nie może być puste', }, } diff --git a/web/i18n/pt-BR/dataset-documents.ts b/web/i18n/pt-BR/dataset-documents.ts index 9a3d13bcab..b8c06c1769 100644 --- a/web/i18n/pt-BR/dataset-documents.ts +++ b/web/i18n/pt-BR/dataset-documents.ts @@ -389,6 +389,8 @@ const translation = { newChildChunk: 'Novo pedaço filho', characters_one: 'personagem', parentChunk: 'Pedaço pai', + keywordEmpty: 'A palavra-chave não pode estar vazia', + keywordDuplicate: 'A palavra-chave já existe', }, } diff --git a/web/i18n/ro-RO/dataset-documents.ts b/web/i18n/ro-RO/dataset-documents.ts index e42be87502..e0f3e8b476 100644 --- a/web/i18n/ro-RO/dataset-documents.ts +++ b/web/i18n/ro-RO/dataset-documents.ts @@ -389,6 +389,8 @@ const translation = { regeneratingTitle: 'Regenerarea bucăților secundare', addChildChunk: 'Adăugați o bucată copil', searchResults_other: 'REZULTATELE', + keywordDuplicate: 'Cuvântul cheie există deja', + keywordEmpty: 'Cuvântul cheie nu poate fi gol', }, } diff --git a/web/i18n/ru-RU/dataset-documents.ts b/web/i18n/ru-RU/dataset-documents.ts index 735266c087..6fadee3b26 100644 --- a/web/i18n/ru-RU/dataset-documents.ts +++ b/web/i18n/ru-RU/dataset-documents.ts @@ -389,6 +389,8 @@ const translation = { characters_one: 'характер', addChildChunk: 'Добавить дочерний чанк', newChildChunk: 'Новый дочерний чанк', + keywordEmpty: 'Ключевое слово не может быть пустым', + keywordDuplicate: 'Ключевое слово уже существует', }, } diff --git a/web/i18n/sl-SI/dataset-documents.ts b/web/i18n/sl-SI/dataset-documents.ts index 78d63c9e29..1a83335ad5 100644 --- a/web/i18n/sl-SI/dataset-documents.ts +++ b/web/i18n/sl-SI/dataset-documents.ts @@ -389,6 +389,8 @@ const translation = { chunk: 'Kos', addChunk: 'Dodajanje kosa', childChunkAdded: 'Dodan je 1 kos otroka', + keywordDuplicate: 'Ključna beseda že obstaja', + keywordEmpty: 'Ključna beseda ne more biti prazna', }, } diff --git a/web/i18n/th-TH/dataset-documents.ts b/web/i18n/th-TH/dataset-documents.ts index 91d04d6bc1..87b43f31e2 100644 --- a/web/i18n/th-TH/dataset-documents.ts +++ b/web/i18n/th-TH/dataset-documents.ts @@ -388,6 +388,8 @@ const translation = { searchResults_other: 'ผลลัพธ์', regenerationSuccessMessage: 'คุณสามารถปิดหน้าต่างนี้ได้', childChunks_one: 'ก้อนเด็ก', + keywordDuplicate: 'คำสำคัญมีอยู่แล้ว', + keywordEmpty: 'คีย์เวิร์ดไม่สามารถว่างเปล่าได้', }, } diff --git a/web/i18n/tr-TR/dataset-documents.ts b/web/i18n/tr-TR/dataset-documents.ts index f643375334..2e00975178 100644 --- a/web/i18n/tr-TR/dataset-documents.ts +++ b/web/i18n/tr-TR/dataset-documents.ts @@ -388,6 +388,8 @@ const translation = { chunks_other: 'Parçalar', editedAt: 'Şurada düzenlendi:', addChildChunk: 'Alt Parça Ekle', + keywordDuplicate: 'Anahtar kelime zaten var', + keywordEmpty: 'Anahtar kelime boş olamaz', }, } diff --git a/web/i18n/uk-UA/dataset-documents.ts b/web/i18n/uk-UA/dataset-documents.ts index da012cbb57..e8464e5661 100644 --- a/web/i18n/uk-UA/dataset-documents.ts +++ b/web/i18n/uk-UA/dataset-documents.ts @@ -389,6 +389,8 @@ const translation = { regenerationSuccessMessage: 'Ви можете закрити це вікно.', expandChunks: 'Розгортання фрагментів', regenerationConfirmTitle: 'Хочете регенерувати дитячі шматки?', + keywordEmpty: 'Ключове слово не може бути порожнім', + keywordDuplicate: 'Ключове слово вже існує', }, } diff --git a/web/i18n/vi-VN/dataset-documents.ts b/web/i18n/vi-VN/dataset-documents.ts index 6e13c1185f..1cc050b804 100644 --- a/web/i18n/vi-VN/dataset-documents.ts +++ b/web/i18n/vi-VN/dataset-documents.ts @@ -388,6 +388,8 @@ const translation = { clearFilter: 'Bộ lọc rõ ràng', chunk: 'Khúc', edited: 'EDITED', + keywordDuplicate: 'Từ khóa đã tồn tại', + keywordEmpty: 'Từ khóa không được để trống', }, } diff --git a/web/i18n/zh-Hans/dataset-documents.ts b/web/i18n/zh-Hans/dataset-documents.ts index be922f9540..8bc3ec3fe0 100644 --- a/web/i18n/zh-Hans/dataset-documents.ts +++ b/web/i18n/zh-Hans/dataset-documents.ts @@ -390,6 +390,8 @@ const translation = { editedAt: '编辑于', expandChunks: '展开分段', collapseChunks: '折叠分段', + keywordEmpty: '关键词不能为空', + keywordDuplicate: '关键词已经存在', }, } diff --git a/web/i18n/zh-Hans/workflow.ts b/web/i18n/zh-Hans/workflow.ts index 22eceabb3f..d03ad6d2c5 100644 --- a/web/i18n/zh-Hans/workflow.ts +++ b/web/i18n/zh-Hans/workflow.ts @@ -558,6 +558,7 @@ const translation = { advancedDependencies: '高级依赖', advancedDependenciesTip: '在这里添加一些预加载需要消耗较多时间或非默认内置的依赖包', searchDependencies: '搜索依赖', + syncFunctionSignature: '同步函数签名至代码', }, templateTransform: { inputVars: '输入变量', diff --git a/web/i18n/zh-Hant/dataset-documents.ts b/web/i18n/zh-Hant/dataset-documents.ts index 60b1df80f3..a79f3993e0 100644 --- a/web/i18n/zh-Hant/dataset-documents.ts +++ b/web/i18n/zh-Hant/dataset-documents.ts @@ -388,6 +388,8 @@ const translation = { searchResults_zero: '結果', parentChunks_other: '父塊', newChildChunk: '新兒童塊', + keywordEmpty: '關鍵字不能為空', + keywordDuplicate: '關鍵字已經存在', }, } diff --git a/web/i18n/zh-Hant/workflow.ts b/web/i18n/zh-Hant/workflow.ts index 2ee345eccc..8262e60351 100644 --- a/web/i18n/zh-Hant/workflow.ts +++ b/web/i18n/zh-Hant/workflow.ts @@ -544,6 +544,7 @@ const translation = { advancedDependencies: '高級依賴', advancedDependenciesTip: '在這裡添加一些預加載需要消耗較多時間或非默認內置的依賴包', searchDependencies: '搜索依賴', + syncFunctionSignature: '同步函數簽名至代碼', }, templateTransform: { inputVars: '輸入變量', diff --git a/web/package.json b/web/package.json index 0ece7fac27..0862ddbb07 100644 --- a/web/package.json +++ b/web/package.json @@ -1,6 +1,6 @@ { "name": "dify-web", - "version": "1.5.0", + "version": "1.5.1", "private": true, "engines": { "node": ">=v22.11.0" @@ -103,7 +103,7 @@ "mime": "^4.0.4", "mitt": "^3.0.1", "negotiator": "^0.6.3", - "next": "15.2.3", + "next": "15.2.4", "next-themes": "^0.4.3", "pinyin-pro": "^3.25.0", "qrcode.react": "^4.2.0", @@ -235,7 +235,11 @@ }, "pnpm": { "overrides": { - "esbuild@<0.25.0": "0.25.0" + "esbuild@<0.25.0": "0.25.0", + "pbkdf2@<3.1.3": "3.1.3", + "vite@<6.2.7": "6.2.7", + "prismjs@<1.30.0": "1.30.0", + "brace-expansion@<2.0.2": "2.0.2" } } } diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index fce3b6581b..ef945dfc54 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -9,6 +9,10 @@ overrides: '@types/react-dom': ~18.2.0 string-width: 4.2.3 esbuild@<0.25.0: 0.25.0 + pbkdf2@<3.1.3: 3.1.3 + vite@<6.2.7: 6.2.7 + prismjs@<1.30.0: 1.30.0 + brace-expansion@<2.0.2: 2.0.2 importers: @@ -207,8 +211,8 @@ importers: specifier: ^0.6.3 version: 0.6.4 next: - specifier: 15.2.3 - version: 15.2.3(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3) + specifier: 15.2.4 + version: 15.2.4(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3) next-themes: specifier: ^0.4.3 version: 0.4.6(react-dom@19.0.0(react@19.0.0))(react@19.0.0) @@ -392,7 +396,7 @@ importers: version: 8.5.0(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(storybook@8.5.0) '@storybook/nextjs': specifier: 8.5.0 - version: 8.5.0(esbuild@0.25.0)(next@15.2.3(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3))(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3)(storybook@8.5.0)(type-fest@4.39.1)(typescript@4.9.5)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.99.5(esbuild@0.25.0)(uglify-js@3.19.3)) + version: 8.5.0(esbuild@0.25.0)(next@15.2.4(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3))(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3)(storybook@8.5.0)(type-fest@4.39.1)(typescript@4.9.5)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.99.5(esbuild@0.25.0)(uglify-js@3.19.3)) '@storybook/react': specifier: 8.5.0 version: 8.5.0(@storybook/test@8.5.0(storybook@8.5.0))(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(storybook@8.5.0)(typescript@4.9.5) @@ -2086,8 +2090,8 @@ packages: '@napi-rs/wasm-runtime@0.2.8': resolution: {integrity: sha512-OBlgKdX7gin7OIq4fadsjpg+cp2ZphvAIKucHsNfTdJiqdOmOEwQd/bHi0VwNrcw5xpBJyUw6cK/QilCqy1BSg==} - '@next/env@15.2.3': - resolution: {integrity: sha512-a26KnbW9DFEUsSxAxKBORR/uD9THoYoKbkpFywMN/AFvboTt94b8+g/07T8J6ACsdLag8/PDU60ov4rPxRAixw==} + '@next/env@15.2.4': + resolution: {integrity: sha512-+SFtMgoiYP3WoSswuNmxJOCwi06TdWE733D+WPjpXIe4LXGULwEaofiiAy6kbS0+XjM5xF5n3lKuBwN2SnqD9g==} '@next/eslint-plugin-next@15.3.0': resolution: {integrity: sha512-511UUcpWw5GWTyKfzW58U2F/bYJyjLE9e3SlnGK/zSXq7RqLlqFO8B9bitJjumLpj317fycC96KZ2RZsjGNfBw==} @@ -2103,50 +2107,50 @@ packages: '@mdx-js/react': optional: true - '@next/swc-darwin-arm64@15.2.3': - resolution: {integrity: sha512-uaBhA8aLbXLqwjnsHSkxs353WrRgQgiFjduDpc7YXEU0B54IKx3vU+cxQlYwPCyC8uYEEX7THhtQQsfHnvv8dw==} + '@next/swc-darwin-arm64@15.2.4': + resolution: {integrity: sha512-1AnMfs655ipJEDC/FHkSr0r3lXBgpqKo4K1kiwfUf3iE68rDFXZ1TtHdMvf7D0hMItgDZ7Vuq3JgNMbt/+3bYw==} engines: {node: '>= 10'} cpu: [arm64] os: [darwin] - '@next/swc-darwin-x64@15.2.3': - resolution: {integrity: sha512-pVwKvJ4Zk7h+4hwhqOUuMx7Ib02u3gDX3HXPKIShBi9JlYllI0nU6TWLbPT94dt7FSi6mSBhfc2JrHViwqbOdw==} + '@next/swc-darwin-x64@15.2.4': + resolution: {integrity: sha512-3qK2zb5EwCwxnO2HeO+TRqCubeI/NgCe+kL5dTJlPldV/uwCnUgC7VbEzgmxbfrkbjehL4H9BPztWOEtsoMwew==} engines: {node: '>= 10'} cpu: [x64] os: [darwin] - '@next/swc-linux-arm64-gnu@15.2.3': - resolution: {integrity: sha512-50ibWdn2RuFFkOEUmo9NCcQbbV9ViQOrUfG48zHBCONciHjaUKtHcYFiCwBVuzD08fzvzkWuuZkd4AqbvKO7UQ==} + '@next/swc-linux-arm64-gnu@15.2.4': + resolution: {integrity: sha512-HFN6GKUcrTWvem8AZN7tT95zPb0GUGv9v0d0iyuTb303vbXkkbHDp/DxufB04jNVD+IN9yHy7y/6Mqq0h0YVaQ==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@next/swc-linux-arm64-musl@15.2.3': - resolution: {integrity: sha512-2gAPA7P652D3HzR4cLyAuVYwYqjG0mt/3pHSWTCyKZq/N/dJcUAEoNQMyUmwTZWCJRKofB+JPuDVP2aD8w2J6Q==} + '@next/swc-linux-arm64-musl@15.2.4': + resolution: {integrity: sha512-Oioa0SORWLwi35/kVB8aCk5Uq+5/ZIumMK1kJV+jSdazFm2NzPDztsefzdmzzpx5oGCJ6FkUC7vkaUseNTStNA==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@next/swc-linux-x64-gnu@15.2.3': - resolution: {integrity: sha512-ODSKvrdMgAJOVU4qElflYy1KSZRM3M45JVbeZu42TINCMG3anp7YCBn80RkISV6bhzKwcUqLBAmOiWkaGtBA9w==} + '@next/swc-linux-x64-gnu@15.2.4': + resolution: {integrity: sha512-yb5WTRaHdkgOqFOZiu6rHV1fAEK0flVpaIN2HB6kxHVSy/dIajWbThS7qON3W9/SNOH2JWkVCyulgGYekMePuw==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@next/swc-linux-x64-musl@15.2.3': - resolution: {integrity: sha512-ZR9kLwCWrlYxwEoytqPi1jhPd1TlsSJWAc+H/CJHmHkf2nD92MQpSRIURR1iNgA/kuFSdxB8xIPt4p/T78kwsg==} + '@next/swc-linux-x64-musl@15.2.4': + resolution: {integrity: sha512-Dcdv/ix6srhkM25fgXiyOieFUkz+fOYkHlydWCtB0xMST6X9XYI3yPDKBZt1xuhOytONsIFJFB08xXYsxUwJLw==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@next/swc-win32-arm64-msvc@15.2.3': - resolution: {integrity: sha512-+G2FrDcfm2YDbhDiObDU/qPriWeiz/9cRR0yMWJeTLGGX6/x8oryO3tt7HhodA1vZ8r2ddJPCjtLcpaVl7TE2Q==} + '@next/swc-win32-arm64-msvc@15.2.4': + resolution: {integrity: sha512-dW0i7eukvDxtIhCYkMrZNQfNicPDExt2jPb9AZPpL7cfyUo7QSNl1DjsHjmmKp6qNAqUESyT8YFl/Aw91cNJJg==} engines: {node: '>= 10'} cpu: [arm64] os: [win32] - '@next/swc-win32-x64-msvc@15.2.3': - resolution: {integrity: sha512-gHYS9tc+G2W0ZC8rBL+H6RdtXIyk40uLiaos0yj5US85FNhbFEndMA2nW3z47nzOWiSvXTZ5kBClc3rD0zJg0w==} + '@next/swc-win32-x64-msvc@15.2.4': + resolution: {integrity: sha512-SbnWkJmkS7Xl3kre8SdMF6F/XDh1DTFEhp0jRTj/uB8iPKoU2bb2NDfcu+iifv1+mxQEd1g2vvSxcZbXSKyWiQ==} engines: {node: '>= 10'} cpu: [x64] os: [win32] @@ -3311,7 +3315,7 @@ packages: resolution: {integrity: sha512-bmpJJm7Y7i9BBELlLuuM1J1Q6EQ6K5Ye4wcyOpOMXMcePYKSIYlpcrCm4l/O6ja4VJA5G2aMJiuZkZdnxlC3SA==} peerDependencies: msw: ^2.4.9 - vite: ^5.0.0 || ^6.0.0 + vite: 6.2.7 peerDependenciesMeta: msw: optional: true @@ -3726,11 +3730,8 @@ packages: boolbase@1.0.0: resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} - brace-expansion@1.1.11: - resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} - - brace-expansion@2.0.1: - resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} + brace-expansion@2.0.2: + resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} braces@3.0.3: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} @@ -4076,9 +4077,6 @@ packages: compare-versions@6.1.1: resolution: {integrity: sha512-4hm4VPpIecmlg59CHXnRDnqGplJFrbLG4aFEl5vl6cK1u76ws3LLvX7ikFnTDl5vo39sjWD6AaDPYodJp/NNHg==} - concat-map@0.0.1: - resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} - confbox@0.1.8: resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==} @@ -4134,6 +4132,9 @@ packages: create-ecdh@4.0.4: resolution: {integrity: sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==} + create-hash@1.1.3: + resolution: {integrity: sha512-snRpch/kwQhcdlnZKYanNF1m0RDlrCdSKQaH87w1FCFPVPNCQ/Il9QJKAX2jVBZddRdaHBMC+zXa9Gw9tmkNUA==} + create-hash@1.2.0: resolution: {integrity: sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==} @@ -5383,6 +5384,9 @@ packages: has-unicode@2.0.1: resolution: {integrity: sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==} + hash-base@2.0.2: + resolution: {integrity: sha512-0TROgQ1/SxE6KmxWSvXHvRj90/Xo1JvZShofnYF+f6ZsGtR4eES7WfrQzPalmyagfKZCXpVnitiRebZulWsbiw==} + hash-base@3.0.5: resolution: {integrity: sha512-vXm0l45VbcHEVlTCzs8M+s0VeYsB2lnlAaThoLKGXr3bE/VWDOelNUnycUPEhKEaXARL2TEFjBOyUiM6+55KBg==} engines: {node: '>= 0.10'} @@ -6565,8 +6569,8 @@ packages: react: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc react-dom: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc - next@15.2.3: - resolution: {integrity: sha512-x6eDkZxk2rPpu46E1ZVUWIBhYCLszmUY6fvHBFcbzJ9dD+qRX6vcHusaqqDlnY+VngKzKbAiG2iRCkPbmi8f7w==} + next@15.2.4: + resolution: {integrity: sha512-VwL+LAaPSxEkd3lU2xWbgEOtrM8oedmyhBqaVNmgKB+GvZlCy9rgaEc+y2on0wv+l0oSFqLtYD6dcC1eAedUaQ==} engines: {node: ^18.18.0 || ^19.8.0 || >= 20.0.0} hasBin: true peerDependencies: @@ -6859,8 +6863,8 @@ packages: resolution: {integrity: sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==} engines: {node: '>= 14.16'} - pbkdf2@3.1.2: - resolution: {integrity: sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==} + pbkdf2@3.1.3: + resolution: {integrity: sha512-wfRLBZ0feWRhCIkoMB6ete7czJcnNnqRpcoWQBLqatqXXmelSRqfdDK4F3u9T2s2cXas/hQJcryI/4lAL+XTlA==} engines: {node: '>=0.12'} pdfjs-dist@4.4.168: @@ -7042,10 +7046,6 @@ packages: resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - prismjs@1.27.0: - resolution: {integrity: sha512-t13BGPUlFDR7wRB5kQDG4jjl7XeuH6jbJGt11JHPL96qwsEHNX2+68tFXqc1/k+/jALsbSWJKUOT/hcYAZ5LkA==} - engines: {node: '>=6'} - prismjs@1.30.0: resolution: {integrity: sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw==} engines: {node: '>=6'} @@ -7527,6 +7527,9 @@ packages: deprecated: Rimraf versions prior to v4 are no longer supported hasBin: true + ripemd160@2.0.1: + resolution: {integrity: sha512-J7f4wutN8mdbV08MJnXibYpCOPHR+yzy+iQ/AsjMv2j8cLavQ8VGagDFUwwTAdF8FmRKVeNpbTTEwNHCW1g94w==} + ripemd160@2.0.2: resolution: {integrity: sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==} @@ -8042,6 +8045,10 @@ packages: tmpl@1.0.5: resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==} + to-buffer@1.2.1: + resolution: {integrity: sha512-tB82LpAIWjhLYbqjx3X4zEeHN6M8CiuOEy2JY8SEQVdYRe3CCHOFaqrBW1doLDrfpWhplcW7BL+bO3/6S3pcDQ==} + engines: {node: '>= 0.4'} + to-regex-range@5.0.1: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} @@ -8367,8 +8374,8 @@ packages: engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} hasBin: true - vite@6.2.6: - resolution: {integrity: sha512-9xpjNl3kR4rVDZgPNdTL0/c6ao4km69a/2ihNQbcANz8RuCOK3hQBmLSJf3bRKVQjVMda+YvizNE8AwvogcPbw==} + vite@6.2.7: + resolution: {integrity: sha512-qg3LkeuinTrZoJHHF94coSaTfIPyBYoywp+ys4qu20oSJFbKMYoIJo0FWJT9q6Vp49l6z9IsJRbHdcGtiKbGoQ==} engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} hasBin: true peerDependencies: @@ -10565,7 +10572,7 @@ snapshots: '@tybys/wasm-util': 0.9.0 optional: true - '@next/env@15.2.3': {} + '@next/env@15.2.4': {} '@next/eslint-plugin-next@15.3.0': dependencies: @@ -10578,28 +10585,28 @@ snapshots: '@mdx-js/loader': 3.1.0(acorn@8.14.1)(webpack@5.99.5(esbuild@0.25.0)(uglify-js@3.19.3)) '@mdx-js/react': 3.1.0(@types/react@18.2.79)(react@19.0.0) - '@next/swc-darwin-arm64@15.2.3': + '@next/swc-darwin-arm64@15.2.4': optional: true - '@next/swc-darwin-x64@15.2.3': + '@next/swc-darwin-x64@15.2.4': optional: true - '@next/swc-linux-arm64-gnu@15.2.3': + '@next/swc-linux-arm64-gnu@15.2.4': optional: true - '@next/swc-linux-arm64-musl@15.2.3': + '@next/swc-linux-arm64-musl@15.2.4': optional: true - '@next/swc-linux-x64-gnu@15.2.3': + '@next/swc-linux-x64-gnu@15.2.4': optional: true - '@next/swc-linux-x64-musl@15.2.3': + '@next/swc-linux-x64-musl@15.2.4': optional: true - '@next/swc-win32-arm64-msvc@15.2.3': + '@next/swc-win32-arm64-msvc@15.2.4': optional: true - '@next/swc-win32-x64-msvc@15.2.3': + '@next/swc-win32-x64-msvc@15.2.4': optional: true '@nodelib/fs.scandir@2.1.5': @@ -11211,7 +11218,7 @@ snapshots: dependencies: storybook: 8.5.0 - '@storybook/nextjs@8.5.0(esbuild@0.25.0)(next@15.2.3(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3))(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3)(storybook@8.5.0)(type-fest@4.39.1)(typescript@4.9.5)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.99.5(esbuild@0.25.0)(uglify-js@3.19.3))': + '@storybook/nextjs@8.5.0(esbuild@0.25.0)(next@15.2.4(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3))(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3)(storybook@8.5.0)(type-fest@4.39.1)(typescript@4.9.5)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.99.5(esbuild@0.25.0)(uglify-js@3.19.3))': dependencies: '@babel/core': 7.26.10 '@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.26.10) @@ -11237,7 +11244,7 @@ snapshots: find-up: 5.0.0 image-size: 1.2.1 loader-utils: 3.3.1 - next: 15.2.3(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3) + next: 15.2.4(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3) node-polyfill-webpack-plugin: 2.0.1(webpack@5.99.5(esbuild@0.25.0)(uglify-js@3.19.3)) pnp-webpack-plugin: 1.7.0(typescript@4.9.5) postcss: 8.5.3 @@ -11956,13 +11963,13 @@ snapshots: chai: 5.2.0 tinyrainbow: 2.0.0 - '@vitest/mocker@3.1.1(vite@6.2.6(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1))': + '@vitest/mocker@3.1.1(vite@6.2.7(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1))': dependencies: '@vitest/spy': 3.1.1 estree-walker: 3.0.3 magic-string: 0.30.17 optionalDependencies: - vite: 6.2.6(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1) + vite: 6.2.7(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1) '@vitest/pretty-format@2.0.5': dependencies: @@ -12488,12 +12495,7 @@ snapshots: boolbase@1.0.0: {} - brace-expansion@1.1.11: - dependencies: - balanced-match: 1.0.2 - concat-map: 0.0.1 - - brace-expansion@2.0.1: + brace-expansion@2.0.2: dependencies: balanced-match: 1.0.2 @@ -12846,8 +12848,6 @@ snapshots: compare-versions@6.1.1: {} - concat-map@0.0.1: {} - confbox@0.1.8: {} confbox@0.2.2: {} @@ -12905,6 +12905,13 @@ snapshots: bn.js: 4.12.1 elliptic: 6.6.1 + create-hash@1.1.3: + dependencies: + cipher-base: 1.0.6 + inherits: 2.0.4 + ripemd160: 2.0.2 + sha.js: 2.4.11 + create-hash@1.2.0: dependencies: cipher-base: 1.0.6 @@ -12959,7 +12966,7 @@ snapshots: diffie-hellman: 5.0.3 hash-base: 3.0.5 inherits: 2.0.4 - pbkdf2: 3.1.2 + pbkdf2: 3.1.3 public-encrypt: 4.0.3 randombytes: 2.1.0 randomfill: 1.0.4 @@ -14577,6 +14584,10 @@ snapshots: has-unicode@2.0.1: optional: true + hash-base@2.0.2: + dependencies: + inherits: 2.0.4 + hash-base@3.0.5: dependencies: inherits: 2.0.4 @@ -16239,15 +16250,15 @@ snapshots: minimatch@10.0.1: dependencies: - brace-expansion: 2.0.1 + brace-expansion: 2.0.2 minimatch@3.1.2: dependencies: - brace-expansion: 1.1.11 + brace-expansion: 2.0.2 minimatch@9.0.5: dependencies: - brace-expansion: 2.0.1 + brace-expansion: 2.0.2 minimist@1.2.8: {} @@ -16307,9 +16318,9 @@ snapshots: react: 19.0.0 react-dom: 19.0.0(react@19.0.0) - next@15.2.3(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3): + next@15.2.4(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3): dependencies: - '@next/env': 15.2.3 + '@next/env': 15.2.4 '@swc/counter': 0.1.3 '@swc/helpers': 0.5.15 busboy: 1.6.0 @@ -16319,14 +16330,14 @@ snapshots: react-dom: 19.0.0(react@19.0.0) styled-jsx: 5.1.6(@babel/core@7.26.10)(react@19.0.0) optionalDependencies: - '@next/swc-darwin-arm64': 15.2.3 - '@next/swc-darwin-x64': 15.2.3 - '@next/swc-linux-arm64-gnu': 15.2.3 - '@next/swc-linux-arm64-musl': 15.2.3 - '@next/swc-linux-x64-gnu': 15.2.3 - '@next/swc-linux-x64-musl': 15.2.3 - '@next/swc-win32-arm64-msvc': 15.2.3 - '@next/swc-win32-x64-msvc': 15.2.3 + '@next/swc-darwin-arm64': 15.2.4 + '@next/swc-darwin-x64': 15.2.4 + '@next/swc-linux-arm64-gnu': 15.2.4 + '@next/swc-linux-arm64-musl': 15.2.4 + '@next/swc-linux-x64-gnu': 15.2.4 + '@next/swc-linux-x64-musl': 15.2.4 + '@next/swc-win32-arm64-msvc': 15.2.4 + '@next/swc-win32-x64-msvc': 15.2.4 sass: 1.86.3 sharp: 0.33.5 transitivePeerDependencies: @@ -16563,7 +16574,7 @@ snapshots: browserify-aes: 1.2.0 evp_bytestokey: 1.0.3 hash-base: 3.0.5 - pbkdf2: 3.1.2 + pbkdf2: 3.1.3 safe-buffer: 5.2.1 parse-entities@2.0.0: @@ -16644,13 +16655,14 @@ snapshots: pathval@2.0.0: {} - pbkdf2@3.1.2: + pbkdf2@3.1.3: dependencies: - create-hash: 1.2.0 + create-hash: 1.1.3 create-hmac: 1.1.7 - ripemd160: 2.0.2 + ripemd160: 2.0.1 safe-buffer: 5.2.1 sha.js: 2.4.11 + to-buffer: 1.2.1 pdfjs-dist@4.4.168: optionalDependencies: @@ -16831,8 +16843,6 @@ snapshots: ansi-styles: 5.2.0 react-is: 18.3.1 - prismjs@1.27.0: {} - prismjs@1.30.0: {} process-nextick-args@2.0.1: {} @@ -17247,7 +17257,7 @@ snapshots: dependencies: hastscript: 6.0.0 parse-entities: 2.0.0 - prismjs: 1.27.0 + prismjs: 1.30.0 regenerate-unicode-properties@10.2.0: dependencies: @@ -17441,6 +17451,11 @@ snapshots: dependencies: glob: 7.2.3 + ripemd160@2.0.1: + dependencies: + hash-base: 2.0.2 + inherits: 2.0.4 + ripemd160@2.0.2: dependencies: hash-base: 3.0.5 @@ -18041,6 +18056,12 @@ snapshots: tmpl@1.0.5: {} + to-buffer@1.2.1: + dependencies: + isarray: 2.0.5 + safe-buffer: 5.2.1 + typed-array-buffer: 1.0.3 + to-regex-range@5.0.1: dependencies: is-number: 7.0.0 @@ -18389,7 +18410,7 @@ snapshots: debug: 4.4.0 es-module-lexer: 1.6.0 pathe: 2.0.3 - vite: 6.2.6(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1) + vite: 6.2.7(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1) transitivePeerDependencies: - '@types/node' - jiti @@ -18404,7 +18425,7 @@ snapshots: - tsx - yaml - vite@6.2.6(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1): + vite@6.2.7(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1): dependencies: esbuild: 0.25.2 postcss: 8.5.3 @@ -18420,7 +18441,7 @@ snapshots: vitest@3.1.1(@types/debug@4.1.12)(@types/node@18.15.0)(happy-dom@17.4.4)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1): dependencies: '@vitest/expect': 3.1.1 - '@vitest/mocker': 3.1.1(vite@6.2.6(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1)) + '@vitest/mocker': 3.1.1(vite@6.2.7(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1)) '@vitest/pretty-format': 3.1.1 '@vitest/runner': 3.1.1 '@vitest/snapshot': 3.1.1 @@ -18436,7 +18457,7 @@ snapshots: tinyexec: 0.3.2 tinypool: 1.0.2 tinyrainbow: 2.0.0 - vite: 6.2.6(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1) + vite: 6.2.7(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1) vite-node: 3.1.1(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1) why-is-node-running: 2.3.0 optionalDependencies: diff --git a/web/service/access-control.ts b/web/service/access-control.ts index 865909d2f9..36999bf8f3 100644 --- a/web/service/access-control.ts +++ b/web/service/access-control.ts @@ -86,5 +86,8 @@ export const useGetUserCanAccessApp = ({ appId, isInstalledApp = true, enabled } enabled: !!appId && enabled, staleTime: 0, gcTime: 0, + initialData: { + result: !enabled, + }, }) } diff --git a/web/service/base.ts b/web/service/base.ts index 80ce3b801d..6d7452dfcc 100644 --- a/web/service/base.ts +++ b/web/service/base.ts @@ -121,7 +121,7 @@ function unicodeToChar(text: string) { function requiredWebSSOLogin(message?: string, code?: number) { const params = new URLSearchParams() - params.append('redirect_url', globalThis.location.pathname) + params.append('redirect_url', encodeURIComponent(`${globalThis.location.pathname}${globalThis.location.search}`)) if (message) params.append('message', message) if (code) diff --git a/web/service/use-plugins.ts b/web/service/use-plugins.ts index ecfdbcf993..ff092bb037 100644 --- a/web/service/use-plugins.ts +++ b/web/service/use-plugins.ts @@ -1,5 +1,6 @@ import { useCallback, useEffect } from 'react' import type { + FormOption, ModelProvider, } from '@/app/components/header/account-setting/model-provider-page/declarations' import { fetchModelProviderModelList } from '@/service/common' @@ -477,7 +478,7 @@ export const usePluginTaskList = (category?: PluginType) => { refreshPluginList(category ? { category } as any : undefined, !category) } } - // eslint-disable-next-line react-hooks/exhaustive-deps + // eslint-disable-next-line react-hooks/exhaustive-deps }, [isRefetching]) const handleRefetch = useCallback(() => { @@ -571,3 +572,17 @@ export const usePluginInfo = (providerName?: string) => { enabled: !!providerName, }) } + +export const useFetchDynamicOptions = (plugin_id: string, provider: string, action: string, parameter: string, provider_type: 'tool') => { + return useMutation({ + mutationFn: () => get<{ options: FormOption[] }>('/workspaces/current/plugin/parameters/dynamic-options', { + params: { + plugin_id, + provider, + action, + parameter, + provider_type, + }, + }), + }) +} diff --git a/web/service/use-workflow.ts b/web/service/use-workflow.ts index ad616403fa..4629ffb146 100644 --- a/web/service/use-workflow.ts +++ b/web/service/use-workflow.ts @@ -116,18 +116,19 @@ export const useInvalidAllLastRun = (appId: string) => { const useConversationVarValuesKey = [NAME_SPACE, 'conversation-variable'] -export const useConversationVarValues = (appId: string) => { +export const useConversationVarValues = (url?: string) => { return useQuery({ - queryKey: [...useConversationVarValuesKey, appId], + enabled: !!url, + queryKey: [...useConversationVarValuesKey, url], queryFn: async () => { - const { items } = (await get(`apps/${appId}/workflows/draft/conversation-variables`)) as { items: VarInInspect[] } + const { items } = (await get(url || '')) as { items: VarInInspect[] } return items }, }) } -export const useInvalidateConversationVarValues = (appId: string) => { - return useInvalid([...useConversationVarValuesKey, appId]) +export const useInvalidateConversationVarValues = (url: string) => { + return useInvalid([...useConversationVarValuesKey, url]) } export const useResetConversationVar = (appId: string) => { @@ -142,25 +143,26 @@ export const useResetConversationVar = (appId: string) => { export const useResetToLastRunValue = (appId: string) => { return useMutation({ mutationKey: [NAME_SPACE, 'reset to last run value', appId], - mutationFn: async (varId: string) => { + mutationFn: async (varId: string): Promise<{ value: any }> => { return put(`apps/${appId}/workflows/draft/variables/${varId}/reset`) }, }) } export const useSysVarValuesKey = [NAME_SPACE, 'sys-variable'] -export const useSysVarValues = (appId: string) => { +export const useSysVarValues = (url?: string) => { return useQuery({ - queryKey: [...useSysVarValuesKey, appId], + enabled: !!url, + queryKey: [...useSysVarValuesKey, url], queryFn: async () => { - const { items } = (await get(`apps/${appId}/workflows/draft/system-variables`)) as { items: VarInInspect[] } + const { items } = (await get(url || '')) as { items: VarInInspect[] } return items }, }) } -export const useInvalidateSysVarValues = (appId: string) => { - return useInvalid([...useSysVarValuesKey, appId]) +export const useInvalidateSysVarValues = (url: string) => { + return useInvalid([...useSysVarValuesKey, url]) } export const useDeleteAllInspectorVars = (appId: string) => { diff --git a/web/utils/completion-params.ts b/web/utils/completion-params.ts new file mode 100644 index 0000000000..b46c3ab720 --- /dev/null +++ b/web/utils/completion-params.ts @@ -0,0 +1,88 @@ +import type { FormValue, ModelParameterRule } from '@/app/components/header/account-setting/model-provider-page/declarations' + +export const mergeValidCompletionParams = ( + oldParams: FormValue | undefined, + rules: ModelParameterRule[], +): { params: FormValue; removedDetails: Record } => { + if (!oldParams || Object.keys(oldParams).length === 0) + return { params: {}, removedDetails: {} } + + const acceptedKeys = new Set(rules.map(r => r.name)) + const ruleMap: Record = {} + rules.forEach((r) => { + ruleMap[r.name] = r + }) + + const nextParams: FormValue = {} + const removedDetails: Record = {} + + Object.entries(oldParams).forEach(([key, value]) => { + if (!acceptedKeys.has(key)) { + removedDetails[key] = 'unsupported' + return + } + + const rule = ruleMap[key] + if (!rule) { + removedDetails[key] = 'unsupported' + return + } + + switch (rule.type) { + case 'int': + case 'float': { + if (typeof value !== 'number') { + removedDetails[key] = 'invalid type' + return + } + const min = rule.min ?? Number.NEGATIVE_INFINITY + const max = rule.max ?? Number.POSITIVE_INFINITY + if (value < min || value > max) { + removedDetails[key] = `out of range (${min}-${max})` + return + } + nextParams[key] = value + return + } + case 'boolean': { + if (typeof value !== 'boolean') { + removedDetails[key] = 'invalid type' + return + } + nextParams[key] = value + return + } + case 'string': + case 'text': { + if (typeof value !== 'string') { + removedDetails[key] = 'invalid type' + return + } + if (Array.isArray(rule.options) && rule.options.length) { + if (!(rule.options as string[]).includes(value)) { + removedDetails[key] = 'unsupported option' + return + } + } + nextParams[key] = value + return + } + default: { + removedDetails[key] = `unsupported rule type: ${(rule as any)?.type ?? 'unknown'}` + } + } + }) + + return { params: nextParams, removedDetails } +} + +export const fetchAndMergeValidCompletionParams = async ( + provider: string, + modelId: string, + oldParams: FormValue | undefined, +): Promise<{ params: FormValue; removedDetails: Record }> => { + const { fetchModelParameterRules } = await import('@/service/common') + const url = `/workspaces/current/model-providers/${provider}/models/parameter-rules?model=${modelId}` + const { data: parameterRules } = await fetchModelParameterRules(url) + return mergeValidCompletionParams(oldParams, parameterRules ?? []) +}