Merge branch 'feat/rag-2' into feat/rag-pipeline-service-api

This commit is contained in:
jyong 2025-09-10 15:09:51 +08:00
commit 914ae3c5d2
84 changed files with 1129 additions and 401 deletions

View File

@ -11,11 +11,7 @@ from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
from configs import dify_config
from controllers.console import api
from controllers.console.app.error import (
ConversationCompletedError,
DraftWorkflowNotExist,
DraftWorkflowNotSync,
)
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError

View File

@ -284,7 +284,7 @@ class DataSourceNotionDatasetSyncApi(Resource):
documents = DocumentService.get_document_by_dataset_id(dataset_id_str)
for document in documents:
document_indexing_sync_task.delay(dataset_id_str, document.id)
return 200
return {"result": "success"}, 200
class DataSourceNotionDocumentSyncApi(Resource):
@ -302,7 +302,7 @@ class DataSourceNotionDocumentSyncApi(Resource):
if document is None:
raise NotFound("Document not found.")
document_indexing_sync_task.delay(dataset_id_str, document_id_str)
return 200
return {"result": "success"}, 200
api.add_resource(DataSourceApi, "/data-source/integrates", "/data-source/integrates/<uuid:binding_id>/<string:action>")

View File

@ -113,7 +113,7 @@ class DatasetMetadataBuiltInFieldActionApi(Resource):
MetadataService.enable_built_in_field(dataset)
elif action == "disable":
MetadataService.disable_built_in_field(dataset)
return 200
return {"result": "success"}, 200
class DocumentMetadataEditApi(Resource):
@ -135,7 +135,7 @@ class DocumentMetadataEditApi(Resource):
MetadataService.update_documents_metadata(dataset, metadata_args)
return 200
return {"result": "success"}, 200
api.add_resource(DatasetMetadataCreateApi, "/datasets/<uuid:dataset_id>/metadata")

View File

@ -111,7 +111,7 @@ class TagBindingCreateApi(Resource):
args = parser.parse_args()
TagService.save_tag_binding(args)
return 200
return {"result": "success"}, 200
class TagBindingDeleteApi(Resource):
@ -132,7 +132,7 @@ class TagBindingDeleteApi(Resource):
args = parser.parse_args()
TagService.delete_tag_binding(args)
return 200
return {"result": "success"}, 200
api.add_resource(TagListApi, "/tags")

View File

@ -174,7 +174,7 @@ class DatasetMetadataBuiltInFieldActionServiceApi(DatasetApiResource):
MetadataService.enable_built_in_field(dataset)
elif action == "disable":
MetadataService.disable_built_in_field(dataset)
return 200
return {"result": "success"}, 200
@service_api_ns.route("/datasets/<uuid:dataset_id>/documents/metadata")
@ -204,4 +204,4 @@ class DocumentMetadataEditServiceApi(DatasetApiResource):
MetadataService.update_documents_metadata(dataset, metadata_args)
return 200
return {"result": "success"}, 200

View File

@ -105,6 +105,7 @@ class WorkflowAppRunner(WorkflowBasedAppRunner):
graph_runtime_state=graph_runtime_state,
workflow_id=self._workflow.id,
tenant_id=self._workflow.tenant_id,
user_id=self.application_generate_entity.user_id,
)
# RUN WORKFLOW

View File

@ -42,6 +42,7 @@ from models.provider import (
TenantPreferredModelProvider,
)
from models.provider_ids import ModelProviderID
from services.enterprise.plugin_manager_service import PluginCredentialType
logger = logging.getLogger(__name__)
@ -129,14 +130,38 @@ class ProviderConfiguration(BaseModel):
return copy_credentials
else:
credentials = None
current_credential_id = None
if self.custom_configuration.models:
for model_configuration in self.custom_configuration.models:
if model_configuration.model_type == model_type and model_configuration.model == model:
credentials = model_configuration.credentials
current_credential_id = model_configuration.current_credential_id
break
if not credentials and self.custom_configuration.provider:
credentials = self.custom_configuration.provider.credentials
current_credential_id = self.custom_configuration.provider.current_credential_id
if current_credential_id:
from core.helper.credential_utils import check_credential_policy_compliance
check_credential_policy_compliance(
credential_id=current_credential_id,
provider=self.provider.provider,
credential_type=PluginCredentialType.MODEL,
)
else:
# no current credential id, check all available credentials
if self.custom_configuration.provider:
for credential_configuration in self.custom_configuration.provider.available_credentials:
from core.helper.credential_utils import check_credential_policy_compliance
check_credential_policy_compliance(
credential_id=credential_configuration.credential_id,
provider=self.provider.provider,
credential_type=PluginCredentialType.MODEL,
)
return credentials
@ -266,7 +291,6 @@ class ProviderConfiguration(BaseModel):
:param credential_id: if provided, return the specified credential
:return:
"""
if credential_id:
return self._get_specific_provider_credential(credential_id)
@ -739,6 +763,7 @@ class ProviderConfiguration(BaseModel):
current_credential_id = credential_record.id
current_credential_name = credential_record.credential_name
credentials = self.obfuscated_credentials(
credentials=credentials,
credential_form_schemas=self.provider.model_credential_schema.credential_form_schemas
@ -793,6 +818,7 @@ class ProviderConfiguration(BaseModel):
):
current_credential_id = model_configuration.current_credential_id
current_credential_name = model_configuration.current_credential_name
credentials = self.obfuscated_credentials(
credentials=model_configuration.credentials,
credential_form_schemas=self.provider.model_credential_schema.credential_form_schemas

View File

@ -145,6 +145,7 @@ class ModelLoadBalancingConfiguration(BaseModel):
name: str
credentials: dict
credential_source_type: str | None = None
credential_id: str | None = None
class ModelSettings(BaseModel):

View File

@ -0,0 +1,75 @@
"""
Credential utility functions for checking credential existence and policy compliance.
"""
from services.enterprise.plugin_manager_service import PluginCredentialType
def is_credential_exists(credential_id: str, credential_type: "PluginCredentialType") -> bool:
"""
Check if the credential still exists in the database.
:param credential_id: The credential ID to check
:param credential_type: The type of credential (MODEL or TOOL)
:return: True if credential exists, False otherwise
"""
from sqlalchemy import select
from sqlalchemy.orm import Session
from extensions.ext_database import db
from models.provider import ProviderCredential, ProviderModelCredential
from models.tools import BuiltinToolProvider
with Session(db.engine) as session:
if credential_type == PluginCredentialType.MODEL:
# Check both pre-defined and custom model credentials using a single UNION query
stmt = (
select(ProviderCredential.id)
.where(ProviderCredential.id == credential_id)
.union(select(ProviderModelCredential.id).where(ProviderModelCredential.id == credential_id))
)
return session.scalar(stmt) is not None
if credential_type == PluginCredentialType.TOOL:
return (
session.scalar(select(BuiltinToolProvider.id).where(BuiltinToolProvider.id == credential_id))
is not None
)
return False
def check_credential_policy_compliance(
credential_id: str, provider: str, credential_type: "PluginCredentialType", check_existence: bool = True
) -> None:
"""
Check credential policy compliance for the given credential ID.
:param credential_id: The credential ID to check
:param provider: The provider name
:param credential_type: The type of credential (MODEL or TOOL)
:param check_existence: Whether to check if credential exists in database first
:raises ValueError: If credential policy compliance check fails
"""
from services.enterprise.plugin_manager_service import (
CheckCredentialPolicyComplianceRequest,
PluginManagerService,
)
from services.feature_service import FeatureService
if not FeatureService.get_system_features().plugin_manager.enabled or not credential_id:
return
# Check if credential exists in database first (if requested)
if check_existence:
if not is_credential_exists(credential_id, credential_type):
raise ValueError(f"Credential with id {credential_id} for provider {provider} not found.")
# Check policy compliance
PluginManagerService.check_credential_policy_compliance(
CheckCredentialPolicyComplianceRequest(
dify_credential_id=credential_id,
provider=provider,
credential_type=credential_type,
)
)

View File

@ -23,6 +23,7 @@ from core.model_runtime.model_providers.__base.tts_model import TTSModel
from core.provider_manager import ProviderManager
from extensions.ext_redis import redis_client
from models.provider import ProviderType
from services.enterprise.plugin_manager_service import PluginCredentialType
logger = logging.getLogger(__name__)
@ -362,6 +363,23 @@ class ModelInstance:
else:
raise last_exception
# Additional policy compliance check as fallback (in case fetch_next didn't catch it)
try:
from core.helper.credential_utils import check_credential_policy_compliance
if lb_config.credential_id:
check_credential_policy_compliance(
credential_id=lb_config.credential_id,
provider=self.provider,
credential_type=PluginCredentialType.MODEL,
)
except Exception as e:
logger.warning(
"Load balancing config %s failed policy compliance check in round-robin: %s", lb_config.id, str(e)
)
self.load_balancing_manager.cooldown(lb_config, expire=60)
continue
try:
if "credentials" in kwargs:
del kwargs["credentials"]
@ -515,6 +533,24 @@ class LBModelManager:
continue
# Check policy compliance for the selected configuration
try:
from core.helper.credential_utils import check_credential_policy_compliance
if config.credential_id:
check_credential_policy_compliance(
credential_id=config.credential_id,
provider=self._provider,
credential_type=PluginCredentialType.MODEL,
)
except Exception as e:
logger.warning("Load balancing config %s failed policy compliance check: %s", config.id, str(e))
cooldown_load_balancing_configs.append(config)
if len(cooldown_load_balancing_configs) >= len(self._load_balancing_configs):
# all configs are in cooldown or failed policy compliance
return None
continue
if dify_config.DEBUG:
logger.info(
"""Model LB

View File

@ -1129,6 +1129,7 @@ class ProviderManager:
name=load_balancing_model_config.name,
credentials=provider_model_credentials,
credential_source_type=load_balancing_model_config.credential_source_type,
credential_id=load_balancing_model_config.credential_id,
)
)

View File

@ -29,6 +29,10 @@ class ToolApiSchemaError(ValueError):
pass
class ToolCredentialPolicyViolationError(ValueError):
pass
class ToolEngineInvokeError(Exception):
meta: ToolInvokeMeta

View File

@ -14,16 +14,31 @@ from sqlalchemy.orm import Session
from yarl import URL
import contexts
from core.helper.provider_cache import ToolProviderCredentialsCache
from core.plugin.impl.tool import PluginToolManager
from core.tools.__base.tool_provider import ToolProviderController
from core.tools.__base.tool_runtime import ToolRuntime
from core.tools.mcp_tool.provider import MCPToolProviderController
from core.tools.mcp_tool.tool import MCPTool
from core.tools.plugin_tool.provider import PluginToolProviderController
from core.tools.plugin_tool.tool import PluginTool
from core.tools.utils.uuid_utils import is_valid_uuid
from core.tools.workflow_as_tool.provider import WorkflowToolProviderController
from core.workflow.entities.variable_pool import VariablePool
from models.provider_ids import ToolProviderID
from services.enterprise.plugin_manager_service import PluginCredentialType
from services.tools.mcp_tools_manage_service import MCPToolManageService
if TYPE_CHECKING:
from core.workflow.nodes.tool.entities import ToolEntity
from configs import dify_config
from core.agent.entities import AgentToolEntity
from core.app.entities.app_invoke_entities import InvokeFrom
from core.helper.module_import_helper import load_single_subclass_from_source
from core.helper.position_helper import is_filtered
from core.helper.provider_cache import ToolProviderCredentialsCache
from core.model_runtime.utils.encoders import jsonable_encoder
from core.tools.__base.tool import Tool
from core.tools.__base.tool_provider import ToolProviderController
from core.tools.__base.tool_runtime import ToolRuntime
from core.tools.builtin_tool.provider import BuiltinToolProviderController
from core.tools.builtin_tool.providers._positions import BuiltinToolProviderSort
from core.tools.builtin_tool.tool import BuiltinTool
@ -39,22 +54,12 @@ from core.tools.entities.tool_entities import (
ToolProviderType,
)
from core.tools.errors import ToolProviderNotFoundError
from core.tools.mcp_tool.provider import MCPToolProviderController
from core.tools.mcp_tool.tool import MCPTool
from core.tools.plugin_tool.provider import PluginToolProviderController
from core.tools.plugin_tool.tool import PluginTool
from core.tools.tool_label_manager import ToolLabelManager
from core.tools.utils.configuration import (
ToolParameterConfigurationManager,
)
from core.tools.utils.configuration import ToolParameterConfigurationManager
from core.tools.utils.encryption import create_provider_encrypter, create_tool_provider_encrypter
from core.tools.utils.uuid_utils import is_valid_uuid
from core.tools.workflow_as_tool.provider import WorkflowToolProviderController
from core.tools.workflow_as_tool.tool import WorkflowTool
from extensions.ext_database import db
from models.provider_ids import ToolProviderID
from models.tools import ApiToolProvider, BuiltinToolProvider, MCPToolProvider, WorkflowToolProvider
from services.tools.mcp_tools_manage_service import MCPToolManageService
from services.tools.tools_transform_service import ToolTransformService
if TYPE_CHECKING:
@ -115,7 +120,6 @@ class ToolManager:
get the plugin provider
"""
# check if context is set
from core.plugin.impl.tool import PluginToolManager
try:
contexts.plugin_tool_providers.get()
@ -237,6 +241,16 @@ class ToolManager:
if builtin_provider is None:
raise ToolProviderNotFoundError(f"builtin provider {provider_id} not found")
# check if the credential is allowed to be used
from core.helper.credential_utils import check_credential_policy_compliance
check_credential_policy_compliance(
credential_id=builtin_provider.id,
provider=provider_id,
credential_type=PluginCredentialType.TOOL,
check_existence=False,
)
encrypter, cache = create_provider_encrypter(
tenant_id=tenant_id,
config=[
@ -509,7 +523,6 @@ class ToolManager:
"""
list all the plugin providers
"""
from core.plugin.impl.tool import PluginToolManager
manager = PluginToolManager()
provider_entities = manager.fetch_tool_providers(tenant_id)

View File

@ -86,9 +86,7 @@ def load_user_from_request(request_from_flask_login):
if not app_mcp_server:
raise NotFound("App MCP server not found.")
end_user = (
db.session.query(EndUser)
.where(EndUser.external_user_id == app_mcp_server.id, EndUser.type == "mcp")
.first()
db.session.query(EndUser).where(EndUser.session_id == app_mcp_server.id, EndUser.type == "mcp").first()
)
if not end_user:
raise NotFound("End user not found.")

View File

@ -12,7 +12,7 @@ from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'b35c3db83d09'
down_revision = 'b95962a3885c'
down_revision = 'c20211f18133'
branch_labels = None
depends_on = None

View File

@ -368,7 +368,7 @@ class Workflow(Base):
if not tenant_id:
return []
environment_variables_dict: dict[str, Any] = json.loads(self._environment_variables)
environment_variables_dict: dict[str, Any] = json.loads(self._environment_variables or "{}")
results = [
variable_factory.build_environment_variable_from_mapping(v) for v in environment_variables_dict.values()
]

View File

@ -223,7 +223,7 @@ class DatasetService:
and retrieval_model.reranking_model.reranking_model_name
):
# check if reranking model setting is valid
DatasetService.check_embedding_model_setting(
DatasetService.check_reranking_model_setting(
tenant_id,
retrieval_model.reranking_model.reranking_provider_name,
retrieval_model.reranking_model.reranking_model_name,

View File

@ -3,18 +3,30 @@ import os
import requests
class EnterpriseRequest:
base_url = os.environ.get("ENTERPRISE_API_URL", "ENTERPRISE_API_URL")
secret_key = os.environ.get("ENTERPRISE_API_SECRET_KEY", "ENTERPRISE_API_SECRET_KEY")
class BaseRequest:
proxies = {
"http": "",
"https": "",
}
base_url = ""
secret_key = ""
secret_key_header = ""
@classmethod
def send_request(cls, method, endpoint, json=None, params=None):
headers = {"Content-Type": "application/json", "Enterprise-Api-Secret-Key": cls.secret_key}
headers = {"Content-Type": "application/json", cls.secret_key_header: cls.secret_key}
url = f"{cls.base_url}{endpoint}"
response = requests.request(method, url, json=json, params=params, headers=headers, proxies=cls.proxies)
return response.json()
class EnterpriseRequest(BaseRequest):
base_url = os.environ.get("ENTERPRISE_API_URL", "ENTERPRISE_API_URL")
secret_key = os.environ.get("ENTERPRISE_API_SECRET_KEY", "ENTERPRISE_API_SECRET_KEY")
secret_key_header = "Enterprise-Api-Secret-Key"
class EnterprisePluginManagerRequest(BaseRequest):
base_url = os.environ.get("ENTERPRISE_PLUGIN_MANAGER_API_URL", "ENTERPRISE_PLUGIN_MANAGER_API_URL")
secret_key = os.environ.get("ENTERPRISE_PLUGIN_MANAGER_API_SECRET_KEY", "ENTERPRISE_PLUGIN_MANAGER_API_SECRET_KEY")
secret_key_header = "Plugin-Manager-Inner-Api-Secret-Key"

View File

@ -0,0 +1,53 @@
import enum
import logging
from pydantic import BaseModel
from services.enterprise.base import EnterprisePluginManagerRequest
from services.errors.base import BaseServiceError
class PluginCredentialType(enum.Enum):
MODEL = 0
TOOL = 1
def to_number(self):
return self.value
class CheckCredentialPolicyComplianceRequest(BaseModel):
dify_credential_id: str
provider: str
credential_type: PluginCredentialType
def model_dump(self, **kwargs):
data = super().model_dump(**kwargs)
data["credential_type"] = self.credential_type.to_number()
return data
class CredentialPolicyViolationError(BaseServiceError):
pass
class PluginManagerService:
@classmethod
def check_credential_policy_compliance(cls, body: CheckCredentialPolicyComplianceRequest):
try:
ret = EnterprisePluginManagerRequest.send_request(
"POST", "/check-credential-policy-compliance", json=body.model_dump()
)
if not isinstance(ret, dict) or "result" not in ret:
raise ValueError("Invalid response format from plugin manager API")
except Exception as e:
raise CredentialPolicyViolationError(
f"error occurred while checking credential policy compliance: {e}"
) from e
if not ret.get("result", False):
raise CredentialPolicyViolationError("Credentials not available: Please use ENTERPRISE global credentials")
logging.debug(
"Credential policy compliance checked for %s with credential %s, result: %s",
body.provider, body.dify_credential_id, ret.get('result', False)
)

View File

@ -139,6 +139,10 @@ class KnowledgeRateLimitModel(BaseModel):
subscription_plan: str = ""
class PluginManagerModel(BaseModel):
enabled: bool = False
class SystemFeatureModel(BaseModel):
sso_enforced_for_signin: bool = False
sso_enforced_for_signin_protocol: str = ""
@ -155,6 +159,7 @@ class SystemFeatureModel(BaseModel):
webapp_auth: WebAppAuthModel = WebAppAuthModel()
plugin_installation_permission: PluginInstallationPermissionModel = PluginInstallationPermissionModel()
enable_change_email: bool = True
plugin_manager: PluginManagerModel = PluginManagerModel()
class FeatureService:
@ -193,6 +198,7 @@ class FeatureService:
system_features.branding.enabled = True
system_features.webapp_auth.enabled = True
system_features.enable_change_email = False
system_features.plugin_manager.enabled = True
cls._fulfill_params_from_enterprise(system_features)
if dify_config.MARKETPLACE_ENABLED:

View File

@ -33,22 +33,14 @@ from libs.datetime_utils import naive_utc_now
from models.account import Account
from models.model import App, AppMode
from models.tools import WorkflowToolProvider
from models.workflow import (
Workflow,
WorkflowNodeExecutionModel,
WorkflowNodeExecutionTriggeredFrom,
WorkflowType,
)
from models.workflow import Workflow, WorkflowNodeExecutionModel, WorkflowNodeExecutionTriggeredFrom, WorkflowType
from repositories.factory import DifyAPIRepositoryFactory
from services.enterprise.plugin_manager_service import PluginCredentialType
from services.errors.app import IsDraftWorkflowError, WorkflowHashNotEqualError
from services.workflow.workflow_converter import WorkflowConverter
from .errors.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError
from .workflow_draft_variable_service import (
DraftVariableSaver,
DraftVarLoader,
WorkflowDraftVariableService,
)
from .workflow_draft_variable_service import DraftVariableSaver, DraftVarLoader, WorkflowDraftVariableService
class WorkflowService:
@ -268,6 +260,12 @@ class WorkflowService:
if not draft_workflow:
raise ValueError("No valid workflow found.")
# Validate credentials before publishing, for credential policy check
from services.feature_service import FeatureService
if FeatureService.get_system_features().plugin_manager.enabled:
self._validate_workflow_credentials(draft_workflow)
# create new workflow
workflow = Workflow.new(
tenant_id=app_model.tenant_id,
@ -293,6 +291,260 @@ class WorkflowService:
# return new workflow
return workflow
def _validate_workflow_credentials(self, workflow: Workflow) -> None:
"""
Validate all credentials in workflow nodes before publishing.
:param workflow: The workflow to validate
:raises ValueError: If any credentials violate policy compliance
"""
graph_dict = workflow.graph_dict
nodes = graph_dict.get("nodes", [])
for node in nodes:
node_data = node.get("data", {})
node_type = node_data.get("type")
node_id = node.get("id", "unknown")
try:
# Extract and validate credentials based on node type
if node_type == "tool":
credential_id = node_data.get("credential_id")
provider = node_data.get("provider_id")
if provider:
if credential_id:
# Check specific credential
from core.helper.credential_utils import check_credential_policy_compliance
check_credential_policy_compliance(
credential_id=credential_id,
provider=provider,
credential_type=PluginCredentialType.TOOL,
)
else:
# Check default workspace credential for this provider
self._check_default_tool_credential(workflow.tenant_id, provider)
elif node_type == "agent":
agent_params = node_data.get("agent_parameters", {})
model_config = agent_params.get("model", {}).get("value", {})
if model_config.get("provider") and model_config.get("model"):
self._validate_llm_model_config(
workflow.tenant_id, model_config["provider"], model_config["model"]
)
# Validate load balancing credentials for agent model if load balancing is enabled
agent_model_node_data = {"model": model_config}
self._validate_load_balancing_credentials(workflow, agent_model_node_data, node_id)
# Validate agent tools
tools = agent_params.get("tools", {}).get("value", [])
for tool in tools:
# Agent tools store provider in provider_name field
provider = tool.get("provider_name")
credential_id = tool.get("credential_id")
if provider:
if credential_id:
from core.helper.credential_utils import check_credential_policy_compliance
check_credential_policy_compliance(credential_id, provider, PluginCredentialType.TOOL)
else:
self._check_default_tool_credential(workflow.tenant_id, provider)
elif node_type in ["llm", "knowledge_retrieval", "parameter_extractor", "question_classifier"]:
model_config = node_data.get("model", {})
provider = model_config.get("provider")
model_name = model_config.get("name")
if provider and model_name:
# Validate that the provider+model combination can fetch valid credentials
self._validate_llm_model_config(workflow.tenant_id, provider, model_name)
# Validate load balancing credentials if load balancing is enabled
self._validate_load_balancing_credentials(workflow, node_data, node_id)
else:
raise ValueError(f"Node {node_id} ({node_type}): Missing provider or model configuration")
except Exception as e:
if isinstance(e, ValueError):
raise e
else:
raise ValueError(f"Node {node_id} ({node_type}): {str(e)}")
def _validate_llm_model_config(self, tenant_id: str, provider: str, model_name: str) -> None:
"""
Validate that an LLM model configuration can fetch valid credentials.
This method attempts to get the model instance and validates that:
1. The provider exists and is configured
2. The model exists in the provider
3. Credentials can be fetched for the model
4. The credentials pass policy compliance checks
:param tenant_id: The tenant ID
:param provider: The provider name
:param model_name: The model name
:raises ValueError: If the model configuration is invalid or credentials fail policy checks
"""
try:
from core.model_manager import ModelManager
from core.model_runtime.entities.model_entities import ModelType
# Get model instance to validate provider+model combination
model_manager = ModelManager()
model_manager.get_model_instance(
tenant_id=tenant_id, provider=provider, model_type=ModelType.LLM, model=model_name
)
# The ModelInstance constructor will automatically check credential policy compliance
# via ProviderConfiguration.get_current_credentials() -> _check_credential_policy_compliance()
# If it fails, an exception will be raised
except Exception as e:
raise ValueError(
f"Failed to validate LLM model configuration (provider: {provider}, model: {model_name}): {str(e)}"
)
def _check_default_tool_credential(self, tenant_id: str, provider: str) -> None:
"""
Check credential policy compliance for the default workspace credential of a tool provider.
This method finds the default credential for the given provider and validates it.
Uses the same fallback logic as runtime to handle deauthorized credentials.
:param tenant_id: The tenant ID
:param provider: The tool provider name
:raises ValueError: If no default credential exists or if it fails policy compliance
"""
try:
from models.tools import BuiltinToolProvider
# Use the same fallback logic as runtime: get the first available credential
# ordered by is_default DESC, created_at ASC (same as tool_manager.py)
default_provider = (
db.session.query(BuiltinToolProvider)
.where(
BuiltinToolProvider.tenant_id == tenant_id,
BuiltinToolProvider.provider == provider,
)
.order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc())
.first()
)
if not default_provider:
raise ValueError("No default credential found")
# Check credential policy compliance using the default credential ID
from core.helper.credential_utils import check_credential_policy_compliance
check_credential_policy_compliance(
credential_id=default_provider.id,
provider=provider,
credential_type=PluginCredentialType.TOOL,
check_existence=False,
)
except Exception as e:
raise ValueError(f"Failed to validate default credential for tool provider {provider}: {str(e)}")
def _validate_load_balancing_credentials(self, workflow: Workflow, node_data: dict, node_id: str) -> None:
"""
Validate load balancing credentials for a workflow node.
:param workflow: The workflow being validated
:param node_data: The node data containing model configuration
:param node_id: The node ID for error reporting
:raises ValueError: If load balancing credentials violate policy compliance
"""
# Extract model configuration
model_config = node_data.get("model", {})
provider = model_config.get("provider")
model_name = model_config.get("name")
if not provider or not model_name:
return # No model config to validate
# Check if this model has load balancing enabled
if self._is_load_balancing_enabled(workflow.tenant_id, provider, model_name):
# Get all load balancing configurations for this model
load_balancing_configs = self._get_load_balancing_configs(workflow.tenant_id, provider, model_name)
# Validate each load balancing configuration
try:
for config in load_balancing_configs:
if config.get("credential_id"):
from core.helper.credential_utils import check_credential_policy_compliance
check_credential_policy_compliance(
config["credential_id"], provider, PluginCredentialType.MODEL
)
except Exception as e:
raise ValueError(f"Invalid load balancing credentials for {provider}/{model_name}: {str(e)}")
def _is_load_balancing_enabled(self, tenant_id: str, provider: str, model_name: str) -> bool:
"""
Check if load balancing is enabled for a specific model.
:param tenant_id: The tenant ID
:param provider: The provider name
:param model_name: The model name
:return: True if load balancing is enabled, False otherwise
"""
try:
from core.model_runtime.entities.model_entities import ModelType
from core.provider_manager import ProviderManager
# Get provider configurations
provider_manager = ProviderManager()
provider_configurations = provider_manager.get_configurations(tenant_id)
provider_configuration = provider_configurations.get(provider)
if not provider_configuration:
return False
# Get provider model setting
provider_model_setting = provider_configuration.get_provider_model_setting(
model_type=ModelType.LLM,
model=model_name,
)
return provider_model_setting is not None and provider_model_setting.load_balancing_enabled
except Exception:
# If we can't determine the status, assume load balancing is not enabled
return False
def _get_load_balancing_configs(self, tenant_id: str, provider: str, model_name: str) -> list[dict]:
"""
Get all load balancing configurations for a model.
:param tenant_id: The tenant ID
:param provider: The provider name
:param model_name: The model name
:return: List of load balancing configuration dictionaries
"""
try:
from services.model_load_balancing_service import ModelLoadBalancingService
model_load_balancing_service = ModelLoadBalancingService()
_, configs = model_load_balancing_service.get_load_balancing_configs(
tenant_id=tenant_id,
provider=provider,
model=model_name,
model_type="llm", # Load balancing is primarily used for LLM models
config_from="predefined-model", # Check both predefined and custom models
)
_, custom_configs = model_load_balancing_service.get_load_balancing_configs(
tenant_id=tenant_id, provider=provider, model=model_name, model_type="llm", config_from="custom-model"
)
all_configs = configs + custom_configs
return [config for config in all_configs if config.get("credential_id")]
except Exception:
# If we can't get the configurations, return empty list
# This will prevent validation errors from breaking the workflow
return []
def get_default_block_configs(self) -> list[dict]:
"""
Get default block configs

View File

@ -296,7 +296,12 @@ class TestBatchCreateSegmentToIndexTask:
from extensions.ext_database import db
# Check that segments were created
segments = db.session.query(DocumentSegment).filter_by(document_id=document.id).all()
segments = (
db.session.query(DocumentSegment)
.filter_by(document_id=document.id)
.order_by(DocumentSegment.position)
.all()
)
assert len(segments) == 3
# Verify segment content and metadata

View File

@ -43,9 +43,9 @@ const AvatarWithEdit = ({ onSave, ...props }: AvatarWithEditProps) => {
const handleSaveAvatar = useCallback(async (uploadedFileId: string) => {
try {
await updateUserProfile({ url: 'account/avatar', body: { avatar: uploadedFileId } })
notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
setIsShowAvatarPicker(false)
onSave?.()
notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
}
catch (e) {
notify({ type: 'error', message: (e as Error).message })

View File

@ -144,9 +144,11 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
})
const a = document.createElement('a')
const file = new Blob([data], { type: 'application/yaml' })
a.href = URL.createObjectURL(file)
const url = URL.createObjectURL(file)
a.href = url
a.download = `${appDetail.name}.yml`
a.click()
URL.revokeObjectURL(url)
}
catch {
notify({ type: 'error', message: t('app.exportFailed') })

View File

@ -7,7 +7,7 @@ import Menu from './menu'
import { useSelector as useAppContextWithSelector } from '@/context/app-context'
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
import type { DataSet } from '@/models/datasets'
import { datasetDetailQueryKeyPrefix, useResetDatasetList } from '@/service/knowledge/use-dataset'
import { datasetDetailQueryKeyPrefix, useInvalidDatasetList } from '@/service/knowledge/use-dataset'
import { useInvalid } from '@/service/use-base'
import { useExportPipelineDSL } from '@/service/use-pipeline'
import Toast from '../../base/toast'
@ -38,13 +38,13 @@ const DropDown = ({
setOpen(prev => !prev)
}, [])
const resetDatasetList = useResetDatasetList()
const invalidDatasetList = useInvalidDatasetList()
const invalidDatasetDetail = useInvalid([...datasetDetailQueryKeyPrefix, dataset.id])
const refreshDataset = useCallback(() => {
resetDatasetList()
invalidDatasetList()
invalidDatasetDetail()
}, [invalidDatasetDetail, resetDatasetList])
}, [invalidDatasetDetail, invalidDatasetList])
const openRenameModal = useCallback(() => {
setShowRenameModal(true)
@ -65,9 +65,11 @@ const DropDown = ({
})
const a = document.createElement('a')
const file = new Blob([data], { type: 'application/yaml' })
a.href = URL.createObjectURL(file)
const url = URL.createObjectURL(file)
a.href = url
a.download = `${name}.pipeline`
a.click()
URL.revokeObjectURL(url)
}
catch {
Toast.notify({ type: 'error', message: t('app.exportFailed') })
@ -93,13 +95,13 @@ const DropDown = ({
try {
await deleteDataset(dataset.id)
Toast.notify({ type: 'success', message: t('dataset.datasetDeleted') })
resetDatasetList()
invalidDatasetList()
replace('/datasets')
}
finally {
setShowConfirmDelete(false)
}
}, [dataset.id, replace, resetDatasetList, t])
}, [dataset.id, replace, invalidDatasetList, t])
return (
<PortalToFollowElem

View File

@ -35,35 +35,47 @@ const DatasetInfo: FC<DatasetInfoProps> = ({
return (
<div className={cn('relative flex flex-col', expand ? '' : 'p-1')}>
{expand && (
<>
<Effect className='-left-5 top-[-22px] opacity-15' />
<div className='flex flex-col gap-y-2 p-2'>
<div className='flex items-center justify-between'>
<AppIcon
size='medium'
iconType={iconInfo.icon_type}
icon={iconInfo.icon}
background={iconInfo.icon_background}
imageUrl={iconInfo.icon_url}
/>
<Effect className='-left-5 top-[-22px] opacity-15' />
)}
<div className='flex flex-col gap-2 p-2'>
<div className='flex items-center gap-1'>
<div className={cn(!expand && '-ml-1')}>
<AppIcon
size={expand ? 'large' : 'small'}
iconType={iconInfo.icon_type}
icon={iconInfo.icon}
background={iconInfo.icon_background}
imageUrl={iconInfo.icon_url}
/>
</div>
{expand && (
<div className='ml-auto'>
<Dropdown expand />
</div>
<div className='flex flex-col gap-y-1 pb-0.5'>
<div
className='system-md-semibold truncate text-text-secondary'
title={dataset.name}
>
{dataset.name}
</div>
<div className='system-2xs-medium-uppercase text-text-tertiary'>
{isExternalProvider && t('dataset.externalTag')}
{!isExternalProvider && isPipelinePublished && dataset.doc_form && dataset.indexing_technique && (
<div className='flex items-center gap-x-2'>
<span>{t(`dataset.chunkingMode.${DOC_FORM_TEXT[dataset.doc_form]}`)}</span>
<span>{formatIndexingTechniqueAndMethod(dataset.indexing_technique, dataset.retrieval_model_dict?.search_method)}</span>
</div>
)}
</div>
)}
</div>
{!expand && (
<div className='-mb-2 -mt-1 flex items-center justify-center'>
<Dropdown expand={false} />
</div>
)}
{expand && (
<div className='flex flex-col gap-y-1 pb-0.5'>
<div
className='system-md-semibold truncate text-text-secondary'
title={dataset.name}
>
{dataset.name}
</div>
<div className='system-2xs-medium-uppercase text-text-tertiary'>
{isExternalProvider && t('dataset.externalTag')}
{!isExternalProvider && isPipelinePublished && dataset.doc_form && dataset.indexing_technique && (
<div className='flex items-center gap-x-2'>
<span>{t(`dataset.chunkingMode.${DOC_FORM_TEXT[dataset.doc_form]}`)}</span>
<span>{formatIndexingTechniqueAndMethod(dataset.indexing_technique, dataset.retrieval_model_dict?.search_method)}</span>
</div>
)}
</div>
{!!dataset.description && (
<p className='system-xs-regular line-clamp-3 text-text-tertiary first-letter:capitalize'>
@ -71,20 +83,8 @@ const DatasetInfo: FC<DatasetInfoProps> = ({
</p>
)}
</div>
</>
)}
{!expand && (
<div className='flex flex-col items-center gap-y-1'>
<AppIcon
size='medium'
iconType={iconInfo.icon_type}
icon={iconInfo.icon}
background={iconInfo.icon_background}
imageUrl={iconInfo.icon_url}
/>
<Dropdown expand={false} />
</div>
)}
)}
</div>
</div>
)
}

View File

@ -60,9 +60,11 @@ const HeaderOptions: FC<Props> = ({
const a = document.createElement('a')
const content = listTransformer(list).join('\n')
const file = new Blob([content], { type: 'application/jsonl' })
a.href = URL.createObjectURL(file)
const url = URL.createObjectURL(file)
a.href = url
a.download = `annotations-${locale}.jsonl`
a.click()
URL.revokeObjectURL(url)
}
const fetchList = async () => {

View File

@ -159,9 +159,11 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
})
const a = document.createElement('a')
const file = new Blob([data], { type: 'application/yaml' })
a.href = URL.createObjectURL(file)
const url = URL.createObjectURL(file)
a.href = url
a.download = `${app.name}.yml`
a.click()
URL.revokeObjectURL(url)
}
catch {
notify({ type: 'error', message: t('app.exportFailed') })

View File

@ -1,5 +1,5 @@
'use client'
import { useState } from 'react'
import { useEffect, useState } from 'react'
import cn from '@/utils/classnames'
export type AvatarProps = {
@ -27,6 +27,12 @@ const Avatar = ({
onError?.(true)
}
// after uploaded, api would first return error imgs url: '.../files//file-preview/...'. Then return the right url, Which caused not show the avatar
useEffect(() => {
if(avatar && imgError)
setImgError(false)
}, [avatar])
if (avatar && !imgError) {
return (
<img

View File

@ -36,7 +36,6 @@ const Textarea = React.forwardRef<HTMLTextAreaElement, TextareaProps>(
ref={ref}
onFocus={onFocus}
onBlur={onBlur}
ref={ref}
style={styleCss}
className={cn(
'min-h-20 w-full appearance-none border border-transparent bg-components-input-bg-normal p-2 text-components-input-text-filled caret-primary-600 outline-none placeholder:text-components-input-text-placeholder hover:border-components-input-border-hover hover:bg-components-input-bg-hover focus:border-components-input-border-active focus:bg-components-input-bg-active focus:shadow-xs',

View File

@ -1,5 +1,5 @@
import cn from '@/utils/classnames'
import React, { useMemo } from 'react'
import React, { useCallback, useMemo, useState } from 'react'
type CredentialIconProps = {
avatar_url?: string
@ -21,10 +21,15 @@ export const CredentialIcon: React.FC<CredentialIconProps> = ({
size = 20,
className = '',
}) => {
const [showAvatar, setShowAvatar] = useState(!!avatar_url && avatar_url !== 'default')
const firstLetter = useMemo(() => name.charAt(0).toUpperCase(), [name])
const bgColor = useMemo(() => ICON_BG_COLORS[firstLetter.charCodeAt(0) % ICON_BG_COLORS.length], [firstLetter])
if (avatar_url && avatar_url !== 'default') {
const onImgLoadError = useCallback(() => {
setShowAvatar(false)
}, [])
if (avatar_url && avatar_url !== 'default' && showAvatar) {
return (
<div
className='flex shrink-0 items-center justify-center overflow-hidden rounded-md border border-divider-regular'
@ -35,6 +40,7 @@ export const CredentialIcon: React.FC<CredentialIconProps> = ({
width={size}
height={size}
className={cn('shrink-0 object-contain', className)}
onError={onImgLoadError}
/>
</div>
)

View File

@ -4,7 +4,7 @@ import Divider from '../../base/divider'
import { useTranslation } from 'react-i18next'
import CreateFromDSLModal, { CreateFromDSLModalTab } from './create-options/create-from-dsl-modal'
import { useRouter, useSearchParams } from 'next/navigation'
import { useResetDatasetList } from '@/service/knowledge/use-dataset'
import { useInvalidDatasetList } from '@/service/knowledge/use-dataset'
const Footer = () => {
const { t } = useTranslation()
@ -14,7 +14,7 @@ const Footer = () => {
const searchParams = useSearchParams()
const { replace } = useRouter()
const dslUrl = searchParams.get('remoteInstallUrl') || undefined
const resetDatasetList = useResetDatasetList()
const invalidDatasetList = useInvalidDatasetList()
const activeTab = useMemo(() => {
if (dslUrl)
@ -34,8 +34,8 @@ const Footer = () => {
}, [dslUrl, replace])
const onImportFromDSLSuccess = useCallback(() => {
resetDatasetList()
}, [resetDatasetList])
invalidDatasetList()
}, [invalidDatasetList])
return (
<div className='absolute bottom-0 left-0 right-0 z-10 flex flex-col gap-y-4 bg-knowledge-pipeline-creation-footer-bg px-16 pb-6 backdrop-blur-[6px]'>

View File

@ -2,7 +2,7 @@ import React, { useCallback } from 'react'
import { useTranslation } from 'react-i18next'
import { RiAddCircleLine } from '@remixicon/react'
import { useCreatePipelineDataset } from '@/service/knowledge/use-create-dataset'
import { useResetDatasetList } from '@/service/knowledge/use-dataset'
import { useInvalidDatasetList } from '@/service/knowledge/use-dataset'
import Toast from '@/app/components/base/toast'
import { useRouter } from 'next/navigation'
@ -11,7 +11,7 @@ const CreateCard = () => {
const { push } = useRouter()
const { mutateAsync: createEmptyDataset } = useCreatePipelineDataset()
const resetDatasetList = useResetDatasetList()
const invalidDatasetList = useInvalidDatasetList()
const handleCreate = useCallback(async () => {
await createEmptyDataset(undefined, {
@ -22,7 +22,7 @@ const CreateCard = () => {
type: 'success',
message: t('datasetPipeline.creation.successTip'),
})
resetDatasetList()
invalidDatasetList()
push(`/datasets/${id}/pipeline`)
}
},
@ -33,7 +33,7 @@ const CreateCard = () => {
})
},
})
}, [createEmptyDataset, push, resetDatasetList, t])
}, [createEmptyDataset, push, invalidDatasetList, t])
return (
<div

View File

@ -19,7 +19,7 @@ import Content from './content'
import Actions from './actions'
import { useCreatePipelineDatasetFromCustomized } from '@/service/knowledge/use-create-dataset'
import { useInvalid } from '@/service/use-base'
import { useResetDatasetList } from '@/service/knowledge/use-dataset'
import { useInvalidDatasetList } from '@/service/knowledge/use-dataset'
type TemplateCardProps = {
pipeline: PipelineTemplate
@ -44,7 +44,7 @@ const TemplateCard = ({
}, false)
const { mutateAsync: createDataset } = useCreatePipelineDatasetFromCustomized()
const { handleCheckPluginDependencies } = usePluginDependencies()
const resetDatasetList = useResetDatasetList()
const invalidDatasetList = useInvalidDatasetList()
const handleUseTemplate = useCallback(async () => {
const { data: pipelineTemplateInfo } = await getPipelineTemplateInfo()
@ -64,7 +64,7 @@ const TemplateCard = ({
type: 'success',
message: t('datasetPipeline.creation.successTip'),
})
resetDatasetList()
invalidDatasetList()
if (newDataset.pipeline_id)
await handleCheckPluginDependencies(newDataset.pipeline_id, true)
push(`/datasets/${newDataset.dataset_id}/pipeline`)
@ -76,7 +76,7 @@ const TemplateCard = ({
})
},
})
}, [getPipelineTemplateInfo, createDataset, t, handleCheckPluginDependencies, push, resetDatasetList])
}, [getPipelineTemplateInfo, createDataset, t, handleCheckPluginDependencies, push, invalidDatasetList])
const handleShowTemplateDetails = useCallback(() => {
setShowDetailModal(true)

View File

@ -11,7 +11,7 @@ import Button from '@/app/components/base/button'
import { ToastContext } from '@/app/components/base/toast'
import { createEmptyDataset } from '@/service/datasets'
import { useResetDatasetList } from '@/service/knowledge/use-dataset'
import { useInvalidDatasetList } from '@/service/knowledge/use-dataset'
type IProps = {
show: boolean
@ -26,7 +26,7 @@ const EmptyDatasetCreationModal = ({
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const router = useRouter()
const resetDatasetList = useResetDatasetList()
const invalidDatasetList = useInvalidDatasetList()
const submit = async () => {
if (!inputValue) {
@ -39,7 +39,7 @@ const EmptyDatasetCreationModal = ({
}
try {
const dataset = await createEmptyDataset({ name: inputValue })
resetDatasetList()
invalidDatasetList()
onHide()
router.push(`/datasets/${dataset.id}/documents`)
}

View File

@ -62,7 +62,7 @@ import CustomDialog from '@/app/components/base/dialog'
import { AlertTriangle } from '@/app/components/base/icons/src/vender/solid/alertsAndFeedback'
import { noop } from 'lodash-es'
import { useDocLink } from '@/context/i18n'
import { useResetDatasetList } from '@/service/knowledge/use-dataset'
import { useInvalidDatasetList } from '@/service/knowledge/use-dataset'
const TextLabel: FC<PropsWithChildren> = (props) => {
return <label className='system-sm-semibold text-text-secondary'>{props.children}</label>
@ -556,7 +556,7 @@ const StepTwo = ({
})
const isCreating = createFirstDocumentMutation.isPending || createDocumentMutation.isPending
const resetDatasetList = useResetDatasetList()
const invalidDatasetList = useInvalidDatasetList()
const createHandle = async () => {
const params = getCreationParams()
@ -586,7 +586,7 @@ const StepTwo = ({
}
if (mutateDatasetRes)
mutateDatasetRes()
resetDatasetList()
invalidDatasetList()
onStepChange && onStepChange(+1)
isSetting && onSave && onSave()
}

View File

@ -257,7 +257,7 @@ const DocumentList: FC<IDocumentListProps> = ({
}, [])
const isCreateFromRAGPipeline = useCallback((createdFrom: string) => {
return createdFrom === 'rag_pipeline'
return createdFrom === 'rag-pipeline'
}, [])
/**

View File

@ -114,9 +114,11 @@ const DatasetCard = ({
})
const a = document.createElement('a')
const file = new Blob([data], { type: 'application/yaml' })
a.href = URL.createObjectURL(file)
const url = URL.createObjectURL(file)
a.href = url
a.download = `${name}.pipeline`
a.click()
URL.revokeObjectURL(url)
}
catch {
Toast.notify({ type: 'error', message: t('app.exportFailed') })
@ -274,6 +276,7 @@ const DatasetCard = ({
htmlContent={
<Operations
showDelete={!isCurrentWorkspaceDatasetOperator}
showExportPipeline={dataset.runtime_mode === 'rag_pipeline'}
openRenameModal={openRenameModal}
handleExportPipeline={handleExportPipeline}
detectIsUsedByApp={detectIsUsedByApp}

View File

@ -6,6 +6,7 @@ import OperationItem from './operation-item'
type OperationsProps = {
showDelete: boolean
showExportPipeline: boolean
openRenameModal: () => void
handleExportPipeline: () => void
detectIsUsedByApp: () => void
@ -13,6 +14,7 @@ type OperationsProps = {
const Operations = ({
showDelete,
showExportPipeline,
openRenameModal,
handleExportPipeline,
detectIsUsedByApp,
@ -27,11 +29,13 @@ const Operations = ({
name={t('common.operation.edit')}
handleClick={openRenameModal}
/>
<OperationItem
Icon={RiFileDownloadLine}
name={t('datasetPipeline.operations.exportPipeline')}
handleClick={handleExportPipeline}
/>
{showExportPipeline && (
<OperationItem
Icon={RiFileDownloadLine}
name={t('datasetPipeline.operations.exportPipeline')}
handleClick={handleExportPipeline}
/>
)}
</div>
{showDelete && (
<>

View File

@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next'
import NewDatasetCard from './new-dataset-card'
import DatasetCard from './dataset-card'
import { useSelector as useAppContextWithSelector } from '@/context/app-context'
import { useDatasetList, useResetDatasetList } from '@/service/knowledge/use-dataset'
import { useDatasetList, useInvalidDatasetList } from '@/service/knowledge/use-dataset'
type Props = {
tags: string[]
@ -32,7 +32,7 @@ const Datasets = ({
include_all: includeAll,
keyword: keywords,
})
const resetDatasetList = useResetDatasetList()
const invalidDatasetList = useInvalidDatasetList()
const anchorRef = useRef<HTMLDivElement>(null)
const observerRef = useRef<IntersectionObserver>(null)
@ -58,7 +58,7 @@ const Datasets = ({
<nav className='grid grow grid-cols-1 content-start gap-3 px-12 pt-2 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4'>
{isCurrentWorkspaceEditor && <NewDatasetCard />}
{datasetList?.pages.map(({ data: datasets }) => datasets.map(dataset => (
<DatasetCard key={dataset.id} dataset={dataset} onSuccess={resetDatasetList} />),
<DatasetCard key={dataset.id} dataset={dataset} onSuccess={invalidDatasetList} />),
))}
<div ref={anchorRef} className='h-0' />
</nav>

View File

@ -36,7 +36,7 @@ import ChunkStructure from '../chunk-structure'
import Toast from '@/app/components/base/toast'
import { RiAlertFill } from '@remixicon/react'
import { useDocLink } from '@/context/i18n'
import { useResetDatasetList } from '@/service/knowledge/use-dataset'
import { useInvalidDatasetList } from '@/service/knowledge/use-dataset'
const rowClass = 'flex gap-x-1'
const labelClass = 'flex items-center shrink-0 w-[180px] h-7 pt-1'
@ -127,7 +127,7 @@ const Form = () => {
getMembers()
})
const resetDatasetList = useResetDatasetList()
const invalidDatasetList = useInvalidDatasetList()
const handleSave = async () => {
if (loading)
return
@ -190,7 +190,7 @@ const Form = () => {
Toast.notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
if (mutateDatasets) {
await mutateDatasets()
resetDatasetList()
invalidDatasetList()
}
}
catch {

View File

@ -196,7 +196,7 @@ const ModelLoadBalancingConfigs = ({
)
: (
<Tooltip popupContent={t('common.modelProvider.apiKeyStatusNormal')}>
<Indicator color='green' />
<Indicator color={credential?.not_allowed_to_use ? 'gray' : 'green'} />
</Tooltip>
)}
</div>
@ -232,7 +232,7 @@ const ModelLoadBalancingConfigs = ({
<>
<span className='mr-2 h-3 border-r border-r-divider-subtle' />
<Switch
defaultValue={Boolean(config.enabled)}
defaultValue={credential?.not_allowed_to_use ? false : Boolean(config.enabled)}
size='md'
className='justify-self-end'
onChange={value => toggleConfigEntryEnabled(index, value)}

View File

@ -1,6 +1,6 @@
'use client'
import { useCallback } from 'react'
import { useCallback, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import { useParams, useRouter } from 'next/navigation'
import {
@ -12,6 +12,7 @@ import Nav from '../nav'
import type { NavItem } from '../nav/nav-selector'
import { basePath } from '@/utils/var'
import { useDatasetDetail, useDatasetList } from '@/service/knowledge/use-dataset'
import type { DataSet } from '@/models/datasets'
const DatasetNav = () => {
const { t } = useTranslation()
@ -28,6 +29,51 @@ const DatasetNav = () => {
})
const datasetItems = flatten(datasetList?.pages.map(datasetData => datasetData.data))
const curNav = useMemo(() => {
if (!currentDataset) return
return {
id: currentDataset.id,
name: currentDataset.name,
icon: currentDataset.icon_info.icon,
icon_type: currentDataset.icon_info.icon_type,
icon_background: currentDataset.icon_info.icon_background,
icon_url: currentDataset.icon_info.icon_url,
} as Omit<NavItem, 'link'>
}, [currentDataset?.id, currentDataset?.name, currentDataset?.icon_info])
const getDatasetLink = useCallback((dataset: DataSet) => {
const isPipelineUnpublished = dataset.runtime_mode === 'rag_pipeline' && !dataset.is_published
const link = isPipelineUnpublished
? `/datasets/${dataset.id}/pipeline`
: `/datasets/${dataset.id}/documents`
return dataset.provider === 'external'
? `/datasets/${dataset.id}/hitTesting`
: link
}, [])
const navigationItems = useMemo(() => {
return datasetItems.map((dataset) => {
const link = getDatasetLink(dataset)
return {
id: dataset.id,
name: dataset.name,
link,
icon: dataset.icon_info.icon,
icon_type: dataset.icon_info.icon_type,
icon_background: dataset.icon_info.icon_background,
icon_url: dataset.icon_info.icon_url,
}
}) as NavItem[]
}, [datasetItems, getDatasetLink])
const createRoute = useMemo(() => {
const runtimeMode = currentDataset?.runtime_mode
if (runtimeMode === 'rag_pipeline')
return `${basePath}/datasets/create-from-pipeline`
else
return `${basePath}/datasets/create`
}, [currentDataset?.runtime_mode])
const handleLoadMore = useCallback(() => {
if (hasNextPage)
fetchNextPage()
@ -41,25 +87,10 @@ const DatasetNav = () => {
text={t('common.menus.datasets')}
activeSegment='datasets'
link='/datasets'
curNav={currentDataset && {
id: currentDataset.id,
name: currentDataset.name,
icon: currentDataset.icon_info.icon,
icon_type: currentDataset.icon_info.icon_type,
icon_background: currentDataset.icon_info.icon_background,
icon_url: currentDataset.icon_info.icon_url,
} as Omit<NavItem, 'link'>}
navigationItems={datasetItems.map(dataset => ({
id: dataset.id,
name: dataset.name,
link: dataset.provider === 'external' ? `/datasets/${dataset.id}/hitTesting` : `/datasets/${dataset.id}/documents`,
icon: dataset.icon_info.icon,
icon_type: dataset.icon_info.icon_type,
icon_background: dataset.icon_info.icon_background,
icon_url: dataset.icon_info.icon_url,
})) as NavItem[]}
curNav={curNav}
navigationItems={navigationItems}
createText={t('common.menus.newDataset')}
onCreate={() => router.push(`${basePath}/datasets/create`)}
onCreate={() => router.push(createRoute)}
onLoadMore={handleLoadMore}
/>
)

View File

@ -42,7 +42,6 @@ const Nav = ({
useEffect(() => {
if (pathname === link)
setLinkLastSearchParams(searchParams.toString())
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [pathname, searchParams])
return (

View File

@ -38,7 +38,7 @@ import {
import Confirm from '@/app/components/base/confirm'
import PublishAsKnowledgePipelineModal from '../../publish-as-knowledge-pipeline-modal'
import type { IconInfo } from '@/models/datasets'
import { useResetDatasetList } from '@/service/knowledge/use-dataset'
import { useInvalidDatasetList } from '@/service/knowledge/use-dataset'
import { useProviderContext } from '@/context/provider-context'
import classNames from '@/utils/classnames'
import PremiumBadge from '@/app/components/base/premium-badge'
@ -86,7 +86,7 @@ const Popup = () => {
}] = useBoolean(false)
const invalidPublishedPipelineInfo = useInvalid([...publishedPipelineInfoQueryKeyPrefix, pipelineId])
const resetDatasetList = useResetDatasetList()
const invalidDatasetList = useInvalidDatasetList()
const handlePublish = useCallback(async (params?: PublishWorkflowParams) => {
if (publishing)
@ -130,7 +130,7 @@ const Popup = () => {
workflowStore.getState().setPublishedAt(res.created_at)
mutateDatasetRes?.()
invalidPublishedPipelineInfo()
resetDatasetList()
invalidDatasetList()
}
}
}
@ -150,9 +150,7 @@ const Popup = () => {
if (published)
return
handlePublish()
},
{ exactMatch: true, useCapture: true },
)
}, { exactMatch: true, useCapture: true })
const goToAddDocuments = useCallback(() => {
push(`/datasets/${datasetId}/documents/create-from-pipeline`)

View File

@ -62,9 +62,9 @@ const CandidateNode = () => {
})
setNodes(newNodes)
if (candidateNode.type === CUSTOM_NOTE_NODE)
saveStateToHistory(WorkflowHistoryEvent.NoteAdd)
saveStateToHistory(WorkflowHistoryEvent.NoteAdd, { nodeId: candidateNode.id })
else
saveStateToHistory(WorkflowHistoryEvent.NodeAdd)
saveStateToHistory(WorkflowHistoryEvent.NodeAdd, { nodeId: candidateNode.id })
workflowStore.setState({ candidateNode: undefined })

View File

@ -89,10 +89,19 @@ const ViewWorkflowHistory = () => {
const calculateChangeList: ChangeHistoryList = useMemo(() => {
const filterList = (list: any, startIndex = 0, reverse = false) => list.map((state: Partial<WorkflowHistoryState>, index: number) => {
const nodes = (state.nodes || store.getState().nodes) || []
const nodeId = state?.workflowHistoryEventMeta?.nodeId
const targetTitle = nodes.find(n => n.id === nodeId)?.data?.title ?? ''
return {
label: state.workflowHistoryEvent && getHistoryLabel(state.workflowHistoryEvent),
index: reverse ? list.length - 1 - index - startIndex : index - startIndex,
state,
state: {
...state,
workflowHistoryEventMeta: state.workflowHistoryEventMeta ? {
...state.workflowHistoryEventMeta,
nodeTitle: state.workflowHistoryEventMeta.nodeTitle || targetTitle,
} : undefined,
},
}
}).filter(Boolean)
@ -110,6 +119,12 @@ const ViewWorkflowHistory = () => {
}
}, [futureStates, getHistoryLabel, pastStates, store])
const composeHistoryItemLabel = useCallback((nodeTitle: string | undefined, baseLabel: string) => {
if (!nodeTitle)
return baseLabel
return `${nodeTitle} ${baseLabel}`
}, [])
return (
(
<PortalToFollowElem
@ -197,7 +212,10 @@ const ViewWorkflowHistory = () => {
'flex items-center text-[13px] font-medium leading-[18px] text-text-secondary',
)}
>
{item?.label || t('workflow.changeHistory.sessionStart')} ({calculateStepLabel(item?.index)}{item?.index === currentHistoryStateIndex && t('workflow.changeHistory.currentState')})
{composeHistoryItemLabel(
item?.state?.workflowHistoryEventMeta?.nodeTitle,
item?.label || t('workflow.changeHistory.sessionStart'),
)} ({calculateStepLabel(item?.index)}{item?.index === currentHistoryStateIndex && t('workflow.changeHistory.currentState')})
</div>
</div>
</div>
@ -222,7 +240,10 @@ const ViewWorkflowHistory = () => {
'flex items-center text-[13px] font-medium leading-[18px] text-text-secondary',
)}
>
{item?.label || t('workflow.changeHistory.sessionStart')} ({calculateStepLabel(item?.index)})
{composeHistoryItemLabel(
item?.state?.workflowHistoryEventMeta?.nodeTitle,
item?.label || t('workflow.changeHistory.sessionStart'),
)} ({calculateStepLabel(item?.index)})
</div>
</div>
</div>

View File

@ -175,7 +175,7 @@ export const useNodesInteractions = () => {
if (x !== 0 && y !== 0) {
// selecting a note will trigger a drag stop event with x and y as 0
saveStateToHistory(WorkflowHistoryEvent.NodeDragStop)
saveStateToHistory(WorkflowHistoryEvent.NodeDragStop, { nodeId: node.id })
}
}
}, [workflowStore, getNodesReadOnly, saveStateToHistory, handleSyncWorkflowDraft])
@ -275,7 +275,7 @@ export const useNodesInteractions = () => {
}, [store, workflowStore, getNodesReadOnly])
const handleNodeSelect = useCallback((nodeId: string, cancelSelection?: boolean, initShowLastRunTab?: boolean) => {
if(initShowLastRunTab)
if (initShowLastRunTab)
workflowStore.setState({ initShowLastRunTab: true })
const {
getNodes,
@ -408,7 +408,7 @@ export const useNodesInteractions = () => {
setEdges(newEdges)
handleSyncWorkflowDraft()
saveStateToHistory(WorkflowHistoryEvent.NodeConnect)
saveStateToHistory(WorkflowHistoryEvent.NodeConnect, { nodeId: targetNode?.id })
}
else {
const {
@ -657,10 +657,10 @@ export const useNodesInteractions = () => {
handleSyncWorkflowDraft()
if (currentNode.type === CUSTOM_NOTE_NODE)
saveStateToHistory(WorkflowHistoryEvent.NoteDelete)
saveStateToHistory(WorkflowHistoryEvent.NoteDelete, { nodeId: currentNode.id })
else
saveStateToHistory(WorkflowHistoryEvent.NodeDelete)
saveStateToHistory(WorkflowHistoryEvent.NodeDelete, { nodeId: currentNode.id })
}, [getNodesReadOnly, store, handleSyncWorkflowDraft, saveStateToHistory, workflowStore, t, nodesMetaDataMap, deleteNodeInspectorVars])
const handleNodeAdd = useCallback<OnNodeAdd>((
@ -1112,7 +1112,7 @@ export const useNodesInteractions = () => {
setEdges(newEdges)
}
handleSyncWorkflowDraft()
saveStateToHistory(WorkflowHistoryEvent.NodeAdd)
saveStateToHistory(WorkflowHistoryEvent.NodeAdd, { nodeId: newNode.id })
}, [getNodesReadOnly, store, handleSyncWorkflowDraft, saveStateToHistory, workflowStore, getAfterNodesInSameBranch, checkNestedParallelLimit, nodesMetaDataMap])
const handleNodeChange = useCallback((
@ -1197,7 +1197,7 @@ export const useNodesInteractions = () => {
setEdges(newEdges)
handleSyncWorkflowDraft()
saveStateToHistory(WorkflowHistoryEvent.NodeChange)
saveStateToHistory(WorkflowHistoryEvent.NodeChange, { nodeId: currentNodeId })
}, [getNodesReadOnly, store, handleSyncWorkflowDraft, saveStateToHistory, nodesMetaDataMap])
const handleNodesCancelSelected = useCallback(() => {
@ -1419,7 +1419,7 @@ export const useNodesInteractions = () => {
setNodes([...nodes, ...nodesToPaste])
setEdges([...edges, ...edgesToPaste])
saveStateToHistory(WorkflowHistoryEvent.NodePaste)
saveStateToHistory(WorkflowHistoryEvent.NodePaste, { nodeId: nodesToPaste?.[0]?.id })
handleSyncWorkflowDraft()
}
}, [getNodesReadOnly, workflowStore, store, reactflow, saveStateToHistory, handleSyncWorkflowDraft, handleNodeIterationChildrenCopy, handleNodeLoopChildrenCopy, nodesMetaDataMap])
@ -1516,7 +1516,7 @@ export const useNodesInteractions = () => {
})
setNodes(newNodes)
handleSyncWorkflowDraft()
saveStateToHistory(WorkflowHistoryEvent.NodeResize)
saveStateToHistory(WorkflowHistoryEvent.NodeResize, { nodeId })
}, [getNodesReadOnly, store, handleSyncWorkflowDraft, saveStateToHistory])
const handleNodeDisconnect = useCallback((nodeId: string) => {

View File

@ -8,6 +8,7 @@ import {
} from 'reactflow'
import { useTranslation } from 'react-i18next'
import { useWorkflowHistoryStore } from '../workflow-history-store'
import type { WorkflowHistoryEventMeta } from '../workflow-history-store'
/**
* All supported Events that create a new history state.
@ -64,20 +65,21 @@ export const useWorkflowHistory = () => {
// Some events may be triggered multiple times in a short period of time.
// We debounce the history state update to avoid creating multiple history states
// with minimal changes.
const saveStateToHistoryRef = useRef(debounce((event: WorkflowHistoryEvent) => {
const saveStateToHistoryRef = useRef(debounce((event: WorkflowHistoryEvent, meta?: WorkflowHistoryEventMeta) => {
workflowHistoryStore.setState({
workflowHistoryEvent: event,
workflowHistoryEventMeta: meta,
nodes: store.getState().getNodes(),
edges: store.getState().edges,
})
}, 500))
const saveStateToHistory = useCallback((event: WorkflowHistoryEvent) => {
const saveStateToHistory = useCallback((event: WorkflowHistoryEvent, meta?: WorkflowHistoryEventMeta) => {
switch (event) {
case WorkflowHistoryEvent.NoteChange:
// Hint: Note change does not trigger when note text changes,
// because the note editors have their own history states.
saveStateToHistoryRef.current(event)
saveStateToHistoryRef.current(event, meta)
break
case WorkflowHistoryEvent.NodeTitleChange:
case WorkflowHistoryEvent.NodeDescriptionChange:
@ -93,7 +95,7 @@ export const useWorkflowHistory = () => {
case WorkflowHistoryEvent.NoteAdd:
case WorkflowHistoryEvent.LayoutOrganize:
case WorkflowHistoryEvent.NoteDelete:
saveStateToHistoryRef.current(event)
saveStateToHistoryRef.current(event, meta)
break
default:
// We do not create a history state for every event.

View File

@ -173,11 +173,11 @@ const BasePanel: FC<BasePanelProps> = ({
const handleTitleBlur = useCallback((title: string) => {
handleNodeDataUpdateWithSyncDraft({ id, data: { title } })
saveStateToHistory(WorkflowHistoryEvent.NodeTitleChange)
saveStateToHistory(WorkflowHistoryEvent.NodeTitleChange, { nodeId: id })
}, [handleNodeDataUpdateWithSyncDraft, id, saveStateToHistory])
const handleDescriptionChange = useCallback((desc: string) => {
handleNodeDataUpdateWithSyncDraft({ id, data: { desc } })
saveStateToHistory(WorkflowHistoryEvent.NodeDescriptionChange)
saveStateToHistory(WorkflowHistoryEvent.NodeDescriptionChange, { nodeId: id })
}, [handleNodeDataUpdateWithSyncDraft, id, saveStateToHistory])
const isChildNode = !!(data.isInIteration || data.isInLoop)

View File

@ -2,6 +2,7 @@ import type { FC } from 'react'
import {
memo,
useCallback,
useMemo,
} from 'react'
import { useTranslation } from 'react-i18next'
import type { KnowledgeBaseNodeType } from './types'
@ -60,6 +61,25 @@ const Panel: FC<NodePanelProps<KnowledgeBaseNodeType>> = ({
}
}, [data.chunk_structure])
const chunkTypePlaceHolder = useMemo(() => {
if (!data.chunk_structure) return ''
let placeholder = ''
switch (data.chunk_structure) {
case ChunkStructureEnum.general:
placeholder = 'general_structure'
break
case ChunkStructureEnum.parent_child:
placeholder = 'parent_child_structure'
break
case ChunkStructureEnum.question_answer:
placeholder = 'qa_structure'
break
default:
return ''
}
return placeholder.charAt(0).toUpperCase() + placeholder.slice(1)
}, [data.chunk_structure])
return (
<div>
<Group
@ -81,7 +101,8 @@ const Panel: FC<NodePanelProps<KnowledgeBaseNodeType>> = ({
}}
fieldProps={{
fieldTitleProps: {
title: t('workflow.nodes.common.inputVars'),
title: t('workflow.nodes.knowledgeBase.chunksInput'),
tooltip: t('workflow.nodes.knowledgeBase.chunksInputTip'),
},
}}
>
@ -95,6 +116,7 @@ const Panel: FC<NodePanelProps<KnowledgeBaseNodeType>> = ({
isFilterFileVar
isSupportFileVar={false}
preferSchemaType
typePlaceHolder={chunkTypePlaceHolder}
/>
</BoxGroupField>
<BoxGroup>

View File

@ -17,7 +17,7 @@ const ErrorMessage: FC<ErrorMessageProps> = ({
className,
)}>
<RiErrorWarningFill className='h-4 w-4 shrink-0 text-text-destructive' />
<div className='system-xs-medium max-h-12 grow overflow-y-auto break-words text-text-primary'>
<div className='system-xs-medium max-h-12 grow overflow-y-auto whitespace-pre-line break-words text-text-primary'>
{message}
</div>
</div>

View File

@ -1,9 +1,8 @@
import { z } from 'zod'
import { ArrayType, Type } from './types'
import type { ArrayItems, Field, LLMNodeType } from './types'
import type { Schema, ValidationError } from 'jsonschema'
import { Validator } from 'jsonschema'
import produce from 'immer'
import { z } from 'zod'
import { draft07Validator, forbidBooleanProperties } from '@/utils/validators'
import type { ValidationError } from 'jsonschema'
export const checkNodeValid = (_payload: LLMNodeType) => {
return true
@ -116,191 +115,22 @@ export const findPropertyWithPath = (target: any, path: string[]) => {
return current
}
const draft07MetaSchema = {
$schema: 'http://json-schema.org/draft-07/schema#',
$id: 'http://json-schema.org/draft-07/schema#',
title: 'Core schema meta-schema',
definitions: {
schemaArray: {
type: 'array',
minItems: 1,
items: { $ref: '#' },
},
nonNegativeInteger: {
type: 'integer',
minimum: 0,
},
nonNegativeIntegerDefault0: {
allOf: [
{ $ref: '#/definitions/nonNegativeInteger' },
{ default: 0 },
],
},
simpleTypes: {
enum: [
'array',
'boolean',
'integer',
'null',
'number',
'object',
'string',
],
},
stringArray: {
type: 'array',
items: { type: 'string' },
uniqueItems: true,
default: [],
},
},
type: ['object', 'boolean'],
properties: {
$id: {
type: 'string',
format: 'uri-reference',
},
$schema: {
type: 'string',
format: 'uri',
},
$ref: {
type: 'string',
format: 'uri-reference',
},
title: {
type: 'string',
},
description: {
type: 'string',
},
default: true,
readOnly: {
type: 'boolean',
default: false,
},
examples: {
type: 'array',
items: true,
},
multipleOf: {
type: 'number',
exclusiveMinimum: 0,
},
maximum: {
type: 'number',
},
exclusiveMaximum: {
type: 'number',
},
minimum: {
type: 'number',
},
exclusiveMinimum: {
type: 'number',
},
maxLength: { $ref: '#/definitions/nonNegativeInteger' },
minLength: { $ref: '#/definitions/nonNegativeIntegerDefault0' },
pattern: {
type: 'string',
format: 'regex',
},
additionalItems: { $ref: '#' },
items: {
anyOf: [
{ $ref: '#' },
{ $ref: '#/definitions/schemaArray' },
],
default: true,
},
maxItems: { $ref: '#/definitions/nonNegativeInteger' },
minItems: { $ref: '#/definitions/nonNegativeIntegerDefault0' },
uniqueItems: {
type: 'boolean',
default: false,
},
contains: { $ref: '#' },
maxProperties: { $ref: '#/definitions/nonNegativeInteger' },
minProperties: { $ref: '#/definitions/nonNegativeIntegerDefault0' },
required: { $ref: '#/definitions/stringArray' },
additionalProperties: { $ref: '#' },
definitions: {
type: 'object',
additionalProperties: { $ref: '#' },
default: {},
},
properties: {
type: 'object',
additionalProperties: { $ref: '#' },
default: {},
},
patternProperties: {
type: 'object',
additionalProperties: { $ref: '#' },
propertyNames: { format: 'regex' },
default: {},
},
dependencies: {
type: 'object',
additionalProperties: {
anyOf: [
{ $ref: '#' },
{ $ref: '#/definitions/stringArray' },
],
},
},
propertyNames: { $ref: '#' },
const: true,
enum: {
type: 'array',
items: true,
minItems: 1,
uniqueItems: true,
},
type: {
anyOf: [
{ $ref: '#/definitions/simpleTypes' },
{
type: 'array',
items: { $ref: '#/definitions/simpleTypes' },
minItems: 1,
uniqueItems: true,
},
],
},
format: { type: 'string' },
allOf: { $ref: '#/definitions/schemaArray' },
anyOf: { $ref: '#/definitions/schemaArray' },
oneOf: { $ref: '#/definitions/schemaArray' },
not: { $ref: '#' },
},
default: true,
} as unknown as Schema
const validator = new Validator()
export const validateSchemaAgainstDraft7 = (schemaToValidate: any) => {
const schema = produce(schemaToValidate, (draft: any) => {
// Make sure the schema has the $schema property for draft-07
if (!draft.$schema)
draft.$schema = 'http://json-schema.org/draft-07/schema#'
})
// First check against Draft-07
const result = draft07Validator(schemaToValidate)
// Then apply custom rule
const customErrors = forbidBooleanProperties(schemaToValidate)
const result = validator.validate(schema, draft07MetaSchema, {
nestedErrors: true,
throwError: false,
})
// Access errors from the validation result
const errors = result.valid ? [] : result.errors || []
return errors
return [...result.errors, ...customErrors]
}
export const getValidationErrorMessage = (errors: ValidationError[]) => {
export const getValidationErrorMessage = (errors: Array<ValidationError | string>) => {
const message = errors.map((error) => {
return `Error: ${error.path.join('.')} ${error.message} Details: ${JSON.stringify(error.stack)}`
}).join('; ')
if (typeof error === 'string')
return error
else
return `Error: ${error.stack}\n`
}).join('')
return message
}

View File

@ -9,7 +9,7 @@ export const useNote = (id: string) => {
const handleThemeChange = useCallback((theme: NoteTheme) => {
handleNodeDataUpdateWithSyncDraft({ id, data: { theme } })
saveStateToHistory(WorkflowHistoryEvent.NoteChange)
saveStateToHistory(WorkflowHistoryEvent.NoteChange, { nodeId: id })
}, [handleNodeDataUpdateWithSyncDraft, id, saveStateToHistory])
const handleEditorChange = useCallback((editorState: EditorState) => {
@ -21,7 +21,7 @@ export const useNote = (id: string) => {
const handleShowAuthorChange = useCallback((showAuthor: boolean) => {
handleNodeDataUpdateWithSyncDraft({ id, data: { showAuthor } })
saveStateToHistory(WorkflowHistoryEvent.NoteChange)
saveStateToHistory(WorkflowHistoryEvent.NoteChange, { nodeId: id })
}, [handleNodeDataUpdateWithSyncDraft, id, saveStateToHistory])
return {

View File

@ -56,12 +56,6 @@ const Right = ({
const toolIcon = useToolIcon(currentNodeVar?.nodeData)
const isTruncated = currentNodeVar?.var.is_truncated
const fullContent = currentNodeVar?.var.full_content
console.log(currentNodeVar?.var)
// const isTruncated = true
// const fullContent = {
// size_bytes: 11289600,
// download_url: 'https://upload.dify.ai/files/222bc6e7-40bd-4433-9ba8-4b9ecda88b14/file-preview?timestamp=1754976824&nonce=d970eb39b119f76ec94a9b026f2825b3&sign=ltJO4vS0jrwxuBl4GU74E1Sg_Tia2Y4g2LoBoPh3970=&as_attachment=true',
// }
const {
resetConversationVar,

View File

@ -1,4 +1,4 @@
import { useEffect, useMemo, useRef, useState } from 'react'
import React, { useEffect, useMemo, useRef, useState } from 'react'
import { useDebounceFn } from 'ahooks'
import Textarea from '@/app/components/base/textarea'
import SchemaEditor from '@/app/components/workflow/nodes/llm/components/json-schema-config-modal/schema-editor'
@ -72,18 +72,22 @@ const ValueContent = ({
const [fileValue, setFileValue] = useState<any>(formatFileValue(currentVar))
const { run: debounceValueChange } = useDebounceFn(handleValueChange, { wait: 500 })
if (showTextEditor) {
if (currentVar.value_type === 'number')
setValue(JSON.stringify(currentVar.value))
if (!currentVar.value)
setValue('')
setValue(currentVar.value)
}
if (showJSONEditor)
setJson(currentVar.value ? JSON.stringify(currentVar.value, null, 2) : '')
if (showFileEditor)
setFileValue(formatFileValue(currentVar))
// update default value when id changed
useEffect(() => {
if (showTextEditor) {
if (currentVar.value_type === 'number')
return setValue(JSON.stringify(currentVar.value))
if (!currentVar.value)
return setValue('')
setValue(currentVar.value)
}
if (showJSONEditor)
setJson(currentVar.value ? JSON.stringify(currentVar.value, null, 2) : '')
if (showFileEditor)
setFileValue(formatFileValue(currentVar))
}, [currentVar.id, currentVar.value])
const handleTextChange = (value: string) => {
if (isTruncated)
@ -299,4 +303,4 @@ const ValueContent = ({
)
}
export default ValueContent
export default React.memo(ValueContent)

View File

@ -51,6 +51,7 @@ export function useWorkflowHistoryStore() {
setState: (state: WorkflowHistoryState) => {
store.setState({
workflowHistoryEvent: state.workflowHistoryEvent,
workflowHistoryEventMeta: state.workflowHistoryEventMeta,
nodes: state.nodes.map((node: Node) => ({ ...node, data: { ...node.data, selected: false } })),
edges: state.edges.map((edge: Edge) => ({ ...edge, selected: false }) as Edge),
})
@ -76,6 +77,7 @@ function createStore({
(set, get) => {
return {
workflowHistoryEvent: undefined,
workflowHistoryEventMeta: undefined,
nodes: storeNodes,
edges: storeEdges,
getNodes: () => get().nodes,
@ -97,6 +99,7 @@ export type WorkflowHistoryStore = {
nodes: Node[]
edges: Edge[]
workflowHistoryEvent: WorkflowHistoryEvent | undefined
workflowHistoryEventMeta?: WorkflowHistoryEventMeta
}
export type WorkflowHistoryActions = {
@ -119,3 +122,8 @@ export type WorkflowWithHistoryProviderProps = {
edges: Edge[]
children: ReactNode
}
export type WorkflowHistoryEventMeta = {
nodeId?: string
nodeTitle?: string
}

View File

@ -942,6 +942,8 @@ const translation = {
aboutRetrieval: 'Über die Abrufmethode.',
chooseChunkStructure: 'Auswählen einer Chunk-Struktur',
chunkIsRequired: 'Chunk-Struktur ist erforderlich',
chunksInput: 'Stücke',
chunksInputTip: 'Die Eingangsvariable des Wissensbasis-Knotens sind Chunks. Der Variablentyp ist ein Objekt mit einem spezifischen JSON-Schema, das konsistent mit der ausgewählten Chunk-Struktur sein muss.',
},
},
tracing: {

View File

@ -950,6 +950,8 @@ const translation = {
learnMore: 'Learn more',
},
changeChunkStructure: 'Change Chunk Structure',
chunksInput: 'Chunks',
chunksInputTip: 'The input variable of the knowledge base node is Chunks. The variable type is an object with a specific JSON Schema which must be consistent with the selected chunk structure.',
aboutRetrieval: 'about retrieval method.',
chunkIsRequired: 'Chunk structure is required',
indexMethodIsRequired: 'Index method is required',

View File

@ -942,6 +942,8 @@ const translation = {
retrievalSettingIsRequired: 'Se requiere configuración de recuperación',
chunkStructure: 'Estructura de fragmentos',
chunkIsRequired: 'Se requiere una estructura de fragmentos',
chunksInput: 'Trozo',
chunksInputTip: 'La variable de entrada del nodo de la base de conocimientos es Chunks. El tipo de variable es un objeto con un esquema JSON específico que debe ser consistente con la estructura del fragmento seleccionado.',
},
},
tracing: {

View File

@ -942,6 +942,8 @@ const translation = {
aboutRetrieval: 'درباره روش بازیابی.',
chunkIsRequired: 'ساختار تکه ای مورد نیاز است',
chooseChunkStructure: 'یک ساختار تکه ای را انتخاب کنید',
chunksInput: 'تکه‌ها',
chunksInputTip: 'متغیر ورودی گره پایگاه دانش چانک‌ها است. نوع متغیر یک شیء با یک طرح JSON خاص است که باید با ساختار چانک انتخاب شده سازگار باشد.',
},
},
tracing: {

View File

@ -942,6 +942,8 @@ const translation = {
changeChunkStructure: 'Modifier la structure des morceaux',
indexMethodIsRequired: 'La méthode dindexation est requise',
retrievalSettingIsRequired: 'Le paramètre de récupération est requis',
chunksInput: 'Morceaux',
chunksInputTip: 'La variable d\'entrée du nœud de la base de connaissances est Chunks. Le type de variable est un objet avec un schéma JSON spécifique qui doit être cohérent avec la structure de morceau sélectionnée.',
},
},
tracing: {

View File

@ -962,6 +962,8 @@ const translation = {
changeChunkStructure: 'चंक संरचना बदलें',
aboutRetrieval: 'पुनर्प्राप्ति विधि के बारे में।',
chooseChunkStructure: 'एक चंक संरचना चुनें',
chunksInput: 'टुकड़े',
chunksInputTip: 'ज्ञान आधार नोड का इनपुट वेरिएबल चंक्स है। वेरिएबल प्रकार एक ऑब्जेक्ट है जिसमें एक विशेष JSON स्कीमा है जो चयनित चंक संरचना के साथ सुसंगत होना चाहिए।',
},
},
tracing: {

View File

@ -917,6 +917,8 @@ const translation = {
retrievalSettingIsRequired: 'Pengaturan pengambilan diperlukan',
indexMethodIsRequired: 'Metode indeks diperlukan',
chunkStructure: 'Struktur Potongan',
chunksInput: 'Potongan',
chunksInputTip: 'Variabel input dari node basis pengetahuan adalah Chunks. Tipe variabel adalah objek dengan Skema JSON tertentu yang harus konsisten dengan struktur chunk yang dipilih.',
},
},
tracing: {},

View File

@ -968,6 +968,8 @@ const translation = {
aboutRetrieval: 'Informazioni sul metodo di recupero.',
chunkIsRequired: 'È necessaria una struttura a blocchi',
retrievalSettingIsRequired: 'È richiesta l\'impostazione di recupero',
chunksInputTip: 'La variabile di input del nodo della base di conoscenza è Chunks. Il tipo di variabile è un oggetto con uno specifico schema JSON che deve essere coerente con la struttura del chunk selezionato.',
chunksInput: 'Pezzetti',
},
},
tracing: {

View File

@ -953,6 +953,8 @@ const translation = {
retrievalSettingIsRequired: 'リトリーバル設定が必要です',
changeChunkStructure: 'チャンク構造を変更する',
indexMethodIsRequired: 'インデックスメソッドが必要です',
chunksInput: 'チャンク',
chunksInputTip: '知識ベースードの入力変数はチャンクです。変数のタイプは、選択されたチャンク構造と一貫性のある特定のJSONスキーマを持つオブジェクトです。',
},
},
tracing: {

View File

@ -990,6 +990,8 @@ const translation = {
changeChunkStructure: '청크 구조 변경',
indexMethodIsRequired: '인덱스 메서드가 필요합니다.',
retrievalSettingIsRequired: '검색 설정이 필요합니다.',
chunksInput: '청크',
chunksInputTip: '지식 기반 노드의 입력 변수는 Chunks입니다. 변수 유형은 선택된 청크 구조와 일치해야 하는 특정 JSON 스키마를 가진 객체입니다.',
},
},
tracing: {

View File

@ -942,6 +942,8 @@ const translation = {
chooseChunkStructure: 'Wybieranie struktury fragmentów',
indexMethodIsRequired: 'Metoda indeksowa jest wymagana',
chunkIsRequired: 'Wymagana jest struktura porcji',
chunksInput: 'Kawałki',
chunksInputTip: 'Zmienna wejściowa węzła bazy wiedzy to Chunks. Typ zmiennej to obiekt z określonym schematem JSON, który musi być zgodny z wybraną strukturą chunk.',
},
},
tracing: {

View File

@ -942,6 +942,8 @@ const translation = {
aboutRetrieval: 'sobre o método de recuperação.',
chooseChunkStructure: 'Escolha uma estrutura de blocos',
indexMethodIsRequired: 'O método de índice é necessário',
chunksInput: 'Pedaços',
chunksInputTip: 'A variável de entrada do nó da base de conhecimento é Chunks. O tipo da variável é um objeto com um esquema JSON específico que deve ser consistente com a estrutura de chunk selecionada.',
},
},
tracing: {

View File

@ -942,6 +942,8 @@ const translation = {
aboutRetrieval: 'despre metoda de recuperare.',
chooseChunkStructure: 'Alegeți o structură de bucăți',
changeChunkStructure: 'Modificați structura bucății',
chunksInput: 'Bucăți',
chunksInputTip: 'Variabila de intrare a nodului bazei de cunoștințe este Chunks. Tipul variabilei este un obiect cu un Șchema JSON specific care trebuie să fie coerent cu structura de chunk selectată.',
},
},
tracing: {

View File

@ -942,6 +942,8 @@ const translation = {
chunkIsRequired: 'Требуется структура чанка',
changeChunkStructure: 'Изменение структуры чанка',
retrievalSettingIsRequired: 'Настройка извлечения обязательна',
chunksInput: 'Куски',
chunksInputTip: 'Входная переменная узла базы знаний - это Чанки. Тип переменной является объектом с определенной схемой JSON, которая должна соответствовать выбранной структуре чанка.',
},
},
tracing: {

View File

@ -949,6 +949,8 @@ const translation = {
chunkStructure: 'Struktura kosov',
changeChunkStructure: 'Spreminjanje strukture kosov',
aboutRetrieval: 'o metodi iskanja.',
chunksInput: 'Kosi',
chunksInputTip: 'Vhodna spremenljivka vozlišča podatkovne baze je Chunks. Tip spremenljivke je objekt s specifično JSON shemo, ki mora biti skladna z izbrano strukturo kosov.',
},
},
tracing: {

View File

@ -27,6 +27,7 @@ const translation = {
resultPreview: {
viewDetails: 'ดูรายละเอียด',
error: 'เกิดข้อผิดพลาดระหว่างการดําเนินการ',
loading: 'กำลังประมวลผล...กรุณารอ',
},
},
ragToolSuggestions: {

View File

@ -942,6 +942,8 @@ const translation = {
indexMethodIsRequired: 'ต้องใช้วิธีการจัดทําดัชนี',
retrievalSettingIsRequired: 'จําเป็นต้องมีการตั้งค่าการดึงข้อมูล',
chunkIsRequired: 'จําเป็นต้องมีโครงสร้างก้อน',
chunksInput: 'ชิ้นส่วน',
chunksInputTip: 'ตัวแปรนำเข้าของโหนดฐานความรู้คือ Chunks ตัวแปรประเภทเป็นอ็อบเจ็กต์ที่มี JSON Schema เฉพาะซึ่งต้องสอดคล้องกับโครงสร้างชิ้นส่วนที่เลือกไว้.',
},
},
tracing: {

View File

@ -943,6 +943,8 @@ const translation = {
aboutRetrieval: 'geri alma yöntemi hakkında.',
retrievalSettingIsRequired: 'Alma ayarı gereklidir',
changeChunkStructure: 'Yığın Yapısını Değiştir',
chunksInput: 'Parçalar',
chunksInputTip: 'Bilgi tabanı düğümünün girdi değişkeni \'Chunks\'tır. Değişkenin tipi, seçilen parça yapısıyla tutarlı olması gereken belirli bir JSON Şemasına sahip bir nesnedir.',
},
},
tracing: {

View File

@ -942,6 +942,8 @@ const translation = {
indexMethodIsRequired: 'Обов\'язковий індексний метод',
chooseChunkStructure: 'Виберіть структуру шматків',
retrievalSettingIsRequired: 'Потрібне налаштування для отримання',
chunksInput: 'Частини',
chunksInputTip: 'Вхідна змінна вузла бази знань - це Частини. Тип змінної - об\'єкт з певною JSON-схемою, яка повинна відповідати вибраній структурі частин.',
},
},
tracing: {

View File

@ -942,6 +942,8 @@ const translation = {
changeChunkStructure: 'Thay đổi cấu trúc chunk',
chooseChunkStructure: 'Chọn cấu trúc chunk',
indexMethodIsRequired: 'Phương pháp chỉ mục là bắt buộc',
chunksInput: 'Mảnh',
chunksInputTip: 'Biến đầu vào của nút cơ sở tri thức là Chunks. Loại biến là một đối tượng với một JSON Schema cụ thể mà phải nhất quán với cấu trúc chunk đã chọn.',
},
},
tracing: {

View File

@ -950,6 +950,8 @@ const translation = {
learnMore: '了解更多',
},
changeChunkStructure: '更改分段结构',
chunksInput: '分块',
chunksInputTip: '知识库节点的输入变量为 Chunks。该变量类型是符合特定 JSON Schema 的对象,必须与所选块结构一致。',
aboutRetrieval: '关于知识检索。',
chunkIsRequired: '分段结构是必需的',
indexMethodIsRequired: '索引方法是必需的',

View File

@ -942,6 +942,8 @@ const translation = {
chunkStructure: '區塊結構',
changeChunkStructure: '變更區塊結構',
retrievalSettingIsRequired: '需要檢索設定',
chunksInput: '區塊',
chunksInputTip: '知識庫節點的輸入變數是 Chunks。該變數類型是一個物件具有特定的 JSON Schema必須與所選的塊結構一致。',
},
},
tracing: {

View File

@ -377,7 +377,7 @@ export type InitialDocumentDetail = {
data_source_info: DataSourceInfo
dataset_process_rule_id: string
name: string
created_from: 'api' | 'web'
created_from: 'rag-pipeline' | 'api' | 'web'
created_by: string
created_at: number
indexing_status: DocumentIndexingStatus
@ -403,7 +403,6 @@ export type SimpleDocumentDetail = InitialDocumentDetail & {
}
}
doc_metadata?: MetadataItemWithValue[]
created_from: string
}
export type DocumentListResponse = {

View File

@ -12603,7 +12603,7 @@ snapshots:
'@vue/compiler-sfc@3.5.17':
dependencies:
'@babel/parser': 7.28.0
'@babel/parser': 7.28.3
'@vue/compiler-core': 3.5.17
'@vue/compiler-dom': 3.5.17
'@vue/compiler-ssr': 3.5.17

View File

@ -10,7 +10,7 @@ import type {
RelatedAppResponse,
} from '@/models/datasets'
import { get } from '../base'
import { useReset } from '../use-base'
import { useInvalid } from '../use-base'
import qs from 'qs'
const NAME_SPACE = 'dataset'
@ -36,8 +36,8 @@ export const useDatasetList = (params: DatasetListRequest) => {
})
}
export const useResetDatasetList = () => {
return useReset([...DatasetListKey])
export const useInvalidDatasetList = () => {
return useInvalid([...DatasetListKey])
}
export const datasetDetailQueryKeyPrefix = [NAME_SPACE, 'detail']

245
web/utils/draft-07.json Normal file
View File

@ -0,0 +1,245 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "http://json-schema.org/draft-07/schema#",
"title": "Core schema meta-schema",
"definitions": {
"schemaArray": {
"type": "array",
"minItems": 1,
"items": {
"$ref": "#"
}
},
"nonNegativeInteger": {
"type": "integer",
"minimum": 0
},
"nonNegativeIntegerDefault0": {
"allOf": [
{
"$ref": "#/definitions/nonNegativeInteger"
},
{
"default": 0
}
]
},
"simpleTypes": {
"enum": [
"array",
"boolean",
"integer",
"null",
"number",
"object",
"string"
]
},
"stringArray": {
"type": "array",
"items": {
"type": "string"
},
"uniqueItems": true,
"default": []
}
},
"type": [
"object",
"boolean"
],
"properties": {
"$id": {
"type": "string",
"format": "uri-reference"
},
"$schema": {
"type": "string",
"format": "uri"
},
"$ref": {
"type": "string",
"format": "uri-reference"
},
"$comment": {
"type": "string"
},
"title": {
"type": "string"
},
"description": {
"type": "string"
},
"default": true,
"readOnly": {
"type": "boolean",
"default": false
},
"writeOnly": {
"type": "boolean",
"default": false
},
"examples": {
"type": "array",
"items": true
},
"multipleOf": {
"type": "number",
"exclusiveMinimum": 0
},
"maximum": {
"type": "number"
},
"exclusiveMaximum": {
"type": "number"
},
"minimum": {
"type": "number"
},
"exclusiveMinimum": {
"type": "number"
},
"maxLength": {
"$ref": "#/definitions/nonNegativeInteger"
},
"minLength": {
"$ref": "#/definitions/nonNegativeIntegerDefault0"
},
"pattern": {
"type": "string",
"format": "regex"
},
"additionalItems": {
"$ref": "#"
},
"items": {
"anyOf": [
{
"$ref": "#"
},
{
"$ref": "#/definitions/schemaArray"
}
],
"default": true
},
"maxItems": {
"$ref": "#/definitions/nonNegativeInteger"
},
"minItems": {
"$ref": "#/definitions/nonNegativeIntegerDefault0"
},
"uniqueItems": {
"type": "boolean",
"default": false
},
"contains": {
"$ref": "#"
},
"maxProperties": {
"$ref": "#/definitions/nonNegativeInteger"
},
"minProperties": {
"$ref": "#/definitions/nonNegativeIntegerDefault0"
},
"required": {
"$ref": "#/definitions/stringArray"
},
"additionalProperties": {
"$ref": "#"
},
"definitions": {
"type": "object",
"additionalProperties": {
"$ref": "#"
},
"default": {}
},
"properties": {
"type": "object",
"additionalProperties": {
"$ref": "#"
},
"default": {}
},
"patternProperties": {
"type": "object",
"additionalProperties": {
"$ref": "#"
},
"propertyNames": {
"format": "regex"
},
"default": {}
},
"dependencies": {
"type": "object",
"additionalProperties": {
"anyOf": [
{
"$ref": "#"
},
{
"$ref": "#/definitions/stringArray"
}
]
}
},
"propertyNames": {
"$ref": "#"
},
"const": true,
"enum": {
"type": "array",
"items": true,
"minItems": 1,
"uniqueItems": true
},
"type": {
"anyOf": [
{
"$ref": "#/definitions/simpleTypes"
},
{
"type": "array",
"items": {
"$ref": "#/definitions/simpleTypes"
},
"minItems": 1,
"uniqueItems": true
}
]
},
"format": {
"type": "string"
},
"contentMediaType": {
"type": "string"
},
"contentEncoding": {
"type": "string"
},
"if": {
"$ref": "#"
},
"then": {
"$ref": "#"
},
"else": {
"$ref": "#"
},
"allOf": {
"$ref": "#/definitions/schemaArray"
},
"anyOf": {
"$ref": "#/definitions/schemaArray"
},
"oneOf": {
"$ref": "#/definitions/schemaArray"
},
"not": {
"$ref": "#"
}
},
"default": true
}

27
web/utils/validators.ts Normal file
View File

@ -0,0 +1,27 @@
import type { Schema } from 'jsonschema'
import { Validator } from 'jsonschema'
import draft07Schema from './draft-07.json'
const validator = new Validator()
export const draft07Validator = (schema: any) => {
return validator.validate(schema, draft07Schema as unknown as Schema)
}
export const forbidBooleanProperties = (schema: any, path: string[] = []): string[] => {
let errors: string[] = []
if (schema && typeof schema === 'object' && schema.properties) {
for (const [key, val] of Object.entries(schema.properties)) {
if (typeof val === 'boolean') {
errors.push(
`Error: Property '${[...path, key].join('.')}' must not be a boolean schema`,
)
}
else if (typeof val === 'object') {
errors = errors.concat(forbidBooleanProperties(val, [...path, key]))
}
}
}
return errors
}