mirror of https://github.com/langgenius/dify.git
Merge branch 'main' into fix/value-content-rerender-error
This commit is contained in:
commit
045d07885d
|
|
@ -579,3 +579,7 @@ QUEUE_MONITOR_INTERVAL=30
|
|||
# Swagger UI configuration
|
||||
SWAGGER_UI_ENABLED=true
|
||||
SWAGGER_UI_PATH=/swagger-ui.html
|
||||
|
||||
# Whether to encrypt dataset IDs when exporting DSL files (default: true)
|
||||
# Set to false to export dataset IDs as plain text for easier cross-environment import
|
||||
DSL_EXPORT_ENCRYPT_DATASET_ID=true
|
||||
|
|
|
|||
|
|
@ -834,6 +834,11 @@ class DataSetConfig(BaseSettings):
|
|||
default=30,
|
||||
)
|
||||
|
||||
DSL_EXPORT_ENCRYPT_DATASET_ID: bool = Field(
|
||||
description="Enable or disable dataset ID encryption when exporting DSL files",
|
||||
default=True,
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceConfig(BaseSettings):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -11,11 +11,7 @@ from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
|||
import services
|
||||
from configs import dify_config
|
||||
from controllers.console import api
|
||||
from controllers.console.app.error import (
|
||||
ConversationCompletedError,
|
||||
DraftWorkflowNotExist,
|
||||
DraftWorkflowNotSync,
|
||||
)
|
||||
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
|
||||
|
|
|
|||
|
|
@ -284,7 +284,7 @@ class DataSourceNotionDatasetSyncApi(Resource):
|
|||
documents = DocumentService.get_document_by_dataset_id(dataset_id_str)
|
||||
for document in documents:
|
||||
document_indexing_sync_task.delay(dataset_id_str, document.id)
|
||||
return 200
|
||||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
class DataSourceNotionDocumentSyncApi(Resource):
|
||||
|
|
@ -302,7 +302,7 @@ class DataSourceNotionDocumentSyncApi(Resource):
|
|||
if document is None:
|
||||
raise NotFound("Document not found.")
|
||||
document_indexing_sync_task.delay(dataset_id_str, document_id_str)
|
||||
return 200
|
||||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
api.add_resource(DataSourceApi, "/data-source/integrates", "/data-source/integrates/<uuid:binding_id>/<string:action>")
|
||||
|
|
|
|||
|
|
@ -113,7 +113,7 @@ class DatasetMetadataBuiltInFieldActionApi(Resource):
|
|||
MetadataService.enable_built_in_field(dataset)
|
||||
elif action == "disable":
|
||||
MetadataService.disable_built_in_field(dataset)
|
||||
return 200
|
||||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
class DocumentMetadataEditApi(Resource):
|
||||
|
|
@ -135,7 +135,7 @@ class DocumentMetadataEditApi(Resource):
|
|||
|
||||
MetadataService.update_documents_metadata(dataset, metadata_args)
|
||||
|
||||
return 200
|
||||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
api.add_resource(DatasetMetadataCreateApi, "/datasets/<uuid:dataset_id>/metadata")
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ class TagBindingCreateApi(Resource):
|
|||
args = parser.parse_args()
|
||||
TagService.save_tag_binding(args)
|
||||
|
||||
return 200
|
||||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
class TagBindingDeleteApi(Resource):
|
||||
|
|
@ -132,7 +132,7 @@ class TagBindingDeleteApi(Resource):
|
|||
args = parser.parse_args()
|
||||
TagService.delete_tag_binding(args)
|
||||
|
||||
return 200
|
||||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
api.add_resource(TagListApi, "/tags")
|
||||
|
|
|
|||
|
|
@ -865,6 +865,7 @@ class ToolProviderMCPApi(Resource):
|
|||
parser.add_argument(
|
||||
"sse_read_timeout", type=float, required=False, nullable=False, location="json", default=300
|
||||
)
|
||||
parser.add_argument("headers", type=dict, required=False, nullable=True, location="json", default={})
|
||||
args = parser.parse_args()
|
||||
user = current_user
|
||||
if not is_valid_url(args["server_url"]):
|
||||
|
|
@ -881,6 +882,7 @@ class ToolProviderMCPApi(Resource):
|
|||
server_identifier=args["server_identifier"],
|
||||
timeout=args["timeout"],
|
||||
sse_read_timeout=args["sse_read_timeout"],
|
||||
headers=args["headers"],
|
||||
)
|
||||
)
|
||||
|
||||
|
|
@ -898,6 +900,7 @@ class ToolProviderMCPApi(Resource):
|
|||
parser.add_argument("server_identifier", type=str, required=True, nullable=False, location="json")
|
||||
parser.add_argument("timeout", type=float, required=False, nullable=True, location="json")
|
||||
parser.add_argument("sse_read_timeout", type=float, required=False, nullable=True, location="json")
|
||||
parser.add_argument("headers", type=dict, required=False, nullable=True, location="json")
|
||||
args = parser.parse_args()
|
||||
if not is_valid_url(args["server_url"]):
|
||||
if "[__HIDDEN__]" in args["server_url"]:
|
||||
|
|
@ -915,6 +918,7 @@ class ToolProviderMCPApi(Resource):
|
|||
server_identifier=args["server_identifier"],
|
||||
timeout=args.get("timeout"),
|
||||
sse_read_timeout=args.get("sse_read_timeout"),
|
||||
headers=args.get("headers"),
|
||||
)
|
||||
return {"result": "success"}
|
||||
|
||||
|
|
@ -951,6 +955,9 @@ class ToolMCPAuthApi(Resource):
|
|||
authed=False,
|
||||
authorization_code=args["authorization_code"],
|
||||
for_list=True,
|
||||
headers=provider.decrypted_headers,
|
||||
timeout=provider.timeout,
|
||||
sse_read_timeout=provider.sse_read_timeout,
|
||||
):
|
||||
MCPToolManageService.update_mcp_provider_credentials(
|
||||
mcp_provider=provider,
|
||||
|
|
|
|||
|
|
@ -8,37 +8,44 @@ from flask_restx import reqparse
|
|||
from pydantic import BaseModel
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.file.constants import DEFAULT_SERVICE_API_USER_ID
|
||||
from extensions.ext_database import db
|
||||
from libs.login import _get_user
|
||||
from models.account import Account, Tenant
|
||||
from models.account import Tenant
|
||||
from models.model import EndUser
|
||||
from services.account_service import AccountService
|
||||
|
||||
|
||||
def get_user(tenant_id: str, user_id: str | None) -> Account | EndUser:
|
||||
def get_user(tenant_id: str, user_id: str | None) -> EndUser:
|
||||
"""
|
||||
Get current user
|
||||
|
||||
NOTE: user_id is not trusted, it could be maliciously set to any value.
|
||||
As a result, it could only be considered as an end user id.
|
||||
"""
|
||||
try:
|
||||
with Session(db.engine) as session:
|
||||
if not user_id:
|
||||
user_id = "DEFAULT-USER"
|
||||
user_id = DEFAULT_SERVICE_API_USER_ID
|
||||
|
||||
user_model = (
|
||||
session.query(EndUser)
|
||||
.where(
|
||||
EndUser.session_id == user_id,
|
||||
EndUser.tenant_id == tenant_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not user_model:
|
||||
user_model = EndUser(
|
||||
tenant_id=tenant_id,
|
||||
type="service_api",
|
||||
is_anonymous=user_id == DEFAULT_SERVICE_API_USER_ID,
|
||||
session_id=user_id,
|
||||
)
|
||||
session.add(user_model)
|
||||
session.commit()
|
||||
session.refresh(user_model)
|
||||
|
||||
if user_id == "DEFAULT-USER":
|
||||
user_model = session.query(EndUser).where(EndUser.session_id == "DEFAULT-USER").first()
|
||||
if not user_model:
|
||||
user_model = EndUser(
|
||||
tenant_id=tenant_id,
|
||||
type="service_api",
|
||||
is_anonymous=True if user_id == "DEFAULT-USER" else False,
|
||||
session_id=user_id,
|
||||
)
|
||||
session.add(user_model)
|
||||
session.commit()
|
||||
session.refresh(user_model)
|
||||
else:
|
||||
user_model = AccountService.load_user(user_id)
|
||||
if not user_model:
|
||||
user_model = session.query(EndUser).where(EndUser.id == user_id).first()
|
||||
if not user_model:
|
||||
raise ValueError("user not found")
|
||||
except Exception:
|
||||
raise ValueError("user not found")
|
||||
|
||||
|
|
@ -63,7 +70,7 @@ def get_user_tenant(view: Optional[Callable] = None):
|
|||
raise ValueError("tenant_id is required")
|
||||
|
||||
if not user_id:
|
||||
user_id = "DEFAULT-USER"
|
||||
user_id = DEFAULT_SERVICE_API_USER_ID
|
||||
|
||||
del kwargs["tenant_id"]
|
||||
del kwargs["user_id"]
|
||||
|
|
|
|||
|
|
@ -174,7 +174,7 @@ class DatasetMetadataBuiltInFieldActionServiceApi(DatasetApiResource):
|
|||
MetadataService.enable_built_in_field(dataset)
|
||||
elif action == "disable":
|
||||
MetadataService.disable_built_in_field(dataset)
|
||||
return 200
|
||||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
@service_api_ns.route("/datasets/<uuid:dataset_id>/documents/metadata")
|
||||
|
|
@ -204,4 +204,4 @@ class DocumentMetadataEditServiceApi(DatasetApiResource):
|
|||
|
||||
MetadataService.update_documents_metadata(dataset, metadata_args)
|
||||
|
||||
return 200
|
||||
return {"result": "success"}, 200
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ from sqlalchemy import select, update
|
|||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import Forbidden, NotFound, Unauthorized
|
||||
|
||||
from core.file.constants import DEFAULT_SERVICE_API_USER_ID
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
|
|
@ -271,7 +272,7 @@ def create_or_update_end_user_for_user_id(app_model: App, user_id: Optional[str]
|
|||
Create or update session terminal based on user ID.
|
||||
"""
|
||||
if not user_id:
|
||||
user_id = "DEFAULT-USER"
|
||||
user_id = DEFAULT_SERVICE_API_USER_ID
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
end_user = (
|
||||
|
|
@ -290,7 +291,7 @@ def create_or_update_end_user_for_user_id(app_model: App, user_id: Optional[str]
|
|||
tenant_id=app_model.tenant_id,
|
||||
app_id=app_model.id,
|
||||
type="service_api",
|
||||
is_anonymous=user_id == "DEFAULT-USER",
|
||||
is_anonymous=user_id == DEFAULT_SERVICE_API_USER_ID,
|
||||
session_id=user_id,
|
||||
)
|
||||
session.add(end_user)
|
||||
|
|
|
|||
|
|
@ -42,6 +42,7 @@ from models.provider import (
|
|||
TenantPreferredModelProvider,
|
||||
)
|
||||
from models.provider_ids import ModelProviderID
|
||||
from services.enterprise.plugin_manager_service import PluginCredentialType
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -129,14 +130,38 @@ class ProviderConfiguration(BaseModel):
|
|||
return copy_credentials
|
||||
else:
|
||||
credentials = None
|
||||
current_credential_id = None
|
||||
|
||||
if self.custom_configuration.models:
|
||||
for model_configuration in self.custom_configuration.models:
|
||||
if model_configuration.model_type == model_type and model_configuration.model == model:
|
||||
credentials = model_configuration.credentials
|
||||
current_credential_id = model_configuration.current_credential_id
|
||||
break
|
||||
|
||||
if not credentials and self.custom_configuration.provider:
|
||||
credentials = self.custom_configuration.provider.credentials
|
||||
current_credential_id = self.custom_configuration.provider.current_credential_id
|
||||
|
||||
if current_credential_id:
|
||||
from core.helper.credential_utils import check_credential_policy_compliance
|
||||
|
||||
check_credential_policy_compliance(
|
||||
credential_id=current_credential_id,
|
||||
provider=self.provider.provider,
|
||||
credential_type=PluginCredentialType.MODEL,
|
||||
)
|
||||
else:
|
||||
# no current credential id, check all available credentials
|
||||
if self.custom_configuration.provider:
|
||||
for credential_configuration in self.custom_configuration.provider.available_credentials:
|
||||
from core.helper.credential_utils import check_credential_policy_compliance
|
||||
|
||||
check_credential_policy_compliance(
|
||||
credential_id=credential_configuration.credential_id,
|
||||
provider=self.provider.provider,
|
||||
credential_type=PluginCredentialType.MODEL,
|
||||
)
|
||||
|
||||
return credentials
|
||||
|
||||
|
|
@ -266,7 +291,6 @@ class ProviderConfiguration(BaseModel):
|
|||
:param credential_id: if provided, return the specified credential
|
||||
:return:
|
||||
"""
|
||||
|
||||
if credential_id:
|
||||
return self._get_specific_provider_credential(credential_id)
|
||||
|
||||
|
|
@ -739,6 +763,7 @@ class ProviderConfiguration(BaseModel):
|
|||
|
||||
current_credential_id = credential_record.id
|
||||
current_credential_name = credential_record.credential_name
|
||||
|
||||
credentials = self.obfuscated_credentials(
|
||||
credentials=credentials,
|
||||
credential_form_schemas=self.provider.model_credential_schema.credential_form_schemas
|
||||
|
|
@ -793,6 +818,7 @@ class ProviderConfiguration(BaseModel):
|
|||
):
|
||||
current_credential_id = model_configuration.current_credential_id
|
||||
current_credential_name = model_configuration.current_credential_name
|
||||
|
||||
credentials = self.obfuscated_credentials(
|
||||
credentials=model_configuration.credentials,
|
||||
credential_form_schemas=self.provider.model_credential_schema.credential_form_schemas
|
||||
|
|
|
|||
|
|
@ -145,6 +145,7 @@ class ModelLoadBalancingConfiguration(BaseModel):
|
|||
name: str
|
||||
credentials: dict
|
||||
credential_source_type: str | None = None
|
||||
credential_id: str | None = None
|
||||
|
||||
|
||||
class ModelSettings(BaseModel):
|
||||
|
|
|
|||
|
|
@ -9,3 +9,7 @@ FILE_MODEL_IDENTITY = "__dify__file__"
|
|||
|
||||
def maybe_file_object(o: Any) -> bool:
|
||||
return isinstance(o, dict) and o.get("dify_model_identity") == FILE_MODEL_IDENTITY
|
||||
|
||||
|
||||
# The default user ID for service API calls.
|
||||
DEFAULT_SERVICE_API_USER_ID = "DEFAULT-USER"
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import time
|
|||
import urllib.parse
|
||||
|
||||
from configs import dify_config
|
||||
from core.file.constants import DEFAULT_SERVICE_API_USER_ID
|
||||
|
||||
|
||||
def get_signed_file_url(upload_file_id: str, as_attachment=False) -> str:
|
||||
|
|
@ -31,7 +32,7 @@ def get_signed_file_url_for_plugin(filename: str, mimetype: str, tenant_id: str,
|
|||
url = f"{base_url}/files/upload/for-plugin"
|
||||
|
||||
if user_id is None:
|
||||
user_id = "DEFAULT-USER"
|
||||
user_id = DEFAULT_SERVICE_API_USER_ID
|
||||
|
||||
timestamp = str(int(time.time()))
|
||||
nonce = os.urandom(16).hex()
|
||||
|
|
@ -47,7 +48,7 @@ def verify_plugin_file_signature(
|
|||
*, filename: str, mimetype: str, tenant_id: str, user_id: str | None, timestamp: str, nonce: str, sign: str
|
||||
) -> bool:
|
||||
if user_id is None:
|
||||
user_id = "DEFAULT-USER"
|
||||
user_id = DEFAULT_SERVICE_API_USER_ID
|
||||
|
||||
data_to_sign = f"upload|{filename}|{mimetype}|{tenant_id}|{user_id}|{timestamp}|{nonce}"
|
||||
secret_key = dify_config.SECRET_KEY.encode()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,75 @@
|
|||
"""
|
||||
Credential utility functions for checking credential existence and policy compliance.
|
||||
"""
|
||||
|
||||
from services.enterprise.plugin_manager_service import PluginCredentialType
|
||||
|
||||
|
||||
def is_credential_exists(credential_id: str, credential_type: "PluginCredentialType") -> bool:
|
||||
"""
|
||||
Check if the credential still exists in the database.
|
||||
|
||||
:param credential_id: The credential ID to check
|
||||
:param credential_type: The type of credential (MODEL or TOOL)
|
||||
:return: True if credential exists, False otherwise
|
||||
"""
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from extensions.ext_database import db
|
||||
from models.provider import ProviderCredential, ProviderModelCredential
|
||||
from models.tools import BuiltinToolProvider
|
||||
|
||||
with Session(db.engine) as session:
|
||||
if credential_type == PluginCredentialType.MODEL:
|
||||
# Check both pre-defined and custom model credentials using a single UNION query
|
||||
stmt = (
|
||||
select(ProviderCredential.id)
|
||||
.where(ProviderCredential.id == credential_id)
|
||||
.union(select(ProviderModelCredential.id).where(ProviderModelCredential.id == credential_id))
|
||||
)
|
||||
return session.scalar(stmt) is not None
|
||||
|
||||
if credential_type == PluginCredentialType.TOOL:
|
||||
return (
|
||||
session.scalar(select(BuiltinToolProvider.id).where(BuiltinToolProvider.id == credential_id))
|
||||
is not None
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def check_credential_policy_compliance(
|
||||
credential_id: str, provider: str, credential_type: "PluginCredentialType", check_existence: bool = True
|
||||
) -> None:
|
||||
"""
|
||||
Check credential policy compliance for the given credential ID.
|
||||
|
||||
:param credential_id: The credential ID to check
|
||||
:param provider: The provider name
|
||||
:param credential_type: The type of credential (MODEL or TOOL)
|
||||
:param check_existence: Whether to check if credential exists in database first
|
||||
:raises ValueError: If credential policy compliance check fails
|
||||
"""
|
||||
from services.enterprise.plugin_manager_service import (
|
||||
CheckCredentialPolicyComplianceRequest,
|
||||
PluginManagerService,
|
||||
)
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
if not FeatureService.get_system_features().plugin_manager.enabled or not credential_id:
|
||||
return
|
||||
|
||||
# Check if credential exists in database first (if requested)
|
||||
if check_existence:
|
||||
if not is_credential_exists(credential_id, credential_type):
|
||||
raise ValueError(f"Credential with id {credential_id} for provider {provider} not found.")
|
||||
|
||||
# Check policy compliance
|
||||
PluginManagerService.check_credential_policy_compliance(
|
||||
CheckCredentialPolicyComplianceRequest(
|
||||
dify_credential_id=credential_id,
|
||||
provider=provider,
|
||||
credential_type=credential_type,
|
||||
)
|
||||
)
|
||||
|
|
@ -23,6 +23,7 @@ from core.model_runtime.model_providers.__base.tts_model import TTSModel
|
|||
from core.provider_manager import ProviderManager
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.provider import ProviderType
|
||||
from services.enterprise.plugin_manager_service import PluginCredentialType
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -362,6 +363,23 @@ class ModelInstance:
|
|||
else:
|
||||
raise last_exception
|
||||
|
||||
# Additional policy compliance check as fallback (in case fetch_next didn't catch it)
|
||||
try:
|
||||
from core.helper.credential_utils import check_credential_policy_compliance
|
||||
|
||||
if lb_config.credential_id:
|
||||
check_credential_policy_compliance(
|
||||
credential_id=lb_config.credential_id,
|
||||
provider=self.provider,
|
||||
credential_type=PluginCredentialType.MODEL,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Load balancing config %s failed policy compliance check in round-robin: %s", lb_config.id, str(e)
|
||||
)
|
||||
self.load_balancing_manager.cooldown(lb_config, expire=60)
|
||||
continue
|
||||
|
||||
try:
|
||||
if "credentials" in kwargs:
|
||||
del kwargs["credentials"]
|
||||
|
|
@ -515,6 +533,24 @@ class LBModelManager:
|
|||
|
||||
continue
|
||||
|
||||
# Check policy compliance for the selected configuration
|
||||
try:
|
||||
from core.helper.credential_utils import check_credential_policy_compliance
|
||||
|
||||
if config.credential_id:
|
||||
check_credential_policy_compliance(
|
||||
credential_id=config.credential_id,
|
||||
provider=self._provider,
|
||||
credential_type=PluginCredentialType.MODEL,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning("Load balancing config %s failed policy compliance check: %s", config.id, str(e))
|
||||
cooldown_load_balancing_configs.append(config)
|
||||
if len(cooldown_load_balancing_configs) >= len(self._load_balancing_configs):
|
||||
# all configs are in cooldown or failed policy compliance
|
||||
return None
|
||||
continue
|
||||
|
||||
if dify_config.DEBUG:
|
||||
logger.info(
|
||||
"""Model LB
|
||||
|
|
|
|||
|
|
@ -325,14 +325,11 @@ class OpsTraceManager:
|
|||
:return:
|
||||
"""
|
||||
# auth check
|
||||
if enabled:
|
||||
try:
|
||||
try:
|
||||
if enabled or tracing_provider is not None:
|
||||
provider_config_map[tracing_provider]
|
||||
except KeyError:
|
||||
raise ValueError(f"Invalid tracing provider: {tracing_provider}")
|
||||
else:
|
||||
if tracing_provider is None:
|
||||
raise ValueError(f"Invalid tracing provider: {tracing_provider}")
|
||||
except KeyError:
|
||||
raise ValueError(f"Invalid tracing provider: {tracing_provider}")
|
||||
|
||||
app_config: Optional[App] = db.session.query(App).where(App.id == app_id).first()
|
||||
if not app_config:
|
||||
|
|
|
|||
|
|
@ -1129,6 +1129,7 @@ class ProviderManager:
|
|||
name=load_balancing_model_config.name,
|
||||
credentials=provider_model_credentials,
|
||||
credential_source_type=load_balancing_model_config.credential_source_type,
|
||||
credential_id=load_balancing_model_config.credential_id,
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -43,6 +43,10 @@ class ToolProviderApiEntity(BaseModel):
|
|||
server_url: Optional[str] = Field(default="", description="The server url of the tool")
|
||||
updated_at: int = Field(default_factory=lambda: int(datetime.now().timestamp()))
|
||||
server_identifier: Optional[str] = Field(default="", description="The server identifier of the MCP tool")
|
||||
timeout: Optional[float] = Field(default=30.0, description="The timeout of the MCP tool")
|
||||
sse_read_timeout: Optional[float] = Field(default=300.0, description="The SSE read timeout of the MCP tool")
|
||||
masked_headers: Optional[dict[str, str]] = Field(default=None, description="The masked headers of the MCP tool")
|
||||
original_headers: Optional[dict[str, str]] = Field(default=None, description="The original headers of the MCP tool")
|
||||
|
||||
@field_validator("tools", mode="before")
|
||||
@classmethod
|
||||
|
|
@ -65,6 +69,10 @@ class ToolProviderApiEntity(BaseModel):
|
|||
if self.type == ToolProviderType.MCP:
|
||||
optional_fields.update(self.optional_field("updated_at", self.updated_at))
|
||||
optional_fields.update(self.optional_field("server_identifier", self.server_identifier))
|
||||
optional_fields.update(self.optional_field("timeout", self.timeout))
|
||||
optional_fields.update(self.optional_field("sse_read_timeout", self.sse_read_timeout))
|
||||
optional_fields.update(self.optional_field("masked_headers", self.masked_headers))
|
||||
optional_fields.update(self.optional_field("original_headers", self.original_headers))
|
||||
return {
|
||||
"id": self.id,
|
||||
"author": self.author,
|
||||
|
|
|
|||
|
|
@ -29,6 +29,10 @@ class ToolApiSchemaError(ValueError):
|
|||
pass
|
||||
|
||||
|
||||
class ToolCredentialPolicyViolationError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class ToolEngineInvokeError(Exception):
|
||||
meta: ToolInvokeMeta
|
||||
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ class MCPToolProviderController(ToolProviderController):
|
|||
provider_id=db_provider.server_identifier or "",
|
||||
tenant_id=db_provider.tenant_id or "",
|
||||
server_url=db_provider.decrypted_server_url,
|
||||
headers={}, # TODO: get headers from db provider
|
||||
headers=db_provider.decrypted_headers or {},
|
||||
timeout=db_provider.timeout,
|
||||
sse_read_timeout=db_provider.sse_read_timeout,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -14,16 +14,31 @@ from sqlalchemy.orm import Session
|
|||
from yarl import URL
|
||||
|
||||
import contexts
|
||||
from core.helper.provider_cache import ToolProviderCredentialsCache
|
||||
from core.plugin.entities.plugin import ToolProviderID
|
||||
from core.plugin.impl.tool import PluginToolManager
|
||||
from core.tools.__base.tool_provider import ToolProviderController
|
||||
from core.tools.__base.tool_runtime import ToolRuntime
|
||||
from core.tools.mcp_tool.provider import MCPToolProviderController
|
||||
from core.tools.mcp_tool.tool import MCPTool
|
||||
from core.tools.plugin_tool.provider import PluginToolProviderController
|
||||
from core.tools.plugin_tool.tool import PluginTool
|
||||
from core.tools.utils.uuid_utils import is_valid_uuid
|
||||
from core.tools.workflow_as_tool.provider import WorkflowToolProviderController
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from services.enterprise.plugin_manager_service import PluginCredentialType
|
||||
from services.tools.mcp_tools_manage_service import MCPToolManageService
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.workflow.nodes.tool.entities import ToolEntity
|
||||
|
||||
from configs import dify_config
|
||||
from core.agent.entities import AgentToolEntity
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.helper.module_import_helper import load_single_subclass_from_source
|
||||
from core.helper.position_helper import is_filtered
|
||||
from core.helper.provider_cache import ToolProviderCredentialsCache
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.tools.__base.tool import Tool
|
||||
from core.tools.__base.tool_provider import ToolProviderController
|
||||
from core.tools.__base.tool_runtime import ToolRuntime
|
||||
from core.tools.builtin_tool.provider import BuiltinToolProviderController
|
||||
from core.tools.builtin_tool.providers._positions import BuiltinToolProviderSort
|
||||
from core.tools.builtin_tool.tool import BuiltinTool
|
||||
|
|
@ -39,22 +54,12 @@ from core.tools.entities.tool_entities import (
|
|||
ToolProviderType,
|
||||
)
|
||||
from core.tools.errors import ToolProviderNotFoundError
|
||||
from core.tools.mcp_tool.provider import MCPToolProviderController
|
||||
from core.tools.mcp_tool.tool import MCPTool
|
||||
from core.tools.plugin_tool.provider import PluginToolProviderController
|
||||
from core.tools.plugin_tool.tool import PluginTool
|
||||
from core.tools.tool_label_manager import ToolLabelManager
|
||||
from core.tools.utils.configuration import (
|
||||
ToolParameterConfigurationManager,
|
||||
)
|
||||
from core.tools.utils.configuration import ToolParameterConfigurationManager
|
||||
from core.tools.utils.encryption import create_provider_encrypter, create_tool_provider_encrypter
|
||||
from core.tools.utils.uuid_utils import is_valid_uuid
|
||||
from core.tools.workflow_as_tool.provider import WorkflowToolProviderController
|
||||
from core.tools.workflow_as_tool.tool import WorkflowTool
|
||||
from extensions.ext_database import db
|
||||
from models.provider_ids import ToolProviderID
|
||||
from models.tools import ApiToolProvider, BuiltinToolProvider, MCPToolProvider, WorkflowToolProvider
|
||||
from services.tools.mcp_tools_manage_service import MCPToolManageService
|
||||
from services.tools.tools_transform_service import ToolTransformService
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
|
@ -115,7 +120,6 @@ class ToolManager:
|
|||
get the plugin provider
|
||||
"""
|
||||
# check if context is set
|
||||
from core.plugin.impl.tool import PluginToolManager
|
||||
|
||||
try:
|
||||
contexts.plugin_tool_providers.get()
|
||||
|
|
@ -237,6 +241,16 @@ class ToolManager:
|
|||
if builtin_provider is None:
|
||||
raise ToolProviderNotFoundError(f"builtin provider {provider_id} not found")
|
||||
|
||||
# check if the credential is allowed to be used
|
||||
from core.helper.credential_utils import check_credential_policy_compliance
|
||||
|
||||
check_credential_policy_compliance(
|
||||
credential_id=builtin_provider.id,
|
||||
provider=provider_id,
|
||||
credential_type=PluginCredentialType.TOOL,
|
||||
check_existence=False,
|
||||
)
|
||||
|
||||
encrypter, cache = create_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[
|
||||
|
|
@ -509,7 +523,6 @@ class ToolManager:
|
|||
"""
|
||||
list all the plugin providers
|
||||
"""
|
||||
from core.plugin.impl.tool import PluginToolManager
|
||||
|
||||
manager = PluginToolManager()
|
||||
provider_entities = manager.fetch_tool_providers(tenant_id)
|
||||
|
|
|
|||
|
|
@ -86,9 +86,7 @@ def load_user_from_request(request_from_flask_login):
|
|||
if not app_mcp_server:
|
||||
raise NotFound("App MCP server not found.")
|
||||
end_user = (
|
||||
db.session.query(EndUser)
|
||||
.where(EndUser.external_user_id == app_mcp_server.id, EndUser.type == "mcp")
|
||||
.first()
|
||||
db.session.query(EndUser).where(EndUser.session_id == app_mcp_server.id, EndUser.type == "mcp").first()
|
||||
)
|
||||
if not end_user:
|
||||
raise NotFound("End user not found.")
|
||||
|
|
|
|||
|
|
@ -0,0 +1,27 @@
|
|||
"""add_headers_to_mcp_provider
|
||||
|
||||
Revision ID: c20211f18133
|
||||
Revises: 8d289573e1da
|
||||
Create Date: 2025-08-29 10:07:54.163626
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'c20211f18133'
|
||||
down_revision = 'b95962a3885c'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# Add encrypted_headers column to tool_mcp_providers table
|
||||
op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', sa.Text(), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
# Remove encrypted_headers column from tool_mcp_providers table
|
||||
op.drop_column('tool_mcp_providers', 'encrypted_headers')
|
||||
|
|
@ -49,7 +49,7 @@ class Dataset(Base):
|
|||
INDEXING_TECHNIQUE_LIST = ["high_quality", "economy", None]
|
||||
PROVIDER_LIST = ["vendor", "external", None]
|
||||
|
||||
id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
|
||||
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID)
|
||||
name: Mapped[str] = mapped_column(String(255))
|
||||
description = mapped_column(sa.Text, nullable=True)
|
||||
|
|
|
|||
|
|
@ -290,6 +290,8 @@ class MCPToolProvider(Base):
|
|||
)
|
||||
timeout: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("30"))
|
||||
sse_read_timeout: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("300"))
|
||||
# encrypted headers for MCP server requests
|
||||
encrypted_headers: Mapped[str | None] = mapped_column(sa.Text, nullable=True)
|
||||
|
||||
def load_user(self) -> Account | None:
|
||||
return db.session.query(Account).where(Account.id == self.user_id).first()
|
||||
|
|
@ -324,6 +326,62 @@ class MCPToolProvider(Base):
|
|||
def decrypted_server_url(self) -> str:
|
||||
return encrypter.decrypt_token(self.tenant_id, self.server_url)
|
||||
|
||||
@property
|
||||
def decrypted_headers(self) -> dict[str, Any]:
|
||||
"""Get decrypted headers for MCP server requests."""
|
||||
from core.entities.provider_entities import BasicProviderConfig
|
||||
from core.helper.provider_cache import NoOpProviderCredentialCache
|
||||
from core.tools.utils.encryption import create_provider_encrypter
|
||||
|
||||
try:
|
||||
if not self.encrypted_headers:
|
||||
return {}
|
||||
|
||||
headers_data = json.loads(self.encrypted_headers)
|
||||
|
||||
# Create dynamic config for all headers as SECRET_INPUT
|
||||
config = [BasicProviderConfig(type=BasicProviderConfig.Type.SECRET_INPUT, name=key) for key in headers_data]
|
||||
|
||||
encrypter_instance, _ = create_provider_encrypter(
|
||||
tenant_id=self.tenant_id,
|
||||
config=config,
|
||||
cache=NoOpProviderCredentialCache(),
|
||||
)
|
||||
|
||||
result = encrypter_instance.decrypt(headers_data)
|
||||
return result
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
@property
|
||||
def masked_headers(self) -> dict[str, Any]:
|
||||
"""Get masked headers for frontend display."""
|
||||
from core.entities.provider_entities import BasicProviderConfig
|
||||
from core.helper.provider_cache import NoOpProviderCredentialCache
|
||||
from core.tools.utils.encryption import create_provider_encrypter
|
||||
|
||||
try:
|
||||
if not self.encrypted_headers:
|
||||
return {}
|
||||
|
||||
headers_data = json.loads(self.encrypted_headers)
|
||||
|
||||
# Create dynamic config for all headers as SECRET_INPUT
|
||||
config = [BasicProviderConfig(type=BasicProviderConfig.Type.SECRET_INPUT, name=key) for key in headers_data]
|
||||
|
||||
encrypter_instance, _ = create_provider_encrypter(
|
||||
tenant_id=self.tenant_id,
|
||||
config=config,
|
||||
cache=NoOpProviderCredentialCache(),
|
||||
)
|
||||
|
||||
# First decrypt, then mask
|
||||
decrypted_headers = encrypter_instance.decrypt(headers_data)
|
||||
result = encrypter_instance.mask_tool_credentials(decrypted_headers)
|
||||
return result
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
@property
|
||||
def masked_server_url(self) -> str:
|
||||
def mask_url(url: str, mask_char: str = "*") -> str:
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
{
|
||||
"include": [
|
||||
"."
|
||||
],
|
||||
"exclude": [
|
||||
"tests/",
|
||||
"migrations/",
|
||||
".venv/",
|
||||
"include": ["models", "configs"],
|
||||
"exclude": [".venv", "tests/", "migrations/"],
|
||||
"ignore": [
|
||||
"core/",
|
||||
"controllers/",
|
||||
"tasks/",
|
||||
|
|
@ -25,4 +21,4 @@
|
|||
"typeCheckingMode": "strict",
|
||||
"pythonVersion": "3.11",
|
||||
"pythonPlatform": "All"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ from pydantic import BaseModel, Field
|
|||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from configs import dify_config
|
||||
from core.helper import ssrf_proxy
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.plugin.entities.plugin import PluginDependency
|
||||
|
|
@ -786,7 +787,10 @@ class AppDslService:
|
|||
|
||||
@classmethod
|
||||
def encrypt_dataset_id(cls, dataset_id: str, tenant_id: str) -> str:
|
||||
"""Encrypt dataset_id using AES-CBC mode"""
|
||||
"""Encrypt dataset_id using AES-CBC mode or return plain text based on configuration"""
|
||||
if not dify_config.DSL_EXPORT_ENCRYPT_DATASET_ID:
|
||||
return dataset_id
|
||||
|
||||
key = cls._generate_aes_key(tenant_id)
|
||||
iv = key[:16]
|
||||
cipher = AES.new(key, AES.MODE_CBC, iv)
|
||||
|
|
@ -795,12 +799,34 @@ class AppDslService:
|
|||
|
||||
@classmethod
|
||||
def decrypt_dataset_id(cls, encrypted_data: str, tenant_id: str) -> str | None:
|
||||
"""AES decryption"""
|
||||
"""AES decryption with fallback to plain text UUID"""
|
||||
# First, check if it's already a plain UUID (not encrypted)
|
||||
if cls._is_valid_uuid(encrypted_data):
|
||||
return encrypted_data
|
||||
|
||||
# If it's not a UUID, try to decrypt it
|
||||
try:
|
||||
key = cls._generate_aes_key(tenant_id)
|
||||
iv = key[:16]
|
||||
cipher = AES.new(key, AES.MODE_CBC, iv)
|
||||
pt = unpad(cipher.decrypt(base64.b64decode(encrypted_data)), AES.block_size)
|
||||
return pt.decode()
|
||||
decrypted_text = pt.decode()
|
||||
|
||||
# Validate that the decrypted result is a valid UUID
|
||||
if cls._is_valid_uuid(decrypted_text):
|
||||
return decrypted_text
|
||||
else:
|
||||
# If decrypted result is not a valid UUID, it's probably not our encrypted data
|
||||
return None
|
||||
except Exception:
|
||||
# If decryption fails completely, return None
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _is_valid_uuid(value: str) -> bool:
|
||||
"""Check if string is a valid UUID format"""
|
||||
try:
|
||||
uuid.UUID(value)
|
||||
return True
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
|
|
|
|||
|
|
@ -223,7 +223,7 @@ class DatasetService:
|
|||
and retrieval_model.reranking_model.reranking_model_name
|
||||
):
|
||||
# check if reranking model setting is valid
|
||||
DatasetService.check_embedding_model_setting(
|
||||
DatasetService.check_reranking_model_setting(
|
||||
tenant_id,
|
||||
retrieval_model.reranking_model.reranking_provider_name,
|
||||
retrieval_model.reranking_model.reranking_model_name,
|
||||
|
|
|
|||
|
|
@ -3,18 +3,30 @@ import os
|
|||
import requests
|
||||
|
||||
|
||||
class EnterpriseRequest:
|
||||
base_url = os.environ.get("ENTERPRISE_API_URL", "ENTERPRISE_API_URL")
|
||||
secret_key = os.environ.get("ENTERPRISE_API_SECRET_KEY", "ENTERPRISE_API_SECRET_KEY")
|
||||
|
||||
class BaseRequest:
|
||||
proxies = {
|
||||
"http": "",
|
||||
"https": "",
|
||||
}
|
||||
base_url = ""
|
||||
secret_key = ""
|
||||
secret_key_header = ""
|
||||
|
||||
@classmethod
|
||||
def send_request(cls, method, endpoint, json=None, params=None):
|
||||
headers = {"Content-Type": "application/json", "Enterprise-Api-Secret-Key": cls.secret_key}
|
||||
headers = {"Content-Type": "application/json", cls.secret_key_header: cls.secret_key}
|
||||
url = f"{cls.base_url}{endpoint}"
|
||||
response = requests.request(method, url, json=json, params=params, headers=headers, proxies=cls.proxies)
|
||||
return response.json()
|
||||
|
||||
|
||||
class EnterpriseRequest(BaseRequest):
|
||||
base_url = os.environ.get("ENTERPRISE_API_URL", "ENTERPRISE_API_URL")
|
||||
secret_key = os.environ.get("ENTERPRISE_API_SECRET_KEY", "ENTERPRISE_API_SECRET_KEY")
|
||||
secret_key_header = "Enterprise-Api-Secret-Key"
|
||||
|
||||
|
||||
class EnterprisePluginManagerRequest(BaseRequest):
|
||||
base_url = os.environ.get("ENTERPRISE_PLUGIN_MANAGER_API_URL", "ENTERPRISE_PLUGIN_MANAGER_API_URL")
|
||||
secret_key = os.environ.get("ENTERPRISE_PLUGIN_MANAGER_API_SECRET_KEY", "ENTERPRISE_PLUGIN_MANAGER_API_SECRET_KEY")
|
||||
secret_key_header = "Plugin-Manager-Inner-Api-Secret-Key"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,52 @@
|
|||
import enum
|
||||
import logging
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from services.enterprise.base import EnterprisePluginManagerRequest
|
||||
from services.errors.base import BaseServiceError
|
||||
|
||||
|
||||
class PluginCredentialType(enum.Enum):
|
||||
MODEL = 0
|
||||
TOOL = 1
|
||||
|
||||
def to_number(self):
|
||||
return self.value
|
||||
|
||||
|
||||
class CheckCredentialPolicyComplianceRequest(BaseModel):
|
||||
dify_credential_id: str
|
||||
provider: str
|
||||
credential_type: PluginCredentialType
|
||||
|
||||
def model_dump(self, **kwargs):
|
||||
data = super().model_dump(**kwargs)
|
||||
data["credential_type"] = self.credential_type.to_number()
|
||||
return data
|
||||
|
||||
|
||||
class CredentialPolicyViolationError(BaseServiceError):
|
||||
pass
|
||||
|
||||
|
||||
class PluginManagerService:
|
||||
@classmethod
|
||||
def check_credential_policy_compliance(cls, body: CheckCredentialPolicyComplianceRequest):
|
||||
try:
|
||||
ret = EnterprisePluginManagerRequest.send_request(
|
||||
"POST", "/check-credential-policy-compliance", json=body.model_dump()
|
||||
)
|
||||
if not isinstance(ret, dict) or "result" not in ret:
|
||||
raise ValueError("Invalid response format from plugin manager API")
|
||||
except Exception as e:
|
||||
raise CredentialPolicyViolationError(
|
||||
f"error occurred while checking credential policy compliance: {e}"
|
||||
) from e
|
||||
|
||||
if not ret.get("result", False):
|
||||
raise CredentialPolicyViolationError("Credentials not available: Please use ENTERPRISE global credentials")
|
||||
|
||||
logging.debug(
|
||||
f"Credential policy compliance checked for {body.provider} with credential {body.dify_credential_id}, result: {ret.get('result', False)}"
|
||||
)
|
||||
|
|
@ -139,6 +139,10 @@ class KnowledgeRateLimitModel(BaseModel):
|
|||
subscription_plan: str = ""
|
||||
|
||||
|
||||
class PluginManagerModel(BaseModel):
|
||||
enabled: bool = False
|
||||
|
||||
|
||||
class SystemFeatureModel(BaseModel):
|
||||
sso_enforced_for_signin: bool = False
|
||||
sso_enforced_for_signin_protocol: str = ""
|
||||
|
|
@ -155,6 +159,7 @@ class SystemFeatureModel(BaseModel):
|
|||
webapp_auth: WebAppAuthModel = WebAppAuthModel()
|
||||
plugin_installation_permission: PluginInstallationPermissionModel = PluginInstallationPermissionModel()
|
||||
enable_change_email: bool = True
|
||||
plugin_manager: PluginManagerModel = PluginManagerModel()
|
||||
|
||||
|
||||
class FeatureService:
|
||||
|
|
@ -193,6 +198,7 @@ class FeatureService:
|
|||
system_features.branding.enabled = True
|
||||
system_features.webapp_auth.enabled = True
|
||||
system_features.enable_change_email = False
|
||||
system_features.plugin_manager.enabled = True
|
||||
cls._fulfill_params_from_enterprise(system_features)
|
||||
|
||||
if dify_config.MARKETPLACE_ENABLED:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import hashlib
|
||||
import json
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from sqlalchemy import or_
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
|
@ -27,6 +27,36 @@ class MCPToolManageService:
|
|||
Service class for managing mcp tools.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _encrypt_headers(headers: dict[str, str], tenant_id: str) -> dict[str, str]:
|
||||
"""
|
||||
Encrypt headers using ProviderConfigEncrypter with all headers as SECRET_INPUT.
|
||||
|
||||
Args:
|
||||
headers: Dictionary of headers to encrypt
|
||||
tenant_id: Tenant ID for encryption
|
||||
|
||||
Returns:
|
||||
Dictionary with all headers encrypted
|
||||
"""
|
||||
if not headers:
|
||||
return {}
|
||||
|
||||
from core.entities.provider_entities import BasicProviderConfig
|
||||
from core.helper.provider_cache import NoOpProviderCredentialCache
|
||||
from core.tools.utils.encryption import create_provider_encrypter
|
||||
|
||||
# Create dynamic config for all headers as SECRET_INPUT
|
||||
config = [BasicProviderConfig(type=BasicProviderConfig.Type.SECRET_INPUT, name=key) for key in headers]
|
||||
|
||||
encrypter_instance, _ = create_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=config,
|
||||
cache=NoOpProviderCredentialCache(),
|
||||
)
|
||||
|
||||
return cast(dict[str, str], encrypter_instance.encrypt(headers))
|
||||
|
||||
@staticmethod
|
||||
def get_mcp_provider_by_provider_id(provider_id: str, tenant_id: str) -> MCPToolProvider:
|
||||
res = (
|
||||
|
|
@ -61,6 +91,7 @@ class MCPToolManageService:
|
|||
server_identifier: str,
|
||||
timeout: float,
|
||||
sse_read_timeout: float,
|
||||
headers: dict[str, str] | None = None,
|
||||
) -> ToolProviderApiEntity:
|
||||
server_url_hash = hashlib.sha256(server_url.encode()).hexdigest()
|
||||
existing_provider = (
|
||||
|
|
@ -83,6 +114,12 @@ class MCPToolManageService:
|
|||
if existing_provider.server_identifier == server_identifier:
|
||||
raise ValueError(f"MCP tool {server_identifier} already exists")
|
||||
encrypted_server_url = encrypter.encrypt_token(tenant_id, server_url)
|
||||
# Encrypt headers
|
||||
encrypted_headers = None
|
||||
if headers:
|
||||
encrypted_headers_dict = MCPToolManageService._encrypt_headers(headers, tenant_id)
|
||||
encrypted_headers = json.dumps(encrypted_headers_dict)
|
||||
|
||||
mcp_tool = MCPToolProvider(
|
||||
tenant_id=tenant_id,
|
||||
name=name,
|
||||
|
|
@ -95,6 +132,7 @@ class MCPToolManageService:
|
|||
server_identifier=server_identifier,
|
||||
timeout=timeout,
|
||||
sse_read_timeout=sse_read_timeout,
|
||||
encrypted_headers=encrypted_headers,
|
||||
)
|
||||
db.session.add(mcp_tool)
|
||||
db.session.commit()
|
||||
|
|
@ -118,9 +156,21 @@ class MCPToolManageService:
|
|||
mcp_provider = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id)
|
||||
server_url = mcp_provider.decrypted_server_url
|
||||
authed = mcp_provider.authed
|
||||
headers = mcp_provider.decrypted_headers
|
||||
timeout = mcp_provider.timeout
|
||||
sse_read_timeout = mcp_provider.sse_read_timeout
|
||||
|
||||
try:
|
||||
with MCPClient(server_url, provider_id, tenant_id, authed=authed, for_list=True) as mcp_client:
|
||||
with MCPClient(
|
||||
server_url,
|
||||
provider_id,
|
||||
tenant_id,
|
||||
authed=authed,
|
||||
for_list=True,
|
||||
headers=headers,
|
||||
timeout=timeout,
|
||||
sse_read_timeout=sse_read_timeout,
|
||||
) as mcp_client:
|
||||
tools = mcp_client.list_tools()
|
||||
except MCPAuthError:
|
||||
raise ValueError("Please auth the tool first")
|
||||
|
|
@ -172,6 +222,7 @@ class MCPToolManageService:
|
|||
server_identifier: str,
|
||||
timeout: float | None = None,
|
||||
sse_read_timeout: float | None = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
):
|
||||
mcp_provider = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id)
|
||||
|
||||
|
|
@ -207,6 +258,13 @@ class MCPToolManageService:
|
|||
mcp_provider.timeout = timeout
|
||||
if sse_read_timeout is not None:
|
||||
mcp_provider.sse_read_timeout = sse_read_timeout
|
||||
if headers is not None:
|
||||
# Encrypt headers
|
||||
if headers:
|
||||
encrypted_headers_dict = MCPToolManageService._encrypt_headers(headers, tenant_id)
|
||||
mcp_provider.encrypted_headers = json.dumps(encrypted_headers_dict)
|
||||
else:
|
||||
mcp_provider.encrypted_headers = None
|
||||
db.session.commit()
|
||||
except IntegrityError as e:
|
||||
db.session.rollback()
|
||||
|
|
@ -242,6 +300,12 @@ class MCPToolManageService:
|
|||
|
||||
@classmethod
|
||||
def _re_connect_mcp_provider(cls, server_url: str, provider_id: str, tenant_id: str):
|
||||
# Get the existing provider to access headers and timeout settings
|
||||
mcp_provider = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id)
|
||||
headers = mcp_provider.decrypted_headers
|
||||
timeout = mcp_provider.timeout
|
||||
sse_read_timeout = mcp_provider.sse_read_timeout
|
||||
|
||||
try:
|
||||
with MCPClient(
|
||||
server_url,
|
||||
|
|
@ -249,6 +313,9 @@ class MCPToolManageService:
|
|||
tenant_id,
|
||||
authed=False,
|
||||
for_list=True,
|
||||
headers=headers,
|
||||
timeout=timeout,
|
||||
sse_read_timeout=sse_read_timeout,
|
||||
) as mcp_client:
|
||||
tools = mcp_client.list_tools()
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -250,6 +250,10 @@ class ToolTransformService:
|
|||
label=I18nObject(en_US=db_provider.name, zh_Hans=db_provider.name),
|
||||
description=I18nObject(en_US="", zh_Hans=""),
|
||||
server_identifier=db_provider.server_identifier,
|
||||
timeout=db_provider.timeout,
|
||||
sse_read_timeout=db_provider.sse_read_timeout,
|
||||
masked_headers=db_provider.masked_headers,
|
||||
original_headers=db_provider.decrypted_headers,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
|
|
|||
|
|
@ -33,22 +33,14 @@ from libs.datetime_utils import naive_utc_now
|
|||
from models.account import Account
|
||||
from models.model import App, AppMode
|
||||
from models.tools import WorkflowToolProvider
|
||||
from models.workflow import (
|
||||
Workflow,
|
||||
WorkflowNodeExecutionModel,
|
||||
WorkflowNodeExecutionTriggeredFrom,
|
||||
WorkflowType,
|
||||
)
|
||||
from models.workflow import Workflow, WorkflowNodeExecutionModel, WorkflowNodeExecutionTriggeredFrom, WorkflowType
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
from services.enterprise.plugin_manager_service import PluginCredentialType
|
||||
from services.errors.app import IsDraftWorkflowError, WorkflowHashNotEqualError
|
||||
from services.workflow.workflow_converter import WorkflowConverter
|
||||
|
||||
from .errors.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError
|
||||
from .workflow_draft_variable_service import (
|
||||
DraftVariableSaver,
|
||||
DraftVarLoader,
|
||||
WorkflowDraftVariableService,
|
||||
)
|
||||
from .workflow_draft_variable_service import DraftVariableSaver, DraftVarLoader, WorkflowDraftVariableService
|
||||
|
||||
|
||||
class WorkflowService:
|
||||
|
|
@ -268,6 +260,12 @@ class WorkflowService:
|
|||
if not draft_workflow:
|
||||
raise ValueError("No valid workflow found.")
|
||||
|
||||
# Validate credentials before publishing, for credential policy check
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
if FeatureService.get_system_features().plugin_manager.enabled:
|
||||
self._validate_workflow_credentials(draft_workflow)
|
||||
|
||||
# create new workflow
|
||||
workflow = Workflow.new(
|
||||
tenant_id=app_model.tenant_id,
|
||||
|
|
@ -293,6 +291,260 @@ class WorkflowService:
|
|||
# return new workflow
|
||||
return workflow
|
||||
|
||||
def _validate_workflow_credentials(self, workflow: Workflow) -> None:
|
||||
"""
|
||||
Validate all credentials in workflow nodes before publishing.
|
||||
|
||||
:param workflow: The workflow to validate
|
||||
:raises ValueError: If any credentials violate policy compliance
|
||||
"""
|
||||
graph_dict = workflow.graph_dict
|
||||
nodes = graph_dict.get("nodes", [])
|
||||
|
||||
for node in nodes:
|
||||
node_data = node.get("data", {})
|
||||
node_type = node_data.get("type")
|
||||
node_id = node.get("id", "unknown")
|
||||
|
||||
try:
|
||||
# Extract and validate credentials based on node type
|
||||
if node_type == "tool":
|
||||
credential_id = node_data.get("credential_id")
|
||||
provider = node_data.get("provider_id")
|
||||
if provider:
|
||||
if credential_id:
|
||||
# Check specific credential
|
||||
from core.helper.credential_utils import check_credential_policy_compliance
|
||||
|
||||
check_credential_policy_compliance(
|
||||
credential_id=credential_id,
|
||||
provider=provider,
|
||||
credential_type=PluginCredentialType.TOOL,
|
||||
)
|
||||
else:
|
||||
# Check default workspace credential for this provider
|
||||
self._check_default_tool_credential(workflow.tenant_id, provider)
|
||||
|
||||
elif node_type == "agent":
|
||||
agent_params = node_data.get("agent_parameters", {})
|
||||
|
||||
model_config = agent_params.get("model", {}).get("value", {})
|
||||
if model_config.get("provider") and model_config.get("model"):
|
||||
self._validate_llm_model_config(
|
||||
workflow.tenant_id, model_config["provider"], model_config["model"]
|
||||
)
|
||||
|
||||
# Validate load balancing credentials for agent model if load balancing is enabled
|
||||
agent_model_node_data = {"model": model_config}
|
||||
self._validate_load_balancing_credentials(workflow, agent_model_node_data, node_id)
|
||||
|
||||
# Validate agent tools
|
||||
tools = agent_params.get("tools", {}).get("value", [])
|
||||
for tool in tools:
|
||||
# Agent tools store provider in provider_name field
|
||||
provider = tool.get("provider_name")
|
||||
credential_id = tool.get("credential_id")
|
||||
if provider:
|
||||
if credential_id:
|
||||
from core.helper.credential_utils import check_credential_policy_compliance
|
||||
|
||||
check_credential_policy_compliance(credential_id, provider, PluginCredentialType.TOOL)
|
||||
else:
|
||||
self._check_default_tool_credential(workflow.tenant_id, provider)
|
||||
|
||||
elif node_type in ["llm", "knowledge_retrieval", "parameter_extractor", "question_classifier"]:
|
||||
model_config = node_data.get("model", {})
|
||||
provider = model_config.get("provider")
|
||||
model_name = model_config.get("name")
|
||||
|
||||
if provider and model_name:
|
||||
# Validate that the provider+model combination can fetch valid credentials
|
||||
self._validate_llm_model_config(workflow.tenant_id, provider, model_name)
|
||||
# Validate load balancing credentials if load balancing is enabled
|
||||
self._validate_load_balancing_credentials(workflow, node_data, node_id)
|
||||
else:
|
||||
raise ValueError(f"Node {node_id} ({node_type}): Missing provider or model configuration")
|
||||
|
||||
except Exception as e:
|
||||
if isinstance(e, ValueError):
|
||||
raise e
|
||||
else:
|
||||
raise ValueError(f"Node {node_id} ({node_type}): {str(e)}")
|
||||
|
||||
def _validate_llm_model_config(self, tenant_id: str, provider: str, model_name: str) -> None:
|
||||
"""
|
||||
Validate that an LLM model configuration can fetch valid credentials.
|
||||
|
||||
This method attempts to get the model instance and validates that:
|
||||
1. The provider exists and is configured
|
||||
2. The model exists in the provider
|
||||
3. Credentials can be fetched for the model
|
||||
4. The credentials pass policy compliance checks
|
||||
|
||||
:param tenant_id: The tenant ID
|
||||
:param provider: The provider name
|
||||
:param model_name: The model name
|
||||
:raises ValueError: If the model configuration is invalid or credentials fail policy checks
|
||||
"""
|
||||
try:
|
||||
from core.model_manager import ModelManager
|
||||
from core.model_runtime.entities.model_entities import ModelType
|
||||
|
||||
# Get model instance to validate provider+model combination
|
||||
model_manager = ModelManager()
|
||||
model_manager.get_model_instance(
|
||||
tenant_id=tenant_id, provider=provider, model_type=ModelType.LLM, model=model_name
|
||||
)
|
||||
|
||||
# The ModelInstance constructor will automatically check credential policy compliance
|
||||
# via ProviderConfiguration.get_current_credentials() -> _check_credential_policy_compliance()
|
||||
# If it fails, an exception will be raised
|
||||
|
||||
except Exception as e:
|
||||
raise ValueError(
|
||||
f"Failed to validate LLM model configuration (provider: {provider}, model: {model_name}): {str(e)}"
|
||||
)
|
||||
|
||||
def _check_default_tool_credential(self, tenant_id: str, provider: str) -> None:
|
||||
"""
|
||||
Check credential policy compliance for the default workspace credential of a tool provider.
|
||||
|
||||
This method finds the default credential for the given provider and validates it.
|
||||
Uses the same fallback logic as runtime to handle deauthorized credentials.
|
||||
|
||||
:param tenant_id: The tenant ID
|
||||
:param provider: The tool provider name
|
||||
:raises ValueError: If no default credential exists or if it fails policy compliance
|
||||
"""
|
||||
try:
|
||||
from models.tools import BuiltinToolProvider
|
||||
|
||||
# Use the same fallback logic as runtime: get the first available credential
|
||||
# ordered by is_default DESC, created_at ASC (same as tool_manager.py)
|
||||
default_provider = (
|
||||
db.session.query(BuiltinToolProvider)
|
||||
.where(
|
||||
BuiltinToolProvider.tenant_id == tenant_id,
|
||||
BuiltinToolProvider.provider == provider,
|
||||
)
|
||||
.order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc())
|
||||
.first()
|
||||
)
|
||||
|
||||
if not default_provider:
|
||||
raise ValueError("No default credential found")
|
||||
|
||||
# Check credential policy compliance using the default credential ID
|
||||
from core.helper.credential_utils import check_credential_policy_compliance
|
||||
|
||||
check_credential_policy_compliance(
|
||||
credential_id=default_provider.id,
|
||||
provider=provider,
|
||||
credential_type=PluginCredentialType.TOOL,
|
||||
check_existence=False,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise ValueError(f"Failed to validate default credential for tool provider {provider}: {str(e)}")
|
||||
|
||||
def _validate_load_balancing_credentials(self, workflow: Workflow, node_data: dict, node_id: str) -> None:
|
||||
"""
|
||||
Validate load balancing credentials for a workflow node.
|
||||
|
||||
:param workflow: The workflow being validated
|
||||
:param node_data: The node data containing model configuration
|
||||
:param node_id: The node ID for error reporting
|
||||
:raises ValueError: If load balancing credentials violate policy compliance
|
||||
"""
|
||||
# Extract model configuration
|
||||
model_config = node_data.get("model", {})
|
||||
provider = model_config.get("provider")
|
||||
model_name = model_config.get("name")
|
||||
|
||||
if not provider or not model_name:
|
||||
return # No model config to validate
|
||||
|
||||
# Check if this model has load balancing enabled
|
||||
if self._is_load_balancing_enabled(workflow.tenant_id, provider, model_name):
|
||||
# Get all load balancing configurations for this model
|
||||
load_balancing_configs = self._get_load_balancing_configs(workflow.tenant_id, provider, model_name)
|
||||
# Validate each load balancing configuration
|
||||
try:
|
||||
for config in load_balancing_configs:
|
||||
if config.get("credential_id"):
|
||||
from core.helper.credential_utils import check_credential_policy_compliance
|
||||
|
||||
check_credential_policy_compliance(
|
||||
config["credential_id"], provider, PluginCredentialType.MODEL
|
||||
)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid load balancing credentials for {provider}/{model_name}: {str(e)}")
|
||||
|
||||
def _is_load_balancing_enabled(self, tenant_id: str, provider: str, model_name: str) -> bool:
|
||||
"""
|
||||
Check if load balancing is enabled for a specific model.
|
||||
|
||||
:param tenant_id: The tenant ID
|
||||
:param provider: The provider name
|
||||
:param model_name: The model name
|
||||
:return: True if load balancing is enabled, False otherwise
|
||||
"""
|
||||
try:
|
||||
from core.model_runtime.entities.model_entities import ModelType
|
||||
from core.provider_manager import ProviderManager
|
||||
|
||||
# Get provider configurations
|
||||
provider_manager = ProviderManager()
|
||||
provider_configurations = provider_manager.get_configurations(tenant_id)
|
||||
provider_configuration = provider_configurations.get(provider)
|
||||
|
||||
if not provider_configuration:
|
||||
return False
|
||||
|
||||
# Get provider model setting
|
||||
provider_model_setting = provider_configuration.get_provider_model_setting(
|
||||
model_type=ModelType.LLM,
|
||||
model=model_name,
|
||||
)
|
||||
return provider_model_setting is not None and provider_model_setting.load_balancing_enabled
|
||||
|
||||
except Exception:
|
||||
# If we can't determine the status, assume load balancing is not enabled
|
||||
return False
|
||||
|
||||
def _get_load_balancing_configs(self, tenant_id: str, provider: str, model_name: str) -> list[dict]:
|
||||
"""
|
||||
Get all load balancing configurations for a model.
|
||||
|
||||
:param tenant_id: The tenant ID
|
||||
:param provider: The provider name
|
||||
:param model_name: The model name
|
||||
:return: List of load balancing configuration dictionaries
|
||||
"""
|
||||
try:
|
||||
from services.model_load_balancing_service import ModelLoadBalancingService
|
||||
|
||||
model_load_balancing_service = ModelLoadBalancingService()
|
||||
_, configs = model_load_balancing_service.get_load_balancing_configs(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
model=model_name,
|
||||
model_type="llm", # Load balancing is primarily used for LLM models
|
||||
config_from="predefined-model", # Check both predefined and custom models
|
||||
)
|
||||
|
||||
_, custom_configs = model_load_balancing_service.get_load_balancing_configs(
|
||||
tenant_id=tenant_id, provider=provider, model=model_name, model_type="llm", config_from="custom-model"
|
||||
)
|
||||
all_configs = configs + custom_configs
|
||||
|
||||
return [config for config in all_configs if config.get("credential_id")]
|
||||
|
||||
except Exception:
|
||||
# If we can't get the configurations, return empty list
|
||||
# This will prevent validation errors from breaking the workflow
|
||||
return []
|
||||
|
||||
def get_default_block_configs(self) -> list[dict]:
|
||||
"""
|
||||
Get default block configs
|
||||
|
|
|
|||
|
|
@ -706,7 +706,14 @@ class TestMCPToolManageService:
|
|||
|
||||
# Verify mock interactions
|
||||
mock_mcp_client.assert_called_once_with(
|
||||
"https://example.com/mcp", mcp_provider.id, tenant.id, authed=False, for_list=True
|
||||
"https://example.com/mcp",
|
||||
mcp_provider.id,
|
||||
tenant.id,
|
||||
authed=False,
|
||||
for_list=True,
|
||||
headers={},
|
||||
timeout=30.0,
|
||||
sse_read_timeout=300.0,
|
||||
)
|
||||
|
||||
def test_list_mcp_tool_from_remote_server_auth_error(
|
||||
|
|
@ -1181,6 +1188,11 @@ class TestMCPToolManageService:
|
|||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Create MCP provider first
|
||||
mcp_provider = self._create_test_mcp_provider(
|
||||
db_session_with_containers, mock_external_service_dependencies, tenant.id, account.id
|
||||
)
|
||||
|
||||
# Mock MCPClient and its context manager
|
||||
mock_tools = [
|
||||
type("MockTool", (), {"model_dump": lambda self: {"name": "test_tool_1", "description": "Test tool 1"}})(),
|
||||
|
|
@ -1194,7 +1206,7 @@ class TestMCPToolManageService:
|
|||
|
||||
# Act: Execute the method under test
|
||||
result = MCPToolManageService._re_connect_mcp_provider(
|
||||
"https://example.com/mcp", "test_provider_id", tenant.id
|
||||
"https://example.com/mcp", mcp_provider.id, tenant.id
|
||||
)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
|
|
@ -1213,7 +1225,14 @@ class TestMCPToolManageService:
|
|||
|
||||
# Verify mock interactions
|
||||
mock_mcp_client.assert_called_once_with(
|
||||
"https://example.com/mcp", "test_provider_id", tenant.id, authed=False, for_list=True
|
||||
"https://example.com/mcp",
|
||||
mcp_provider.id,
|
||||
tenant.id,
|
||||
authed=False,
|
||||
for_list=True,
|
||||
headers={},
|
||||
timeout=30.0,
|
||||
sse_read_timeout=300.0,
|
||||
)
|
||||
|
||||
def test_re_connect_mcp_provider_auth_error(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
|
|
@ -1231,6 +1250,11 @@ class TestMCPToolManageService:
|
|||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Create MCP provider first
|
||||
mcp_provider = self._create_test_mcp_provider(
|
||||
db_session_with_containers, mock_external_service_dependencies, tenant.id, account.id
|
||||
)
|
||||
|
||||
# Mock MCPClient to raise authentication error
|
||||
with patch("services.tools.mcp_tools_manage_service.MCPClient") as mock_mcp_client:
|
||||
from core.mcp.error import MCPAuthError
|
||||
|
|
@ -1240,7 +1264,7 @@ class TestMCPToolManageService:
|
|||
|
||||
# Act: Execute the method under test
|
||||
result = MCPToolManageService._re_connect_mcp_provider(
|
||||
"https://example.com/mcp", "test_provider_id", tenant.id
|
||||
"https://example.com/mcp", mcp_provider.id, tenant.id
|
||||
)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
|
|
@ -1265,6 +1289,11 @@ class TestMCPToolManageService:
|
|||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
# Create MCP provider first
|
||||
mcp_provider = self._create_test_mcp_provider(
|
||||
db_session_with_containers, mock_external_service_dependencies, tenant.id, account.id
|
||||
)
|
||||
|
||||
# Mock MCPClient to raise connection error
|
||||
with patch("services.tools.mcp_tools_manage_service.MCPClient") as mock_mcp_client:
|
||||
from core.mcp.error import MCPError
|
||||
|
|
@ -1274,4 +1303,4 @@ class TestMCPToolManageService:
|
|||
|
||||
# Act & Assert: Verify proper error handling
|
||||
with pytest.raises(ValueError, match="Failed to re-connect MCP server: Connection failed"):
|
||||
MCPToolManageService._re_connect_mcp_provider("https://example.com/mcp", "test_provider_id", tenant.id)
|
||||
MCPToolManageService._re_connect_mcp_provider("https://example.com/mcp", mcp_provider.id, tenant.id)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,739 @@
|
|||
"""
|
||||
Integration tests for batch_create_segment_to_index_task using testcontainers.
|
||||
|
||||
This module provides comprehensive integration tests for the batch segment creation
|
||||
and indexing task using TestContainers infrastructure. The tests ensure that the
|
||||
task properly processes CSV files, creates document segments, and establishes
|
||||
vector indexes in a real database environment.
|
||||
|
||||
All tests use the testcontainers infrastructure to ensure proper database isolation
|
||||
and realistic testing scenarios with actual PostgreSQL and Redis instances.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
|
||||
from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models.dataset import Dataset, Document, DocumentSegment
|
||||
from models.enums import CreatorUserRole
|
||||
from models.model import UploadFile
|
||||
from tasks.batch_create_segment_to_index_task import batch_create_segment_to_index_task
|
||||
|
||||
|
||||
class TestBatchCreateSegmentToIndexTask:
|
||||
"""Integration tests for batch_create_segment_to_index_task using testcontainers."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def cleanup_database(self, db_session_with_containers):
|
||||
"""Clean up database before each test to ensure isolation."""
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
# Clear all test data
|
||||
db.session.query(DocumentSegment).delete()
|
||||
db.session.query(Document).delete()
|
||||
db.session.query(Dataset).delete()
|
||||
db.session.query(UploadFile).delete()
|
||||
db.session.query(TenantAccountJoin).delete()
|
||||
db.session.query(Tenant).delete()
|
||||
db.session.query(Account).delete()
|
||||
db.session.commit()
|
||||
|
||||
# Clear Redis cache
|
||||
redis_client.flushdb()
|
||||
|
||||
@pytest.fixture
|
||||
def mock_external_service_dependencies(self):
|
||||
"""Mock setup for external service dependencies."""
|
||||
with (
|
||||
patch("tasks.batch_create_segment_to_index_task.storage") as mock_storage,
|
||||
patch("tasks.batch_create_segment_to_index_task.ModelManager") as mock_model_manager,
|
||||
patch("tasks.batch_create_segment_to_index_task.VectorService") as mock_vector_service,
|
||||
):
|
||||
# Setup default mock returns
|
||||
mock_storage.download.return_value = None
|
||||
|
||||
# Mock embedding model for high quality indexing
|
||||
mock_embedding_model = MagicMock()
|
||||
mock_embedding_model.get_text_embedding_num_tokens.return_value = [10, 15, 20]
|
||||
mock_model_manager_instance = MagicMock()
|
||||
mock_model_manager_instance.get_model_instance.return_value = mock_embedding_model
|
||||
mock_model_manager.return_value = mock_model_manager_instance
|
||||
|
||||
# Mock vector service
|
||||
mock_vector_service.create_segments_vector.return_value = None
|
||||
|
||||
yield {
|
||||
"storage": mock_storage,
|
||||
"model_manager": mock_model_manager,
|
||||
"vector_service": mock_vector_service,
|
||||
"embedding_model": mock_embedding_model,
|
||||
}
|
||||
|
||||
def _create_test_account_and_tenant(self, db_session_with_containers):
|
||||
"""
|
||||
Helper method to create a test account and tenant for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
|
||||
Returns:
|
||||
tuple: (Account, Tenant) created instances
|
||||
"""
|
||||
fake = Faker()
|
||||
|
||||
# Create account
|
||||
account = Account(
|
||||
email=fake.email(),
|
||||
name=fake.name(),
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
)
|
||||
|
||||
from extensions.ext_database import db
|
||||
|
||||
db.session.add(account)
|
||||
db.session.commit()
|
||||
|
||||
# Create tenant for the account
|
||||
tenant = Tenant(
|
||||
name=fake.company(),
|
||||
status="normal",
|
||||
)
|
||||
db.session.add(tenant)
|
||||
db.session.commit()
|
||||
|
||||
# Create tenant-account join
|
||||
join = TenantAccountJoin(
|
||||
tenant_id=tenant.id,
|
||||
account_id=account.id,
|
||||
role=TenantAccountRole.OWNER.value,
|
||||
current=True,
|
||||
)
|
||||
db.session.add(join)
|
||||
db.session.commit()
|
||||
|
||||
# Set current tenant for account
|
||||
account.current_tenant = tenant
|
||||
|
||||
return account, tenant
|
||||
|
||||
def _create_test_dataset(self, db_session_with_containers, account, tenant):
|
||||
"""
|
||||
Helper method to create a test dataset for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
account: Account instance
|
||||
tenant: Tenant instance
|
||||
|
||||
Returns:
|
||||
Dataset: Created dataset instance
|
||||
"""
|
||||
fake = Faker()
|
||||
|
||||
dataset = Dataset(
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(),
|
||||
data_source_type="upload_file",
|
||||
indexing_technique="high_quality",
|
||||
embedding_model="text-embedding-ada-002",
|
||||
embedding_model_provider="openai",
|
||||
created_by=account.id,
|
||||
)
|
||||
|
||||
from extensions.ext_database import db
|
||||
|
||||
db.session.add(dataset)
|
||||
db.session.commit()
|
||||
|
||||
return dataset
|
||||
|
||||
def _create_test_document(self, db_session_with_containers, account, tenant, dataset):
|
||||
"""
|
||||
Helper method to create a test document for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
account: Account instance
|
||||
tenant: Tenant instance
|
||||
dataset: Dataset instance
|
||||
|
||||
Returns:
|
||||
Document: Created document instance
|
||||
"""
|
||||
fake = Faker()
|
||||
|
||||
document = Document(
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
position=1,
|
||||
data_source_type="upload_file",
|
||||
batch="test_batch",
|
||||
name=fake.file_name(),
|
||||
created_from="upload_file",
|
||||
created_by=account.id,
|
||||
indexing_status="completed",
|
||||
enabled=True,
|
||||
archived=False,
|
||||
doc_form="text_model",
|
||||
word_count=0,
|
||||
)
|
||||
|
||||
from extensions.ext_database import db
|
||||
|
||||
db.session.add(document)
|
||||
db.session.commit()
|
||||
|
||||
return document
|
||||
|
||||
def _create_test_upload_file(self, db_session_with_containers, account, tenant):
|
||||
"""
|
||||
Helper method to create a test upload file for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
account: Account instance
|
||||
tenant: Tenant instance
|
||||
|
||||
Returns:
|
||||
UploadFile: Created upload file instance
|
||||
"""
|
||||
fake = Faker()
|
||||
|
||||
upload_file = UploadFile(
|
||||
tenant_id=tenant.id,
|
||||
storage_type="local",
|
||||
key=f"test_files/{fake.file_name()}",
|
||||
name=fake.file_name(),
|
||||
size=1024,
|
||||
extension=".csv",
|
||||
mime_type="text/csv",
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=account.id,
|
||||
created_at=datetime.now(),
|
||||
used=False,
|
||||
)
|
||||
|
||||
from extensions.ext_database import db
|
||||
|
||||
db.session.add(upload_file)
|
||||
db.session.commit()
|
||||
|
||||
return upload_file
|
||||
|
||||
def _create_test_csv_content(self, content_type="text_model"):
|
||||
"""
|
||||
Helper method to create test CSV content.
|
||||
|
||||
Args:
|
||||
content_type: Type of content to create ("text_model" or "qa_model")
|
||||
|
||||
Returns:
|
||||
str: CSV content as string
|
||||
"""
|
||||
if content_type == "qa_model":
|
||||
csv_content = "content,answer\n"
|
||||
csv_content += "This is the first segment content,This is the first answer\n"
|
||||
csv_content += "This is the second segment content,This is the second answer\n"
|
||||
csv_content += "This is the third segment content,This is the third answer\n"
|
||||
else:
|
||||
csv_content = "content\n"
|
||||
csv_content += "This is the first segment content\n"
|
||||
csv_content += "This is the second segment content\n"
|
||||
csv_content += "This is the third segment content\n"
|
||||
|
||||
return csv_content
|
||||
|
||||
def test_batch_create_segment_to_index_task_success_text_model(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test successful batch creation of segments for text model documents.
|
||||
|
||||
This test verifies that the task can successfully:
|
||||
1. Process a CSV file with text content
|
||||
2. Create document segments with proper metadata
|
||||
3. Update document word count
|
||||
4. Create vector indexes
|
||||
5. Set Redis cache status
|
||||
"""
|
||||
# Create test data
|
||||
account, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
dataset = self._create_test_dataset(db_session_with_containers, account, tenant)
|
||||
document = self._create_test_document(db_session_with_containers, account, tenant, dataset)
|
||||
upload_file = self._create_test_upload_file(db_session_with_containers, account, tenant)
|
||||
|
||||
# Create CSV content
|
||||
csv_content = self._create_test_csv_content("text_model")
|
||||
|
||||
# Mock storage to return our CSV content
|
||||
mock_storage = mock_external_service_dependencies["storage"]
|
||||
|
||||
def mock_download(key, file_path):
|
||||
with open(file_path, "w", encoding="utf-8") as f:
|
||||
f.write(csv_content)
|
||||
|
||||
mock_storage.download.side_effect = mock_download
|
||||
|
||||
# Execute the task
|
||||
job_id = str(uuid.uuid4())
|
||||
batch_create_segment_to_index_task(
|
||||
job_id=job_id,
|
||||
upload_file_id=upload_file.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
tenant_id=tenant.id,
|
||||
user_id=account.id,
|
||||
)
|
||||
|
||||
# Verify results
|
||||
from extensions.ext_database import db
|
||||
|
||||
# Check that segments were created
|
||||
segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter_by(document_id=document.id)
|
||||
.order_by(DocumentSegment.position)
|
||||
.all()
|
||||
)
|
||||
assert len(segments) == 3
|
||||
|
||||
# Verify segment content and metadata
|
||||
for i, segment in enumerate(segments):
|
||||
assert segment.tenant_id == tenant.id
|
||||
assert segment.dataset_id == dataset.id
|
||||
assert segment.document_id == document.id
|
||||
assert segment.position == i + 1
|
||||
assert segment.status == "completed"
|
||||
assert segment.indexing_at is not None
|
||||
assert segment.completed_at is not None
|
||||
assert segment.answer is None # text_model doesn't have answers
|
||||
|
||||
# Check that document word count was updated
|
||||
db.session.refresh(document)
|
||||
assert document.word_count > 0
|
||||
|
||||
# Verify vector service was called
|
||||
mock_vector_service = mock_external_service_dependencies["vector_service"]
|
||||
mock_vector_service.create_segments_vector.assert_called_once()
|
||||
|
||||
# Check Redis cache was set
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
cache_key = f"segment_batch_import_{job_id}"
|
||||
cache_value = redis_client.get(cache_key)
|
||||
assert cache_value == b"completed"
|
||||
|
||||
def test_batch_create_segment_to_index_task_dataset_not_found(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test task failure when dataset does not exist.
|
||||
|
||||
This test verifies that the task properly handles error cases:
|
||||
1. Fails gracefully when dataset is not found
|
||||
2. Sets appropriate Redis cache status
|
||||
3. Logs error information
|
||||
4. Maintains database integrity
|
||||
"""
|
||||
# Create test data
|
||||
account, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
upload_file = self._create_test_upload_file(db_session_with_containers, account, tenant)
|
||||
|
||||
# Use non-existent IDs
|
||||
non_existent_dataset_id = str(uuid.uuid4())
|
||||
non_existent_document_id = str(uuid.uuid4())
|
||||
|
||||
# Execute the task with non-existent dataset
|
||||
job_id = str(uuid.uuid4())
|
||||
batch_create_segment_to_index_task(
|
||||
job_id=job_id,
|
||||
upload_file_id=upload_file.id,
|
||||
dataset_id=non_existent_dataset_id,
|
||||
document_id=non_existent_document_id,
|
||||
tenant_id=tenant.id,
|
||||
user_id=account.id,
|
||||
)
|
||||
|
||||
# Verify error handling
|
||||
# Check Redis cache was set to error status
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
cache_key = f"segment_batch_import_{job_id}"
|
||||
cache_value = redis_client.get(cache_key)
|
||||
assert cache_value == b"error"
|
||||
|
||||
# Verify no segments were created (since dataset doesn't exist)
|
||||
from extensions.ext_database import db
|
||||
|
||||
segments = db.session.query(DocumentSegment).all()
|
||||
assert len(segments) == 0
|
||||
|
||||
# Verify no documents were modified
|
||||
documents = db.session.query(Document).all()
|
||||
assert len(documents) == 0
|
||||
|
||||
def test_batch_create_segment_to_index_task_document_not_found(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test task failure when document does not exist.
|
||||
|
||||
This test verifies that the task properly handles error cases:
|
||||
1. Fails gracefully when document is not found
|
||||
2. Sets appropriate Redis cache status
|
||||
3. Maintains database integrity
|
||||
4. Logs appropriate error information
|
||||
"""
|
||||
# Create test data
|
||||
account, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
dataset = self._create_test_dataset(db_session_with_containers, account, tenant)
|
||||
upload_file = self._create_test_upload_file(db_session_with_containers, account, tenant)
|
||||
|
||||
# Use non-existent document ID
|
||||
non_existent_document_id = str(uuid.uuid4())
|
||||
|
||||
# Execute the task with non-existent document
|
||||
job_id = str(uuid.uuid4())
|
||||
batch_create_segment_to_index_task(
|
||||
job_id=job_id,
|
||||
upload_file_id=upload_file.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=non_existent_document_id,
|
||||
tenant_id=tenant.id,
|
||||
user_id=account.id,
|
||||
)
|
||||
|
||||
# Verify error handling
|
||||
# Check Redis cache was set to error status
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
cache_key = f"segment_batch_import_{job_id}"
|
||||
cache_value = redis_client.get(cache_key)
|
||||
assert cache_value == b"error"
|
||||
|
||||
# Verify no segments were created
|
||||
from extensions.ext_database import db
|
||||
|
||||
segments = db.session.query(DocumentSegment).all()
|
||||
assert len(segments) == 0
|
||||
|
||||
# Verify dataset remains unchanged (no segments were added to the dataset)
|
||||
db.session.refresh(dataset)
|
||||
segments_for_dataset = db.session.query(DocumentSegment).filter_by(dataset_id=dataset.id).all()
|
||||
assert len(segments_for_dataset) == 0
|
||||
|
||||
def test_batch_create_segment_to_index_task_document_not_available(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test task failure when document is not available for indexing.
|
||||
|
||||
This test verifies that the task properly handles error cases:
|
||||
1. Fails when document is disabled
|
||||
2. Fails when document is archived
|
||||
3. Fails when document indexing status is not completed
|
||||
4. Sets appropriate Redis cache status
|
||||
"""
|
||||
# Create test data
|
||||
account, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
dataset = self._create_test_dataset(db_session_with_containers, account, tenant)
|
||||
upload_file = self._create_test_upload_file(db_session_with_containers, account, tenant)
|
||||
|
||||
# Create document with various unavailable states
|
||||
test_cases = [
|
||||
# Disabled document
|
||||
Document(
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
position=1,
|
||||
data_source_type="upload_file",
|
||||
batch="test_batch",
|
||||
name="disabled_document",
|
||||
created_from="upload_file",
|
||||
created_by=account.id,
|
||||
indexing_status="completed",
|
||||
enabled=False, # Document is disabled
|
||||
archived=False,
|
||||
doc_form="text_model",
|
||||
word_count=0,
|
||||
),
|
||||
# Archived document
|
||||
Document(
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
position=2,
|
||||
data_source_type="upload_file",
|
||||
batch="test_batch",
|
||||
name="archived_document",
|
||||
created_from="upload_file",
|
||||
created_by=account.id,
|
||||
indexing_status="completed",
|
||||
enabled=True,
|
||||
archived=True, # Document is archived
|
||||
doc_form="text_model",
|
||||
word_count=0,
|
||||
),
|
||||
# Document with incomplete indexing
|
||||
Document(
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
position=3,
|
||||
data_source_type="upload_file",
|
||||
batch="test_batch",
|
||||
name="incomplete_document",
|
||||
created_from="upload_file",
|
||||
created_by=account.id,
|
||||
indexing_status="indexing", # Not completed
|
||||
enabled=True,
|
||||
archived=False,
|
||||
doc_form="text_model",
|
||||
word_count=0,
|
||||
),
|
||||
]
|
||||
|
||||
from extensions.ext_database import db
|
||||
|
||||
for document in test_cases:
|
||||
db.session.add(document)
|
||||
db.session.commit()
|
||||
|
||||
# Test each unavailable document
|
||||
for i, document in enumerate(test_cases):
|
||||
job_id = str(uuid.uuid4())
|
||||
batch_create_segment_to_index_task(
|
||||
job_id=job_id,
|
||||
upload_file_id=upload_file.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
tenant_id=tenant.id,
|
||||
user_id=account.id,
|
||||
)
|
||||
|
||||
# Verify error handling for each case
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
cache_key = f"segment_batch_import_{job_id}"
|
||||
cache_value = redis_client.get(cache_key)
|
||||
assert cache_value == b"error"
|
||||
|
||||
# Verify no segments were created
|
||||
segments = db.session.query(DocumentSegment).filter_by(document_id=document.id).all()
|
||||
assert len(segments) == 0
|
||||
|
||||
def test_batch_create_segment_to_index_task_upload_file_not_found(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test task failure when upload file does not exist.
|
||||
|
||||
This test verifies that the task properly handles error cases:
|
||||
1. Fails gracefully when upload file is not found
|
||||
2. Sets appropriate Redis cache status
|
||||
3. Maintains database integrity
|
||||
4. Logs appropriate error information
|
||||
"""
|
||||
# Create test data
|
||||
account, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
dataset = self._create_test_dataset(db_session_with_containers, account, tenant)
|
||||
document = self._create_test_document(db_session_with_containers, account, tenant, dataset)
|
||||
|
||||
# Use non-existent upload file ID
|
||||
non_existent_upload_file_id = str(uuid.uuid4())
|
||||
|
||||
# Execute the task with non-existent upload file
|
||||
job_id = str(uuid.uuid4())
|
||||
batch_create_segment_to_index_task(
|
||||
job_id=job_id,
|
||||
upload_file_id=non_existent_upload_file_id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
tenant_id=tenant.id,
|
||||
user_id=account.id,
|
||||
)
|
||||
|
||||
# Verify error handling
|
||||
# Check Redis cache was set to error status
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
cache_key = f"segment_batch_import_{job_id}"
|
||||
cache_value = redis_client.get(cache_key)
|
||||
assert cache_value == b"error"
|
||||
|
||||
# Verify no segments were created
|
||||
from extensions.ext_database import db
|
||||
|
||||
segments = db.session.query(DocumentSegment).all()
|
||||
assert len(segments) == 0
|
||||
|
||||
# Verify document remains unchanged
|
||||
db.session.refresh(document)
|
||||
assert document.word_count == 0
|
||||
|
||||
def test_batch_create_segment_to_index_task_empty_csv_file(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test task failure when CSV file is empty.
|
||||
|
||||
This test verifies that the task properly handles error cases:
|
||||
1. Fails when CSV file contains no data
|
||||
2. Sets appropriate Redis cache status
|
||||
3. Maintains database integrity
|
||||
4. Logs appropriate error information
|
||||
"""
|
||||
# Create test data
|
||||
account, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
dataset = self._create_test_dataset(db_session_with_containers, account, tenant)
|
||||
document = self._create_test_document(db_session_with_containers, account, tenant, dataset)
|
||||
upload_file = self._create_test_upload_file(db_session_with_containers, account, tenant)
|
||||
|
||||
# Create empty CSV content
|
||||
empty_csv_content = "content\n" # Only header, no data rows
|
||||
|
||||
# Mock storage to return empty CSV content
|
||||
mock_storage = mock_external_service_dependencies["storage"]
|
||||
|
||||
def mock_download(key, file_path):
|
||||
with open(file_path, "w", encoding="utf-8") as f:
|
||||
f.write(empty_csv_content)
|
||||
|
||||
mock_storage.download.side_effect = mock_download
|
||||
|
||||
# Execute the task
|
||||
job_id = str(uuid.uuid4())
|
||||
batch_create_segment_to_index_task(
|
||||
job_id=job_id,
|
||||
upload_file_id=upload_file.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
tenant_id=tenant.id,
|
||||
user_id=account.id,
|
||||
)
|
||||
|
||||
# Verify error handling
|
||||
# Check Redis cache was set to error status
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
cache_key = f"segment_batch_import_{job_id}"
|
||||
cache_value = redis_client.get(cache_key)
|
||||
assert cache_value == b"error"
|
||||
|
||||
# Verify no segments were created
|
||||
from extensions.ext_database import db
|
||||
|
||||
segments = db.session.query(DocumentSegment).all()
|
||||
assert len(segments) == 0
|
||||
|
||||
# Verify document remains unchanged
|
||||
db.session.refresh(document)
|
||||
assert document.word_count == 0
|
||||
|
||||
def test_batch_create_segment_to_index_task_position_calculation(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test proper position calculation for segments when existing segments exist.
|
||||
|
||||
This test verifies that the task correctly:
|
||||
1. Calculates positions for new segments based on existing ones
|
||||
2. Handles position increment logic properly
|
||||
3. Maintains proper segment ordering
|
||||
4. Works with existing segment data
|
||||
"""
|
||||
# Create test data
|
||||
account, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
dataset = self._create_test_dataset(db_session_with_containers, account, tenant)
|
||||
document = self._create_test_document(db_session_with_containers, account, tenant, dataset)
|
||||
upload_file = self._create_test_upload_file(db_session_with_containers, account, tenant)
|
||||
|
||||
# Create existing segments to test position calculation
|
||||
existing_segments = []
|
||||
for i in range(3):
|
||||
segment = DocumentSegment(
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
position=i + 1,
|
||||
content=f"Existing segment {i + 1}",
|
||||
word_count=len(f"Existing segment {i + 1}"),
|
||||
tokens=10,
|
||||
created_by=account.id,
|
||||
status="completed",
|
||||
index_node_id=str(uuid.uuid4()),
|
||||
index_node_hash=f"hash_{i}",
|
||||
)
|
||||
existing_segments.append(segment)
|
||||
|
||||
from extensions.ext_database import db
|
||||
|
||||
for segment in existing_segments:
|
||||
db.session.add(segment)
|
||||
db.session.commit()
|
||||
|
||||
# Create CSV content
|
||||
csv_content = self._create_test_csv_content("text_model")
|
||||
|
||||
# Mock storage to return our CSV content
|
||||
mock_storage = mock_external_service_dependencies["storage"]
|
||||
|
||||
def mock_download(key, file_path):
|
||||
with open(file_path, "w", encoding="utf-8") as f:
|
||||
f.write(csv_content)
|
||||
|
||||
mock_storage.download.side_effect = mock_download
|
||||
|
||||
# Execute the task
|
||||
job_id = str(uuid.uuid4())
|
||||
batch_create_segment_to_index_task(
|
||||
job_id=job_id,
|
||||
upload_file_id=upload_file.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
tenant_id=tenant.id,
|
||||
user_id=account.id,
|
||||
)
|
||||
|
||||
# Verify results
|
||||
# Check that new segments were created with correct positions
|
||||
all_segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter_by(document_id=document.id)
|
||||
.order_by(DocumentSegment.position)
|
||||
.all()
|
||||
)
|
||||
assert len(all_segments) == 6 # 3 existing + 3 new
|
||||
|
||||
# Verify position ordering
|
||||
for i, segment in enumerate(all_segments):
|
||||
assert segment.position == i + 1
|
||||
|
||||
# Verify new segments have correct positions (4, 5, 6)
|
||||
new_segments = all_segments[3:]
|
||||
for i, segment in enumerate(new_segments):
|
||||
expected_position = 4 + i # Should start at position 4
|
||||
assert segment.position == expected_position
|
||||
assert segment.status == "completed"
|
||||
assert segment.indexing_at is not None
|
||||
assert segment.completed_at is not None
|
||||
|
||||
# Check that document word count was updated
|
||||
db.session.refresh(document)
|
||||
assert document.word_count > 0
|
||||
|
||||
# Verify vector service was called
|
||||
mock_vector_service = mock_external_service_dependencies["vector_service"]
|
||||
mock_vector_service.create_segments_vector.assert_called_once()
|
||||
|
||||
# Check Redis cache was set
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
cache_key = f"segment_batch_import_{job_id}"
|
||||
cache_value = redis_client.get(cache_key)
|
||||
assert cache_value == b"completed"
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -917,6 +917,12 @@ WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
|
|||
HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760
|
||||
HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576
|
||||
HTTP_REQUEST_NODE_SSL_VERIFY=True
|
||||
# Base64 encoded CA certificate data for custom certificate verification (PEM format, optional)
|
||||
# HTTP_REQUEST_NODE_SSL_CERT_DATA=LS0tLS1CRUdJTi...
|
||||
# Base64 encoded client certificate data for mutual TLS authentication (PEM format, optional)
|
||||
# HTTP_REQUEST_NODE_SSL_CLIENT_CERT_DATA=LS0tLS1CRUdJTi...
|
||||
# Base64 encoded client private key data for mutual TLS authentication (PEM format, optional)
|
||||
# HTTP_REQUEST_NODE_SSL_CLIENT_KEY_DATA=LS0tLS1CRUdJTi...
|
||||
|
||||
# Respect X-* headers to redirect clients
|
||||
RESPECT_XFORWARD_HEADERS_ENABLED=false
|
||||
|
|
@ -1270,6 +1276,10 @@ QUEUE_MONITOR_INTERVAL=30
|
|||
SWAGGER_UI_ENABLED=true
|
||||
SWAGGER_UI_PATH=/swagger-ui.html
|
||||
|
||||
# Whether to encrypt dataset IDs when exporting DSL files (default: true)
|
||||
# Set to false to export dataset IDs as plain text for easier cross-environment import
|
||||
DSL_EXPORT_ENCRYPT_DATASET_ID=true
|
||||
|
||||
# Celery schedule tasks configuration
|
||||
ENABLE_CLEAN_EMBEDDING_CACHE_TASK=false
|
||||
ENABLE_CLEAN_UNUSED_DATASETS_TASK=false
|
||||
|
|
|
|||
|
|
@ -574,6 +574,7 @@ x-shared-env: &shared-api-worker-env
|
|||
QUEUE_MONITOR_INTERVAL: ${QUEUE_MONITOR_INTERVAL:-30}
|
||||
SWAGGER_UI_ENABLED: ${SWAGGER_UI_ENABLED:-true}
|
||||
SWAGGER_UI_PATH: ${SWAGGER_UI_PATH:-/swagger-ui.html}
|
||||
DSL_EXPORT_ENCRYPT_DATASET_ID: ${DSL_EXPORT_ENCRYPT_DATASET_ID:-true}
|
||||
ENABLE_CLEAN_EMBEDDING_CACHE_TASK: ${ENABLE_CLEAN_EMBEDDING_CACHE_TASK:-false}
|
||||
ENABLE_CLEAN_UNUSED_DATASETS_TASK: ${ENABLE_CLEAN_UNUSED_DATASETS_TASK:-false}
|
||||
ENABLE_CREATE_TIDB_SERVERLESS_TASK: ${ENABLE_CREATE_TIDB_SERVERLESS_TASK:-false}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,144 @@
|
|||
{
|
||||
"plugins": [
|
||||
"unicorn",
|
||||
"typescript",
|
||||
"oxc"
|
||||
],
|
||||
"categories": {},
|
||||
"rules": {
|
||||
"for-direction": "error",
|
||||
"no-async-promise-executor": "error",
|
||||
"no-caller": "error",
|
||||
"no-class-assign": "error",
|
||||
"no-compare-neg-zero": "error",
|
||||
"no-cond-assign": "warn",
|
||||
"no-const-assign": "warn",
|
||||
"no-constant-binary-expression": "error",
|
||||
"no-constant-condition": "warn",
|
||||
"no-control-regex": "warn",
|
||||
"no-debugger": "warn",
|
||||
"no-delete-var": "warn",
|
||||
"no-dupe-class-members": "warn",
|
||||
"no-dupe-else-if": "warn",
|
||||
"no-dupe-keys": "warn",
|
||||
"no-duplicate-case": "warn",
|
||||
"no-empty-character-class": "warn",
|
||||
"no-empty-pattern": "warn",
|
||||
"no-empty-static-block": "warn",
|
||||
"no-eval": "warn",
|
||||
"no-ex-assign": "warn",
|
||||
"no-extra-boolean-cast": "warn",
|
||||
"no-func-assign": "warn",
|
||||
"no-global-assign": "warn",
|
||||
"no-import-assign": "warn",
|
||||
"no-invalid-regexp": "warn",
|
||||
"no-irregular-whitespace": "warn",
|
||||
"no-loss-of-precision": "warn",
|
||||
"no-new-native-nonconstructor": "warn",
|
||||
"no-nonoctal-decimal-escape": "warn",
|
||||
"no-obj-calls": "warn",
|
||||
"no-self-assign": "warn",
|
||||
"no-setter-return": "warn",
|
||||
"no-shadow-restricted-names": "warn",
|
||||
"no-sparse-arrays": "warn",
|
||||
"no-this-before-super": "warn",
|
||||
"no-unassigned-vars": "warn",
|
||||
"no-unsafe-finally": "warn",
|
||||
"no-unsafe-negation": "warn",
|
||||
"no-unsafe-optional-chaining": "warn",
|
||||
"no-unused-labels": "warn",
|
||||
"no-unused-private-class-members": "warn",
|
||||
"no-unused-vars": "warn",
|
||||
"no-useless-backreference": "warn",
|
||||
"no-useless-catch": "error",
|
||||
"no-useless-escape": "warn",
|
||||
"no-useless-rename": "warn",
|
||||
"no-with": "warn",
|
||||
"require-yield": "warn",
|
||||
"use-isnan": "warn",
|
||||
"valid-typeof": "warn",
|
||||
"oxc/bad-array-method-on-arguments": "warn",
|
||||
"oxc/bad-char-at-comparison": "warn",
|
||||
"oxc/bad-comparison-sequence": "warn",
|
||||
"oxc/bad-min-max-func": "warn",
|
||||
"oxc/bad-object-literal-comparison": "warn",
|
||||
"oxc/bad-replace-all-arg": "warn",
|
||||
"oxc/const-comparisons": "warn",
|
||||
"oxc/double-comparisons": "warn",
|
||||
"oxc/erasing-op": "warn",
|
||||
"oxc/missing-throw": "warn",
|
||||
"oxc/number-arg-out-of-range": "warn",
|
||||
"oxc/only-used-in-recursion": "warn",
|
||||
"oxc/uninvoked-array-callback": "warn",
|
||||
"typescript/await-thenable": "warn",
|
||||
"typescript/no-array-delete": "warn",
|
||||
"typescript/no-base-to-string": "warn",
|
||||
"typescript/no-confusing-void-expression": "warn",
|
||||
"typescript/no-duplicate-enum-values": "warn",
|
||||
"typescript/no-duplicate-type-constituents": "warn",
|
||||
"typescript/no-extra-non-null-assertion": "warn",
|
||||
"typescript/no-floating-promises": "warn",
|
||||
"typescript/no-for-in-array": "warn",
|
||||
"typescript/no-implied-eval": "warn",
|
||||
"typescript/no-meaningless-void-operator": "warn",
|
||||
"typescript/no-misused-new": "warn",
|
||||
"typescript/no-misused-spread": "warn",
|
||||
"typescript/no-non-null-asserted-optional-chain": "warn",
|
||||
"typescript/no-redundant-type-constituents": "warn",
|
||||
"typescript/no-this-alias": "warn",
|
||||
"typescript/no-unnecessary-parameter-property-assignment": "warn",
|
||||
"typescript/no-unsafe-declaration-merging": "warn",
|
||||
"typescript/no-unsafe-unary-minus": "warn",
|
||||
"typescript/no-useless-empty-export": "warn",
|
||||
"typescript/no-wrapper-object-types": "warn",
|
||||
"typescript/prefer-as-const": "warn",
|
||||
"typescript/require-array-sort-compare": "warn",
|
||||
"typescript/restrict-template-expressions": "warn",
|
||||
"typescript/triple-slash-reference": "warn",
|
||||
"typescript/unbound-method": "warn",
|
||||
"unicorn/no-await-in-promise-methods": "warn",
|
||||
"unicorn/no-empty-file": "warn",
|
||||
"unicorn/no-invalid-fetch-options": "warn",
|
||||
"unicorn/no-invalid-remove-event-listener": "warn",
|
||||
"unicorn/no-new-array": "warn",
|
||||
"unicorn/no-single-promise-in-promise-methods": "warn",
|
||||
"unicorn/no-thenable": "warn",
|
||||
"unicorn/no-unnecessary-await": "warn",
|
||||
"unicorn/no-useless-fallback-in-spread": "warn",
|
||||
"unicorn/no-useless-length-check": "warn",
|
||||
"unicorn/no-useless-spread": "warn",
|
||||
"unicorn/prefer-set-size": "warn",
|
||||
"unicorn/prefer-string-starts-ends-with": "warn"
|
||||
},
|
||||
"settings": {
|
||||
"jsx-a11y": {
|
||||
"polymorphicPropName": null,
|
||||
"components": {},
|
||||
"attributes": {}
|
||||
},
|
||||
"next": {
|
||||
"rootDir": []
|
||||
},
|
||||
"react": {
|
||||
"formComponents": [],
|
||||
"linkComponents": []
|
||||
},
|
||||
"jsdoc": {
|
||||
"ignorePrivate": false,
|
||||
"ignoreInternal": false,
|
||||
"ignoreReplacesDocs": true,
|
||||
"overrideReplacesDocs": true,
|
||||
"augmentsExtendsReplacesDocs": false,
|
||||
"implementsReplacesDocs": false,
|
||||
"exemptDestructuredRootsFromChecks": false,
|
||||
"tagNamePreference": {}
|
||||
}
|
||||
},
|
||||
"env": {
|
||||
"builtin": true
|
||||
},
|
||||
"globals": {},
|
||||
"ignorePatterns": [
|
||||
"**/*.js"
|
||||
]
|
||||
}
|
||||
|
|
@ -82,7 +82,7 @@ export default function CheckCode() {
|
|||
<form action="">
|
||||
<input type='text' className='hidden' />
|
||||
<label htmlFor="code" className='system-md-semibold mb-1 text-text-secondary'>{t('login.checkCode.verificationCode')}</label>
|
||||
<Input value={code} onChange={e => setVerifyCode(e.target.value)} max-length={6} className='mt-1' placeholder={t('login.checkCode.verificationCodePlaceholder') as string} />
|
||||
<Input value={code} onChange={e => setVerifyCode(e.target.value)} maxLength={6} className='mt-1' placeholder={t('login.checkCode.verificationCodePlaceholder') || ''} />
|
||||
<Button loading={loading} disabled={loading} className='my-3 w-full' variant='primary' onClick={verify}>{t('login.checkCode.verify')}</Button>
|
||||
<Countdown onResend={resendCode} />
|
||||
</form>
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ export default function CheckCode() {
|
|||
|
||||
<form action="">
|
||||
<label htmlFor="code" className='system-md-semibold mb-1 text-text-secondary'>{t('login.checkCode.verificationCode')}</label>
|
||||
<Input value={code} onChange={e => setVerifyCode(e.target.value)} max-length={6} className='mt-1' placeholder={t('login.checkCode.verificationCodePlaceholder') as string} />
|
||||
<Input value={code} onChange={e => setVerifyCode(e.target.value)} maxLength={6} className='mt-1' placeholder={t('login.checkCode.verificationCodePlaceholder') || ''} />
|
||||
<Button loading={loading} disabled={loading} className='my-3 w-full' variant='primary' onClick={verify}>{t('login.checkCode.verify')}</Button>
|
||||
<Countdown onResend={resendCode} />
|
||||
</form>
|
||||
|
|
|
|||
|
|
@ -43,9 +43,9 @@ const AvatarWithEdit = ({ onSave, ...props }: AvatarWithEditProps) => {
|
|||
const handleSaveAvatar = useCallback(async (uploadedFileId: string) => {
|
||||
try {
|
||||
await updateUserProfile({ url: 'account/avatar', body: { avatar: uploadedFileId } })
|
||||
notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
|
||||
setIsShowAvatarPicker(false)
|
||||
onSave?.()
|
||||
notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
|
||||
}
|
||||
catch (e) {
|
||||
notify({ type: 'error', message: (e as Error).message })
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
'use client'
|
||||
import { useState } from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
|
||||
export type AvatarProps = {
|
||||
|
|
@ -27,6 +27,12 @@ const Avatar = ({
|
|||
onError?.(true)
|
||||
}
|
||||
|
||||
// after uploaded, api would first return error imgs url: '.../files//file-preview/...'. Then return the right url, Which caused not show the avatar
|
||||
useEffect(() => {
|
||||
if(avatar && imgError)
|
||||
setImgError(false)
|
||||
}, [avatar])
|
||||
|
||||
if (avatar && !imgError) {
|
||||
return (
|
||||
<img
|
||||
|
|
|
|||
|
|
@ -215,7 +215,7 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => {
|
|||
}
|
||||
}
|
||||
if (item.number) {
|
||||
const convertedNumber = Number(initInputs[item.number.variable]) ?? undefined
|
||||
const convertedNumber = Number(initInputs[item.number.variable])
|
||||
return {
|
||||
...item.number,
|
||||
default: convertedNumber || item.default || item.number.default,
|
||||
|
|
|
|||
|
|
@ -188,7 +188,7 @@ export const useEmbeddedChatbot = () => {
|
|||
}
|
||||
}
|
||||
if (item.number) {
|
||||
const convertedNumber = Number(initInputs[item.number.variable]) ?? undefined
|
||||
const convertedNumber = Number(initInputs[item.number.variable])
|
||||
return {
|
||||
...item.number,
|
||||
default: convertedNumber || item.default || item.number.default,
|
||||
|
|
|
|||
|
|
@ -43,6 +43,16 @@ async function getProcessedInputsFromUrlParams(): Promise<Record<string, any>> {
|
|||
|
||||
async function getProcessedSystemVariablesFromUrlParams(): Promise<Record<string, any>> {
|
||||
const urlParams = new URLSearchParams(window.location.search)
|
||||
const redirectUrl = urlParams.get('redirect_url')
|
||||
if (redirectUrl) {
|
||||
const decodedRedirectUrl = decodeURIComponent(redirectUrl)
|
||||
const queryString = decodedRedirectUrl.split('?')[1]
|
||||
if (queryString) {
|
||||
const redirectParams = new URLSearchParams(queryString)
|
||||
for (const [key, value] of redirectParams.entries())
|
||||
urlParams.set(key, value)
|
||||
}
|
||||
}
|
||||
const systemVariables: Record<string, any> = {}
|
||||
const entriesArray = Array.from(urlParams.entries())
|
||||
await Promise.all(
|
||||
|
|
|
|||
|
|
@ -9,17 +9,34 @@ import { isValidUrl } from './utils'
|
|||
|
||||
const Link = ({ node, children, ...props }: any) => {
|
||||
const { onSend } = useChatContext()
|
||||
const commonClassName = 'cursor-pointer underline !decoration-primary-700 decoration-dashed'
|
||||
if (node.properties?.href && node.properties.href?.toString().startsWith('abbr')) {
|
||||
const hidden_text = decodeURIComponent(node.properties.href.toString().split('abbr:')[1])
|
||||
|
||||
return <abbr className="cursor-pointer underline !decoration-primary-700 decoration-dashed" onClick={() => onSend?.(hidden_text)} title={node.children[0]?.value || ''}>{node.children[0]?.value || ''}</abbr>
|
||||
return <abbr className={commonClassName} onClick={() => onSend?.(hidden_text)} title={node.children[0]?.value || ''}>{node.children[0]?.value || ''}</abbr>
|
||||
}
|
||||
else {
|
||||
const href = props.href || node.properties?.href
|
||||
if(!href || !isValidUrl(href))
|
||||
if (href && /^#[a-zA-Z0-9_\-]+$/.test(href.toString())) {
|
||||
const handleClick = (e: React.MouseEvent<HTMLAnchorElement>) => {
|
||||
e.preventDefault()
|
||||
// scroll to target element if exists within the answer container
|
||||
const answerContainer = e.currentTarget.closest('.chat-answer-container')
|
||||
|
||||
if (answerContainer) {
|
||||
const targetId = CSS.escape(href.toString().substring(1))
|
||||
const targetElement = answerContainer.querySelector(`[id="${targetId}"]`)
|
||||
if (targetElement)
|
||||
targetElement.scrollIntoView({ behavior: 'smooth' })
|
||||
}
|
||||
}
|
||||
return <a href={href} onClick={handleClick} className={commonClassName}>{children || 'ScrollView'}</a>
|
||||
}
|
||||
|
||||
if (!href || !isValidUrl(href))
|
||||
return <span>{children}</span>
|
||||
|
||||
return <a href={href} target="_blank" className="cursor-pointer underline !decoration-primary-700 decoration-dashed">{children || 'Download'}</a>
|
||||
return <a href={href} target="_blank" rel="noopener noreferrer" className={commonClassName}>{children || 'Download'}</a>
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -186,12 +186,12 @@ const ParameterItem: FC<ParameterItemProps> = ({
|
|||
if (parameterRule.type === 'boolean') {
|
||||
return (
|
||||
<Radio.Group
|
||||
className='flex w-[178px] items-center'
|
||||
className='flex w-[150px] items-center'
|
||||
value={renderValue as boolean}
|
||||
onChange={handleRadioChange}
|
||||
>
|
||||
<Radio value={true} className='w-[83px]'>True</Radio>
|
||||
<Radio value={false} className='w-[83px]'>False</Radio>
|
||||
<Radio value={true} className='w-[70px] px-[18px]'>True</Radio>
|
||||
<Radio value={false} className='w-[70px] px-[18px]'>False</Radio>
|
||||
</Radio.Group>
|
||||
)
|
||||
}
|
||||
|
|
@ -199,7 +199,7 @@ const ParameterItem: FC<ParameterItemProps> = ({
|
|||
if (parameterRule.type === 'string' && !parameterRule.options?.length) {
|
||||
return (
|
||||
<input
|
||||
className={cn(isInWorkflow ? 'w-[178px]' : 'w-full', 'system-sm-regular ml-4 flex h-8 appearance-none items-center rounded-lg bg-components-input-bg-normal px-3 text-components-input-text-filled outline-none')}
|
||||
className={cn(isInWorkflow ? 'w-[150px]' : 'w-full', 'system-sm-regular ml-4 flex h-8 appearance-none items-center rounded-lg bg-components-input-bg-normal px-3 text-components-input-text-filled outline-none')}
|
||||
value={renderValue as string}
|
||||
onChange={handleStringInputChange}
|
||||
/>
|
||||
|
|
@ -270,7 +270,7 @@ const ParameterItem: FC<ParameterItemProps> = ({
|
|||
parameterRule.help && (
|
||||
<Tooltip
|
||||
popupContent={(
|
||||
<div className='w-[178px] whitespace-pre-wrap'>{parameterRule.help[language] || parameterRule.help.en_US}</div>
|
||||
<div className='w-[150px] whitespace-pre-wrap'>{parameterRule.help[language] || parameterRule.help.en_US}</div>
|
||||
)}
|
||||
popupClassName='mr-1'
|
||||
triggerClassName='mr-1 w-4 h-4 shrink-0'
|
||||
|
|
@ -280,7 +280,7 @@ const ParameterItem: FC<ParameterItemProps> = ({
|
|||
</div>
|
||||
{
|
||||
parameterRule.type === 'tag' && (
|
||||
<div className={cn(!isInWorkflow && 'w-[178px]', 'system-xs-regular text-text-tertiary')}>
|
||||
<div className={cn(!isInWorkflow && 'w-[150px]', 'system-xs-regular text-text-tertiary')}>
|
||||
{parameterRule?.tagPlaceholder?.[language]}
|
||||
</div>
|
||||
)
|
||||
|
|
|
|||
|
|
@ -196,7 +196,7 @@ const ModelLoadBalancingConfigs = ({
|
|||
)
|
||||
: (
|
||||
<Tooltip popupContent={t('common.modelProvider.apiKeyStatusNormal')}>
|
||||
<Indicator color='green' />
|
||||
<Indicator color={credential?.not_allowed_to_use ? 'gray' : 'green'} />
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
|
|
@ -232,7 +232,7 @@ const ModelLoadBalancingConfigs = ({
|
|||
<>
|
||||
<span className='mr-2 h-3 border-r border-r-divider-subtle' />
|
||||
<Switch
|
||||
defaultValue={Boolean(config.enabled)}
|
||||
defaultValue={credential?.not_allowed_to_use ? false : Boolean(config.enabled)}
|
||||
size='md'
|
||||
className='justify-self-end'
|
||||
onChange={value => toggleConfigEntryEnabled(index, value)}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,143 @@
|
|||
'use client'
|
||||
import React, { useCallback } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { RiAddLine, RiDeleteBinLine } from '@remixicon/react'
|
||||
import Input from '@/app/components/base/input'
|
||||
import Button from '@/app/components/base/button'
|
||||
import ActionButton from '@/app/components/base/action-button'
|
||||
import cn from '@/utils/classnames'
|
||||
|
||||
export type HeaderItem = {
|
||||
key: string
|
||||
value: string
|
||||
}
|
||||
|
||||
type Props = {
|
||||
headers: Record<string, string>
|
||||
onChange: (headers: Record<string, string>) => void
|
||||
readonly?: boolean
|
||||
isMasked?: boolean
|
||||
}
|
||||
|
||||
const HeadersInput = ({
|
||||
headers,
|
||||
onChange,
|
||||
readonly = false,
|
||||
isMasked = false,
|
||||
}: Props) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
const headerItems = Object.entries(headers).map(([key, value]) => ({ key, value }))
|
||||
|
||||
const handleItemChange = useCallback((index: number, field: 'key' | 'value', value: string) => {
|
||||
const newItems = [...headerItems]
|
||||
newItems[index] = { ...newItems[index], [field]: value }
|
||||
|
||||
const newHeaders = newItems.reduce((acc, item) => {
|
||||
if (item.key.trim())
|
||||
acc[item.key.trim()] = item.value
|
||||
return acc
|
||||
}, {} as Record<string, string>)
|
||||
|
||||
onChange(newHeaders)
|
||||
}, [headerItems, onChange])
|
||||
|
||||
const handleRemoveItem = useCallback((index: number) => {
|
||||
const newItems = headerItems.filter((_, i) => i !== index)
|
||||
const newHeaders = newItems.reduce((acc, item) => {
|
||||
if (item.key.trim())
|
||||
acc[item.key.trim()] = item.value
|
||||
|
||||
return acc
|
||||
}, {} as Record<string, string>)
|
||||
onChange(newHeaders)
|
||||
}, [headerItems, onChange])
|
||||
|
||||
const handleAddItem = useCallback(() => {
|
||||
const newHeaders = { ...headers, '': '' }
|
||||
onChange(newHeaders)
|
||||
}, [headers, onChange])
|
||||
|
||||
if (headerItems.length === 0) {
|
||||
return (
|
||||
<div className='space-y-2'>
|
||||
<div className='body-xs-regular text-text-tertiary'>
|
||||
{t('tools.mcp.modal.noHeaders')}
|
||||
</div>
|
||||
{!readonly && (
|
||||
<Button
|
||||
variant='secondary'
|
||||
size='small'
|
||||
onClick={handleAddItem}
|
||||
className='w-full'
|
||||
>
|
||||
<RiAddLine className='mr-1 h-4 w-4' />
|
||||
{t('tools.mcp.modal.addHeader')}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='space-y-2'>
|
||||
{isMasked && (
|
||||
<div className='body-xs-regular text-text-tertiary'>
|
||||
{t('tools.mcp.modal.maskedHeadersTip')}
|
||||
</div>
|
||||
)}
|
||||
<div className='overflow-hidden rounded-lg border border-divider-regular'>
|
||||
<div className='system-xs-medium-uppercase bg-background-secondary flex h-7 items-center leading-7 text-text-tertiary'>
|
||||
<div className='h-full w-1/2 border-r border-divider-regular pl-3'>{t('tools.mcp.modal.headerKey')}</div>
|
||||
<div className='h-full w-1/2 pl-3 pr-1'>{t('tools.mcp.modal.headerValue')}</div>
|
||||
</div>
|
||||
{headerItems.map((item, index) => (
|
||||
<div key={index} className={cn(
|
||||
'flex items-center border-divider-regular',
|
||||
index < headerItems.length - 1 && 'border-b',
|
||||
)}>
|
||||
<div className='w-1/2 border-r border-divider-regular'>
|
||||
<Input
|
||||
value={item.key}
|
||||
onChange={e => handleItemChange(index, 'key', e.target.value)}
|
||||
placeholder={t('tools.mcp.modal.headerKeyPlaceholder')}
|
||||
className='rounded-none border-0'
|
||||
readOnly={readonly}
|
||||
/>
|
||||
</div>
|
||||
<div className='flex w-1/2 items-center'>
|
||||
<Input
|
||||
value={item.value}
|
||||
onChange={e => handleItemChange(index, 'value', e.target.value)}
|
||||
placeholder={t('tools.mcp.modal.headerValuePlaceholder')}
|
||||
className='flex-1 rounded-none border-0'
|
||||
readOnly={readonly}
|
||||
/>
|
||||
{!readonly && headerItems.length > 1 && (
|
||||
<ActionButton
|
||||
onClick={() => handleRemoveItem(index)}
|
||||
className='mr-2'
|
||||
>
|
||||
<RiDeleteBinLine className='h-4 w-4 text-text-destructive' />
|
||||
</ActionButton>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
{!readonly && (
|
||||
<Button
|
||||
variant='secondary'
|
||||
size='small'
|
||||
onClick={handleAddItem}
|
||||
className='w-full'
|
||||
>
|
||||
<RiAddLine className='mr-1 h-4 w-4' />
|
||||
{t('tools.mcp.modal.addHeader')}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default React.memo(HeadersInput)
|
||||
|
|
@ -9,6 +9,7 @@ import AppIcon from '@/app/components/base/app-icon'
|
|||
import Modal from '@/app/components/base/modal'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Input from '@/app/components/base/input'
|
||||
import HeadersInput from './headers-input'
|
||||
import type { AppIconType } from '@/types/app'
|
||||
import type { ToolWithProvider } from '@/app/components/workflow/types'
|
||||
import { noop } from 'lodash-es'
|
||||
|
|
@ -29,6 +30,7 @@ export type DuplicateAppModalProps = {
|
|||
server_identifier: string
|
||||
timeout: number
|
||||
sse_read_timeout: number
|
||||
headers?: Record<string, string>
|
||||
}) => void
|
||||
onHide: () => void
|
||||
}
|
||||
|
|
@ -66,12 +68,38 @@ const MCPModal = ({
|
|||
const [appIcon, setAppIcon] = useState<AppIconSelection>(getIcon(data))
|
||||
const [showAppIconPicker, setShowAppIconPicker] = useState(false)
|
||||
const [serverIdentifier, setServerIdentifier] = React.useState(data?.server_identifier || '')
|
||||
const [timeout, setMcpTimeout] = React.useState(30)
|
||||
const [sseReadTimeout, setSseReadTimeout] = React.useState(300)
|
||||
const [timeout, setMcpTimeout] = React.useState(data?.timeout || 30)
|
||||
const [sseReadTimeout, setSseReadTimeout] = React.useState(data?.sse_read_timeout || 300)
|
||||
const [headers, setHeaders] = React.useState<Record<string, string>>(
|
||||
data?.masked_headers || {},
|
||||
)
|
||||
const [isFetchingIcon, setIsFetchingIcon] = useState(false)
|
||||
const appIconRef = useRef<HTMLDivElement>(null)
|
||||
const isHovering = useHover(appIconRef)
|
||||
|
||||
// Update states when data changes (for edit mode)
|
||||
React.useEffect(() => {
|
||||
if (data) {
|
||||
setUrl(data.server_url || '')
|
||||
setName(data.name || '')
|
||||
setServerIdentifier(data.server_identifier || '')
|
||||
setMcpTimeout(data.timeout || 30)
|
||||
setSseReadTimeout(data.sse_read_timeout || 300)
|
||||
setHeaders(data.masked_headers || {})
|
||||
setAppIcon(getIcon(data))
|
||||
}
|
||||
else {
|
||||
// Reset for create mode
|
||||
setUrl('')
|
||||
setName('')
|
||||
setServerIdentifier('')
|
||||
setMcpTimeout(30)
|
||||
setSseReadTimeout(300)
|
||||
setHeaders({})
|
||||
setAppIcon(DEFAULT_ICON as AppIconSelection)
|
||||
}
|
||||
}, [data])
|
||||
|
||||
const isValidUrl = (string: string) => {
|
||||
try {
|
||||
const urlPattern = /^(https?:\/\/)((([a-z\d]([a-z\d-]*[a-z\d])*)\.)+[a-z]{2,}|((\d{1,3}\.){3}\d{1,3})|localhost)(\:\d+)?(\/[-a-z\d%_.~+]*)*(\?[;&a-z\d%_.~+=-]*)?/i
|
||||
|
|
@ -129,6 +157,7 @@ const MCPModal = ({
|
|||
server_identifier: serverIdentifier.trim(),
|
||||
timeout: timeout || 30,
|
||||
sse_read_timeout: sseReadTimeout || 300,
|
||||
headers: Object.keys(headers).length > 0 ? headers : undefined,
|
||||
})
|
||||
if(isCreate)
|
||||
onHide()
|
||||
|
|
@ -231,6 +260,18 @@ const MCPModal = ({
|
|||
placeholder={t('tools.mcp.modal.timeoutPlaceholder')}
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<div className='mb-1 flex h-6 items-center'>
|
||||
<span className='system-sm-medium text-text-secondary'>{t('tools.mcp.modal.headers')}</span>
|
||||
</div>
|
||||
<div className='body-xs-regular mb-2 text-text-tertiary'>{t('tools.mcp.modal.headersTip')}</div>
|
||||
<HeadersInput
|
||||
headers={headers}
|
||||
onChange={setHeaders}
|
||||
readonly={false}
|
||||
isMasked={!isCreate && Object.keys(headers).length > 0}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className='flex flex-row-reverse pt-5'>
|
||||
<Button disabled={!name || !url || !serverIdentifier || isFetchingIcon} className='ml-2' variant='primary' onClick={submit}>{data ? t('tools.mcp.modal.save') : t('tools.mcp.modal.confirm')}</Button>
|
||||
|
|
|
|||
|
|
@ -60,6 +60,8 @@ export type Collection = {
|
|||
server_identifier?: string
|
||||
timeout?: number
|
||||
sse_read_timeout?: number
|
||||
headers?: Record<string, string>
|
||||
masked_headers?: Record<string, string>
|
||||
is_authorized?: boolean
|
||||
provider?: string
|
||||
}
|
||||
|
|
@ -187,4 +189,5 @@ export type MCPServerDetail = {
|
|||
description: string
|
||||
status: string
|
||||
parameters?: Record<string, string>
|
||||
headers?: Record<string, string>
|
||||
}
|
||||
|
|
|
|||
|
|
@ -62,9 +62,9 @@ const CandidateNode = () => {
|
|||
})
|
||||
setNodes(newNodes)
|
||||
if (candidateNode.type === CUSTOM_NOTE_NODE)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NoteAdd)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NoteAdd, { nodeId: candidateNode.id })
|
||||
else
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeAdd)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeAdd, { nodeId: candidateNode.id })
|
||||
|
||||
workflowStore.setState({ candidateNode: undefined })
|
||||
|
||||
|
|
|
|||
|
|
@ -89,10 +89,19 @@ const ViewWorkflowHistory = () => {
|
|||
|
||||
const calculateChangeList: ChangeHistoryList = useMemo(() => {
|
||||
const filterList = (list: any, startIndex = 0, reverse = false) => list.map((state: Partial<WorkflowHistoryState>, index: number) => {
|
||||
const nodes = (state.nodes || store.getState().nodes) || []
|
||||
const nodeId = state?.workflowHistoryEventMeta?.nodeId
|
||||
const targetTitle = nodes.find(n => n.id === nodeId)?.data?.title ?? ''
|
||||
return {
|
||||
label: state.workflowHistoryEvent && getHistoryLabel(state.workflowHistoryEvent),
|
||||
index: reverse ? list.length - 1 - index - startIndex : index - startIndex,
|
||||
state,
|
||||
state: {
|
||||
...state,
|
||||
workflowHistoryEventMeta: state.workflowHistoryEventMeta ? {
|
||||
...state.workflowHistoryEventMeta,
|
||||
nodeTitle: state.workflowHistoryEventMeta.nodeTitle || targetTitle,
|
||||
} : undefined,
|
||||
},
|
||||
}
|
||||
}).filter(Boolean)
|
||||
|
||||
|
|
@ -110,6 +119,12 @@ const ViewWorkflowHistory = () => {
|
|||
}
|
||||
}, [futureStates, getHistoryLabel, pastStates, store])
|
||||
|
||||
const composeHistoryItemLabel = useCallback((nodeTitle: string | undefined, baseLabel: string) => {
|
||||
if (!nodeTitle)
|
||||
return baseLabel
|
||||
return `${nodeTitle} ${baseLabel}`
|
||||
}, [])
|
||||
|
||||
return (
|
||||
(
|
||||
<PortalToFollowElem
|
||||
|
|
@ -197,7 +212,10 @@ const ViewWorkflowHistory = () => {
|
|||
'flex items-center text-[13px] font-medium leading-[18px] text-text-secondary',
|
||||
)}
|
||||
>
|
||||
{item?.label || t('workflow.changeHistory.sessionStart')} ({calculateStepLabel(item?.index)}{item?.index === currentHistoryStateIndex && t('workflow.changeHistory.currentState')})
|
||||
{composeHistoryItemLabel(
|
||||
item?.state?.workflowHistoryEventMeta?.nodeTitle,
|
||||
item?.label || t('workflow.changeHistory.sessionStart'),
|
||||
)} ({calculateStepLabel(item?.index)}{item?.index === currentHistoryStateIndex && t('workflow.changeHistory.currentState')})
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
@ -222,7 +240,10 @@ const ViewWorkflowHistory = () => {
|
|||
'flex items-center text-[13px] font-medium leading-[18px] text-text-secondary',
|
||||
)}
|
||||
>
|
||||
{item?.label || t('workflow.changeHistory.sessionStart')} ({calculateStepLabel(item?.index)})
|
||||
{composeHistoryItemLabel(
|
||||
item?.state?.workflowHistoryEventMeta?.nodeTitle,
|
||||
item?.label || t('workflow.changeHistory.sessionStart'),
|
||||
)} ({calculateStepLabel(item?.index)})
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -175,7 +175,7 @@ export const useNodesInteractions = () => {
|
|||
|
||||
if (x !== 0 && y !== 0) {
|
||||
// selecting a note will trigger a drag stop event with x and y as 0
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeDragStop)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeDragStop, { nodeId: node.id })
|
||||
}
|
||||
}
|
||||
}, [workflowStore, getNodesReadOnly, saveStateToHistory, handleSyncWorkflowDraft])
|
||||
|
|
@ -275,7 +275,7 @@ export const useNodesInteractions = () => {
|
|||
}, [store, workflowStore, getNodesReadOnly])
|
||||
|
||||
const handleNodeSelect = useCallback((nodeId: string, cancelSelection?: boolean, initShowLastRunTab?: boolean) => {
|
||||
if(initShowLastRunTab)
|
||||
if (initShowLastRunTab)
|
||||
workflowStore.setState({ initShowLastRunTab: true })
|
||||
const {
|
||||
getNodes,
|
||||
|
|
@ -408,7 +408,7 @@ export const useNodesInteractions = () => {
|
|||
setEdges(newEdges)
|
||||
|
||||
handleSyncWorkflowDraft()
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeConnect)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeConnect, { nodeId: targetNode?.id })
|
||||
}
|
||||
else {
|
||||
const {
|
||||
|
|
@ -657,10 +657,10 @@ export const useNodesInteractions = () => {
|
|||
handleSyncWorkflowDraft()
|
||||
|
||||
if (currentNode.type === CUSTOM_NOTE_NODE)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NoteDelete)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NoteDelete, { nodeId: currentNode.id })
|
||||
|
||||
else
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeDelete)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeDelete, { nodeId: currentNode.id })
|
||||
}, [getNodesReadOnly, store, handleSyncWorkflowDraft, saveStateToHistory, workflowStore, t, nodesMetaDataMap, deleteNodeInspectorVars])
|
||||
|
||||
const handleNodeAdd = useCallback<OnNodeAdd>((
|
||||
|
|
@ -1112,7 +1112,7 @@ export const useNodesInteractions = () => {
|
|||
setEdges(newEdges)
|
||||
}
|
||||
handleSyncWorkflowDraft()
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeAdd)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeAdd, { nodeId: newNode.id })
|
||||
}, [getNodesReadOnly, store, handleSyncWorkflowDraft, saveStateToHistory, workflowStore, getAfterNodesInSameBranch, checkNestedParallelLimit, nodesMetaDataMap])
|
||||
|
||||
const handleNodeChange = useCallback((
|
||||
|
|
@ -1197,7 +1197,7 @@ export const useNodesInteractions = () => {
|
|||
setEdges(newEdges)
|
||||
handleSyncWorkflowDraft()
|
||||
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeChange)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeChange, { nodeId: currentNodeId })
|
||||
}, [getNodesReadOnly, store, handleSyncWorkflowDraft, saveStateToHistory, nodesMetaDataMap])
|
||||
|
||||
const handleNodesCancelSelected = useCallback(() => {
|
||||
|
|
@ -1419,7 +1419,7 @@ export const useNodesInteractions = () => {
|
|||
|
||||
setNodes([...nodes, ...nodesToPaste])
|
||||
setEdges([...edges, ...edgesToPaste])
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodePaste)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodePaste, { nodeId: nodesToPaste?.[0]?.id })
|
||||
handleSyncWorkflowDraft()
|
||||
}
|
||||
}, [getNodesReadOnly, workflowStore, store, reactflow, saveStateToHistory, handleSyncWorkflowDraft, handleNodeIterationChildrenCopy, handleNodeLoopChildrenCopy, nodesMetaDataMap])
|
||||
|
|
@ -1516,7 +1516,7 @@ export const useNodesInteractions = () => {
|
|||
})
|
||||
setNodes(newNodes)
|
||||
handleSyncWorkflowDraft()
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeResize)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeResize, { nodeId })
|
||||
}, [getNodesReadOnly, store, handleSyncWorkflowDraft, saveStateToHistory])
|
||||
|
||||
const handleNodeDisconnect = useCallback((nodeId: string) => {
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ import {
|
|||
} from 'reactflow'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useWorkflowHistoryStore } from '../workflow-history-store'
|
||||
import type { WorkflowHistoryEventMeta } from '../workflow-history-store'
|
||||
|
||||
/**
|
||||
* All supported Events that create a new history state.
|
||||
|
|
@ -64,20 +65,21 @@ export const useWorkflowHistory = () => {
|
|||
// Some events may be triggered multiple times in a short period of time.
|
||||
// We debounce the history state update to avoid creating multiple history states
|
||||
// with minimal changes.
|
||||
const saveStateToHistoryRef = useRef(debounce((event: WorkflowHistoryEvent) => {
|
||||
const saveStateToHistoryRef = useRef(debounce((event: WorkflowHistoryEvent, meta?: WorkflowHistoryEventMeta) => {
|
||||
workflowHistoryStore.setState({
|
||||
workflowHistoryEvent: event,
|
||||
workflowHistoryEventMeta: meta,
|
||||
nodes: store.getState().getNodes(),
|
||||
edges: store.getState().edges,
|
||||
})
|
||||
}, 500))
|
||||
|
||||
const saveStateToHistory = useCallback((event: WorkflowHistoryEvent) => {
|
||||
const saveStateToHistory = useCallback((event: WorkflowHistoryEvent, meta?: WorkflowHistoryEventMeta) => {
|
||||
switch (event) {
|
||||
case WorkflowHistoryEvent.NoteChange:
|
||||
// Hint: Note change does not trigger when note text changes,
|
||||
// because the note editors have their own history states.
|
||||
saveStateToHistoryRef.current(event)
|
||||
saveStateToHistoryRef.current(event, meta)
|
||||
break
|
||||
case WorkflowHistoryEvent.NodeTitleChange:
|
||||
case WorkflowHistoryEvent.NodeDescriptionChange:
|
||||
|
|
@ -93,7 +95,7 @@ export const useWorkflowHistory = () => {
|
|||
case WorkflowHistoryEvent.NoteAdd:
|
||||
case WorkflowHistoryEvent.LayoutOrganize:
|
||||
case WorkflowHistoryEvent.NoteDelete:
|
||||
saveStateToHistoryRef.current(event)
|
||||
saveStateToHistoryRef.current(event, meta)
|
||||
break
|
||||
default:
|
||||
// We do not create a history state for every event.
|
||||
|
|
|
|||
|
|
@ -173,11 +173,11 @@ const BasePanel: FC<BasePanelProps> = ({
|
|||
|
||||
const handleTitleBlur = useCallback((title: string) => {
|
||||
handleNodeDataUpdateWithSyncDraft({ id, data: { title } })
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeTitleChange)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeTitleChange, { nodeId: id })
|
||||
}, [handleNodeDataUpdateWithSyncDraft, id, saveStateToHistory])
|
||||
const handleDescriptionChange = useCallback((desc: string) => {
|
||||
handleNodeDataUpdateWithSyncDraft({ id, data: { desc } })
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeDescriptionChange)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeDescriptionChange, { nodeId: id })
|
||||
}, [handleNodeDataUpdateWithSyncDraft, id, saveStateToHistory])
|
||||
|
||||
const isChildNode = !!(data.isInIteration || data.isInLoop)
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ const nodeDefault: NodeDefault<ListFilterNodeType> = {
|
|||
if (!errorMessages && !filter_by.conditions[0]?.comparison_operator)
|
||||
errorMessages = t(`${i18nPrefix}.fieldRequired`, { field: t('workflow.nodes.listFilter.filterConditionComparisonOperator') })
|
||||
|
||||
if (!errorMessages && !comparisonOperatorNotRequireValue(filter_by.conditions[0]?.comparison_operator) && (item_var_type === VarType.boolean ? !filter_by.conditions[0]?.value === undefined : !filter_by.conditions[0]?.value))
|
||||
if (!errorMessages && !comparisonOperatorNotRequireValue(filter_by.conditions[0]?.comparison_operator) && (item_var_type === VarType.boolean ? filter_by.conditions[0]?.value === undefined : !filter_by.conditions[0]?.value))
|
||||
errorMessages = t(`${i18nPrefix}.fieldRequired`, { field: t('workflow.nodes.listFilter.filterConditionComparisonValue') })
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ const ErrorMessage: FC<ErrorMessageProps> = ({
|
|||
className,
|
||||
)}>
|
||||
<RiErrorWarningFill className='h-4 w-4 shrink-0 text-text-destructive' />
|
||||
<div className='system-xs-medium max-h-12 grow overflow-y-auto break-words text-text-primary'>
|
||||
<div className='system-xs-medium max-h-12 grow overflow-y-auto whitespace-pre-line break-words text-text-primary'>
|
||||
{message}
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
import { z } from 'zod'
|
||||
import { ArrayType, Type } from './types'
|
||||
import type { ArrayItems, Field, LLMNodeType } from './types'
|
||||
import type { Schema, ValidationError } from 'jsonschema'
|
||||
import { Validator } from 'jsonschema'
|
||||
import produce from 'immer'
|
||||
import { z } from 'zod'
|
||||
import { draft07Validator, forbidBooleanProperties } from '@/utils/validators'
|
||||
import type { ValidationError } from 'jsonschema'
|
||||
|
||||
export const checkNodeValid = (_payload: LLMNodeType) => {
|
||||
return true
|
||||
|
|
@ -116,191 +115,22 @@ export const findPropertyWithPath = (target: any, path: string[]) => {
|
|||
return current
|
||||
}
|
||||
|
||||
const draft07MetaSchema = {
|
||||
$schema: 'http://json-schema.org/draft-07/schema#',
|
||||
$id: 'http://json-schema.org/draft-07/schema#',
|
||||
title: 'Core schema meta-schema',
|
||||
definitions: {
|
||||
schemaArray: {
|
||||
type: 'array',
|
||||
minItems: 1,
|
||||
items: { $ref: '#' },
|
||||
},
|
||||
nonNegativeInteger: {
|
||||
type: 'integer',
|
||||
minimum: 0,
|
||||
},
|
||||
nonNegativeIntegerDefault0: {
|
||||
allOf: [
|
||||
{ $ref: '#/definitions/nonNegativeInteger' },
|
||||
{ default: 0 },
|
||||
],
|
||||
},
|
||||
simpleTypes: {
|
||||
enum: [
|
||||
'array',
|
||||
'boolean',
|
||||
'integer',
|
||||
'null',
|
||||
'number',
|
||||
'object',
|
||||
'string',
|
||||
],
|
||||
},
|
||||
stringArray: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
uniqueItems: true,
|
||||
default: [],
|
||||
},
|
||||
},
|
||||
type: ['object', 'boolean'],
|
||||
properties: {
|
||||
$id: {
|
||||
type: 'string',
|
||||
format: 'uri-reference',
|
||||
},
|
||||
$schema: {
|
||||
type: 'string',
|
||||
format: 'uri',
|
||||
},
|
||||
$ref: {
|
||||
type: 'string',
|
||||
format: 'uri-reference',
|
||||
},
|
||||
title: {
|
||||
type: 'string',
|
||||
},
|
||||
description: {
|
||||
type: 'string',
|
||||
},
|
||||
default: true,
|
||||
readOnly: {
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
},
|
||||
examples: {
|
||||
type: 'array',
|
||||
items: true,
|
||||
},
|
||||
multipleOf: {
|
||||
type: 'number',
|
||||
exclusiveMinimum: 0,
|
||||
},
|
||||
maximum: {
|
||||
type: 'number',
|
||||
},
|
||||
exclusiveMaximum: {
|
||||
type: 'number',
|
||||
},
|
||||
minimum: {
|
||||
type: 'number',
|
||||
},
|
||||
exclusiveMinimum: {
|
||||
type: 'number',
|
||||
},
|
||||
maxLength: { $ref: '#/definitions/nonNegativeInteger' },
|
||||
minLength: { $ref: '#/definitions/nonNegativeIntegerDefault0' },
|
||||
pattern: {
|
||||
type: 'string',
|
||||
format: 'regex',
|
||||
},
|
||||
additionalItems: { $ref: '#' },
|
||||
items: {
|
||||
anyOf: [
|
||||
{ $ref: '#' },
|
||||
{ $ref: '#/definitions/schemaArray' },
|
||||
],
|
||||
default: true,
|
||||
},
|
||||
maxItems: { $ref: '#/definitions/nonNegativeInteger' },
|
||||
minItems: { $ref: '#/definitions/nonNegativeIntegerDefault0' },
|
||||
uniqueItems: {
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
},
|
||||
contains: { $ref: '#' },
|
||||
maxProperties: { $ref: '#/definitions/nonNegativeInteger' },
|
||||
minProperties: { $ref: '#/definitions/nonNegativeIntegerDefault0' },
|
||||
required: { $ref: '#/definitions/stringArray' },
|
||||
additionalProperties: { $ref: '#' },
|
||||
definitions: {
|
||||
type: 'object',
|
||||
additionalProperties: { $ref: '#' },
|
||||
default: {},
|
||||
},
|
||||
properties: {
|
||||
type: 'object',
|
||||
additionalProperties: { $ref: '#' },
|
||||
default: {},
|
||||
},
|
||||
patternProperties: {
|
||||
type: 'object',
|
||||
additionalProperties: { $ref: '#' },
|
||||
propertyNames: { format: 'regex' },
|
||||
default: {},
|
||||
},
|
||||
dependencies: {
|
||||
type: 'object',
|
||||
additionalProperties: {
|
||||
anyOf: [
|
||||
{ $ref: '#' },
|
||||
{ $ref: '#/definitions/stringArray' },
|
||||
],
|
||||
},
|
||||
},
|
||||
propertyNames: { $ref: '#' },
|
||||
const: true,
|
||||
enum: {
|
||||
type: 'array',
|
||||
items: true,
|
||||
minItems: 1,
|
||||
uniqueItems: true,
|
||||
},
|
||||
type: {
|
||||
anyOf: [
|
||||
{ $ref: '#/definitions/simpleTypes' },
|
||||
{
|
||||
type: 'array',
|
||||
items: { $ref: '#/definitions/simpleTypes' },
|
||||
minItems: 1,
|
||||
uniqueItems: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
format: { type: 'string' },
|
||||
allOf: { $ref: '#/definitions/schemaArray' },
|
||||
anyOf: { $ref: '#/definitions/schemaArray' },
|
||||
oneOf: { $ref: '#/definitions/schemaArray' },
|
||||
not: { $ref: '#' },
|
||||
},
|
||||
default: true,
|
||||
} as unknown as Schema
|
||||
|
||||
const validator = new Validator()
|
||||
|
||||
export const validateSchemaAgainstDraft7 = (schemaToValidate: any) => {
|
||||
const schema = produce(schemaToValidate, (draft: any) => {
|
||||
// Make sure the schema has the $schema property for draft-07
|
||||
if (!draft.$schema)
|
||||
draft.$schema = 'http://json-schema.org/draft-07/schema#'
|
||||
})
|
||||
// First check against Draft-07
|
||||
const result = draft07Validator(schemaToValidate)
|
||||
// Then apply custom rule
|
||||
const customErrors = forbidBooleanProperties(schemaToValidate)
|
||||
|
||||
const result = validator.validate(schema, draft07MetaSchema, {
|
||||
nestedErrors: true,
|
||||
throwError: false,
|
||||
})
|
||||
|
||||
// Access errors from the validation result
|
||||
const errors = result.valid ? [] : result.errors || []
|
||||
|
||||
return errors
|
||||
return [...result.errors, ...customErrors]
|
||||
}
|
||||
|
||||
export const getValidationErrorMessage = (errors: ValidationError[]) => {
|
||||
export const getValidationErrorMessage = (errors: Array<ValidationError | string>) => {
|
||||
const message = errors.map((error) => {
|
||||
return `Error: ${error.path.join('.')} ${error.message} Details: ${JSON.stringify(error.stack)}`
|
||||
}).join('; ')
|
||||
if (typeof error === 'string')
|
||||
return error
|
||||
else
|
||||
return `Error: ${error.stack}\n`
|
||||
}).join('')
|
||||
return message
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ export const useNote = (id: string) => {
|
|||
|
||||
const handleThemeChange = useCallback((theme: NoteTheme) => {
|
||||
handleNodeDataUpdateWithSyncDraft({ id, data: { theme } })
|
||||
saveStateToHistory(WorkflowHistoryEvent.NoteChange)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NoteChange, { nodeId: id })
|
||||
}, [handleNodeDataUpdateWithSyncDraft, id, saveStateToHistory])
|
||||
|
||||
const handleEditorChange = useCallback((editorState: EditorState) => {
|
||||
|
|
@ -21,7 +21,7 @@ export const useNote = (id: string) => {
|
|||
|
||||
const handleShowAuthorChange = useCallback((showAuthor: boolean) => {
|
||||
handleNodeDataUpdateWithSyncDraft({ id, data: { showAuthor } })
|
||||
saveStateToHistory(WorkflowHistoryEvent.NoteChange)
|
||||
saveStateToHistory(WorkflowHistoryEvent.NoteChange, { nodeId: id })
|
||||
}, [handleNodeDataUpdateWithSyncDraft, id, saveStateToHistory])
|
||||
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -72,18 +72,22 @@ const ValueContent = ({
|
|||
const [fileValue, setFileValue] = useState<any>(formatFileValue(currentVar))
|
||||
|
||||
const { run: debounceValueChange } = useDebounceFn(handleValueChange, { wait: 500 })
|
||||
if (showTextEditor) {
|
||||
if (currentVar.value_type === 'number')
|
||||
setValue(JSON.stringify(currentVar.value))
|
||||
if (!currentVar.value)
|
||||
setValue('')
|
||||
setValue(currentVar.value)
|
||||
}
|
||||
if (showJSONEditor)
|
||||
setJson(currentVar.value ? JSON.stringify(currentVar.value, null, 2) : '')
|
||||
|
||||
if (showFileEditor)
|
||||
setFileValue(formatFileValue(currentVar))
|
||||
// update default value when id changed
|
||||
useEffect(() => {
|
||||
if (showTextEditor) {
|
||||
if (currentVar.value_type === 'number')
|
||||
return setValue(JSON.stringify(currentVar.value))
|
||||
if (!currentVar.value)
|
||||
return setValue('')
|
||||
setValue(currentVar.value)
|
||||
}
|
||||
if (showJSONEditor)
|
||||
setJson(currentVar.value ? JSON.stringify(currentVar.value, null, 2) : '')
|
||||
|
||||
if (showFileEditor)
|
||||
setFileValue(formatFileValue(currentVar))
|
||||
}, [currentVar.id, currentVar.value])
|
||||
|
||||
const handleTextChange = (value: string) => {
|
||||
if (isTruncated)
|
||||
|
|
|
|||
|
|
@ -51,6 +51,7 @@ export function useWorkflowHistoryStore() {
|
|||
setState: (state: WorkflowHistoryState) => {
|
||||
store.setState({
|
||||
workflowHistoryEvent: state.workflowHistoryEvent,
|
||||
workflowHistoryEventMeta: state.workflowHistoryEventMeta,
|
||||
nodes: state.nodes.map((node: Node) => ({ ...node, data: { ...node.data, selected: false } })),
|
||||
edges: state.edges.map((edge: Edge) => ({ ...edge, selected: false }) as Edge),
|
||||
})
|
||||
|
|
@ -76,6 +77,7 @@ function createStore({
|
|||
(set, get) => {
|
||||
return {
|
||||
workflowHistoryEvent: undefined,
|
||||
workflowHistoryEventMeta: undefined,
|
||||
nodes: storeNodes,
|
||||
edges: storeEdges,
|
||||
getNodes: () => get().nodes,
|
||||
|
|
@ -97,6 +99,7 @@ export type WorkflowHistoryStore = {
|
|||
nodes: Node[]
|
||||
edges: Edge[]
|
||||
workflowHistoryEvent: WorkflowHistoryEvent | undefined
|
||||
workflowHistoryEventMeta?: WorkflowHistoryEventMeta
|
||||
}
|
||||
|
||||
export type WorkflowHistoryActions = {
|
||||
|
|
@ -119,3 +122,8 @@ export type WorkflowWithHistoryProviderProps = {
|
|||
edges: Edge[]
|
||||
children: ReactNode
|
||||
}
|
||||
|
||||
export type WorkflowHistoryEventMeta = {
|
||||
nodeId?: string
|
||||
nodeTitle?: string
|
||||
}
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ export default function CheckCode() {
|
|||
<form action="">
|
||||
<input type='text' className='hidden' />
|
||||
<label htmlFor="code" className='system-md-semibold mb-1 text-text-secondary'>{t('login.checkCode.verificationCode')}</label>
|
||||
<Input value={code} onChange={e => setVerifyCode(e.target.value)} max-length={6} className='mt-1' placeholder={t('login.checkCode.verificationCodePlaceholder') as string} />
|
||||
<Input value={code} onChange={e => setVerifyCode(e.target.value)} maxLength={6} className='mt-1' placeholder={t('login.checkCode.verificationCodePlaceholder') as string} />
|
||||
<Button loading={loading} disabled={loading} className='my-3 w-full' variant='primary' onClick={verify}>{t('login.checkCode.verify')}</Button>
|
||||
<Countdown onResend={resendCode} />
|
||||
</form>
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ export default function CheckCode() {
|
|||
|
||||
<form action="">
|
||||
<label htmlFor="code" className='system-md-semibold mb-1 text-text-secondary'>{t('login.checkCode.verificationCode')}</label>
|
||||
<Input value={code} onChange={e => setVerifyCode(e.target.value)} max-length={6} className='mt-1' placeholder={t('login.checkCode.verificationCodePlaceholder') as string} />
|
||||
<Input value={code} onChange={e => setVerifyCode(e.target.value)} maxLength={6} className='mt-1' placeholder={t('login.checkCode.verificationCodePlaceholder') as string} />
|
||||
<Button loading={loading} disabled={loading} className='my-3 w-full' variant='primary' onClick={verify}>{t('login.checkCode.verify')}</Button>
|
||||
<Countdown onResend={resendCode} />
|
||||
</form>
|
||||
|
|
|
|||
|
|
@ -193,6 +193,16 @@ const translation = {
|
|||
confirm: 'Hinzufügen & Autorisieren',
|
||||
sseReadTimeout: 'SSE-Lesezeitüberschreitung',
|
||||
timeout: 'Zeitüberschreitung',
|
||||
headers: 'Kopfzeilen',
|
||||
timeoutPlaceholder: 'dreißig',
|
||||
headerKeyPlaceholder: 'z.B., Autorisierung',
|
||||
addHeader: 'Kopfzeile hinzufügen',
|
||||
headerValuePlaceholder: 'z.B., Träger Token123',
|
||||
headerValue: 'Header-Wert',
|
||||
headerKey: 'Kopfzeilenname',
|
||||
noHeaders: 'Keine benutzerdefinierten Header konfiguriert',
|
||||
maskedHeadersTip: 'Headerwerte sind zum Schutz maskiert. Änderungen werden die tatsächlichen Werte aktualisieren.',
|
||||
headersTip: 'Zusätzliche HTTP-Header, die mit MCP-Serveranfragen gesendet werden sollen',
|
||||
},
|
||||
delete: 'MCP-Server entfernen',
|
||||
deleteConfirmTitle: 'Möchten Sie {{mcp}} entfernen?',
|
||||
|
|
|
|||
|
|
@ -187,12 +187,22 @@ const translation = {
|
|||
serverIdentifier: 'Server Identifier',
|
||||
serverIdentifierTip: 'Unique identifier for the MCP server within the workspace. Lowercase letters, numbers, underscores, and hyphens only. Up to 24 characters.',
|
||||
serverIdentifierPlaceholder: 'Unique identifier, e.g., my-mcp-server',
|
||||
serverIdentifierWarning: 'The server won’t be recognized by existing apps after an ID change',
|
||||
serverIdentifierWarning: 'The server won\'t be recognized by existing apps after an ID change',
|
||||
headers: 'Headers',
|
||||
headersTip: 'Additional HTTP headers to send with MCP server requests',
|
||||
headerKey: 'Header Name',
|
||||
headerValue: 'Header Value',
|
||||
headerKeyPlaceholder: 'e.g., Authorization',
|
||||
headerValuePlaceholder: 'e.g., Bearer token123',
|
||||
addHeader: 'Add Header',
|
||||
noHeaders: 'No custom headers configured',
|
||||
maskedHeadersTip: 'Header values are masked for security. Changes will update the actual values.',
|
||||
cancel: 'Cancel',
|
||||
save: 'Save',
|
||||
confirm: 'Add & Authorize',
|
||||
timeout: 'Timeout',
|
||||
sseReadTimeout: 'SSE Read Timeout',
|
||||
timeoutPlaceholder: '30',
|
||||
},
|
||||
delete: 'Remove MCP Server',
|
||||
deleteConfirmTitle: 'Would you like to remove {{mcp}}?',
|
||||
|
|
|
|||
|
|
@ -193,6 +193,16 @@ const translation = {
|
|||
confirm: 'Añadir y Autorizar',
|
||||
sseReadTimeout: 'Tiempo de espera de lectura SSE',
|
||||
timeout: 'Tiempo de espera',
|
||||
timeoutPlaceholder: 'treinta',
|
||||
headers: 'Encabezados',
|
||||
addHeader: 'Agregar encabezado',
|
||||
headerValuePlaceholder: 'por ejemplo, token de portador123',
|
||||
headersTip: 'Encabezados HTTP adicionales para enviar con las solicitudes del servidor MCP',
|
||||
maskedHeadersTip: 'Los valores del encabezado están enmascarados por seguridad. Los cambios actualizarán los valores reales.',
|
||||
headerKeyPlaceholder: 'por ejemplo, Autorización',
|
||||
headerValue: 'Valor del encabezado',
|
||||
noHeaders: 'No se han configurado encabezados personalizados',
|
||||
headerKey: 'Nombre del encabezado',
|
||||
},
|
||||
delete: 'Eliminar servidor MCP',
|
||||
deleteConfirmTitle: '¿Eliminar {{mcp}}?',
|
||||
|
|
|
|||
|
|
@ -193,6 +193,16 @@ const translation = {
|
|||
confirm: 'افزودن و مجوزدهی',
|
||||
timeout: 'مهلت',
|
||||
sseReadTimeout: 'زمان.out خواندن SSE',
|
||||
headers: 'هدرها',
|
||||
timeoutPlaceholder: '30',
|
||||
headerKey: 'نام هدر',
|
||||
headerValue: 'مقدار هدر',
|
||||
addHeader: 'هدر اضافه کنید',
|
||||
headerKeyPlaceholder: 'Authorization',
|
||||
headerValuePlaceholder: 'مثلاً، Bearer 123',
|
||||
noHeaders: 'هیچ هدر سفارشی پیکربندی نشده است',
|
||||
headersTip: 'هدرهای HTTP اضافی برای ارسال با درخواستهای سرور MCP',
|
||||
maskedHeadersTip: 'مقدارهای هدر به خاطر امنیت مخفی شدهاند. تغییرات مقادیر واقعی را بهروزرسانی خواهد کرد.',
|
||||
},
|
||||
delete: 'حذف سرور MCP',
|
||||
deleteConfirmTitle: 'آیا مایل به حذف {mcp} هستید؟',
|
||||
|
|
|
|||
|
|
@ -193,6 +193,16 @@ const translation = {
|
|||
confirm: 'Ajouter & Authoriser',
|
||||
sseReadTimeout: 'Délai d\'attente de lecture SSE',
|
||||
timeout: 'Délai d\'attente',
|
||||
timeoutPlaceholder: 'trente',
|
||||
headerValue: 'Valeur d\'en-tête',
|
||||
headerKey: 'Nom de l\'en-tête',
|
||||
noHeaders: 'Aucun en-tête personnalisé configuré',
|
||||
headers: 'En-têtes',
|
||||
headerKeyPlaceholder: 'par exemple, Autorisation',
|
||||
headerValuePlaceholder: 'par exemple, Jeton d\'accès123',
|
||||
headersTip: 'En-têtes HTTP supplémentaires à envoyer avec les requêtes au serveur MCP',
|
||||
addHeader: 'Ajouter un en-tête',
|
||||
maskedHeadersTip: 'Les valeurs d\'en-tête sont masquées pour des raisons de sécurité. Les modifications mettront à jour les valeurs réelles.',
|
||||
},
|
||||
delete: 'Supprimer le Serveur MCP',
|
||||
deleteConfirmTitle: 'Souhaitez-vous supprimer {mcp}?',
|
||||
|
|
|
|||
|
|
@ -198,6 +198,16 @@ const translation = {
|
|||
confirm: 'जोड़ें और अधिकृत करें',
|
||||
timeout: 'टाइमआउट',
|
||||
sseReadTimeout: 'एसएसई पढ़ने का टाइमआउट',
|
||||
headerKey: 'हेडर नाम',
|
||||
headers: 'हेडर',
|
||||
headerValue: 'हेडर मान',
|
||||
timeoutPlaceholder: 'तीस',
|
||||
headerValuePlaceholder: 'उदाहरण के लिए, बियरर टोकन123',
|
||||
addHeader: 'हेडर जोड़ें',
|
||||
headerKeyPlaceholder: 'उदाहरण के लिए, प्राधिकरण',
|
||||
noHeaders: 'कोई कस्टम हेडर कॉन्फ़िगर नहीं किए गए हैं',
|
||||
maskedHeadersTip: 'सुरक्षा के लिए हेडर मानों को छिपाया गया है। परिवर्तन वास्तविक मानों को अपडेट करेगा।',
|
||||
headersTip: 'MCP सर्वर अनुरोधों के साथ भेजने के लिए अतिरिक्त HTTP हेडर्स',
|
||||
},
|
||||
delete: 'MCP सर्वर हटाएँ',
|
||||
deleteConfirmTitle: '{mcp} हटाना चाहते हैं?',
|
||||
|
|
|
|||
|
|
@ -175,6 +175,16 @@ const translation = {
|
|||
cancel: 'Membatalkan',
|
||||
serverIdentifierPlaceholder: 'Pengidentifikasi unik, misalnya, my-mcp-server',
|
||||
serverUrl: 'Server URL',
|
||||
headers: 'Header',
|
||||
timeoutPlaceholder: '30',
|
||||
addHeader: 'Tambahkan Judul',
|
||||
headerKey: 'Nama Header',
|
||||
headerValue: 'Nilai Header',
|
||||
headersTip: 'Header HTTP tambahan untuk dikirim bersama permintaan server MCP',
|
||||
headerKeyPlaceholder: 'Authorization',
|
||||
headerValuePlaceholder: 'Bearer 123',
|
||||
noHeaders: 'Tidak ada header kustom yang dikonfigurasi',
|
||||
maskedHeadersTip: 'Nilai header disembunyikan untuk keamanan. Perubahan akan memperbarui nilai yang sebenarnya.',
|
||||
},
|
||||
operation: {
|
||||
edit: 'Mengedit',
|
||||
|
|
|
|||
|
|
@ -203,6 +203,16 @@ const translation = {
|
|||
confirm: 'Aggiungi & Autorizza',
|
||||
timeout: 'Tempo scaduto',
|
||||
sseReadTimeout: 'Timeout di lettura SSE',
|
||||
headerKey: 'Nome intestazione',
|
||||
timeoutPlaceholder: 'trenta',
|
||||
headers: 'Intestazioni',
|
||||
addHeader: 'Aggiungi intestazione',
|
||||
noHeaders: 'Nessuna intestazione personalizzata configurata',
|
||||
headerKeyPlaceholder: 'ad es., Autorizzazione',
|
||||
headerValue: 'Valore dell\'intestazione',
|
||||
headerValuePlaceholder: 'ad esempio, Token di accesso123',
|
||||
headersTip: 'Intestazioni HTTP aggiuntive da inviare con le richieste al server MCP',
|
||||
maskedHeadersTip: 'I valori dell\'intestazione sono mascherati per motivi di sicurezza. Le modifiche aggiorneranno i valori effettivi.',
|
||||
},
|
||||
delete: 'Rimuovi Server MCP',
|
||||
deleteConfirmTitle: 'Vuoi rimuovere {mcp}?',
|
||||
|
|
|
|||
|
|
@ -37,8 +37,8 @@ const translation = {
|
|||
tip: 'スタジオでワークフローをツールに公開する',
|
||||
},
|
||||
mcp: {
|
||||
title: '利用可能なMCPツールはありません',
|
||||
tip: 'MCPサーバーを追加する',
|
||||
title: '利用可能な MCP ツールはありません',
|
||||
tip: 'MCP サーバーを追加する',
|
||||
},
|
||||
agent: {
|
||||
title: 'Agent strategy は利用できません',
|
||||
|
|
@ -85,13 +85,13 @@ const translation = {
|
|||
apiKeyPlaceholder: 'API キーの HTTP ヘッダー名',
|
||||
apiValuePlaceholder: 'API キーを入力してください',
|
||||
api_key_query: 'クエリパラメータ',
|
||||
queryParamPlaceholder: 'APIキーのクエリパラメータ名',
|
||||
queryParamPlaceholder: 'API キーのクエリパラメータ名',
|
||||
api_key_header: 'ヘッダー',
|
||||
},
|
||||
key: 'キー',
|
||||
value: '値',
|
||||
queryParam: 'クエリパラメータ',
|
||||
queryParamTooltip: 'APIキーのクエリパラメータとして渡す名前、例えば「https://example.com/test?key=API_KEY」の「key」。',
|
||||
queryParamTooltip: 'API キーのクエリパラメータとして渡す名前、例えば「https://example.com/test?key=API_KEY」の「key」。',
|
||||
},
|
||||
authHeaderPrefix: {
|
||||
title: '認証タイプ',
|
||||
|
|
@ -169,32 +169,42 @@ const translation = {
|
|||
noTools: 'ツールが見つかりませんでした',
|
||||
mcp: {
|
||||
create: {
|
||||
cardTitle: 'MCPサーバー(HTTP)を追加',
|
||||
cardLink: 'MCPサーバー統合について詳しく知る',
|
||||
cardTitle: 'MCP サーバー(HTTP)を追加',
|
||||
cardLink: 'MCP サーバー統合について詳しく知る',
|
||||
},
|
||||
noConfigured: '未設定',
|
||||
updateTime: '更新日時',
|
||||
toolsCount: '{{count}} 個のツール',
|
||||
noTools: '利用可能なツールはありません',
|
||||
modal: {
|
||||
title: 'MCPサーバー(HTTP)を追加',
|
||||
editTitle: 'MCPサーバー(HTTP)を編集',
|
||||
title: 'MCP サーバー(HTTP)を追加',
|
||||
editTitle: 'MCP サーバー(HTTP)を編集',
|
||||
name: '名前とアイコン',
|
||||
namePlaceholder: 'MCPサーバーの名前を入力',
|
||||
namePlaceholder: 'MCP サーバーの名前を入力',
|
||||
serverUrl: 'サーバーURL',
|
||||
serverUrlPlaceholder: 'サーバーエンドポイントのURLを入力',
|
||||
serverUrlPlaceholder: 'サーバーエンドポイントの URL を入力',
|
||||
serverUrlWarning: 'サーバーアドレスを更新すると、このサーバーに依存するアプリケーションに影響を与える可能性があります。',
|
||||
serverIdentifier: 'サーバー識別子',
|
||||
serverIdentifierTip: 'ワークスペース内でのMCPサーバーのユニーク識別子です。使用可能な文字は小文字、数字、アンダースコア、ハイフンで、最大24文字です。',
|
||||
serverIdentifierTip: 'ワークスペース内での MCP サーバーのユニーク識別子です。使用可能な文字は小文字、数字、アンダースコア、ハイフンで、最大 24 文字です。',
|
||||
serverIdentifierPlaceholder: 'ユニーク識別子(例:my-mcp-server)',
|
||||
serverIdentifierWarning: 'IDを変更すると、既存のアプリケーションではサーバーが認識できなくなります。',
|
||||
serverIdentifierWarning: 'ID を変更すると、既存のアプリケーションではサーバーが認識できなくなります。',
|
||||
cancel: 'キャンセル',
|
||||
save: '保存',
|
||||
confirm: '追加して承認',
|
||||
timeout: 'タイムアウト',
|
||||
sseReadTimeout: 'SSE 読み取りタイムアウト',
|
||||
headerValuePlaceholder: '例:ベアラートークン123',
|
||||
headerKeyPlaceholder: '例えば、承認',
|
||||
headers: 'ヘッダー',
|
||||
timeoutPlaceholder: '三十',
|
||||
headerKey: 'ヘッダー名',
|
||||
addHeader: 'ヘッダーを追加',
|
||||
headerValue: 'ヘッダーの値',
|
||||
noHeaders: 'カスタムヘッダーは設定されていません',
|
||||
headersTip: 'MCPサーバーへのリクエストに送信する追加のHTTPヘッダー',
|
||||
maskedHeadersTip: 'ヘッダー値はセキュリティのためマスクされています。変更は実際の値を更新します。',
|
||||
},
|
||||
delete: 'MCPサーバーを削除',
|
||||
delete: 'MCP サーバーを削除',
|
||||
deleteConfirmTitle: '{{mcp}} を削除しますか?',
|
||||
operation: {
|
||||
edit: '編集',
|
||||
|
|
@ -213,23 +223,23 @@ const translation = {
|
|||
toolUpdateConfirmTitle: 'ツールリストの更新',
|
||||
toolUpdateConfirmContent: 'ツールリストを更新すると、既存のアプリケーションに重大な影響を与える可能性があります。続行しますか?',
|
||||
toolsNum: '{{count}} 個のツールが含まれています',
|
||||
onlyTool: '1つのツールが含まれています',
|
||||
onlyTool: '1 つのツールが含まれています',
|
||||
identifier: 'サーバー識別子(クリックしてコピー)',
|
||||
server: {
|
||||
title: 'MCPサーバー',
|
||||
title: 'MCP サーバー',
|
||||
url: 'サーバーURL',
|
||||
reGen: 'サーバーURLを再生成しますか?',
|
||||
reGen: 'サーバーURL を再生成しますか?',
|
||||
addDescription: '説明を追加',
|
||||
edit: '説明を編集',
|
||||
modal: {
|
||||
addTitle: 'MCPサーバーを有効化するための説明を追加',
|
||||
addTitle: 'MCP サーバーを有効化するための説明を追加',
|
||||
editTitle: '説明を編集',
|
||||
description: '説明',
|
||||
descriptionPlaceholder: 'このツールの機能とLLM(大規模言語モデル)での使用方法を説明してください。',
|
||||
descriptionPlaceholder: 'このツールの機能と LLM(大規模言語モデル)での使用方法を説明してください。',
|
||||
parameters: 'パラメータ',
|
||||
parametersTip: '各パラメータの説明を追加して、LLMがその目的と制約を理解できるようにします。',
|
||||
parametersTip: '各パラメータの説明を追加して、LLM がその目的と制約を理解できるようにします。',
|
||||
parametersPlaceholder: 'パラメータの目的と制約',
|
||||
confirm: 'MCPサーバーを有効にする',
|
||||
confirm: 'MCP サーバーを有効にする',
|
||||
},
|
||||
publishTip: 'アプリが公開されていません。まずアプリを公開してください。',
|
||||
},
|
||||
|
|
|
|||
|
|
@ -193,6 +193,16 @@ const translation = {
|
|||
confirm: '추가 및 승인',
|
||||
timeout: '타임아웃',
|
||||
sseReadTimeout: 'SSE 읽기 타임아웃',
|
||||
headers: '헤더',
|
||||
headerKeyPlaceholder: '예: 승인',
|
||||
headerKey: '헤더 이름',
|
||||
headerValuePlaceholder: '예: 베어러 토큰123',
|
||||
timeoutPlaceholder: '서른',
|
||||
headerValue: '헤더 값',
|
||||
addHeader: '헤더 추가',
|
||||
noHeaders: '사용자 정의 헤더가 구성되어 있지 않습니다.',
|
||||
headersTip: 'MCP 서버 요청과 함께 보낼 추가 HTTP 헤더',
|
||||
maskedHeadersTip: '헤더 값은 보안상 마스킹 처리되어 있습니다. 변경 사항은 실제 값에 업데이트됩니다.',
|
||||
},
|
||||
delete: 'MCP 서버 제거',
|
||||
deleteConfirmTitle: '{mcp}를 제거하시겠습니까?',
|
||||
|
|
|
|||
|
|
@ -197,6 +197,16 @@ const translation = {
|
|||
confirm: 'Dodaj i autoryzuj',
|
||||
timeout: 'Limit czasu',
|
||||
sseReadTimeout: 'Przekroczenie czasu oczekiwania na odczyt SSE',
|
||||
addHeader: 'Dodaj nagłówek',
|
||||
headers: 'Nagłówki',
|
||||
headerKeyPlaceholder: 'np. Autoryzacja',
|
||||
timeoutPlaceholder: 'trzydzieści',
|
||||
headerValuePlaceholder: 'np. Token dostępu 123',
|
||||
headerKey: 'Nazwa nagłówka',
|
||||
headersTip: 'Dodatkowe nagłówki HTTP do wysłania z żądaniami serwera MCP',
|
||||
headerValue: 'Wartość nagłówka',
|
||||
noHeaders: 'Brak skonfigurowanych nagłówków niestandardowych',
|
||||
maskedHeadersTip: 'Wartości nagłówków są ukryte dla bezpieczeństwa. Zmiany zaktualizują rzeczywiste wartości.',
|
||||
},
|
||||
delete: 'Usuń serwer MCP',
|
||||
deleteConfirmTitle: 'Usunąć {mcp}?',
|
||||
|
|
|
|||
|
|
@ -193,6 +193,16 @@ const translation = {
|
|||
confirm: 'Adicionar e Autorizar',
|
||||
sseReadTimeout: 'Tempo limite de leitura SSE',
|
||||
timeout: 'Tempo esgotado',
|
||||
timeoutPlaceholder: 'trinta',
|
||||
headerValue: 'Valor do Cabeçalho',
|
||||
headerKeyPlaceholder: 'por exemplo, Autorização',
|
||||
addHeader: 'Adicionar Cabeçalho',
|
||||
headersTip: 'Cabeçalhos HTTP adicionais a serem enviados com as solicitações do servidor MCP',
|
||||
headers: 'Cabeçalhos',
|
||||
maskedHeadersTip: 'Os valores do cabeçalho estão mascarados por segurança. As alterações atualizarão os valores reais.',
|
||||
headerKey: 'Nome do Cabeçalho',
|
||||
noHeaders: 'Nenhum cabeçalho personalizado configurado',
|
||||
headerValuePlaceholder: 'ex: Token de portador 123',
|
||||
},
|
||||
delete: 'Remover Servidor MCP',
|
||||
deleteConfirmTitle: 'Você gostaria de remover {{mcp}}?',
|
||||
|
|
|
|||
|
|
@ -193,6 +193,16 @@ const translation = {
|
|||
confirm: 'Adăugare și Autorizare',
|
||||
timeout: 'Timp de așteptare',
|
||||
sseReadTimeout: 'Timp de așteptare pentru citirea SSE',
|
||||
headerKeyPlaceholder: 'de exemplu, Autorizație',
|
||||
headers: 'Antete',
|
||||
addHeader: 'Adăugați antet',
|
||||
headerValuePlaceholder: 'de exemplu, Bearer token123',
|
||||
timeoutPlaceholder: 'treizeci',
|
||||
headerKey: 'Numele antetului',
|
||||
headerValue: 'Valoare Antet',
|
||||
maskedHeadersTip: 'Valorile de antet sunt mascate pentru securitate. Modificările vor actualiza valorile reale.',
|
||||
headersTip: 'Header-uri HTTP suplimentare de trimis cu cererile către serverul MCP',
|
||||
noHeaders: 'Nu sunt configurate antete personalizate.',
|
||||
},
|
||||
delete: 'Eliminare Server MCP',
|
||||
deleteConfirmTitle: 'Ștergeți {mcp}?',
|
||||
|
|
|
|||
|
|
@ -193,6 +193,16 @@ const translation = {
|
|||
confirm: 'Добавить и авторизовать',
|
||||
timeout: 'Тайм-аут',
|
||||
sseReadTimeout: 'Таймаут чтения SSE',
|
||||
headerValuePlaceholder: 'например, Токен носителя 123',
|
||||
headers: 'Заголовки',
|
||||
headerKey: 'Название заголовка',
|
||||
timeoutPlaceholder: 'тридцать',
|
||||
addHeader: 'Добавить заголовок',
|
||||
headerValue: 'Значение заголовка',
|
||||
headerKeyPlaceholder: 'например, Авторизация',
|
||||
noHeaders: 'Нет настроенных пользовательских заголовков',
|
||||
maskedHeadersTip: 'Значения заголовков скрыты для безопасности. Изменения обновят фактические значения.',
|
||||
headersTip: 'Дополнительные HTTP заголовки для отправки с запросами к серверу MCP',
|
||||
},
|
||||
delete: 'Удалить MCP сервер',
|
||||
deleteConfirmTitle: 'Вы действительно хотите удалить {mcp}?',
|
||||
|
|
|
|||
|
|
@ -193,6 +193,16 @@ const translation = {
|
|||
confirm: 'Dodaj in avtoriziraj',
|
||||
timeout: 'Časovna omejitev',
|
||||
sseReadTimeout: 'SSE časovna omejitev branja',
|
||||
timeoutPlaceholder: '30',
|
||||
headers: 'Glave',
|
||||
headerKeyPlaceholder: 'npr., Authorization',
|
||||
headerValue: 'Vrednost glave',
|
||||
headerKey: 'Ime glave',
|
||||
addHeader: 'Dodaj glavo',
|
||||
headersTip: 'Dodatni HTTP glavi za poslati z zahtevami MCP strežnika',
|
||||
headerValuePlaceholder: 'npr., Bearer žeton123',
|
||||
noHeaders: 'Nobena prilagojena glava ni konfigurirana',
|
||||
maskedHeadersTip: 'Vrednosti glave so zakrite zaradi varnosti. Spremembe bodo posodobile dejanske vrednosti.',
|
||||
},
|
||||
delete: 'Odstrani strežnik MCP',
|
||||
deleteConfirmTitle: 'Odstraniti {mcp}?',
|
||||
|
|
|
|||
|
|
@ -193,6 +193,16 @@ const translation = {
|
|||
confirm: 'เพิ่มและอนุญาต',
|
||||
timeout: 'หมดเวลา',
|
||||
sseReadTimeout: 'หมดเวลาการอ่าน SSE',
|
||||
timeoutPlaceholder: 'สามสิบ',
|
||||
headerValue: 'ค่าหัวข้อ',
|
||||
addHeader: 'เพิ่มหัวเรื่อง',
|
||||
headerKey: 'ชื่อหัวเรื่อง',
|
||||
headerKeyPlaceholder: 'เช่น การอนุญาต',
|
||||
headerValuePlaceholder: 'ตัวอย่าง: รหัสตัวแทน token123',
|
||||
headers: 'หัวเรื่อง',
|
||||
noHeaders: 'ไม่มีการกำหนดหัวข้อที่กำหนดเอง',
|
||||
headersTip: 'HTTP header เพิ่มเติมที่จะส่งไปกับคำขอ MCP server',
|
||||
maskedHeadersTip: 'ค่าหัวถูกปกปิดเพื่อความปลอดภัย การเปลี่ยนแปลงจะปรับปรุงค่าที่แท้จริง',
|
||||
},
|
||||
delete: 'ลบเซิร์ฟเวอร์ MCP',
|
||||
deleteConfirmTitle: 'คุณต้องการลบ {mcp} หรือไม่?',
|
||||
|
|
|
|||
|
|
@ -193,6 +193,16 @@ const translation = {
|
|||
confirm: 'Ekle ve Yetkilendir',
|
||||
timeout: 'Zaman aşımı',
|
||||
sseReadTimeout: 'SSE Okuma Zaman Aşımı',
|
||||
headers: 'Başlıklar',
|
||||
headerKeyPlaceholder: 'örneğin, Yetkilendirme',
|
||||
addHeader: 'Başlık Ekle',
|
||||
headerValue: 'Başlık Değeri',
|
||||
noHeaders: 'Özel başlıklar yapılandırılmamış',
|
||||
headerKey: 'Başlık Adı',
|
||||
timeoutPlaceholder: 'otuz',
|
||||
headersTip: 'MCP sunucu istekleri ile gönderilecek ek HTTP başlıkları',
|
||||
headerValuePlaceholder: 'örneğin, Taşıyıcı jeton123',
|
||||
maskedHeadersTip: 'Başlık değerleri güvenlik amacıyla gizlenmiştir. Değişiklikler gerçek değerleri güncelleyecektir.',
|
||||
},
|
||||
delete: 'MCP Sunucusunu Kaldır',
|
||||
deleteConfirmTitle: '{mcp} kaldırılsın mı?',
|
||||
|
|
|
|||
|
|
@ -193,6 +193,16 @@ const translation = {
|
|||
confirm: 'Додати та Авторизувати',
|
||||
timeout: 'Час вичерпано',
|
||||
sseReadTimeout: 'Тайм-аут читання SSE',
|
||||
headers: 'Заголовки',
|
||||
headerValuePlaceholder: 'наприклад, токен носія 123',
|
||||
headerValue: 'Значення заголовка',
|
||||
headerKey: 'Назва заголовка',
|
||||
timeoutPlaceholder: 'тридцять',
|
||||
addHeader: 'Додати заголовок',
|
||||
noHeaders: 'Не налаштовано спеціальні заголовки',
|
||||
headerKeyPlaceholder: 'наприклад, Авторизація',
|
||||
maskedHeadersTip: 'Значення заголовків маскуються для безпеки. Зміни оновлять фактичні значення.',
|
||||
headersTip: 'Додаткові HTTP заголовки для відправлення з запитами до сервера MCP',
|
||||
},
|
||||
delete: 'Видалити сервер MCP',
|
||||
deleteConfirmTitle: 'Видалити {mcp}?',
|
||||
|
|
|
|||
|
|
@ -193,6 +193,16 @@ const translation = {
|
|||
confirm: 'Thêm & Ủy quyền',
|
||||
sseReadTimeout: 'Thời gian chờ Đọc SSE',
|
||||
timeout: 'Thời gian chờ',
|
||||
headerKeyPlaceholder: 'ví dụ, Ủy quyền',
|
||||
timeoutPlaceholder: 'ba mươi',
|
||||
addHeader: 'Thêm tiêu đề',
|
||||
headers: 'Tiêu đề',
|
||||
headerValuePlaceholder: 'ví dụ: mã thông báo Bearer123',
|
||||
headerKey: 'Tên tiêu đề',
|
||||
noHeaders: 'Không có tiêu đề tùy chỉnh nào được cấu hình',
|
||||
headerValue: 'Giá trị tiêu đề',
|
||||
maskedHeadersTip: 'Các giá trị tiêu đề được mã hóa để đảm bảo an ninh. Các thay đổi sẽ cập nhật các giá trị thực tế.',
|
||||
headersTip: 'Các tiêu đề HTTP bổ sung để gửi cùng với các yêu cầu máy chủ MCP',
|
||||
},
|
||||
delete: 'Xóa Máy chủ MCP',
|
||||
deleteConfirmTitle: 'Xóa {mcp}?',
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ const translation = {
|
|||
type: '鉴权类型',
|
||||
keyTooltip: 'HTTP 头部名称,如果你不知道是什么,可以将其保留为 Authorization 或设置为自定义值',
|
||||
queryParam: '查询参数',
|
||||
queryParamTooltip: '用于传递 API 密钥查询参数的名称, 如 "https://example.com/test?key=API_KEY" 中的 "key"参数',
|
||||
queryParamTooltip: '用于传递 API 密钥查询参数的名称,如 "https://example.com/test?key=API_KEY" 中的 "key"参数',
|
||||
types: {
|
||||
none: '无',
|
||||
api_key_header: '请求头',
|
||||
|
|
@ -188,11 +188,21 @@ const translation = {
|
|||
serverIdentifierTip: '工作空间内服务器的唯一标识。支持小写字母、数字、下划线和连字符,最多 24 个字符。',
|
||||
serverIdentifierPlaceholder: '服务器唯一标识,例如 my-mcp-server',
|
||||
serverIdentifierWarning: '更改服务器标识符后,现有应用将无法识别此服务器',
|
||||
headers: '请求头',
|
||||
headersTip: '发送到 MCP 服务器的额外 HTTP 请求头',
|
||||
headerKey: '请求头名称',
|
||||
headerValue: '请求头值',
|
||||
headerKeyPlaceholder: '例如:Authorization',
|
||||
headerValuePlaceholder: '例如:Bearer token123',
|
||||
addHeader: '添加请求头',
|
||||
noHeaders: '未配置自定义请求头',
|
||||
maskedHeadersTip: '为了安全,请求头值已被掩码处理。修改将更新实际值。',
|
||||
cancel: '取消',
|
||||
save: '保存',
|
||||
confirm: '添加并授权',
|
||||
timeout: '超时时间',
|
||||
sseReadTimeout: 'SSE 读取超时时间',
|
||||
timeoutPlaceholder: '30',
|
||||
},
|
||||
delete: '删除 MCP 服务',
|
||||
deleteConfirmTitle: '你想要删除 {{mcp}} 吗?',
|
||||
|
|
|
|||
|
|
@ -193,6 +193,16 @@ const translation = {
|
|||
confirm: '新增並授權',
|
||||
sseReadTimeout: 'SSE 讀取超時',
|
||||
timeout: '超時',
|
||||
headerValue: '標題值',
|
||||
headerKey: '標題名稱',
|
||||
noHeaders: '沒有配置自定義標頭',
|
||||
timeoutPlaceholder: '三十',
|
||||
headerValuePlaceholder: '例如,承載者令牌123',
|
||||
addHeader: '添加標題',
|
||||
headerKeyPlaceholder: '例如,授權',
|
||||
headersTip: '與 MCP 伺服器請求一同發送的附加 HTTP 標頭',
|
||||
maskedHeadersTip: '標頭值已被遮罩以保障安全。更改將更新實際值。',
|
||||
headers: '標題',
|
||||
},
|
||||
delete: '刪除 MCP 伺服器',
|
||||
deleteConfirmTitle: '您確定要刪除 {{mcp}} 嗎?',
|
||||
|
|
|
|||
|
|
@ -470,11 +470,6 @@ export type createDocumentResponse = {
|
|||
documents: InitialDocumentDetail[]
|
||||
}
|
||||
|
||||
export type PrecessRule = {
|
||||
mode: ProcessMode
|
||||
rules: Rules
|
||||
}
|
||||
|
||||
export type FullDocumentDetail = SimpleDocumentDetail & {
|
||||
batch: string
|
||||
created_api_request_id: string
|
||||
|
|
@ -497,7 +492,7 @@ export type FullDocumentDetail = SimpleDocumentDetail & {
|
|||
doc_type?: DocType | null | 'others'
|
||||
doc_metadata?: DocMetadata | null
|
||||
segment_count: number
|
||||
dataset_process_rule: PrecessRule
|
||||
dataset_process_rule: ProcessRule
|
||||
document_process_rule: ProcessRule
|
||||
[key: string]: any
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12603,7 +12603,7 @@ snapshots:
|
|||
|
||||
'@vue/compiler-sfc@3.5.17':
|
||||
dependencies:
|
||||
'@babel/parser': 7.28.0
|
||||
'@babel/parser': 7.28.3
|
||||
'@vue/compiler-core': 3.5.17
|
||||
'@vue/compiler-dom': 3.5.17
|
||||
'@vue/compiler-ssr': 3.5.17
|
||||
|
|
|
|||
|
|
@ -87,6 +87,7 @@ export const useCreateMCP = () => {
|
|||
icon_background?: string | null
|
||||
timeout?: number
|
||||
sse_read_timeout?: number
|
||||
headers?: Record<string, string>
|
||||
}) => {
|
||||
return post<ToolWithProvider>('workspaces/current/tool-provider/mcp', {
|
||||
body: {
|
||||
|
|
@ -113,6 +114,7 @@ export const useUpdateMCP = ({
|
|||
provider_id: string
|
||||
timeout?: number
|
||||
sse_read_timeout?: number
|
||||
headers?: Record<string, string>
|
||||
}) => {
|
||||
return put('workspaces/current/tool-provider/mcp', {
|
||||
body: {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,245 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Core schema meta-schema",
|
||||
"definitions": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": {
|
||||
"$ref": "#"
|
||||
}
|
||||
},
|
||||
"nonNegativeInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"nonNegativeIntegerDefault0": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/nonNegativeInteger"
|
||||
},
|
||||
{
|
||||
"default": 0
|
||||
}
|
||||
]
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [
|
||||
"array",
|
||||
"boolean",
|
||||
"integer",
|
||||
"null",
|
||||
"number",
|
||||
"object",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"uniqueItems": true,
|
||||
"default": []
|
||||
}
|
||||
},
|
||||
"type": [
|
||||
"object",
|
||||
"boolean"
|
||||
],
|
||||
"properties": {
|
||||
"$id": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"$ref": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$comment": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": true,
|
||||
"readOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"writeOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
},
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"exclusiveMinimum": 0
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"maxLength": {
|
||||
"$ref": "#/definitions/nonNegativeInteger"
|
||||
},
|
||||
"minLength": {
|
||||
"$ref": "#/definitions/nonNegativeIntegerDefault0"
|
||||
},
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"additionalItems": {
|
||||
"$ref": "#"
|
||||
},
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/schemaArray"
|
||||
}
|
||||
],
|
||||
"default": true
|
||||
},
|
||||
"maxItems": {
|
||||
"$ref": "#/definitions/nonNegativeInteger"
|
||||
},
|
||||
"minItems": {
|
||||
"$ref": "#/definitions/nonNegativeIntegerDefault0"
|
||||
},
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"contains": {
|
||||
"$ref": "#"
|
||||
},
|
||||
"maxProperties": {
|
||||
"$ref": "#/definitions/nonNegativeInteger"
|
||||
},
|
||||
"minProperties": {
|
||||
"$ref": "#/definitions/nonNegativeIntegerDefault0"
|
||||
},
|
||||
"required": {
|
||||
"$ref": "#/definitions/stringArray"
|
||||
},
|
||||
"additionalProperties": {
|
||||
"$ref": "#"
|
||||
},
|
||||
"definitions": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#"
|
||||
},
|
||||
"default": {}
|
||||
},
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#"
|
||||
},
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#"
|
||||
},
|
||||
"propertyNames": {
|
||||
"format": "regex"
|
||||
},
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/stringArray"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"propertyNames": {
|
||||
"$ref": "#"
|
||||
},
|
||||
"const": true,
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"items": true,
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
},
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/simpleTypes"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/simpleTypes"
|
||||
},
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"format": {
|
||||
"type": "string"
|
||||
},
|
||||
"contentMediaType": {
|
||||
"type": "string"
|
||||
},
|
||||
"contentEncoding": {
|
||||
"type": "string"
|
||||
},
|
||||
"if": {
|
||||
"$ref": "#"
|
||||
},
|
||||
"then": {
|
||||
"$ref": "#"
|
||||
},
|
||||
"else": {
|
||||
"$ref": "#"
|
||||
},
|
||||
"allOf": {
|
||||
"$ref": "#/definitions/schemaArray"
|
||||
},
|
||||
"anyOf": {
|
||||
"$ref": "#/definitions/schemaArray"
|
||||
},
|
||||
"oneOf": {
|
||||
"$ref": "#/definitions/schemaArray"
|
||||
},
|
||||
"not": {
|
||||
"$ref": "#"
|
||||
}
|
||||
},
|
||||
"default": true
|
||||
}
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
import type { Schema } from 'jsonschema'
|
||||
import { Validator } from 'jsonschema'
|
||||
import draft07Schema from './draft-07.json'
|
||||
|
||||
const validator = new Validator()
|
||||
|
||||
export const draft07Validator = (schema: any) => {
|
||||
return validator.validate(schema, draft07Schema as unknown as Schema)
|
||||
}
|
||||
|
||||
export const forbidBooleanProperties = (schema: any, path: string[] = []): string[] => {
|
||||
let errors: string[] = []
|
||||
|
||||
if (schema && typeof schema === 'object' && schema.properties) {
|
||||
for (const [key, val] of Object.entries(schema.properties)) {
|
||||
if (typeof val === 'boolean') {
|
||||
errors.push(
|
||||
`Error: Property '${[...path, key].join('.')}' must not be a boolean schema`,
|
||||
)
|
||||
}
|
||||
else if (typeof val === 'object') {
|
||||
errors = errors.concat(forbidBooleanProperties(val, [...path, key]))
|
||||
}
|
||||
}
|
||||
}
|
||||
return errors
|
||||
}
|
||||
Loading…
Reference in New Issue