Merge branch 'feat/collaboration' into deploy/dev

This commit is contained in:
hjlarry 2025-10-13 16:43:23 +08:00
commit 064075ab5f
82 changed files with 966 additions and 500 deletions

View File

@ -1521,6 +1521,14 @@ def transform_datasource_credentials():
auth_count = 0
for firecrawl_tenant_credential in firecrawl_tenant_credentials:
auth_count += 1
if not firecrawl_tenant_credential.credentials:
click.echo(
click.style(
f"Skipping firecrawl credential for tenant {tenant_id} due to missing credentials.",
fg="yellow",
)
)
continue
# get credential api key
credentials_json = json.loads(firecrawl_tenant_credential.credentials)
api_key = credentials_json.get("config", {}).get("api_key")
@ -1576,6 +1584,14 @@ def transform_datasource_credentials():
auth_count = 0
for jina_tenant_credential in jina_tenant_credentials:
auth_count += 1
if not jina_tenant_credential.credentials:
click.echo(
click.style(
f"Skipping jina credential for tenant {tenant_id} due to missing credentials.",
fg="yellow",
)
)
continue
# get credential api key
credentials_json = json.loads(jina_tenant_credential.credentials)
api_key = credentials_json.get("config", {}).get("api_key")

View File

@ -1,5 +1,4 @@
import flask_restx
from flask_login import current_user
from flask_restx import Resource, fields, marshal_with
from flask_restx._http import HTTPStatus
from sqlalchemy import select
@ -8,7 +7,8 @@ from werkzeug.exceptions import Forbidden
from extensions.ext_database import db
from libs.helper import TimestampField
from libs.login import login_required
from libs.login import current_user, login_required
from models.account import Account
from models.dataset import Dataset
from models.model import ApiToken, App
@ -57,6 +57,8 @@ class BaseApiKeyListResource(Resource):
def get(self, resource_id):
assert self.resource_id_field is not None, "resource_id_field must be set"
resource_id = str(resource_id)
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
keys = db.session.scalars(
select(ApiToken).where(
@ -69,8 +71,10 @@ class BaseApiKeyListResource(Resource):
def post(self, resource_id):
assert self.resource_id_field is not None, "resource_id_field must be set"
resource_id = str(resource_id)
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
if not current_user.is_editor:
if not current_user.has_edit_permission:
raise Forbidden()
current_key_count = (
@ -108,6 +112,8 @@ class BaseApiKeyResource(Resource):
assert self.resource_id_field is not None, "resource_id_field must be set"
resource_id = str(resource_id)
api_key_id = str(api_key_id)
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
# The role of the current user in the ta table must be admin or owner

View File

@ -1,9 +1,9 @@
from flask import request
from flask_login import current_user
from flask_restx import Resource, reqparse
from libs.helper import extract_remote_ip
from libs.login import login_required
from libs.login import current_user, login_required
from models.account import Account
from services.billing_service import BillingService
from .. import console_ns
@ -17,6 +17,8 @@ class ComplianceApi(Resource):
@account_initialization_required
@only_edition_cloud
def get(self):
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
parser = reqparse.RequestParser()
parser.add_argument("doc_name", type=str, required=True, location="args")
args = parser.parse_args()

View File

@ -1,7 +1,5 @@
import logging
from typing import cast
from flask_login import current_user
from flask_restx import marshal, reqparse
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
@ -21,6 +19,7 @@ from core.errors.error import (
)
from core.model_runtime.errors.invoke import InvokeError
from fields.hit_testing_fields import hit_testing_record_fields
from libs.login import current_user
from models.account import Account
from services.dataset_service import DatasetService
from services.hit_testing_service import HitTestingService
@ -31,6 +30,7 @@ logger = logging.getLogger(__name__)
class DatasetsHitTestingBase:
@staticmethod
def get_and_validate_dataset(dataset_id: str):
assert isinstance(current_user, Account)
dataset = DatasetService.get_dataset(dataset_id)
if dataset is None:
raise NotFound("Dataset not found.")
@ -57,11 +57,12 @@ class DatasetsHitTestingBase:
@staticmethod
def perform_hit_testing(dataset, args):
assert isinstance(current_user, Account)
try:
response = HitTestingService.retrieve(
dataset=dataset,
query=args["query"],
account=cast(Account, current_user),
account=current_user,
retrieval_model=args["retrieval_model"],
external_retrieval_model=args["external_retrieval_model"],
limit=10,

View File

@ -25,6 +25,8 @@ def installed_app_required(view: Callable[Concatenate[InstalledApp, P], R] | Non
def decorator(view: Callable[Concatenate[InstalledApp, P], R]):
@wraps(view)
def decorated(installed_app_id: str, *args: P.args, **kwargs: P.kwargs):
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
installed_app = (
db.session.query(InstalledApp)
.where(
@ -57,6 +59,7 @@ def user_allowed_to_access_app(view: Callable[Concatenate[InstalledApp, P], R] |
def decorated(installed_app: InstalledApp, *args: P.args, **kwargs: P.kwargs):
feature = FeatureService.get_system_features()
if feature.webapp_auth.enabled:
assert isinstance(current_user, Account)
app_id = installed_app.app_id
app_code = AppService.get_app_code_by_id(app_id)
res = EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp(

View File

@ -1,11 +1,11 @@
from flask_login import current_user
from flask_restx import Resource, fields, marshal_with, reqparse
from constants import HIDDEN_VALUE
from controllers.console import api, console_ns
from controllers.console.wraps import account_initialization_required, setup_required
from fields.api_based_extension_fields import api_based_extension_fields
from libs.login import login_required
from libs.login import current_user, login_required
from models.account import Account
from models.api_based_extension import APIBasedExtension
from services.api_based_extension_service import APIBasedExtensionService
from services.code_based_extension_service import CodeBasedExtensionService
@ -47,6 +47,8 @@ class APIBasedExtensionAPI(Resource):
@account_initialization_required
@marshal_with(api_based_extension_fields)
def get(self):
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
tenant_id = current_user.current_tenant_id
return APIBasedExtensionService.get_all_by_tenant_id(tenant_id)
@ -68,6 +70,8 @@ class APIBasedExtensionAPI(Resource):
@account_initialization_required
@marshal_with(api_based_extension_fields)
def post(self):
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, required=True, location="json")
parser.add_argument("api_endpoint", type=str, required=True, location="json")
@ -95,6 +99,8 @@ class APIBasedExtensionDetailAPI(Resource):
@account_initialization_required
@marshal_with(api_based_extension_fields)
def get(self, id):
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
api_based_extension_id = str(id)
tenant_id = current_user.current_tenant_id
@ -119,6 +125,8 @@ class APIBasedExtensionDetailAPI(Resource):
@account_initialization_required
@marshal_with(api_based_extension_fields)
def post(self, id):
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
api_based_extension_id = str(id)
tenant_id = current_user.current_tenant_id
@ -146,6 +154,8 @@ class APIBasedExtensionDetailAPI(Resource):
@login_required
@account_initialization_required
def delete(self, id):
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
api_based_extension_id = str(id)
tenant_id = current_user.current_tenant_id

View File

@ -1,7 +1,7 @@
from flask_login import current_user
from flask_restx import Resource, fields
from libs.login import login_required
from libs.login import current_user, login_required
from models.account import Account
from services.feature_service import FeatureService
from . import api, console_ns
@ -23,6 +23,8 @@ class FeatureApi(Resource):
@cloud_utm_record
def get(self):
"""Get feature configuration for current tenant"""
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
return FeatureService.get_features(current_user.current_tenant_id).model_dump()

View File

@ -1,8 +1,6 @@
import urllib.parse
from typing import cast
import httpx
from flask_login import current_user
from flask_restx import Resource, marshal_with, reqparse
import services
@ -16,6 +14,7 @@ from core.file import helpers as file_helpers
from core.helper import ssrf_proxy
from extensions.ext_database import db
from fields.file_fields import file_fields_with_signed_url, remote_file_info_fields
from libs.login import current_user
from models.account import Account
from services.file_service import FileService
@ -65,7 +64,8 @@ class RemoteFileUploadApi(Resource):
content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content
try:
user = cast(Account, current_user)
assert isinstance(current_user, Account)
user = current_user
upload_file = FileService(db.engine).upload_file(
filename=file_info.filename,
content=content,

View File

@ -1,12 +1,12 @@
from flask import request
from flask_login import current_user
from flask_restx import Resource, marshal_with, reqparse
from werkzeug.exceptions import Forbidden
from controllers.console import console_ns
from controllers.console.wraps import account_initialization_required, setup_required
from fields.tag_fields import dataset_tag_fields
from libs.login import login_required
from libs.login import current_user, login_required
from models.account import Account
from models.model import Tag
from services.tag_service import TagService
@ -24,6 +24,8 @@ class TagListApi(Resource):
@account_initialization_required
@marshal_with(dataset_tag_fields)
def get(self):
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
tag_type = request.args.get("type", type=str, default="")
keyword = request.args.get("keyword", default=None, type=str)
tags = TagService.get_tags(tag_type, current_user.current_tenant_id, keyword)
@ -34,8 +36,10 @@ class TagListApi(Resource):
@login_required
@account_initialization_required
def post(self):
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
# The role of the current user in the ta table must be admin, owner, or editor
if not (current_user.is_editor or current_user.is_dataset_editor):
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
parser = reqparse.RequestParser()
@ -59,9 +63,11 @@ class TagUpdateDeleteApi(Resource):
@login_required
@account_initialization_required
def patch(self, tag_id):
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
tag_id = str(tag_id)
# The role of the current user in the ta table must be admin, owner, or editor
if not (current_user.is_editor or current_user.is_dataset_editor):
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
parser = reqparse.RequestParser()
@ -81,9 +87,11 @@ class TagUpdateDeleteApi(Resource):
@login_required
@account_initialization_required
def delete(self, tag_id):
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
tag_id = str(tag_id)
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
if not current_user.has_edit_permission:
raise Forbidden()
TagService.delete_tag(tag_id)
@ -97,8 +105,10 @@ class TagBindingCreateApi(Resource):
@login_required
@account_initialization_required
def post(self):
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
if not (current_user.is_editor or current_user.is_dataset_editor):
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
parser = reqparse.RequestParser()
@ -123,8 +133,10 @@ class TagBindingDeleteApi(Resource):
@login_required
@account_initialization_required
def post(self):
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
if not (current_user.is_editor or current_user.is_dataset_editor):
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
parser = reqparse.RequestParser()

View File

@ -1,10 +1,10 @@
from flask_login import current_user
from flask_restx import Resource, fields
from controllers.console import api, console_ns
from controllers.console.wraps import account_initialization_required, setup_required
from core.model_runtime.utils.encoders import jsonable_encoder
from libs.login import login_required
from libs.login import current_user, login_required
from models.account import Account
from services.agent_service import AgentService
@ -21,7 +21,9 @@ class AgentProviderListApi(Resource):
@login_required
@account_initialization_required
def get(self):
assert isinstance(current_user, Account)
user = current_user
assert user.current_tenant_id is not None
user_id = user.id
tenant_id = user.current_tenant_id
@ -43,7 +45,9 @@ class AgentProviderApi(Resource):
@login_required
@account_initialization_required
def get(self, provider_name: str):
assert isinstance(current_user, Account)
user = current_user
assert user.current_tenant_id is not None
user_id = user.id
tenant_id = user.current_tenant_id
return jsonable_encoder(AgentService.get_agent_provider(user_id, tenant_id, provider_name))

View File

@ -1,4 +1,3 @@
from flask_login import current_user
from flask_restx import Resource, fields, reqparse
from werkzeug.exceptions import Forbidden
@ -6,10 +5,18 @@ from controllers.console import api, console_ns
from controllers.console.wraps import account_initialization_required, setup_required
from core.model_runtime.utils.encoders import jsonable_encoder
from core.plugin.impl.exc import PluginPermissionDeniedError
from libs.login import login_required
from libs.login import current_user, login_required
from models.account import Account
from services.plugin.endpoint_service import EndpointService
def _current_account_with_tenant() -> tuple[Account, str]:
assert isinstance(current_user, Account)
tenant_id = current_user.current_tenant_id
assert tenant_id is not None
return current_user, tenant_id
@console_ns.route("/workspaces/current/endpoints/create")
class EndpointCreateApi(Resource):
@api.doc("create_endpoint")
@ -34,7 +41,7 @@ class EndpointCreateApi(Resource):
@login_required
@account_initialization_required
def post(self):
user = current_user
user, tenant_id = _current_account_with_tenant()
if not user.is_admin_or_owner:
raise Forbidden()
@ -51,7 +58,7 @@ class EndpointCreateApi(Resource):
try:
return {
"success": EndpointService.create_endpoint(
tenant_id=user.current_tenant_id,
tenant_id=tenant_id,
user_id=user.id,
plugin_unique_identifier=plugin_unique_identifier,
name=name,
@ -80,7 +87,7 @@ class EndpointListApi(Resource):
@login_required
@account_initialization_required
def get(self):
user = current_user
user, tenant_id = _current_account_with_tenant()
parser = reqparse.RequestParser()
parser.add_argument("page", type=int, required=True, location="args")
@ -93,7 +100,7 @@ class EndpointListApi(Resource):
return jsonable_encoder(
{
"endpoints": EndpointService.list_endpoints(
tenant_id=user.current_tenant_id,
tenant_id=tenant_id,
user_id=user.id,
page=page,
page_size=page_size,
@ -123,7 +130,7 @@ class EndpointListForSinglePluginApi(Resource):
@login_required
@account_initialization_required
def get(self):
user = current_user
user, tenant_id = _current_account_with_tenant()
parser = reqparse.RequestParser()
parser.add_argument("page", type=int, required=True, location="args")
@ -138,7 +145,7 @@ class EndpointListForSinglePluginApi(Resource):
return jsonable_encoder(
{
"endpoints": EndpointService.list_endpoints_for_single_plugin(
tenant_id=user.current_tenant_id,
tenant_id=tenant_id,
user_id=user.id,
plugin_id=plugin_id,
page=page,
@ -165,7 +172,7 @@ class EndpointDeleteApi(Resource):
@login_required
@account_initialization_required
def post(self):
user = current_user
user, tenant_id = _current_account_with_tenant()
parser = reqparse.RequestParser()
parser.add_argument("endpoint_id", type=str, required=True)
@ -177,9 +184,7 @@ class EndpointDeleteApi(Resource):
endpoint_id = args["endpoint_id"]
return {
"success": EndpointService.delete_endpoint(
tenant_id=user.current_tenant_id, user_id=user.id, endpoint_id=endpoint_id
)
"success": EndpointService.delete_endpoint(tenant_id=tenant_id, user_id=user.id, endpoint_id=endpoint_id)
}
@ -207,7 +212,7 @@ class EndpointUpdateApi(Resource):
@login_required
@account_initialization_required
def post(self):
user = current_user
user, tenant_id = _current_account_with_tenant()
parser = reqparse.RequestParser()
parser.add_argument("endpoint_id", type=str, required=True)
@ -224,7 +229,7 @@ class EndpointUpdateApi(Resource):
return {
"success": EndpointService.update_endpoint(
tenant_id=user.current_tenant_id,
tenant_id=tenant_id,
user_id=user.id,
endpoint_id=endpoint_id,
name=name,
@ -250,7 +255,7 @@ class EndpointEnableApi(Resource):
@login_required
@account_initialization_required
def post(self):
user = current_user
user, tenant_id = _current_account_with_tenant()
parser = reqparse.RequestParser()
parser.add_argument("endpoint_id", type=str, required=True)
@ -262,9 +267,7 @@ class EndpointEnableApi(Resource):
raise Forbidden()
return {
"success": EndpointService.enable_endpoint(
tenant_id=user.current_tenant_id, user_id=user.id, endpoint_id=endpoint_id
)
"success": EndpointService.enable_endpoint(tenant_id=tenant_id, user_id=user.id, endpoint_id=endpoint_id)
}
@ -285,7 +288,7 @@ class EndpointDisableApi(Resource):
@login_required
@account_initialization_required
def post(self):
user = current_user
user, tenant_id = _current_account_with_tenant()
parser = reqparse.RequestParser()
parser.add_argument("endpoint_id", type=str, required=True)
@ -297,7 +300,5 @@ class EndpointDisableApi(Resource):
raise Forbidden()
return {
"success": EndpointService.disable_endpoint(
tenant_id=user.current_tenant_id, user_id=user.id, endpoint_id=endpoint_id
)
"success": EndpointService.disable_endpoint(tenant_id=tenant_id, user_id=user.id, endpoint_id=endpoint_id)
}

View File

@ -1,7 +1,6 @@
from urllib import parse
from flask import abort, request
from flask_login import current_user
from flask_restx import Resource, marshal_with, reqparse
import services
@ -26,7 +25,7 @@ from controllers.console.wraps import (
from extensions.ext_database import db
from fields.member_fields import account_with_role_list_fields
from libs.helper import extract_remote_ip
from libs.login import login_required
from libs.login import current_user, login_required
from models.account import Account, TenantAccountRole
from services.account_service import AccountService, RegisterService, TenantService
from services.errors.account import AccountAlreadyInTenantError

View File

@ -1,7 +1,6 @@
import logging
from flask import request
from flask_login import current_user
from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse
from sqlalchemy import select
from werkzeug.exceptions import Unauthorized
@ -24,7 +23,7 @@ from controllers.console.wraps import (
)
from extensions.ext_database import db
from libs.helper import TimestampField
from libs.login import login_required
from libs.login import current_user, login_required
from models.account import Account, Tenant, TenantStatus
from services.account_service import TenantService
from services.feature_service import FeatureService

View File

@ -7,13 +7,13 @@ from functools import wraps
from typing import ParamSpec, TypeVar
from flask import abort, request
from flask_login import current_user
from configs import dify_config
from controllers.console.workspace.error import AccountNotInitializedError
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from models.account import AccountStatus
from libs.login import current_user
from models.account import Account, AccountStatus
from models.dataset import RateLimitLog
from models.model import DifySetup
from services.feature_service import FeatureService, LicenseStatus
@ -25,11 +25,16 @@ P = ParamSpec("P")
R = TypeVar("R")
def _current_account() -> Account:
assert isinstance(current_user, Account)
return current_user
def account_initialization_required(view: Callable[P, R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
# check account initialization
account = current_user
account = _current_account()
if account.status == AccountStatus.UNINITIALIZED:
raise AccountNotInitializedError()
@ -75,7 +80,9 @@ def only_edition_self_hosted(view: Callable[P, R]):
def cloud_edition_billing_enabled(view: Callable[P, R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
features = FeatureService.get_features(current_user.current_tenant_id)
account = _current_account()
assert account.current_tenant_id is not None
features = FeatureService.get_features(account.current_tenant_id)
if not features.billing.enabled:
abort(403, "Billing feature is not enabled.")
return view(*args, **kwargs)
@ -87,7 +94,10 @@ def cloud_edition_billing_resource_check(resource: str):
def interceptor(view: Callable[P, R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
features = FeatureService.get_features(current_user.current_tenant_id)
account = _current_account()
assert account.current_tenant_id is not None
tenant_id = account.current_tenant_id
features = FeatureService.get_features(tenant_id)
if features.billing.enabled:
members = features.members
apps = features.apps
@ -128,7 +138,9 @@ def cloud_edition_billing_knowledge_limit_check(resource: str):
def interceptor(view: Callable[P, R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
features = FeatureService.get_features(current_user.current_tenant_id)
account = _current_account()
assert account.current_tenant_id is not None
features = FeatureService.get_features(account.current_tenant_id)
if features.billing.enabled:
if resource == "add_segment":
if features.billing.subscription.plan == "sandbox":
@ -151,10 +163,13 @@ def cloud_edition_billing_rate_limit_check(resource: str):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
if resource == "knowledge":
knowledge_rate_limit = FeatureService.get_knowledge_rate_limit(current_user.current_tenant_id)
account = _current_account()
assert account.current_tenant_id is not None
tenant_id = account.current_tenant_id
knowledge_rate_limit = FeatureService.get_knowledge_rate_limit(tenant_id)
if knowledge_rate_limit.enabled:
current_time = int(time.time() * 1000)
key = f"rate_limit_{current_user.current_tenant_id}"
key = f"rate_limit_{tenant_id}"
redis_client.zadd(key, {current_time: current_time})
@ -165,7 +180,7 @@ def cloud_edition_billing_rate_limit_check(resource: str):
if request_count > knowledge_rate_limit.limit:
# add ratelimit record
rate_limit_log = RateLimitLog(
tenant_id=current_user.current_tenant_id,
tenant_id=tenant_id,
subscription_plan=knowledge_rate_limit.subscription_plan,
operation="knowledge",
)
@ -185,14 +200,17 @@ def cloud_utm_record(view: Callable[P, R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
with contextlib.suppress(Exception):
features = FeatureService.get_features(current_user.current_tenant_id)
account = _current_account()
assert account.current_tenant_id is not None
tenant_id = account.current_tenant_id
features = FeatureService.get_features(tenant_id)
if features.billing.enabled:
utm_info = request.cookies.get("utm_info")
if utm_info:
utm_info_dict: dict = json.loads(utm_info)
OperationService.record_utm(current_user.current_tenant_id, utm_info_dict)
OperationService.record_utm(tenant_id, utm_info_dict)
return view(*args, **kwargs)
@ -271,7 +289,9 @@ def enable_change_email(view: Callable[P, R]):
def is_allow_transfer_owner(view: Callable[P, R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
features = FeatureService.get_features(current_user.current_tenant_id)
account = _current_account()
assert account.current_tenant_id is not None
features = FeatureService.get_features(account.current_tenant_id)
if features.is_allow_transfer_workspace:
return view(*args, **kwargs)
@ -284,7 +304,9 @@ def is_allow_transfer_owner(view: Callable[P, R]):
def knowledge_pipeline_publish_enabled(view):
@wraps(view)
def decorated(*args, **kwargs):
features = FeatureService.get_features(current_user.current_tenant_id)
account = _current_account()
assert account.current_tenant_id is not None
features = FeatureService.get_features(account.current_tenant_id)
if features.knowledge_pipeline.publish_enabled:
return view(*args, **kwargs)
abort(403)

View File

@ -1,5 +1,5 @@
import enum
from enum import Enum
from enum import StrEnum
from typing import Any
from pydantic import BaseModel, Field, ValidationInfo, field_validator
@ -218,7 +218,7 @@ class DatasourceLabel(BaseModel):
icon: str = Field(..., description="The icon of the tool")
class DatasourceInvokeFrom(Enum):
class DatasourceInvokeFrom(StrEnum):
"""
Enum class for datasource invoke
"""

View File

@ -1,5 +1,5 @@
from collections.abc import Sequence
from enum import Enum, StrEnum, auto
from enum import StrEnum, auto
from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator
@ -7,7 +7,7 @@ from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.entities.model_entities import AIModelEntity, ModelType
class ConfigurateMethod(Enum):
class ConfigurateMethod(StrEnum):
"""
Enum class for configurate method of provider model.
"""

View File

@ -255,7 +255,7 @@ class BasePluginClient:
except Exception:
raise PluginDaemonInnerError(code=rep.code, message=rep.message)
logger.error("Error in stream reponse for plugin %s", rep.__dict__)
logger.error("Error in stream response for plugin %s", rep.__dict__)
self._handle_plugin_daemon_error(error.error_type, error.message)
raise ValueError(f"plugin daemon: {rep.message}, code: {rep.code}")
if rep.data is None:

View File

@ -34,7 +34,7 @@ class RetrievalService:
@classmethod
def retrieve(
cls,
retrieval_method: str,
retrieval_method: RetrievalMethod,
dataset_id: str,
query: str,
top_k: int,
@ -56,7 +56,7 @@ class RetrievalService:
# Optimize multithreading with thread pools
with ThreadPoolExecutor(max_workers=dify_config.RETRIEVAL_SERVICE_EXECUTORS) as executor: # type: ignore
futures = []
if retrieval_method == "keyword_search":
if retrieval_method == RetrievalMethod.KEYWORD_SEARCH:
futures.append(
executor.submit(
cls.keyword_search,
@ -220,7 +220,7 @@ class RetrievalService:
score_threshold: float | None,
reranking_model: dict | None,
all_documents: list,
retrieval_method: str,
retrieval_method: RetrievalMethod,
exceptions: list,
document_ids_filter: list[str] | None = None,
):

View File

@ -1,11 +1,11 @@
from collections.abc import Mapping
from enum import Enum
from enum import StrEnum
from typing import Any
from pydantic import BaseModel, Field
class DatasourceStreamEvent(Enum):
class DatasourceStreamEvent(StrEnum):
"""
Datasource Stream event
"""

View File

@ -1,6 +1,7 @@
import logging
import os
from configs import dify_config
from core.rag.extractor.extractor_base import BaseExtractor
from core.rag.models.document import Document
@ -49,7 +50,8 @@ class UnstructuredWordExtractor(BaseExtractor):
from unstructured.chunking.title import chunk_by_title
chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000)
max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters)
documents = []
for chunk in chunks:
text = chunk.text.strip()

View File

@ -4,6 +4,7 @@ import logging
from bs4 import BeautifulSoup
from configs import dify_config
from core.rag.extractor.extractor_base import BaseExtractor
from core.rag.models.document import Document
@ -46,7 +47,8 @@ class UnstructuredEmailExtractor(BaseExtractor):
from unstructured.chunking.title import chunk_by_title
chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000)
max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters)
documents = []
for chunk in chunks:
text = chunk.text.strip()

View File

@ -2,6 +2,7 @@ import logging
import pypandoc # type: ignore
from configs import dify_config
from core.rag.extractor.extractor_base import BaseExtractor
from core.rag.models.document import Document
@ -40,7 +41,8 @@ class UnstructuredEpubExtractor(BaseExtractor):
from unstructured.chunking.title import chunk_by_title
chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000)
max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters)
documents = []
for chunk in chunks:
text = chunk.text.strip()

View File

@ -1,5 +1,6 @@
import logging
from configs import dify_config
from core.rag.extractor.extractor_base import BaseExtractor
from core.rag.models.document import Document
@ -32,7 +33,8 @@ class UnstructuredMarkdownExtractor(BaseExtractor):
elements = partition_md(filename=self._file_path)
from unstructured.chunking.title import chunk_by_title
chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000)
max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters)
documents = []
for chunk in chunks:
text = chunk.text.strip()

View File

@ -1,5 +1,6 @@
import logging
from configs import dify_config
from core.rag.extractor.extractor_base import BaseExtractor
from core.rag.models.document import Document
@ -31,7 +32,8 @@ class UnstructuredMsgExtractor(BaseExtractor):
elements = partition_msg(filename=self._file_path)
from unstructured.chunking.title import chunk_by_title
chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000)
max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters)
documents = []
for chunk in chunks:
text = chunk.text.strip()

View File

@ -1,5 +1,6 @@
import logging
from configs import dify_config
from core.rag.extractor.extractor_base import BaseExtractor
from core.rag.models.document import Document
@ -32,7 +33,8 @@ class UnstructuredXmlExtractor(BaseExtractor):
from unstructured.chunking.title import chunk_by_title
chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000)
max_characters = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
chunks = chunk_by_title(elements, max_characters=max_characters, combine_text_under_n_chars=max_characters)
documents = []
for chunk in chunks:
text = chunk.text.strip()

View File

@ -7,6 +7,7 @@ from typing import TYPE_CHECKING, Any, Optional
from configs import dify_config
from core.rag.extractor.entity.extract_setting import ExtractSetting
from core.rag.models.document import Document
from core.rag.retrieval.retrieval_methods import RetrievalMethod
from core.rag.splitter.fixed_text_splitter import (
EnhanceRecursiveCharacterTextSplitter,
FixedRecursiveCharacterTextSplitter,
@ -49,7 +50,7 @@ class BaseIndexProcessor(ABC):
@abstractmethod
def retrieve(
self,
retrieval_method: str,
retrieval_method: RetrievalMethod,
query: str,
dataset: Dataset,
top_k: int,

View File

@ -14,6 +14,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor
from core.rag.index_processor.constant.index_type import IndexType
from core.rag.index_processor.index_processor_base import BaseIndexProcessor
from core.rag.models.document import Document
from core.rag.retrieval.retrieval_methods import RetrievalMethod
from core.tools.utils.text_processing_utils import remove_leading_symbols
from libs import helper
from models.dataset import Dataset, DatasetProcessRule
@ -106,7 +107,7 @@ class ParagraphIndexProcessor(BaseIndexProcessor):
def retrieve(
self,
retrieval_method: str,
retrieval_method: RetrievalMethod,
query: str,
dataset: Dataset,
top_k: int,

View File

@ -16,6 +16,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor
from core.rag.index_processor.constant.index_type import IndexType
from core.rag.index_processor.index_processor_base import BaseIndexProcessor
from core.rag.models.document import ChildDocument, Document, ParentChildStructureChunk
from core.rag.retrieval.retrieval_methods import RetrievalMethod
from extensions.ext_database import db
from libs import helper
from models.dataset import ChildChunk, Dataset, DatasetProcessRule, DocumentSegment
@ -161,7 +162,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor):
def retrieve(
self,
retrieval_method: str,
retrieval_method: RetrievalMethod,
query: str,
dataset: Dataset,
top_k: int,

View File

@ -21,6 +21,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor
from core.rag.index_processor.constant.index_type import IndexType
from core.rag.index_processor.index_processor_base import BaseIndexProcessor
from core.rag.models.document import Document, QAStructureChunk
from core.rag.retrieval.retrieval_methods import RetrievalMethod
from core.tools.utils.text_processing_utils import remove_leading_symbols
from libs import helper
from models.dataset import Dataset
@ -141,7 +142,7 @@ class QAIndexProcessor(BaseIndexProcessor):
def retrieve(
self,
retrieval_method: str,
retrieval_method: RetrievalMethod,
query: str,
dataset: Dataset,
top_k: int,

View File

@ -364,7 +364,7 @@ class DatasetRetrieval:
top_k = retrieval_model_config["top_k"]
# get retrieval method
if dataset.indexing_technique == "economy":
retrieval_method = "keyword_search"
retrieval_method = RetrievalMethod.KEYWORD_SEARCH
else:
retrieval_method = retrieval_model_config["search_method"]
# get reranking model
@ -623,7 +623,7 @@ class DatasetRetrieval:
if dataset.indexing_technique == "economy":
# use keyword table query
documents = RetrievalService.retrieve(
retrieval_method="keyword_search",
retrieval_method=RetrievalMethod.KEYWORD_SEARCH,
dataset_id=dataset.id,
query=query,
top_k=top_k,

View File

@ -1,7 +1,7 @@
from enum import Enum
from enum import StrEnum
class RetrievalMethod(Enum):
class RetrievalMethod(StrEnum):
SEMANTIC_SEARCH = "semantic_search"
FULL_TEXT_SEARCH = "full_text_search"
HYBRID_SEARCH = "hybrid_search"

View File

@ -76,7 +76,8 @@ class MCPToolProviderController(ToolProviderController):
)
for remote_mcp_tool in remote_mcp_tools
]
if not db_provider.icon:
raise ValueError("Database provider icon is required")
return cls(
entity=ToolProviderEntityWithPlugin(
identity=ToolProviderIdentity(

View File

@ -172,7 +172,7 @@ class DatasetMultiRetrieverTool(DatasetRetrieverBaseTool):
if dataset.indexing_technique == "economy":
# use keyword table query
documents = RetrievalService.retrieve(
retrieval_method="keyword_search",
retrieval_method=RetrievalMethod.KEYWORD_SEARCH,
dataset_id=dataset.id,
query=query,
top_k=retrieval_model.get("top_k") or 4,

View File

@ -130,7 +130,7 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
if dataset.indexing_technique == "economy":
# use keyword table query
documents = RetrievalService.retrieve(
retrieval_method="keyword_search",
retrieval_method=RetrievalMethod.KEYWORD_SEARCH,
dataset_id=dataset.id,
query=query,
top_k=self.top_k,

View File

@ -5,6 +5,7 @@ Therefore, a model manager is needed to list/invoke/validate models.
"""
import json
from decimal import Decimal
from typing import cast
from core.model_manager import ModelManager
@ -118,10 +119,10 @@ class ModelInvocationUtils:
model_response="",
prompt_tokens=prompt_tokens,
answer_tokens=0,
answer_unit_price=0,
answer_price_unit=0,
answer_unit_price=Decimal(),
answer_price_unit=Decimal(),
provider_response_latency=0,
total_price=0,
total_price=Decimal(),
currency="USD",
)
@ -152,7 +153,7 @@ class ModelInvocationUtils:
raise InvokeModelError(f"Invoke error: {e}")
# update tool model invoke
tool_model_invoke.model_response = response.message.content
tool_model_invoke.model_response = str(response.message.content)
if response.usage:
tool_model_invoke.answer_tokens = response.usage.completion_tokens
tool_model_invoke.answer_unit_price = response.usage.completion_unit_price

View File

@ -1,7 +1,7 @@
from enum import Enum, StrEnum
from enum import StrEnum
class NodeState(Enum):
class NodeState(StrEnum):
"""State of a node or edge during workflow execution."""
UNKNOWN = "unknown"

View File

@ -10,7 +10,7 @@ When limits are exceeded, the layer automatically aborts execution.
import logging
import time
from enum import Enum
from enum import StrEnum
from typing import final
from typing_extensions import override
@ -24,7 +24,7 @@ from core.workflow.graph_events import (
from core.workflow.graph_events.node import NodeRunFailedEvent, NodeRunSucceededEvent
class LimitType(Enum):
class LimitType(StrEnum):
"""Types of execution limits that can be exceeded."""
STEP_LIMIT = "step_limit"

View File

@ -2,6 +2,7 @@ from typing import Literal, Union
from pydantic import BaseModel
from core.rag.retrieval.retrieval_methods import RetrievalMethod
from core.workflow.nodes.base import BaseNodeData
@ -63,7 +64,7 @@ class RetrievalSetting(BaseModel):
Retrieval Setting.
"""
search_method: Literal["semantic_search", "keyword_search", "full_text_search", "hybrid_search"]
search_method: RetrievalMethod
top_k: int
score_threshold: float | None = 0.5
score_threshold_enabled: bool = False

View File

@ -37,10 +37,11 @@ config.set_main_option('sqlalchemy.url', get_engine_url())
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
from models.base import Base
from models.base import TypeBase
def get_metadata():
return Base.metadata
return TypeBase.metadata
def include_object(object, name, type_, reflected, compare_to):
if type_ == "foreign_key_constraint":

View File

@ -6,12 +6,12 @@ from sqlalchemy import DateTime, String, func
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm import Mapped, mapped_column
from models.base import Base
from models.base import TypeBase
from .types import StringUUID
class DataSourceOauthBinding(Base):
class DataSourceOauthBinding(TypeBase):
__tablename__ = "data_source_oauth_bindings"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="source_binding_pkey"),
@ -19,17 +19,25 @@ class DataSourceOauthBinding(Base):
sa.Index("source_info_idx", "source_info", postgresql_using="gin"),
)
id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
access_token: Mapped[str] = mapped_column(String(255), nullable=False)
provider: Mapped[str] = mapped_column(String(255), nullable=False)
source_info = mapped_column(JSONB, nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())
updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())
disabled: Mapped[bool | None] = mapped_column(sa.Boolean, nullable=True, server_default=sa.text("false"))
source_info: Mapped[dict] = mapped_column(JSONB, nullable=False)
created_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
)
updated_at: Mapped[datetime] = mapped_column(
DateTime,
nullable=False,
server_default=func.current_timestamp(),
onupdate=func.current_timestamp(),
init=False,
)
disabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=True, server_default=sa.text("false"), default=False)
class DataSourceApiKeyAuthBinding(Base):
class DataSourceApiKeyAuthBinding(TypeBase):
__tablename__ = "data_source_api_key_auth_bindings"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="data_source_api_key_auth_binding_pkey"),
@ -37,14 +45,22 @@ class DataSourceApiKeyAuthBinding(Base):
sa.Index("data_source_api_key_auth_binding_provider_idx", "provider"),
)
id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
category: Mapped[str] = mapped_column(String(255), nullable=False)
provider: Mapped[str] = mapped_column(String(255), nullable=False)
credentials = mapped_column(sa.Text, nullable=True) # JSON
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())
updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())
disabled: Mapped[bool | None] = mapped_column(sa.Boolean, nullable=True, server_default=sa.text("false"))
credentials: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) # JSON
created_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
)
updated_at: Mapped[datetime] = mapped_column(
DateTime,
nullable=False,
server_default=func.current_timestamp(),
onupdate=func.current_timestamp(),
init=False,
)
disabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=True, server_default=sa.text("false"), default=False)
def to_dict(self):
return {
@ -52,7 +68,7 @@ class DataSourceApiKeyAuthBinding(Base):
"tenant_id": self.tenant_id,
"category": self.category,
"provider": self.provider,
"credentials": json.loads(self.credentials),
"credentials": json.loads(self.credentials) if self.credentials else None,
"created_at": self.created_at.timestamp(),
"updated_at": self.updated_at.timestamp(),
"disabled": self.disabled,

View File

@ -6,41 +6,43 @@ from sqlalchemy import DateTime, String
from sqlalchemy.orm import Mapped, mapped_column
from libs.datetime_utils import naive_utc_now
from models.base import Base
from models.base import TypeBase
class CeleryTask(Base):
class CeleryTask(TypeBase):
"""Task result/status."""
__tablename__ = "celery_taskmeta"
id = mapped_column(sa.Integer, sa.Sequence("task_id_sequence"), primary_key=True, autoincrement=True)
task_id = mapped_column(String(155), unique=True)
status = mapped_column(String(50), default=states.PENDING)
result = mapped_column(sa.PickleType, nullable=True)
date_done = mapped_column(
id: Mapped[int] = mapped_column(
sa.Integer, sa.Sequence("task_id_sequence"), primary_key=True, autoincrement=True, init=False
)
task_id: Mapped[str] = mapped_column(String(155), unique=True)
status: Mapped[str] = mapped_column(String(50), default=states.PENDING)
result: Mapped[bytes | None] = mapped_column(sa.PickleType, nullable=True, default=None)
date_done: Mapped[datetime | None] = mapped_column(
DateTime,
default=lambda: naive_utc_now(),
onupdate=lambda: naive_utc_now(),
default=naive_utc_now,
onupdate=naive_utc_now,
nullable=True,
)
traceback = mapped_column(sa.Text, nullable=True)
name = mapped_column(String(155), nullable=True)
args = mapped_column(sa.LargeBinary, nullable=True)
kwargs = mapped_column(sa.LargeBinary, nullable=True)
worker = mapped_column(String(155), nullable=True)
retries: Mapped[int | None] = mapped_column(sa.Integer, nullable=True)
queue = mapped_column(String(155), nullable=True)
traceback: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None)
name: Mapped[str | None] = mapped_column(String(155), nullable=True, default=None)
args: Mapped[bytes | None] = mapped_column(sa.LargeBinary, nullable=True, default=None)
kwargs: Mapped[bytes | None] = mapped_column(sa.LargeBinary, nullable=True, default=None)
worker: Mapped[str | None] = mapped_column(String(155), nullable=True, default=None)
retries: Mapped[int | None] = mapped_column(sa.Integer, nullable=True, default=None)
queue: Mapped[str | None] = mapped_column(String(155), nullable=True, default=None)
class CeleryTaskSet(Base):
class CeleryTaskSet(TypeBase):
"""TaskSet result."""
__tablename__ = "celery_tasksetmeta"
id: Mapped[int] = mapped_column(
sa.Integer, sa.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True
sa.Integer, sa.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True, init=False
)
taskset_id = mapped_column(String(155), unique=True)
result = mapped_column(sa.PickleType, nullable=True)
date_done: Mapped[datetime | None] = mapped_column(DateTime, default=lambda: naive_utc_now(), nullable=True)
taskset_id: Mapped[str] = mapped_column(String(155), unique=True)
result: Mapped[bytes | None] = mapped_column(sa.PickleType, nullable=True, default=None)
date_done: Mapped[datetime | None] = mapped_column(DateTime, default=naive_utc_now, nullable=True)

View File

@ -1,6 +1,7 @@
import json
from collections.abc import Mapping
from datetime import datetime
from decimal import Decimal
from typing import TYPE_CHECKING, Any, cast
from urllib.parse import urlparse
@ -13,7 +14,7 @@ from core.helper import encrypter
from core.tools.entities.common_entities import I18nObject
from core.tools.entities.tool_bundle import ApiToolBundle
from core.tools.entities.tool_entities import ApiProviderSchemaType, WorkflowToolParameterConfiguration
from models.base import Base, TypeBase
from models.base import TypeBase
from .engine import db
from .model import Account, App, Tenant
@ -42,28 +43,28 @@ class ToolOAuthSystemClient(TypeBase):
# tenant level tool oauth client params (client_id, client_secret, etc.)
class ToolOAuthTenantClient(Base):
class ToolOAuthTenantClient(TypeBase):
__tablename__ = "tool_oauth_tenant_clients"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="tool_oauth_tenant_client_pkey"),
sa.UniqueConstraint("tenant_id", "plugin_id", "provider", name="unique_tool_oauth_tenant_client"),
)
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False)
# tenant id
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
plugin_id: Mapped[str] = mapped_column(String(512), nullable=False)
provider: Mapped[str] = mapped_column(String(255), nullable=False)
enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true"))
enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true"), init=False)
# oauth params of the tool provider
encrypted_oauth_params: Mapped[str] = mapped_column(sa.Text, nullable=False)
encrypted_oauth_params: Mapped[str] = mapped_column(sa.Text, nullable=False, init=False)
@property
def oauth_params(self) -> dict[str, Any]:
return cast(dict[str, Any], json.loads(self.encrypted_oauth_params or "{}"))
class BuiltinToolProvider(Base):
class BuiltinToolProvider(TypeBase):
"""
This table stores the tool provider information for built-in tools for each tenant.
"""
@ -75,37 +76,45 @@ class BuiltinToolProvider(Base):
)
# id of the tool provider
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False)
name: Mapped[str] = mapped_column(
String(256), nullable=False, server_default=sa.text("'API KEY 1'::character varying")
String(256),
nullable=False,
server_default=sa.text("'API KEY 1'::character varying"),
)
# id of the tenant
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=True)
tenant_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True)
# who created this tool provider
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# name of the tool provider
provider: Mapped[str] = mapped_column(String(256), nullable=False)
# credential of the tool provider
encrypted_credentials: Mapped[str] = mapped_column(sa.Text, nullable=True)
encrypted_credentials: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None)
created_at: Mapped[datetime] = mapped_column(
sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)")
sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False
)
updated_at: Mapped[datetime] = mapped_column(
sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)")
sa.DateTime,
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP(0)"),
onupdate=func.current_timestamp(),
init=False,
)
is_default: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false"))
is_default: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false"), default=False)
# credential type, e.g., "api-key", "oauth2"
credential_type: Mapped[str] = mapped_column(
String(32), nullable=False, server_default=sa.text("'api-key'::character varying")
String(32), nullable=False, server_default=sa.text("'api-key'::character varying"), default="api-key"
)
expires_at: Mapped[int] = mapped_column(sa.BigInteger, nullable=False, server_default=sa.text("-1"))
expires_at: Mapped[int] = mapped_column(sa.BigInteger, nullable=False, server_default=sa.text("-1"), default=-1)
@property
def credentials(self) -> dict[str, Any]:
if not self.encrypted_credentials:
return {}
return cast(dict[str, Any], json.loads(self.encrypted_credentials))
class ApiToolProvider(Base):
class ApiToolProvider(TypeBase):
"""
The table stores the api providers.
"""
@ -116,31 +125,43 @@ class ApiToolProvider(Base):
sa.UniqueConstraint("name", "tenant_id", name="unique_api_tool_provider"),
)
id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False)
# name of the api provider
name = mapped_column(String(255), nullable=False, server_default=sa.text("'API KEY 1'::character varying"))
name: Mapped[str] = mapped_column(
String(255),
nullable=False,
server_default=sa.text("'API KEY 1'::character varying"),
)
# icon
icon: Mapped[str] = mapped_column(String(255), nullable=False)
# original schema
schema = mapped_column(sa.Text, nullable=False)
schema: Mapped[str] = mapped_column(sa.Text, nullable=False)
schema_type_str: Mapped[str] = mapped_column(String(40), nullable=False)
# who created this tool
user_id = mapped_column(StringUUID, nullable=False)
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# tenant id
tenant_id = mapped_column(StringUUID, nullable=False)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# description of the provider
description = mapped_column(sa.Text, nullable=False)
description: Mapped[str] = mapped_column(sa.Text, nullable=False)
# json format tools
tools_str = mapped_column(sa.Text, nullable=False)
tools_str: Mapped[str] = mapped_column(sa.Text, nullable=False)
# json format credentials
credentials_str = mapped_column(sa.Text, nullable=False)
credentials_str: Mapped[str] = mapped_column(sa.Text, nullable=False)
# privacy policy
privacy_policy = mapped_column(String(255), nullable=True)
privacy_policy: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None)
# custom_disclaimer
custom_disclaimer: Mapped[str] = mapped_column(sa.TEXT, default="")
created_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp())
created_at: Mapped[datetime] = mapped_column(
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
)
updated_at: Mapped[datetime] = mapped_column(
sa.DateTime,
nullable=False,
server_default=func.current_timestamp(),
onupdate=func.current_timestamp(),
init=False,
)
@property
def schema_type(self) -> "ApiProviderSchemaType":
@ -189,7 +210,7 @@ class ToolLabelBinding(TypeBase):
label_name: Mapped[str] = mapped_column(String(40), nullable=False)
class WorkflowToolProvider(Base):
class WorkflowToolProvider(TypeBase):
"""
The table stores the workflow providers.
"""
@ -201,7 +222,7 @@ class WorkflowToolProvider(Base):
sa.UniqueConstraint("tenant_id", "app_id", name="unique_workflow_tool_provider_app_id"),
)
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False)
# name of the workflow provider
name: Mapped[str] = mapped_column(String(255), nullable=False)
# label of the workflow provider
@ -219,15 +240,19 @@ class WorkflowToolProvider(Base):
# description of the provider
description: Mapped[str] = mapped_column(sa.Text, nullable=False)
# parameter configuration
parameter_configuration: Mapped[str] = mapped_column(sa.Text, nullable=False, server_default="[]")
parameter_configuration: Mapped[str] = mapped_column(sa.Text, nullable=False, server_default="[]", default="[]")
# privacy policy
privacy_policy: Mapped[str] = mapped_column(String(255), nullable=True, server_default="")
privacy_policy: Mapped[str | None] = mapped_column(String(255), nullable=True, server_default="", default=None)
created_at: Mapped[datetime] = mapped_column(
sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)")
sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False
)
updated_at: Mapped[datetime] = mapped_column(
sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)")
sa.DateTime,
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP(0)"),
onupdate=func.current_timestamp(),
init=False,
)
@property
@ -252,7 +277,7 @@ class WorkflowToolProvider(Base):
return db.session.query(App).where(App.id == self.app_id).first()
class MCPToolProvider(Base):
class MCPToolProvider(TypeBase):
"""
The table stores the mcp providers.
"""
@ -265,7 +290,7 @@ class MCPToolProvider(Base):
sa.UniqueConstraint("tenant_id", "server_identifier", name="unique_mcp_provider_server_identifier"),
)
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False)
# name of the mcp provider
name: Mapped[str] = mapped_column(String(40), nullable=False)
# server identifier of the mcp provider
@ -275,27 +300,33 @@ class MCPToolProvider(Base):
# hash of server_url for uniqueness check
server_url_hash: Mapped[str] = mapped_column(String(64), nullable=False)
# icon of the mcp provider
icon: Mapped[str] = mapped_column(String(255), nullable=True)
icon: Mapped[str | None] = mapped_column(String(255), nullable=True)
# tenant id
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# who created this tool
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# encrypted credentials
encrypted_credentials: Mapped[str] = mapped_column(sa.Text, nullable=True)
encrypted_credentials: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None)
# authed
authed: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=False)
# tools
tools: Mapped[str] = mapped_column(sa.Text, nullable=False, default="[]")
created_at: Mapped[datetime] = mapped_column(
sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)")
sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False
)
updated_at: Mapped[datetime] = mapped_column(
sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)")
sa.DateTime,
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP(0)"),
onupdate=func.current_timestamp(),
init=False,
)
timeout: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("30"), default=30.0)
sse_read_timeout: Mapped[float] = mapped_column(
sa.Float, nullable=False, server_default=sa.text("300"), default=300.0
)
timeout: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("30"))
sse_read_timeout: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("300"))
# encrypted headers for MCP server requests
encrypted_headers: Mapped[str | None] = mapped_column(sa.Text, nullable=True)
encrypted_headers: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None)
def load_user(self) -> Account | None:
return db.session.query(Account).where(Account.id == self.user_id).first()
@ -306,9 +337,11 @@ class MCPToolProvider(Base):
@property
def credentials(self) -> dict[str, Any]:
if not self.encrypted_credentials:
return {}
try:
return cast(dict[str, Any], json.loads(self.encrypted_credentials)) or {}
except Exception:
except json.JSONDecodeError:
return {}
@property
@ -321,6 +354,7 @@ class MCPToolProvider(Base):
def provider_icon(self) -> Mapping[str, str] | str:
from core.file import helpers as file_helpers
assert self.icon
try:
return json.loads(self.icon)
except json.JSONDecodeError:
@ -419,7 +453,7 @@ class MCPToolProvider(Base):
return encrypter.decrypt(self.credentials)
class ToolModelInvoke(Base):
class ToolModelInvoke(TypeBase):
"""
store the invoke logs from tool invoke
"""
@ -427,37 +461,47 @@ class ToolModelInvoke(Base):
__tablename__ = "tool_model_invokes"
__table_args__ = (sa.PrimaryKeyConstraint("id", name="tool_model_invoke_pkey"),)
id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False)
# who invoke this tool
user_id = mapped_column(StringUUID, nullable=False)
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# tenant id
tenant_id = mapped_column(StringUUID, nullable=False)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# provider
provider: Mapped[str] = mapped_column(String(255), nullable=False)
# type
tool_type = mapped_column(String(40), nullable=False)
tool_type: Mapped[str] = mapped_column(String(40), nullable=False)
# tool name
tool_name = mapped_column(String(128), nullable=False)
tool_name: Mapped[str] = mapped_column(String(128), nullable=False)
# invoke parameters
model_parameters = mapped_column(sa.Text, nullable=False)
model_parameters: Mapped[str] = mapped_column(sa.Text, nullable=False)
# prompt messages
prompt_messages = mapped_column(sa.Text, nullable=False)
prompt_messages: Mapped[str] = mapped_column(sa.Text, nullable=False)
# invoke response
model_response = mapped_column(sa.Text, nullable=False)
model_response: Mapped[str] = mapped_column(sa.Text, nullable=False)
prompt_tokens: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0"))
answer_tokens: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0"))
answer_unit_price = mapped_column(sa.Numeric(10, 4), nullable=False)
answer_price_unit = mapped_column(sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001"))
provider_response_latency = mapped_column(sa.Float, nullable=False, server_default=sa.text("0"))
total_price = mapped_column(sa.Numeric(10, 7))
answer_unit_price: Mapped[Decimal] = mapped_column(sa.Numeric(10, 4), nullable=False)
answer_price_unit: Mapped[Decimal] = mapped_column(
sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001")
)
provider_response_latency: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("0"))
total_price: Mapped[Decimal | None] = mapped_column(sa.Numeric(10, 7))
currency: Mapped[str] = mapped_column(String(255), nullable=False)
created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp())
created_at: Mapped[datetime] = mapped_column(
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
)
updated_at: Mapped[datetime] = mapped_column(
sa.DateTime,
nullable=False,
server_default=func.current_timestamp(),
onupdate=func.current_timestamp(),
init=False,
)
@deprecated
class ToolConversationVariables(Base):
class ToolConversationVariables(TypeBase):
"""
store the conversation variables from tool invoke
"""
@ -470,18 +514,26 @@ class ToolConversationVariables(Base):
sa.Index("conversation_id_idx", "conversation_id"),
)
id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False)
# conversation user id
user_id = mapped_column(StringUUID, nullable=False)
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# tenant id
tenant_id = mapped_column(StringUUID, nullable=False)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# conversation id
conversation_id = mapped_column(StringUUID, nullable=False)
conversation_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# variables pool
variables_str = mapped_column(sa.Text, nullable=False)
variables_str: Mapped[str] = mapped_column(sa.Text, nullable=False)
created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp())
created_at: Mapped[datetime] = mapped_column(
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
)
updated_at: Mapped[datetime] = mapped_column(
sa.DateTime,
nullable=False,
server_default=func.current_timestamp(),
onupdate=func.current_timestamp(),
init=False,
)
@property
def variables(self):
@ -519,7 +571,7 @@ class ToolFile(TypeBase):
@deprecated
class DeprecatedPublishedAppTool(Base):
class DeprecatedPublishedAppTool(TypeBase):
"""
The table stores the apps published as a tool for each person.
"""
@ -530,26 +582,34 @@ class DeprecatedPublishedAppTool(Base):
sa.UniqueConstraint("app_id", "user_id", name="unique_published_app_tool"),
)
id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False)
# id of the app
app_id = mapped_column(StringUUID, ForeignKey("apps.id"), nullable=False)
app_id: Mapped[str] = mapped_column(StringUUID, ForeignKey("apps.id"), nullable=False)
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# who published this tool
description = mapped_column(sa.Text, nullable=False)
description: Mapped[str] = mapped_column(sa.Text, nullable=False)
# llm_description of the tool, for LLM
llm_description = mapped_column(sa.Text, nullable=False)
llm_description: Mapped[str] = mapped_column(sa.Text, nullable=False)
# query description, query will be seem as a parameter of the tool,
# to describe this parameter to llm, we need this field
query_description = mapped_column(sa.Text, nullable=False)
query_description: Mapped[str] = mapped_column(sa.Text, nullable=False)
# query name, the name of the query parameter
query_name = mapped_column(String(40), nullable=False)
query_name: Mapped[str] = mapped_column(String(40), nullable=False)
# name of the tool provider
tool_name = mapped_column(String(40), nullable=False)
tool_name: Mapped[str] = mapped_column(String(40), nullable=False)
# author
author = mapped_column(String(40), nullable=False)
created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"))
updated_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"))
author: Mapped[str] = mapped_column(String(40), nullable=False)
created_at: Mapped[datetime] = mapped_column(
sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False
)
updated_at: Mapped[datetime] = mapped_column(
sa.DateTime,
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP(0)"),
onupdate=func.current_timestamp(),
init=False,
)
@property
def description_i18n(self) -> "I18nObject":

View File

@ -4,46 +4,58 @@ import sqlalchemy as sa
from sqlalchemy import DateTime, String, func
from sqlalchemy.orm import Mapped, mapped_column
from models.base import Base
from models.base import TypeBase
from .engine import db
from .model import Message
from .types import StringUUID
class SavedMessage(Base):
class SavedMessage(TypeBase):
__tablename__ = "saved_messages"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="saved_message_pkey"),
sa.Index("saved_message_message_idx", "app_id", "message_id", "created_by_role", "created_by"),
)
id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
app_id = mapped_column(StringUUID, nullable=False)
message_id = mapped_column(StringUUID, nullable=False)
created_by_role = mapped_column(
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False)
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
message_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
created_by_role: Mapped[str] = mapped_column(
String(255), nullable=False, server_default=sa.text("'end_user'::character varying")
)
created_by = mapped_column(StringUUID, nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
created_at: Mapped[datetime] = mapped_column(
DateTime,
nullable=False,
server_default=func.current_timestamp(),
init=False,
)
@property
def message(self):
return db.session.query(Message).where(Message.id == self.message_id).first()
class PinnedConversation(Base):
class PinnedConversation(TypeBase):
__tablename__ = "pinned_conversations"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="pinned_conversation_pkey"),
sa.Index("pinned_conversation_conversation_idx", "app_id", "conversation_id", "created_by_role", "created_by"),
)
id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
app_id = mapped_column(StringUUID, nullable=False)
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False)
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
conversation_id: Mapped[str] = mapped_column(StringUUID)
created_by_role = mapped_column(
String(255), nullable=False, server_default=sa.text("'end_user'::character varying")
created_by_role: Mapped[str] = mapped_column(
String(255),
nullable=False,
server_default=sa.text("'end_user'::character varying"),
)
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
created_at: Mapped[datetime] = mapped_column(
DateTime,
nullable=False,
server_default=func.current_timestamp(),
init=False,
)
created_by = mapped_column(StringUUID, nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())

View File

@ -37,7 +37,7 @@ dependencies = [
"markdown~=3.5.1",
"numpy~=1.26.4",
"openpyxl~=3.1.5",
"opik~=1.7.25",
"opik~=1.8.72",
"opentelemetry-api==1.27.0",
"opentelemetry-distro==0.48b0",
"opentelemetry-exporter-otlp==1.27.0",

View File

@ -26,10 +26,9 @@ class ApiKeyAuthService:
api_key = encrypter.encrypt_token(tenant_id, args["credentials"]["config"]["api_key"])
args["credentials"]["config"]["api_key"] = api_key
data_source_api_key_binding = DataSourceApiKeyAuthBinding()
data_source_api_key_binding.tenant_id = tenant_id
data_source_api_key_binding.category = args["category"]
data_source_api_key_binding.provider = args["provider"]
data_source_api_key_binding = DataSourceApiKeyAuthBinding(
tenant_id=tenant_id, category=args["category"], provider=args["provider"]
)
data_source_api_key_binding.credentials = json.dumps(args["credentials"], ensure_ascii=False)
db.session.add(data_source_api_key_binding)
db.session.commit()
@ -48,6 +47,8 @@ class ApiKeyAuthService:
)
if not data_source_api_key_bindings:
return None
if not data_source_api_key_bindings.credentials:
return None
credentials = json.loads(data_source_api_key_bindings.credentials)
return credentials

View File

@ -1470,7 +1470,7 @@ class DocumentService:
dataset.collection_binding_id = dataset_collection_binding.id
if not dataset.retrieval_model:
default_retrieval_model = {
"search_method": RetrievalMethod.SEMANTIC_SEARCH.value,
"search_method": RetrievalMethod.SEMANTIC_SEARCH,
"reranking_enable": False,
"reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""},
"top_k": 4,
@ -1752,7 +1752,7 @@ class DocumentService:
# dataset.collection_binding_id = dataset_collection_binding.id
# if not dataset.retrieval_model:
# default_retrieval_model = {
# "search_method": RetrievalMethod.SEMANTIC_SEARCH.value,
# "search_method": RetrievalMethod.SEMANTIC_SEARCH,
# "reranking_enable": False,
# "reranking_model": {"reranking_provider_name": "", "reranking_model_name": ""},
# "top_k": 2,
@ -2205,7 +2205,7 @@ class DocumentService:
retrieval_model = knowledge_config.retrieval_model
else:
retrieval_model = RetrievalModel(
search_method=RetrievalMethod.SEMANTIC_SEARCH.value,
search_method=RetrievalMethod.SEMANTIC_SEARCH,
reranking_enable=False,
reranking_model=RerankingModel(reranking_provider_name="", reranking_model_name=""),
top_k=4,

View File

@ -3,6 +3,8 @@ from typing import Literal
from pydantic import BaseModel
from core.rag.retrieval.retrieval_methods import RetrievalMethod
class ParentMode(StrEnum):
FULL_DOC = "full-doc"
@ -95,7 +97,7 @@ class WeightModel(BaseModel):
class RetrievalModel(BaseModel):
search_method: Literal["hybrid_search", "semantic_search", "full_text_search", "keyword_search"]
search_method: RetrievalMethod
reranking_enable: bool
reranking_model: RerankingModel | None = None
reranking_mode: str | None = None

View File

@ -2,6 +2,8 @@ from typing import Literal
from pydantic import BaseModel, field_validator
from core.rag.retrieval.retrieval_methods import RetrievalMethod
class IconInfo(BaseModel):
icon: str
@ -83,7 +85,7 @@ class RetrievalSetting(BaseModel):
Retrieval Setting.
"""
search_method: Literal["semantic_search", "full_text_search", "keyword_search", "hybrid_search"]
search_method: RetrievalMethod
top_k: int
score_threshold: float | None = 0.5
score_threshold_enabled: bool = False

View File

@ -1,5 +1,5 @@
from collections.abc import Sequence
from enum import Enum
from enum import StrEnum
from pydantic import BaseModel, ConfigDict, model_validator
@ -27,7 +27,7 @@ from core.model_runtime.entities.provider_entities import (
from models.provider import ProviderType
class CustomConfigurationStatus(Enum):
class CustomConfigurationStatus(StrEnum):
"""
Enum class for custom configuration status.
"""

View File

@ -88,9 +88,9 @@ class ExternalDatasetService:
else:
raise ValueError(f"invalid endpoint: {endpoint}")
try:
response = httpx.post(endpoint, headers={"Authorization": f"Bearer {api_key}"})
except Exception:
raise ValueError(f"failed to connect to the endpoint: {endpoint}")
response = ssrf_proxy.post(endpoint, headers={"Authorization": f"Bearer {api_key}"})
except Exception as e:
raise ValueError(f"failed to connect to the endpoint: {endpoint}") from e
if response.status_code == 502:
raise ValueError(f"Bad Gateway: failed to connect to the endpoint: {endpoint}")
if response.status_code == 404:

View File

@ -63,7 +63,7 @@ class HitTestingService:
if metadata_condition and not document_ids_filter:
return cls.compact_retrieve_response(query, [])
all_documents = RetrievalService.retrieve(
retrieval_method=retrieval_model.get("search_method", "semantic_search"),
retrieval_method=RetrievalMethod(retrieval_model.get("search_method", RetrievalMethod.SEMANTIC_SEARCH)),
dataset_id=dataset.id,
query=query,
top_k=retrieval_model.get("top_k", 4),

View File

@ -9,6 +9,7 @@ from flask_login import current_user
from constants import DOCUMENT_EXTENSIONS
from core.plugin.impl.plugin import PluginInstaller
from core.rag.retrieval.retrieval_methods import RetrievalMethod
from extensions.ext_database import db
from factories import variable_factory
from models.dataset import Dataset, Document, DocumentPipelineExecutionLog, Pipeline
@ -164,7 +165,7 @@ class RagPipelineTransformService:
if retrieval_model:
retrieval_setting = RetrievalSetting.model_validate(retrieval_model)
if indexing_technique == "economy":
retrieval_setting.search_method = "keyword_search"
retrieval_setting.search_method = RetrievalMethod.KEYWORD_SEARCH
knowledge_configuration.retrieval_model = retrieval_setting
else:
dataset.retrieval_model = knowledge_configuration.retrieval_model.model_dump()

View File

@ -148,7 +148,7 @@ class ApiToolManageService:
description=extra_info.get("description", ""),
schema_type_str=schema_type,
tools_str=json.dumps(jsonable_encoder(tool_bundles)),
credentials_str={},
credentials_str="{}",
privacy_policy=privacy_policy,
custom_disclaimer=custom_disclaimer,
)

View File

@ -683,7 +683,7 @@ class BuiltinToolManageService:
cache=NoOpProviderCredentialCache(),
)
original_params = encrypter.decrypt(custom_client_params.oauth_params)
new_params: dict = {
new_params = {
key: value if value != HIDDEN_VALUE else original_params.get(key, UNKNOWN_VALUE)
for key, value in client_params.items()
}

View File

@ -188,6 +188,8 @@ class MCPToolManageService:
raise
user = mcp_provider.load_user()
if not mcp_provider.icon:
raise ValueError("MCP provider icon is required")
return ToolProviderApiEntity(
id=mcp_provider.id,
name=mcp_provider.name,

View File

@ -152,7 +152,8 @@ class ToolTransformService:
if decrypt_credentials:
credentials = db_provider.credentials
if not db_provider.tenant_id:
raise ValueError(f"Required tenant_id is missing for BuiltinToolProvider with id {db_provider.id}")
# init tool configuration
encrypter, _ = create_provider_encrypter(
tenant_id=db_provider.tenant_id,

View File

@ -60,7 +60,7 @@ class TestAccountInitialization:
return "success"
# Act
with patch("controllers.console.wraps.current_user", mock_user):
with patch("controllers.console.wraps._current_account", return_value=mock_user):
result = protected_view()
# Assert
@ -77,7 +77,7 @@ class TestAccountInitialization:
return "success"
# Act & Assert
with patch("controllers.console.wraps.current_user", mock_user):
with patch("controllers.console.wraps._current_account", return_value=mock_user):
with pytest.raises(AccountNotInitializedError):
protected_view()
@ -163,7 +163,7 @@ class TestBillingResourceLimits:
return "member_added"
# Act
with patch("controllers.console.wraps.current_user"):
with patch("controllers.console.wraps._current_account", return_value=MockUser("test_user")):
with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features):
result = add_member()
@ -185,7 +185,7 @@ class TestBillingResourceLimits:
# Act & Assert
with app.test_request_context():
with patch("controllers.console.wraps.current_user", MockUser("test_user")):
with patch("controllers.console.wraps._current_account", return_value=MockUser("test_user")):
with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features):
with pytest.raises(Exception) as exc_info:
add_member()
@ -207,7 +207,7 @@ class TestBillingResourceLimits:
# Test 1: Should reject when source is datasets
with app.test_request_context("/?source=datasets"):
with patch("controllers.console.wraps.current_user", MockUser("test_user")):
with patch("controllers.console.wraps._current_account", return_value=MockUser("test_user")):
with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features):
with pytest.raises(Exception) as exc_info:
upload_document()
@ -215,7 +215,7 @@ class TestBillingResourceLimits:
# Test 2: Should allow when source is not datasets
with app.test_request_context("/?source=other"):
with patch("controllers.console.wraps.current_user", MockUser("test_user")):
with patch("controllers.console.wraps._current_account", return_value=MockUser("test_user")):
with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features):
result = upload_document()
assert result == "document_uploaded"
@ -239,7 +239,7 @@ class TestRateLimiting:
return "knowledge_success"
# Act
with patch("controllers.console.wraps.current_user"):
with patch("controllers.console.wraps._current_account", return_value=MockUser("test_user")):
with patch(
"controllers.console.wraps.FeatureService.get_knowledge_rate_limit", return_value=mock_rate_limit
):
@ -271,7 +271,7 @@ class TestRateLimiting:
# Act & Assert
with app.test_request_context():
with patch("controllers.console.wraps.current_user", MockUser("test_user")):
with patch("controllers.console.wraps._current_account", return_value=MockUser("test_user")):
with patch(
"controllers.console.wraps.FeatureService.get_knowledge_rate_limit", return_value=mock_rate_limit
):

View File

@ -1,10 +1,12 @@
import os
from pytest_mock import MockerFixture
from core.rag.extractor.firecrawl.firecrawl_app import FirecrawlApp
from tests.unit_tests.core.rag.extractor.test_notion_extractor import _mock_response
def test_firecrawl_web_extractor_crawl_mode(mocker):
def test_firecrawl_web_extractor_crawl_mode(mocker: MockerFixture):
url = "https://firecrawl.dev"
api_key = os.getenv("FIRECRAWL_API_KEY") or "fc-"
base_url = "https://api.firecrawl.dev"

View File

@ -1,5 +1,7 @@
from unittest import mock
from pytest_mock import MockerFixture
from core.rag.extractor import notion_extractor
user_id = "user1"
@ -57,7 +59,7 @@ def _remove_multiple_new_lines(text):
return text.strip()
def test_notion_page(mocker):
def test_notion_page(mocker: MockerFixture):
texts = ["Head 1", "1.1", "paragraph 1", "1.1.1"]
mocked_notion_page = {
"object": "list",
@ -77,7 +79,7 @@ def test_notion_page(mocker):
assert content == "# Head 1\n## 1.1\nparagraph 1\n### 1.1.1"
def test_notion_database(mocker):
def test_notion_database(mocker: MockerFixture):
page_title_list = ["page1", "page2", "page3"]
mocked_notion_database = {
"object": "list",

View File

@ -2,6 +2,7 @@ from unittest.mock import MagicMock, patch
import pytest
import redis
from pytest_mock import MockerFixture
from core.entities.provider_entities import ModelLoadBalancingConfiguration
from core.model_manager import LBModelManager
@ -39,7 +40,7 @@ def lb_model_manager():
return lb_model_manager
def test_lb_model_manager_fetch_next(mocker, lb_model_manager):
def test_lb_model_manager_fetch_next(mocker: MockerFixture, lb_model_manager: LBModelManager):
# initialize redis client
redis_client.initialize(redis.Redis())

View File

@ -1,4 +1,5 @@
import pytest
from pytest_mock import MockerFixture
from core.entities.provider_entities import ModelSettings
from core.model_runtime.entities.model_entities import ModelType
@ -7,19 +8,25 @@ from models.provider import LoadBalancingModelConfig, ProviderModelSetting
@pytest.fixture
def mock_provider_entity(mocker):
def mock_provider_entity(mocker: MockerFixture):
mock_entity = mocker.Mock()
mock_entity.provider = "openai"
mock_entity.configurate_methods = ["predefined-model"]
mock_entity.supported_model_types = [ModelType.LLM]
mock_entity.model_credential_schema = mocker.Mock()
mock_entity.model_credential_schema.credential_form_schemas = []
# Use PropertyMock to ensure credential_form_schemas is iterable
provider_credential_schema = mocker.Mock()
type(provider_credential_schema).credential_form_schemas = mocker.PropertyMock(return_value=[])
mock_entity.provider_credential_schema = provider_credential_schema
model_credential_schema = mocker.Mock()
type(model_credential_schema).credential_form_schemas = mocker.PropertyMock(return_value=[])
mock_entity.model_credential_schema = model_credential_schema
return mock_entity
def test__to_model_settings(mocker, mock_provider_entity):
def test__to_model_settings(mocker: MockerFixture, mock_provider_entity):
# Mocking the inputs
provider_model_settings = [
ProviderModelSetting(
@ -79,7 +86,7 @@ def test__to_model_settings(mocker, mock_provider_entity):
assert result[0].load_balancing_configs[1].name == "first"
def test__to_model_settings_only_one_lb(mocker, mock_provider_entity):
def test__to_model_settings_only_one_lb(mocker: MockerFixture, mock_provider_entity):
# Mocking the inputs
provider_model_settings = [
ProviderModelSetting(
@ -127,7 +134,7 @@ def test__to_model_settings_only_one_lb(mocker, mock_provider_entity):
assert len(result[0].load_balancing_configs) == 0
def test__to_model_settings_lb_disabled(mocker, mock_provider_entity):
def test__to_model_settings_lb_disabled(mocker: MockerFixture, mock_provider_entity):
# Mocking the inputs
provider_model_settings = [
ProviderModelSetting(

View File

@ -1533,7 +1533,7 @@ requires-dist = [
{ name = "opentelemetry-sdk", specifier = "==1.27.0" },
{ name = "opentelemetry-semantic-conventions", specifier = "==0.48b0" },
{ name = "opentelemetry-util-http", specifier = "==0.48b0" },
{ name = "opik", specifier = "~=1.7.25" },
{ name = "opik", specifier = "~=1.8.72" },
{ name = "packaging", specifier = "~=23.2" },
{ name = "pandas", extras = ["excel", "output-formatting", "performance"], specifier = "~=2.2.2" },
{ name = "psycogreen", specifier = "~=1.0.2" },
@ -4045,7 +4045,7 @@ wheels = [
[[package]]
name = "opik"
version = "1.7.43"
version = "1.8.72"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "boto3-stubs", extra = ["bedrock-runtime"] },
@ -4064,9 +4064,9 @@ dependencies = [
{ name = "tqdm" },
{ name = "uuid6" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ba/52/cea0317bc3207bc967b48932781995d9cdb2c490e7e05caa00ff660f7205/opik-1.7.43.tar.gz", hash = "sha256:0b02522b0b74d0a67b141939deda01f8bb69690eda6b04a7cecb1c7f0649ccd0", size = 326886, upload-time = "2025-07-07T10:30:07.715Z" }
sdist = { url = "https://files.pythonhosted.org/packages/aa/08/679b60db21994cf3318d4cdd1d08417c1877b79ac20971a8d80f118c9455/opik-1.8.72.tar.gz", hash = "sha256:26fcb003dc609d96b52eaf6a12fb16eb2b69eb0d1b35d88279ec612925d23944", size = 409774, upload-time = "2025-10-10T13:22:38.2Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/76/ae/f3566bdc3c49a1a8f795b1b6e726ef211c87e31f92d870ca6d63999c9bbf/opik-1.7.43-py3-none-any.whl", hash = "sha256:a66395c8b5ea7c24846f72dafc70c74d5b8f24ffbc4c8a1b3a7f9456e550568d", size = 625356, upload-time = "2025-07-07T10:30:06.389Z" },
{ url = "https://files.pythonhosted.org/packages/f8/f5/04d35af828d127de65a36286ce5b53e7310087a6b55a56f398daa7f0c9a6/opik-1.8.72-py3-none-any.whl", hash = "sha256:697e361a8364666f36aeb197aaba7ffa0696b49f04d2257b733d436749c90a8c", size = 768233, upload-time = "2025-10-10T13:22:36.352Z" },
]
[[package]]

View File

@ -1,7 +1,7 @@
import { useTranslation } from 'react-i18next'
import { useRouter } from 'next/navigation'
import { useContext } from 'use-context-selector'
import React, { useCallback, useState } from 'react'
import React, { useCallback, useEffect, useState } from 'react'
import {
RiDeleteBinLine,
RiEditLine,
@ -16,7 +16,7 @@ import { useStore as useAppStore } from '@/app/components/app/store'
import { ToastContext } from '@/app/components/base/toast'
import { useAppContext } from '@/context/app-context'
import { useProviderContext } from '@/context/provider-context'
import { copyApp, deleteApp, exportAppConfig, updateAppInfo } from '@/service/apps'
import { copyApp, deleteApp, exportAppConfig, fetchAppDetail, updateAppInfo } from '@/service/apps'
import type { DuplicateAppModalProps } from '@/app/components/app/duplicate-modal'
import type { CreateAppModalProps } from '@/app/components/explore/create-app-modal'
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
@ -31,6 +31,8 @@ import type { Operation } from './app-operations'
import AppOperations from './app-operations'
import dynamic from 'next/dynamic'
import cn from '@/utils/classnames'
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
const SwitchAppModal = dynamic(() => import('@/app/components/app/switch-app-modal'), {
ssr: false,
@ -74,6 +76,19 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
const [secretEnvList, setSecretEnvList] = useState<EnvironmentVariable[]>([])
const [showExportWarning, setShowExportWarning] = useState(false)
const emitAppMetaUpdate = useCallback(() => {
if (!appDetail?.id)
return
const socket = webSocketClient.getSocket(appDetail.id)
if (socket) {
socket.emit('collaboration_event', {
type: 'app_meta_update',
data: { timestamp: Date.now() },
timestamp: Date.now(),
})
}
}, [appDetail?.id])
const onEdit: CreateAppModalProps['onConfirm'] = useCallback(async ({
name,
icon_type,
@ -102,11 +117,12 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
message: t('app.editDone'),
})
setAppDetail(app)
emitAppMetaUpdate()
}
catch {
notify({ type: 'error', message: t('app.editFailed') })
}
}, [appDetail, notify, setAppDetail, t])
}, [appDetail, notify, setAppDetail, t, emitAppMetaUpdate])
const onCopy: DuplicateAppModalProps['onConfirm'] = async ({ name, icon_type, icon, icon_background }) => {
if (!appDetail)
@ -203,6 +219,23 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
setShowConfirmDelete(false)
}, [appDetail, notify, onPlanInfoChanged, replace, setAppDetail, t])
useEffect(() => {
if (!appDetail?.id)
return
const unsubscribe = collaborationManager.onAppMetaUpdate(async () => {
try {
const res = await fetchAppDetail({ url: '/apps', id: appDetail.id })
setAppDetail({ ...res })
}
catch (error) {
console.error('failed to refresh app detail from collaboration update:', error)
}
})
return unsubscribe
}, [appDetail?.id, setAppDetail])
const { isCurrentWorkspaceEditor } = useAppContext()
if (!appDetail)

View File

@ -47,6 +47,9 @@ import { AccessMode } from '@/models/access-control'
import { fetchAppDetail } from '@/service/apps'
import { useGlobalPublicStore } from '@/context/global-public-context'
import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now'
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
import { useInvalidateAppWorkflow } from '@/service/use-workflow'
export type AppPublisherProps = {
disabled?: boolean
@ -96,6 +99,7 @@ const AppPublisher = ({
const isChatApp = ['chat', 'agent-chat', 'completion'].includes(appDetail?.mode || '')
const { data: userCanAccessApp, isLoading: isGettingUserCanAccessApp, refetch } = useGetUserCanAccessApp({ appId: appDetail?.id, enabled: false })
const { data: appAccessSubjects, isLoading: isGettingAppWhiteListSubjects } = useAppWhiteListSubjects(appDetail?.id, open && systemFeatures.webapp_auth.enabled && appDetail?.access_mode === AccessMode.SPECIFIC_GROUPS_MEMBERS)
const invalidateAppWorkflow = useInvalidateAppWorkflow()
useEffect(() => {
if (systemFeatures.webapp_auth.enabled && open && appDetail)
@ -120,11 +124,27 @@ const AppPublisher = ({
try {
await onPublish?.(params)
setPublished(true)
const appId = appDetail?.id
const socket = appId ? webSocketClient.getSocket(appId) : null
if (appId)
invalidateAppWorkflow(appId)
if (socket) {
const timestamp = Date.now()
socket.emit('collaboration_event', {
type: 'app_publish_update',
data: {
action: 'published',
timestamp,
},
timestamp,
})
}
}
catch {
setPublished(false)
}
}, [onPublish])
}, [appDetail?.id, onPublish, invalidateAppWorkflow])
const handleRestore = useCallback(async () => {
try {
@ -178,6 +198,18 @@ const AppPublisher = ({
handlePublish()
}, { exactMatch: true, useCapture: true })
useEffect(() => {
const appId = appDetail?.id
if (!appId) return
const unsubscribe = collaborationManager.onAppPublishUpdate((update: any) => {
if (update?.data?.action === 'published')
invalidateAppWorkflow(appId)
})
return unsubscribe
}, [appDetail?.id, invalidateAppWorkflow])
return (
<>
<PortalToFollowElem

View File

@ -15,11 +15,12 @@ const ContentDialog = ({
onClose,
children,
}: ContentDialogProps) => {
// z-[70]: Ensures dialog appears above workflow operators (z-[60]) and other UI elements
return (
<Transition
show={show}
as='div'
className='absolute left-0 top-0 z-30 box-border h-full w-full p-2'
className='absolute left-0 top-0 z-[70] box-border h-full w-full p-2'
>
<TransitionChild>
<div

View File

@ -0,0 +1,4 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M0 4C0 1.79086 1.79086 0 4 0H12C14.2091 0 16 1.79086 16 4V12C16 14.2091 14.2091 16 12 16H4C1.79086 16 0 14.2091 0 12V4Z" fill="white" fill-opacity="0.12"/>
<path d="M3.42756 8.7358V7.62784H10.8764C11.2003 7.62784 11.4957 7.5483 11.7628 7.3892C12.0298 7.23011 12.2415 7.01705 12.3977 6.75C12.5568 6.48295 12.6364 6.1875 12.6364 5.86364C12.6364 5.53977 12.5568 5.24574 12.3977 4.98153C12.2386 4.71449 12.0256 4.50142 11.7585 4.34233C11.4943 4.18324 11.2003 4.10369 10.8764 4.10369H10.3991V3H10.8764C11.4048 3 11.8849 3.12926 12.3168 3.38778C12.7486 3.64631 13.0938 3.99148 13.3523 4.4233C13.6108 4.85511 13.7401 5.33523 13.7401 5.86364C13.7401 6.25852 13.6648 6.62926 13.5142 6.97585C13.3665 7.32244 13.1619 7.62784 12.9006 7.89205C12.6392 8.15625 12.3352 8.36364 11.9886 8.5142C11.642 8.66193 11.2713 8.7358 10.8764 8.7358H3.42756ZM6.16761 12.0554L2.29403 8.18182L6.16761 4.30824L6.9304 5.07102L3.81534 8.18182L6.9304 11.2926L6.16761 12.0554Z" fill="white"/>
</svg>

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

@ -0,0 +1,36 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "16",
"height": "16",
"viewBox": "0 0 16 16",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M0 4C0 1.79086 1.79086 0 4 0H12C14.2091 0 16 1.79086 16 4V12C16 14.2091 14.2091 16 12 16H4C1.79086 16 0 14.2091 0 12V4Z",
"fill": "white",
"fill-opacity": "0.12"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M3.42756 8.7358V7.62784H10.8764C11.2003 7.62784 11.4957 7.5483 11.7628 7.3892C12.0298 7.23011 12.2415 7.01705 12.3977 6.75C12.5568 6.48295 12.6364 6.1875 12.6364 5.86364C12.6364 5.53977 12.5568 5.24574 12.3977 4.98153C12.2386 4.71449 12.0256 4.50142 11.7585 4.34233C11.4943 4.18324 11.2003 4.10369 10.8764 4.10369H10.3991V3H10.8764C11.4048 3 11.8849 3.12926 12.3168 3.38778C12.7486 3.64631 13.0938 3.99148 13.3523 4.4233C13.6108 4.85511 13.7401 5.33523 13.7401 5.86364C13.7401 6.25852 13.6648 6.62926 13.5142 6.97585C13.3665 7.32244 13.1619 7.62784 12.9006 7.89205C12.6392 8.15625 12.3352 8.36364 11.9886 8.5142C11.642 8.66193 11.2713 8.7358 10.8764 8.7358H3.42756ZM6.16761 12.0554L2.29403 8.18182L6.16761 4.30824L6.9304 5.07102L3.81534 8.18182L6.9304 11.2926L6.16761 12.0554Z",
"fill": "white"
},
"children": []
}
]
},
"name": "EnterKey"
}

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import * as React from 'react'
import data from './EnterKey.json'
import IconBase from '@/app/components/base/icons/IconBase'
import type { IconData } from '@/app/components/base/icons/IconBase'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'EnterKey'
export default Icon

View File

@ -1,6 +1,7 @@
export { default as D } from './D'
export { default as DiagonalDividingLine } from './DiagonalDividingLine'
export { default as Dify } from './Dify'
export { default as EnterKey } from './EnterKey'
export { default as Gdpr } from './Gdpr'
export { default as Github } from './Github'
export { default as Highlight } from './Highlight'

View File

@ -7,6 +7,7 @@ import { useInvalidateStrategyProviders } from '@/service/use-strategy'
import type { Plugin, PluginDeclaration, PluginManifestInMarket } from '../../types'
import { PluginType } from '../../types'
import { useInvalidDataSourceList } from '@/service/use-pipeline'
import { useInvalidDataSourceListAuth } from '@/service/use-datasource'
const useRefreshPluginList = () => {
const invalidateInstalledPluginList = useInvalidateInstalledPluginList()
@ -19,6 +20,8 @@ const useRefreshPluginList = () => {
const invalidateAllBuiltInTools = useInvalidateAllBuiltInTools()
const invalidateAllDataSources = useInvalidDataSourceList()
const invalidateDataSourceListAuth = useInvalidDataSourceListAuth()
const invalidateStrategyProviders = useInvalidateStrategyProviders()
return {
refreshPluginList: (manifest?: PluginManifestInMarket | Plugin | PluginDeclaration | null, refreshAllType?: boolean) => {
@ -32,8 +35,10 @@ const useRefreshPluginList = () => {
// TODO: update suggested tools. It's a function in hook useMarketplacePlugins,handleUpdatePlugins
}
if ((manifest && PluginType.datasource.includes(manifest.category)) || refreshAllType)
if ((manifest && PluginType.datasource.includes(manifest.category)) || refreshAllType) {
invalidateAllDataSources()
invalidateDataSourceListAuth()
}
// model select
if ((manifest && PluginType.model.includes(manifest.category)) || refreshAllType) {

View File

@ -16,6 +16,7 @@ import {
useUpdateMCPServer,
} from '@/service/use-tools'
import cn from '@/utils/classnames'
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
export type ModalProps = {
appID: string
@ -59,6 +60,21 @@ const MCPServerModal = ({
return res
}
const emitMcpServerUpdate = (action: 'created' | 'updated') => {
const socket = webSocketClient.getSocket(appID)
if (!socket) return
const timestamp = Date.now()
socket.emit('collaboration_event', {
type: 'mcp_server_update',
data: {
action,
timestamp,
},
timestamp,
})
}
const submit = async () => {
if (!data) {
const payload: any = {
@ -71,6 +87,7 @@ const MCPServerModal = ({
await createMCPServer(payload)
invalidateMCPServerDetail(appID)
emitMcpServerUpdate('created')
onHide()
}
else {
@ -83,6 +100,7 @@ const MCPServerModal = ({
payload.description = description
await updateMCPServer(payload)
invalidateMCPServerDetail(appID)
emitMcpServerUpdate('updated')
onHide()
}
}
@ -92,6 +110,7 @@ const MCPServerModal = ({
isShow={show}
onClose={onHide}
className={cn('relative !max-w-[520px] !p-0')}
highPriority
>
<div className='absolute right-5 top-5 z-10 cursor-pointer p-1.5' onClick={onHide}>
<RiCloseLine className='h-5 w-5 text-text-tertiary' />

View File

@ -1,10 +1,9 @@
import {
useCallback,
useEffect,
useMemo,
useRef,
} from 'react'
import Link from 'next/link'
import { useTranslation } from 'react-i18next'
import { RiArrowRightUpLine } from '@remixicon/react'
import { BlockEnum } from '../types'
import type {
OnSelectBlock,
@ -14,10 +13,12 @@ import type { DataSourceDefaultValue, ToolDefaultValue } from './types'
import Tools from './tools'
import { ViewType } from './view-type-select'
import cn from '@/utils/classnames'
import type { ListRef } from '@/app/components/workflow/block-selector/market-place-plugin/list'
import { getMarketplaceUrl } from '@/utils/var'
import PluginList, { type ListRef } from '@/app/components/workflow/block-selector/market-place-plugin/list'
import { useGlobalPublicStore } from '@/context/global-public-context'
import { DEFAULT_FILE_EXTENSIONS_IN_LOCAL_FILE_DATA_SOURCE } from './constants'
import { useMarketplacePlugins } from '../../plugins/marketplace/hooks'
import { PluginType } from '../../plugins/types'
import { useGetLanguage } from '@/context/i18n'
type AllToolsProps = {
className?: string
@ -34,9 +35,26 @@ const DataSources = ({
onSelect,
dataSources,
}: AllToolsProps) => {
const { t } = useTranslation()
const language = useGetLanguage()
const pluginRef = useRef<ListRef>(null)
const wrapElemRef = useRef<HTMLDivElement>(null)
const isMatchingKeywords = (text: string, keywords: string) => {
return text.toLowerCase().includes(keywords.toLowerCase())
}
const filteredDatasources = useMemo(() => {
const hasFilter = searchText
if (!hasFilter)
return dataSources.filter(toolWithProvider => toolWithProvider.tools.length > 0)
return dataSources.filter((toolWithProvider) => {
return isMatchingKeywords(toolWithProvider.name, searchText) || toolWithProvider.tools.some((tool) => {
return tool.label[language].toLowerCase().includes(searchText.toLowerCase()) || tool.name.toLowerCase().includes(searchText.toLowerCase())
})
})
}, [searchText, dataSources, language])
const handleSelect = useCallback((_: any, toolDefaultValue: ToolDefaultValue) => {
let defaultValue: DataSourceDefaultValue = {
plugin_id: toolDefaultValue?.provider_id,
@ -55,8 +73,24 @@ const DataSources = ({
}
onSelect(BlockEnum.DataSource, toolDefaultValue && defaultValue)
}, [onSelect])
const { enable_marketplace } = useGlobalPublicStore(s => s.systemFeatures)
const {
queryPluginsWithDebounced: fetchPlugins,
plugins: notInstalledPlugins = [],
} = useMarketplacePlugins()
useEffect(() => {
if (!enable_marketplace) return
if (searchText) {
fetchPlugins({
query: searchText,
category: PluginType.datasource,
})
}
}, [searchText, enable_marketplace])
return (
<div className={cn(className)}>
<div
@ -66,24 +100,23 @@ const DataSources = ({
>
<Tools
className={toolContentClassName}
tools={dataSources}
tools={filteredDatasources}
onSelect={handleSelect as OnSelectBlock}
viewType={ViewType.flat}
hasSearchText={!!searchText}
canNotSelectMultiple
/>
{
enable_marketplace && (
<Link
className='system-sm-medium sticky bottom-0 z-10 flex h-8 cursor-pointer items-center rounded-b-lg border-[0.5px] border-t border-components-panel-border bg-components-panel-bg-blur px-4 py-1 text-text-accent-light-mode-only shadow-lg'
href={getMarketplaceUrl('')}
target='_blank'
>
<span>{t('plugin.findMoreInMarketplace')}</span>
<RiArrowRightUpLine className='ml-0.5 h-3 w-3' />
</Link>
)
}
{/* Plugins from marketplace */}
{enable_marketplace && (
<PluginList
ref={pluginRef}
wrapElemRef={wrapElemRef}
list={notInstalledPlugins}
tags={[]}
searchText={searchText}
toolContentClassName={toolContentClassName}
/>
)}
</div>
</div>
)

View File

@ -372,6 +372,14 @@ export class CollaborationManager {
return this.eventEmitter.on('appStateUpdate', callback)
}
onAppPublishUpdate(callback: (update: any) => void): () => void {
return this.eventEmitter.on('appPublishUpdate', callback)
}
onAppMetaUpdate(callback: (update: any) => void): () => void {
return this.eventEmitter.on('appMetaUpdate', callback)
}
onMcpServerUpdate(callback: (update: any) => void): () => void {
return this.eventEmitter.on('mcpServerUpdate', callback)
}
@ -540,7 +548,7 @@ export class CollaborationManager {
const oldNodesMap = new Map(oldNodes.map(node => [node.id, node]))
const newNodesMap = new Map(newNodes.map(node => [node.id, node]))
const syncDataAllowList = new Set(['_children'])
const syncDataAllowList = new Set(['_children', '_connectedSourceHandleIds', '_connectedTargetHandleIds', '_targetBranches'])
const shouldSyncDataKey = (key: string) => (syncDataAllowList.has(key) || !key.startsWith('_')) && key !== 'selected'
// Delete removed nodes
@ -812,6 +820,14 @@ export class CollaborationManager {
console.log('Processing app_state_update event:', update)
this.eventEmitter.emit('appStateUpdate', update)
}
else if (update.type === 'app_meta_update') {
console.log('Processing app_meta_update event:', update)
this.eventEmitter.emit('appMetaUpdate', update)
}
else if (update.type === 'app_publish_update') {
console.log('Processing app_publish_update event:', update)
this.eventEmitter.emit('appPublishUpdate', update)
}
else if (update.type === 'mcp_server_update') {
console.log('Processing mcp_server_update event:', update)
this.eventEmitter.emit('mcpServerUpdate', update)

View File

@ -50,7 +50,7 @@ export type GraphSyncData = {
}
export type CollaborationUpdate = {
type: 'mouse_move' | 'vars_and_features_update' | 'sync_request' | 'app_state_update' | 'mcp_server_update' | 'workflow_update' | 'comments_update' | 'node_panel_presence'
type: 'mouse_move' | 'vars_and_features_update' | 'sync_request' | 'app_state_update' | 'app_meta_update' | 'mcp_server_update' | 'workflow_update' | 'comments_update' | 'node_panel_presence' | 'app_publish_update'
userId: string
data: any
timestamp: number

View File

@ -22,6 +22,7 @@ import Avatar from '@/app/components/base/avatar'
import cn from '@/utils/classnames'
import { type UserProfile, fetchMentionableUsers } from '@/service/workflow-comment'
import { useStore, useWorkflowStore } from '../store'
import { EnterKey } from '@/app/components/base/icons/src/public/common'
type MentionInputProps = {
value: string
@ -431,6 +432,10 @@ const MentionInputInner = forwardRef<HTMLTextAreaElement, MentionInputProps>(({
}, [value, mentionedUserIds, onSubmit])
const handleKeyDown = useCallback((e: React.KeyboardEvent) => {
// Ignore key events during IME composition (e.g., Chinese, Japanese input)
if (e.nativeEvent.isComposing)
return
if (showMentionDropdown) {
if (e.key === 'ArrowDown') {
e.preventDefault()
@ -583,9 +588,13 @@ const MentionInputInner = forwardRef<HTMLTextAreaElement, MentionInputProps>(({
size='small'
disabled={loading || !value.trim()}
onClick={() => handleSubmit()}
className='gap-1'
>
{loading && <RiLoader2Line className='mr-1 h-3.5 w-3.5 animate-spin' />}
{t('common.operation.save')}
<span>{t('common.operation.save')}</span>
{!loading && (
<EnterKey className='h-4 w-4' />
)}
</Button>
</div>
</div>
@ -594,7 +603,7 @@ const MentionInputInner = forwardRef<HTMLTextAreaElement, MentionInputProps>(({
{showMentionDropdown && filteredMentionUsers.length > 0 && typeof document !== 'undefined' && createPortal(
<div
className="fixed z-[9999] max-h-40 w-64 overflow-y-auto rounded-lg border border-components-panel-border bg-components-panel-bg shadow-lg"
className="bg-components-panel-bg/95 fixed z-[9999] max-h-[248px] w-[280px] overflow-y-auto rounded-xl border-[0.5px] border-components-panel-border shadow-lg backdrop-blur-[10px]"
style={{
left: dropdownPosition.x,
[dropdownPosition.placement === 'top' ? 'bottom' : 'top']: dropdownPosition.placement === 'top'
@ -607,7 +616,7 @@ const MentionInputInner = forwardRef<HTMLTextAreaElement, MentionInputProps>(({
<div
key={user.id}
className={cn(
'flex cursor-pointer items-center gap-2 p-2 hover:bg-state-base-hover',
'flex cursor-pointer items-center gap-2 rounded-md py-1 pl-2 pr-3 hover:bg-state-base-hover',
index === selectedMentionIndex && 'bg-state-base-hover',
)}
onClick={() => insertMention(user)}

View File

@ -2,6 +2,7 @@
import type { FC, ReactNode } from 'react'
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useParams } from 'next/navigation'
import { useReactFlow, useViewport } from 'reactflow'
import { useTranslation } from 'react-i18next'
import { RiArrowDownSLine, RiArrowUpSLine, RiCheckboxCircleFill, RiCheckboxCircleLine, RiCloseLine, RiDeleteBinLine, RiMoreFill } from '@remixicon/react'
@ -16,6 +17,7 @@ import type { WorkflowCommentDetail, WorkflowCommentDetailReply } from '@/servic
import { useAppContext } from '@/context/app-context'
import { MentionInput } from './mention-input'
import { getUserColor } from '@/app/components/workflow/collaboration/utils/user-color'
import { useStore } from '../store'
type CommentThreadProps = {
comment: WorkflowCommentDetail
@ -41,9 +43,9 @@ const ThreadMessage: FC<{
avatarUrl?: string | null
createdAt: number
content: string
mentionedNames?: string[]
mentionableNames: string[]
className?: string
}> = ({ authorId, authorName, avatarUrl, createdAt, content, mentionedNames, className }) => {
}> = ({ authorId, authorName, avatarUrl, createdAt, content, mentionableNames, className }) => {
const { formatTimeFromNow } = useFormatTimeFromNow()
const { userProfile } = useAppContext()
const currentUserId = userProfile?.id
@ -54,9 +56,11 @@ const ThreadMessage: FC<{
if (!content)
return ''
const normalizedNames = Array.from(new Set((mentionedNames || [])
// Extract valid user names from mentionableNames, sorted by length (longest first)
const normalizedNames = Array.from(new Set(mentionableNames
.map(name => name.trim())
.filter(Boolean)))
normalizedNames.sort((a, b) => b.length - a.length)
if (normalizedNames.length === 0)
return content
@ -111,7 +115,7 @@ const ThreadMessage: FC<{
segments.push(<span key={`text-${cursor}`}>{content.slice(cursor)}</span>)
return segments
}, [content, mentionedNames])
}, [content, mentionableNames])
return (
<div className={cn('flex gap-3 pt-1', className)}>
@ -154,6 +158,8 @@ export const CommentThread: FC<CommentThreadProps> = memo(({
onReplyDelete,
onReplyDeleteDirect,
}) => {
const params = useParams()
const appId = params.appId as string
const { flowToScreenPosition } = useReactFlow()
const viewport = useViewport()
const { userProfile } = useAppContext()
@ -162,11 +168,26 @@ export const CommentThread: FC<CommentThreadProps> = memo(({
const [activeReplyMenuId, setActiveReplyMenuId] = useState<string | null>(null)
const [editingReply, setEditingReply] = useState<{ id: string; content: string }>({ id: '', content: '' })
const [deletingReplyId, setDeletingReplyId] = useState<string | null>(null)
const [isSubmittingEdit, setIsSubmittingEdit] = useState(false)
// Focus management refs
const replyInputRef = useRef<HTMLTextAreaElement>(null)
const threadRef = useRef<HTMLDivElement>(null)
// Get mentionable users from store
const mentionUsersFromStore = useStore(state => (
appId ? state.mentionableUsersCache[appId] : undefined
))
const mentionUsers = mentionUsersFromStore ?? []
// Extract all mentionable names for highlighting
const mentionableNames = useMemo(() => {
const names = mentionUsers
.map(user => user.name?.trim())
.filter((name): name is string => Boolean(name))
return Array.from(new Set(names))
}, [mentionUsers])
useEffect(() => {
setReplyContent('')
}, [comment.id])
@ -245,13 +266,23 @@ export const CommentThread: FC<CommentThreadProps> = memo(({
if (!onReplyEdit || !editingReply) return
const trimmed = content.trim()
if (!trimmed) return
await onReplyEdit(editingReply.id, trimmed, mentionedUserIds)
setEditingReply({ id: '', content: '' })
// P1: Restore focus to reply input after saving edit
setTimeout(() => {
replyInputRef.current?.focus()
}, 0)
setIsSubmittingEdit(true)
try {
await onReplyEdit(editingReply.id, trimmed, mentionedUserIds)
setEditingReply({ id: '', content: '' })
// P1: Restore focus to reply input after saving edit
setTimeout(() => {
replyInputRef.current?.focus()
}, 0)
}
catch (error) {
console.error('Failed to edit reply', error)
}
finally {
setIsSubmittingEdit(false)
}
}, [editingReply, onReplyEdit])
const replies = comment.replies || []
@ -296,25 +327,6 @@ export const CommentThread: FC<CommentThreadProps> = memo(({
previousReplyCountRef.current = replies.length
}, [comment.id, replies.length])
const mentionsByTarget = useMemo(() => {
const map = new Map<string, string[]>()
for (const mention of comment.mentions || []) {
const name = mention.mentioned_user_account?.name?.trim()
if (!name)
continue
const key = mention.reply_id ?? 'root'
const existing = map.get(key)
if (existing) {
if (!existing.includes(name))
existing.push(name)
}
else {
map.set(key, [name])
}
}
return map
}, [comment.mentions])
return (
<div
className='absolute z-50 w-[360px] max-w-[360px]'
@ -414,14 +426,14 @@ export const CommentThread: FC<CommentThreadProps> = memo(({
ref={messageListRef}
className='relative mt-2 flex-1 overflow-y-auto px-4 pb-4'
>
<div className='rounded-lg py-2 pl-1 transition-colors hover:bg-components-panel-on-panel-item-bg-hover'>
<div className='-mx-4 rounded-lg px-4 py-2 transition-colors hover:bg-components-panel-on-panel-item-bg-hover'>
<ThreadMessage
authorId={comment.created_by_account?.id || ''}
authorName={comment.created_by_account?.name || t('workflow.comments.fallback.user')}
avatarUrl={comment.created_by_account?.avatar_url || null}
createdAt={comment.created_at}
content={comment.content}
mentionedNames={mentionsByTarget.get('root')}
mentionableNames={mentionableNames}
/>
</div>
{replies.length > 0 && (
@ -432,7 +444,7 @@ export const CommentThread: FC<CommentThreadProps> = memo(({
return (
<div
key={reply.id}
className='group relative rounded-lg py-2 pl-1 transition-colors hover:bg-components-panel-on-panel-item-bg-hover'
className='group relative -mx-4 rounded-lg px-4 py-2 transition-colors hover:bg-components-panel-on-panel-item-bg-hover'
>
{isOwnReply && !isReplyEditing && (
<PortalToFollowElem
@ -468,7 +480,7 @@ export const CommentThread: FC<CommentThreadProps> = memo(({
</PortalToFollowElemTrigger>
</div>
<PortalToFollowElemContent
className='z-[100] w-36 rounded-xl border border-components-panel-border bg-components-panel-bg-blur shadow-lg backdrop-blur-[10px]'
className='z-[100] w-36 rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur shadow-lg backdrop-blur-[10px]'
data-reply-menu
>
{/* Menu buttons - hidden when showing delete confirm */}
@ -519,19 +531,31 @@ export const CommentThread: FC<CommentThreadProps> = memo(({
</PortalToFollowElem>
)}
{isReplyEditing ? (
<div className='rounded-lg border border-components-chat-input-border bg-components-panel-bg-blur px-3 py-2 shadow-sm'>
<MentionInput
value={editingReply?.content ?? ''}
onChange={newContent => setEditingReply(prev => prev ? { ...prev, content: newContent } : prev)}
onSubmit={handleEditSubmit}
onCancel={handleCancelEdit}
placeholder={t('workflow.comments.placeholder.editReply')}
disabled={loading}
loading={replyUpdating}
isEditing={true}
className="system-sm-regular"
autoFocus
/>
<div className='flex gap-3 pt-1'>
<div className='shrink-0'>
<Avatar
name={reply.created_by_account?.name || t('workflow.comments.fallback.user')}
avatar={reply.created_by_account?.avatar_url || null}
size={24}
className='h-8 w-8 rounded-full'
/>
</div>
<div className='min-w-0 flex-1'>
<div className='rounded-xl border border-components-chat-input-border bg-components-panel-bg-blur p-1 shadow-md backdrop-blur-[10px]'>
<MentionInput
value={editingReply?.content ?? ''}
onChange={newContent => setEditingReply(prev => prev ? { ...prev, content: newContent } : prev)}
onSubmit={handleEditSubmit}
onCancel={handleCancelEdit}
placeholder={t('workflow.comments.placeholder.editReply')}
disabled={loading}
loading={replyUpdating || isSubmittingEdit}
isEditing={true}
className="system-sm-regular"
autoFocus
/>
</div>
</div>
</div>
) : (
<ThreadMessage
@ -540,7 +564,7 @@ export const CommentThread: FC<CommentThreadProps> = memo(({
avatarUrl={reply.created_by_account?.avatar_url || null}
createdAt={reply.created_at}
content={reply.content}
mentionedNames={mentionsByTarget.get(reply.id)}
mentionableNames={mentionableNames}
/>
)}
</div>

View File

@ -0,0 +1,94 @@
import type { ComponentType } from 'react'
import { BlockEnum } from '../types'
import StartNode from './start/node'
import StartPanel from './start/panel'
import EndNode from './end/node'
import EndPanel from './end/panel'
import AnswerNode from './answer/node'
import AnswerPanel from './answer/panel'
import LLMNode from './llm/node'
import LLMPanel from './llm/panel'
import KnowledgeRetrievalNode from './knowledge-retrieval/node'
import KnowledgeRetrievalPanel from './knowledge-retrieval/panel'
import QuestionClassifierNode from './question-classifier/node'
import QuestionClassifierPanel from './question-classifier/panel'
import IfElseNode from './if-else/node'
import IfElsePanel from './if-else/panel'
import CodeNode from './code/node'
import CodePanel from './code/panel'
import TemplateTransformNode from './template-transform/node'
import TemplateTransformPanel from './template-transform/panel'
import HttpNode from './http/node'
import HttpPanel from './http/panel'
import ToolNode from './tool/node'
import ToolPanel from './tool/panel'
import VariableAssignerNode from './variable-assigner/node'
import VariableAssignerPanel from './variable-assigner/panel'
import AssignerNode from './assigner/node'
import AssignerPanel from './assigner/panel'
import ParameterExtractorNode from './parameter-extractor/node'
import ParameterExtractorPanel from './parameter-extractor/panel'
import IterationNode from './iteration/node'
import IterationPanel from './iteration/panel'
import LoopNode from './loop/node'
import LoopPanel from './loop/panel'
import DocExtractorNode from './document-extractor/node'
import DocExtractorPanel from './document-extractor/panel'
import ListFilterNode from './list-operator/node'
import ListFilterPanel from './list-operator/panel'
import AgentNode from './agent/node'
import AgentPanel from './agent/panel'
import DataSourceNode from './data-source/node'
import DataSourcePanel from './data-source/panel'
import KnowledgeBaseNode from './knowledge-base/node'
import KnowledgeBasePanel from './knowledge-base/panel'
export const NodeComponentMap: Record<string, ComponentType<any>> = {
[BlockEnum.Start]: StartNode,
[BlockEnum.End]: EndNode,
[BlockEnum.Answer]: AnswerNode,
[BlockEnum.LLM]: LLMNode,
[BlockEnum.KnowledgeRetrieval]: KnowledgeRetrievalNode,
[BlockEnum.QuestionClassifier]: QuestionClassifierNode,
[BlockEnum.IfElse]: IfElseNode,
[BlockEnum.Code]: CodeNode,
[BlockEnum.TemplateTransform]: TemplateTransformNode,
[BlockEnum.HttpRequest]: HttpNode,
[BlockEnum.Tool]: ToolNode,
[BlockEnum.VariableAssigner]: VariableAssignerNode,
[BlockEnum.Assigner]: AssignerNode,
[BlockEnum.VariableAggregator]: VariableAssignerNode,
[BlockEnum.ParameterExtractor]: ParameterExtractorNode,
[BlockEnum.Iteration]: IterationNode,
[BlockEnum.Loop]: LoopNode,
[BlockEnum.DocExtractor]: DocExtractorNode,
[BlockEnum.ListFilter]: ListFilterNode,
[BlockEnum.Agent]: AgentNode,
[BlockEnum.DataSource]: DataSourceNode,
[BlockEnum.KnowledgeBase]: KnowledgeBaseNode,
}
export const PanelComponentMap: Record<string, ComponentType<any>> = {
[BlockEnum.Start]: StartPanel,
[BlockEnum.End]: EndPanel,
[BlockEnum.Answer]: AnswerPanel,
[BlockEnum.LLM]: LLMPanel,
[BlockEnum.KnowledgeRetrieval]: KnowledgeRetrievalPanel,
[BlockEnum.QuestionClassifier]: QuestionClassifierPanel,
[BlockEnum.IfElse]: IfElsePanel,
[BlockEnum.Code]: CodePanel,
[BlockEnum.TemplateTransform]: TemplateTransformPanel,
[BlockEnum.HttpRequest]: HttpPanel,
[BlockEnum.Tool]: ToolPanel,
[BlockEnum.VariableAssigner]: VariableAssignerPanel,
[BlockEnum.VariableAggregator]: VariableAssignerPanel,
[BlockEnum.Assigner]: AssignerPanel,
[BlockEnum.ParameterExtractor]: ParameterExtractorPanel,
[BlockEnum.Iteration]: IterationPanel,
[BlockEnum.Loop]: LoopPanel,
[BlockEnum.DocExtractor]: DocExtractorPanel,
[BlockEnum.ListFilter]: ListFilterPanel,
[BlockEnum.Agent]: AgentPanel,
[BlockEnum.DataSource]: DataSourcePanel,
[BlockEnum.KnowledgeBase]: KnowledgeBasePanel,
}

View File

@ -1,101 +1,5 @@
import type { ComponentType } from 'react'
import { BlockEnum } from '../types'
import StartNode from './start/node'
import StartPanel from './start/panel'
import EndNode from './end/node'
import EndPanel from './end/panel'
import AnswerNode from './answer/node'
import AnswerPanel from './answer/panel'
import LLMNode from './llm/node'
import LLMPanel from './llm/panel'
import KnowledgeRetrievalNode from './knowledge-retrieval/node'
import KnowledgeRetrievalPanel from './knowledge-retrieval/panel'
import QuestionClassifierNode from './question-classifier/node'
import QuestionClassifierPanel from './question-classifier/panel'
import IfElseNode from './if-else/node'
import IfElsePanel from './if-else/panel'
import CodeNode from './code/node'
import CodePanel from './code/panel'
import TemplateTransformNode from './template-transform/node'
import TemplateTransformPanel from './template-transform/panel'
import HttpNode from './http/node'
import HttpPanel from './http/panel'
import ToolNode from './tool/node'
import ToolPanel from './tool/panel'
import VariableAssignerNode from './variable-assigner/node'
import VariableAssignerPanel from './variable-assigner/panel'
import AssignerNode from './assigner/node'
import AssignerPanel from './assigner/panel'
import ParameterExtractorNode from './parameter-extractor/node'
import ParameterExtractorPanel from './parameter-extractor/panel'
import IterationNode from './iteration/node'
import IterationPanel from './iteration/panel'
import LoopNode from './loop/node'
import LoopPanel from './loop/panel'
import DocExtractorNode from './document-extractor/node'
import DocExtractorPanel from './document-extractor/panel'
import ListFilterNode from './list-operator/node'
import ListFilterPanel from './list-operator/panel'
import AgentNode from './agent/node'
import AgentPanel from './agent/panel'
import DataSourceNode from './data-source/node'
import DataSourcePanel from './data-source/panel'
import KnowledgeBaseNode from './knowledge-base/node'
import KnowledgeBasePanel from './knowledge-base/panel'
import { TransferMethod } from '@/types/app'
export const NodeComponentMap: Record<string, ComponentType<any>> = {
[BlockEnum.Start]: StartNode,
[BlockEnum.End]: EndNode,
[BlockEnum.Answer]: AnswerNode,
[BlockEnum.LLM]: LLMNode,
[BlockEnum.KnowledgeRetrieval]: KnowledgeRetrievalNode,
[BlockEnum.QuestionClassifier]: QuestionClassifierNode,
[BlockEnum.IfElse]: IfElseNode,
[BlockEnum.Code]: CodeNode,
[BlockEnum.TemplateTransform]: TemplateTransformNode,
[BlockEnum.HttpRequest]: HttpNode,
[BlockEnum.Tool]: ToolNode,
[BlockEnum.VariableAssigner]: VariableAssignerNode,
[BlockEnum.Assigner]: AssignerNode,
[BlockEnum.VariableAggregator]: VariableAssignerNode,
[BlockEnum.ParameterExtractor]: ParameterExtractorNode,
[BlockEnum.Iteration]: IterationNode,
[BlockEnum.Loop]: LoopNode,
[BlockEnum.DocExtractor]: DocExtractorNode,
[BlockEnum.ListFilter]: ListFilterNode,
[BlockEnum.Agent]: AgentNode,
[BlockEnum.DataSource]: DataSourceNode,
[BlockEnum.KnowledgeBase]: KnowledgeBaseNode,
}
export const PanelComponentMap: Record<string, ComponentType<any>> = {
[BlockEnum.Start]: StartPanel,
[BlockEnum.End]: EndPanel,
[BlockEnum.Answer]: AnswerPanel,
[BlockEnum.LLM]: LLMPanel,
[BlockEnum.KnowledgeRetrieval]: KnowledgeRetrievalPanel,
[BlockEnum.QuestionClassifier]: QuestionClassifierPanel,
[BlockEnum.IfElse]: IfElsePanel,
[BlockEnum.Code]: CodePanel,
[BlockEnum.TemplateTransform]: TemplateTransformPanel,
[BlockEnum.HttpRequest]: HttpPanel,
[BlockEnum.Tool]: ToolPanel,
[BlockEnum.VariableAssigner]: VariableAssignerPanel,
[BlockEnum.VariableAggregator]: VariableAssignerPanel,
[BlockEnum.Assigner]: AssignerPanel,
[BlockEnum.ParameterExtractor]: ParameterExtractorPanel,
[BlockEnum.Iteration]: IterationPanel,
[BlockEnum.Loop]: LoopPanel,
[BlockEnum.DocExtractor]: DocExtractorPanel,
[BlockEnum.ListFilter]: ListFilterPanel,
[BlockEnum.Agent]: AgentPanel,
[BlockEnum.DataSource]: DataSourcePanel,
[BlockEnum.KnowledgeBase]: KnowledgeBasePanel,
}
export const CUSTOM_NODE_TYPE = 'custom'
export const FILE_TYPE_OPTIONS = [
{ value: 'image', i18nKey: 'image' },
{ value: 'document', i18nKey: 'doc' },

View File

@ -8,7 +8,7 @@ import { CUSTOM_NODE } from '../constants'
import {
NodeComponentMap,
PanelComponentMap,
} from './constants'
} from './components'
import BaseNode from './_base/node'
import BasePanel from './_base/components/workflow-panel'

View File

@ -29,7 +29,7 @@ const ChunkStructure = ({
<Field
fieldTitleProps={{
title: t('workflow.nodes.knowledgeBase.chunkStructure'),
tooltip: t('workflow.nodes.knowledgeBase.chunkStructure'),
tooltip: t('workflow.nodes.knowledgeBase.chunkStructureTip.message'),
operation: chunkStructure && (
<Selector
options={options}

View File

@ -55,7 +55,7 @@
"@lexical/react": "^0.36.2",
"@lexical/selection": "^0.36.2",
"@lexical/text": "^0.36.2",
"@lexical/utils": "^0.36.2",
"@lexical/utils": "^0.37.0",
"@monaco-editor/react": "^4.6.0",
"@octokit/core": "^6.1.2",
"@octokit/request-error": "^6.1.5",
@ -192,7 +192,7 @@
"globals": "^15.11.0",
"husky": "^9.1.6",
"jest": "^29.7.0",
"knip": "^5.64.1",
"knip": "^5.64.3",
"lint-staged": "^15.2.10",
"lodash": "^4.17.21",
"magicast": "^0.3.4",

View File

@ -86,8 +86,8 @@ importers:
specifier: ^0.36.2
version: 0.36.2
'@lexical/utils':
specifier: ^0.36.2
version: 0.36.2
specifier: ^0.37.0
version: 0.37.0
'@monaco-editor/react':
specifier: ^4.6.0
version: 4.7.0(monaco-editor@0.52.2)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)
@ -492,8 +492,8 @@ importers:
specifier: ^29.7.0
version: 29.7.0(@types/node@18.15.0)(ts-node@10.9.2(@types/node@18.15.0)(typescript@5.8.3))
knip:
specifier: ^5.64.1
version: 5.64.1(@types/node@18.15.0)(typescript@5.8.3)
specifier: ^5.64.3
version: 5.64.3(@types/node@18.15.0)(typescript@5.8.3)
lint-staged:
specifier: ^15.2.10
version: 15.5.2
@ -1750,170 +1750,144 @@ packages:
resolution: {integrity: sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==}
cpu: [arm64]
os: [linux]
libc: [glibc]
'@img/sharp-libvips-linux-arm64@1.2.0':
resolution: {integrity: sha512-RXwd0CgG+uPRX5YYrkzKyalt2OJYRiJQ8ED/fi1tq9WQW2jsQIn0tqrlR5l5dr/rjqq6AHAxURhj2DVjyQWSOA==}
cpu: [arm64]
os: [linux]
libc: [glibc]
'@img/sharp-libvips-linux-arm@1.0.5':
resolution: {integrity: sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==}
cpu: [arm]
os: [linux]
libc: [glibc]
'@img/sharp-libvips-linux-arm@1.2.0':
resolution: {integrity: sha512-mWd2uWvDtL/nvIzThLq3fr2nnGfyr/XMXlq8ZJ9WMR6PXijHlC3ksp0IpuhK6bougvQrchUAfzRLnbsen0Cqvw==}
cpu: [arm]
os: [linux]
libc: [glibc]
'@img/sharp-libvips-linux-ppc64@1.2.0':
resolution: {integrity: sha512-Xod/7KaDDHkYu2phxxfeEPXfVXFKx70EAFZ0qyUdOjCcxbjqyJOEUpDe6RIyaunGxT34Anf9ue/wuWOqBW2WcQ==}
cpu: [ppc64]
os: [linux]
libc: [glibc]
'@img/sharp-libvips-linux-s390x@1.0.4':
resolution: {integrity: sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==}
cpu: [s390x]
os: [linux]
libc: [glibc]
'@img/sharp-libvips-linux-s390x@1.2.0':
resolution: {integrity: sha512-eMKfzDxLGT8mnmPJTNMcjfO33fLiTDsrMlUVcp6b96ETbnJmd4uvZxVJSKPQfS+odwfVaGifhsB07J1LynFehw==}
cpu: [s390x]
os: [linux]
libc: [glibc]
'@img/sharp-libvips-linux-x64@1.0.4':
resolution: {integrity: sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==}
cpu: [x64]
os: [linux]
libc: [glibc]
'@img/sharp-libvips-linux-x64@1.2.0':
resolution: {integrity: sha512-ZW3FPWIc7K1sH9E3nxIGB3y3dZkpJlMnkk7z5tu1nSkBoCgw2nSRTFHI5pB/3CQaJM0pdzMF3paf9ckKMSE9Tg==}
cpu: [x64]
os: [linux]
libc: [glibc]
'@img/sharp-libvips-linuxmusl-arm64@1.0.4':
resolution: {integrity: sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==}
cpu: [arm64]
os: [linux]
libc: [musl]
'@img/sharp-libvips-linuxmusl-arm64@1.2.0':
resolution: {integrity: sha512-UG+LqQJbf5VJ8NWJ5Z3tdIe/HXjuIdo4JeVNADXBFuG7z9zjoegpzzGIyV5zQKi4zaJjnAd2+g2nna8TZvuW9Q==}
cpu: [arm64]
os: [linux]
libc: [musl]
'@img/sharp-libvips-linuxmusl-x64@1.0.4':
resolution: {integrity: sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==}
cpu: [x64]
os: [linux]
libc: [musl]
'@img/sharp-libvips-linuxmusl-x64@1.2.0':
resolution: {integrity: sha512-SRYOLR7CXPgNze8akZwjoGBoN1ThNZoqpOgfnOxmWsklTGVfJiGJoC/Lod7aNMGA1jSsKWM1+HRX43OP6p9+6Q==}
cpu: [x64]
os: [linux]
libc: [musl]
'@img/sharp-linux-arm64@0.33.5':
resolution: {integrity: sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [arm64]
os: [linux]
libc: [glibc]
'@img/sharp-linux-arm64@0.34.3':
resolution: {integrity: sha512-QdrKe3EvQrqwkDrtuTIjI0bu6YEJHTgEeqdzI3uWJOH6G1O8Nl1iEeVYRGdj1h5I21CqxSvQp1Yv7xeU3ZewbA==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [arm64]
os: [linux]
libc: [glibc]
'@img/sharp-linux-arm@0.33.5':
resolution: {integrity: sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [arm]
os: [linux]
libc: [glibc]
'@img/sharp-linux-arm@0.34.3':
resolution: {integrity: sha512-oBK9l+h6KBN0i3dC8rYntLiVfW8D8wH+NPNT3O/WBHeW0OQWCjfWksLUaPidsrDKpJgXp3G3/hkmhptAW0I3+A==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [arm]
os: [linux]
libc: [glibc]
'@img/sharp-linux-ppc64@0.34.3':
resolution: {integrity: sha512-GLtbLQMCNC5nxuImPR2+RgrviwKwVql28FWZIW1zWruy6zLgA5/x2ZXk3mxj58X/tszVF69KK0Is83V8YgWhLA==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [ppc64]
os: [linux]
libc: [glibc]
'@img/sharp-linux-s390x@0.33.5':
resolution: {integrity: sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [s390x]
os: [linux]
libc: [glibc]
'@img/sharp-linux-s390x@0.34.3':
resolution: {integrity: sha512-3gahT+A6c4cdc2edhsLHmIOXMb17ltffJlxR0aC2VPZfwKoTGZec6u5GrFgdR7ciJSsHT27BD3TIuGcuRT0KmQ==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [s390x]
os: [linux]
libc: [glibc]
'@img/sharp-linux-x64@0.33.5':
resolution: {integrity: sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [x64]
os: [linux]
libc: [glibc]
'@img/sharp-linux-x64@0.34.3':
resolution: {integrity: sha512-8kYso8d806ypnSq3/Ly0QEw90V5ZoHh10yH0HnrzOCr6DKAPI6QVHvwleqMkVQ0m+fc7EH8ah0BB0QPuWY6zJQ==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [x64]
os: [linux]
libc: [glibc]
'@img/sharp-linuxmusl-arm64@0.33.5':
resolution: {integrity: sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [arm64]
os: [linux]
libc: [musl]
'@img/sharp-linuxmusl-arm64@0.34.3':
resolution: {integrity: sha512-vAjbHDlr4izEiXM1OTggpCcPg9tn4YriK5vAjowJsHwdBIdx0fYRsURkxLG2RLm9gyBq66gwtWI8Gx0/ov+JKQ==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [arm64]
os: [linux]
libc: [musl]
'@img/sharp-linuxmusl-x64@0.33.5':
resolution: {integrity: sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [x64]
os: [linux]
libc: [musl]
'@img/sharp-linuxmusl-x64@0.34.3':
resolution: {integrity: sha512-gCWUn9547K5bwvOn9l5XGAEjVTTRji4aPTqLzGXHvIr6bIDZKNTA34seMPgM0WmSf+RYBH411VavCejp3PkOeQ==}
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
cpu: [x64]
os: [linux]
libc: [musl]
'@img/sharp-wasm32@0.33.5':
resolution: {integrity: sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==}
@ -2058,6 +2032,9 @@ packages:
'@lexical/clipboard@0.36.2':
resolution: {integrity: sha512-l7z52jltlMz1HmJRmG7ZdxySPjheRRxdV/75QEnzalMtqfLPgh4G5IpycISjbX+95PgEaC6rXbcjPix0CyHDJg==}
'@lexical/clipboard@0.37.0':
resolution: {integrity: sha512-hRwASFX/ilaI5r8YOcZuQgONFshRgCPfdxfofNL7uruSFYAO6LkUhsjzZwUgf0DbmCJmbBADFw15FSthgCUhGA==}
'@lexical/code@0.36.2':
resolution: {integrity: sha512-dfS62rNo3uKwNAJQ39zC+8gYX0k8UAoW7u+JPIqx+K2VPukZlvpsPLNGft15pdWBkHc7Pv+o9gJlB6gGv+EBfA==}
@ -2073,6 +2050,9 @@ packages:
'@lexical/extension@0.36.2':
resolution: {integrity: sha512-NWxtqMFMzScq4Eemqp1ST2KREIfj57fUbn7qHv+mMnYgQZK4iIhrHKo5klonxi1oBURcxUZMIbdtH7MJ4BdisA==}
'@lexical/extension@0.37.0':
resolution: {integrity: sha512-Z58f2tIdz9bn8gltUu5cVg37qROGha38dUZv20gI2GeNugXAkoPzJYEcxlI1D/26tkevJ/7VaFUr9PTk+iKmaA==}
'@lexical/hashtag@0.36.2':
resolution: {integrity: sha512-WdmKtzXFcahQT3ShFDeHF6LCR5C8yvFCj3ImI09rZwICrYeonbMrzsBUxS1joBz0HQ+ufF9Tx+RxLvGWx6WxzQ==}
@ -2082,12 +2062,18 @@ packages:
'@lexical/html@0.36.2':
resolution: {integrity: sha512-fgqALzgKnoy93G0yFyYD4C4qJTSMZyUt4JE5kj/POFwWNOnXThIqJhQGwBvH/ibImpIfOeds2TrSr8PbStlrNg==}
'@lexical/html@0.37.0':
resolution: {integrity: sha512-oTsBc45eL8/lmF7fqGR+UCjrJYP04gumzf5nk4TczrxWL2pM4GIMLLKG1mpQI2H1MDiRLzq3T/xdI7Gh74z7Zw==}
'@lexical/link@0.36.2':
resolution: {integrity: sha512-Zb+DeHA1po8VMiOAAXsBmAHhfWmQttsUkI5oiZUmOXJruRuQ2rVr01NoxHpoEpLwHOABVNzD3PMbwov+g3c7lg==}
'@lexical/list@0.36.2':
resolution: {integrity: sha512-JpaIaE0lgNUrAR7iaCaIoETcCKG9EvZjM3G71VxiexTs7PltmEMq36LUlO2goafWurP7knG2rUpVnTcuSbYYeA==}
'@lexical/list@0.37.0':
resolution: {integrity: sha512-AOC6yAA3mfNvJKbwo+kvAbPJI+13yF2ISA65vbA578CugvJ08zIVgM+pSzxquGhD0ioJY3cXVW7+gdkCP1qu5g==}
'@lexical/mark@0.36.2':
resolution: {integrity: sha512-n0MNXtGH+1i43hglgHjpQV0093HmIiFR7Budg2BJb8ZNzO1KZRqeXAHlA5ZzJ698FkAnS4R5bqG9tZ0JJHgAuA==}
@ -2115,15 +2101,24 @@ packages:
'@lexical/selection@0.36.2':
resolution: {integrity: sha512-n96joW3HCKBmPeESR172BxVE+m8V9SdidQm4kKb9jOZ1Ota+tnam2386TeI6795TWwgjDQJPK3HZNKcX6Gb+Bg==}
'@lexical/selection@0.37.0':
resolution: {integrity: sha512-Lix1s2r71jHfsTEs4q/YqK2s3uXKOnyA3fd1VDMWysO+bZzRwEO5+qyDvENZ0WrXSDCnlibNFV1HttWX9/zqyw==}
'@lexical/table@0.36.2':
resolution: {integrity: sha512-96rNNPiVbC65i+Jn1QzIsehCS7UVUc69ovrh9Bt4+pXDebZSdZai153Q7RUq8q3AQ5ocK4/SA2kLQfMu0grj3Q==}
'@lexical/table@0.37.0':
resolution: {integrity: sha512-g7S8ml8kIujEDLWlzYKETgPCQ2U9oeWqdytRuHjHGi/rjAAGHSej5IRqTPIMxNP3VVQHnBoQ+Y9hBtjiuddhgQ==}
'@lexical/text@0.36.2':
resolution: {integrity: sha512-IbbqgRdMAD6Uk9b2+qSVoy+8RVcczrz6OgXvg39+EYD+XEC7Rbw7kDTWzuNSJJpP7vxSO8YDZSaIlP5gNH3qKA==}
'@lexical/utils@0.36.2':
resolution: {integrity: sha512-P9+t2Ob10YNGYT/PWEER+1EqH8SAjCNRn+7SBvKbr0IdleGF2JvzbJwAWaRwZs1c18P11XdQZ779dGvWlfwBIw==}
'@lexical/utils@0.37.0':
resolution: {integrity: sha512-CFp4diY/kR5RqhzQSl/7SwsMod1sgLpI1FBifcOuJ6L/S6YywGpEB4B7aV5zqW21A/jU2T+2NZtxSUn6S+9gMg==}
'@lexical/yjs@0.36.2':
resolution: {integrity: sha512-gZ66Mw+uKXTO8KeX/hNKAinXbFg3gnNYraG76lBXCwb/Ka3q34upIY9FUeGOwGVaau3iIDQhE49I+6MugAX2FQ==}
peerDependencies:
@ -2203,28 +2198,24 @@ packages:
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
libc: [glibc]
'@next/swc-linux-arm64-musl@15.5.4':
resolution: {integrity: sha512-TOK7iTxmXFc45UrtKqWdZ1shfxuL4tnVAOuuJK4S88rX3oyVV4ZkLjtMT85wQkfBrOOvU55aLty+MV8xmcJR8A==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
libc: [musl]
'@next/swc-linux-x64-gnu@15.5.4':
resolution: {integrity: sha512-7HKolaj+481FSW/5lL0BcTkA4Ueam9SPYWyN/ib/WGAFZf0DGAN8frNpNZYFHtM4ZstrHZS3LY3vrwlIQfsiMA==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
libc: [glibc]
'@next/swc-linux-x64-musl@15.5.4':
resolution: {integrity: sha512-nlQQ6nfgN0nCO/KuyEUwwOdwQIGjOs4WNMjEUtpIQJPR2NUfmGpW2wkJln1d4nJ7oUzd1g4GivH5GoEPBgfsdw==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
libc: [musl]
'@next/swc-win32-arm64-msvc@15.5.4':
resolution: {integrity: sha512-PcR2bN7FlM32XM6eumklmyWLLbu2vs+D7nJX8OAIoWy69Kef8mfiN4e8TUv2KohprwifdpFKPzIP1njuCjD0YA==}
@ -2401,49 +2392,41 @@ packages:
resolution: {integrity: sha512-TWq+y2psMzbMtZB9USAq2bSA7NV1TMmh9lhAFbMGQ8Yp2YV4BRC/HilD6qF++efQl6shueGBFOv0LVe9BUXaIA==}
cpu: [arm64]
os: [linux]
libc: [glibc]
'@oxc-resolver/binding-linux-arm64-musl@11.9.0':
resolution: {integrity: sha512-8WwGLfXk7yttc6rD6g53+RnYfX5B8xOot1ffthLn8oCXzVRO4cdChlmeHStxwLD/MWx8z8BGeyfyINNrsh9N2w==}
cpu: [arm64]
os: [linux]
libc: [musl]
'@oxc-resolver/binding-linux-ppc64-gnu@11.9.0':
resolution: {integrity: sha512-ZWiAXfan6actlSzayaFS/kYO2zD6k1k0fmLb1opbujXYMKepEnjjVOvKdzCIYR/zKzudqI39dGc+ywqVdsPIpQ==}
cpu: [ppc64]
os: [linux]
libc: [glibc]
'@oxc-resolver/binding-linux-riscv64-gnu@11.9.0':
resolution: {integrity: sha512-p9mCSb+Bym+eycNo9k+81wQ5SAE31E+/rtfbDmF4/7krPotkEjPsEBSc3rqunRwO+FtsUn7H68JLY7hlai49eQ==}
cpu: [riscv64]
os: [linux]
libc: [glibc]
'@oxc-resolver/binding-linux-riscv64-musl@11.9.0':
resolution: {integrity: sha512-/SePuVxgFhLPciRwsJ8kLVltr+rxh0b6riGFuoPnFXBbHFclKnjNIt3TfqzUj0/vOnslXw3cVGPpmtkm2TgCgg==}
cpu: [riscv64]
os: [linux]
libc: [musl]
'@oxc-resolver/binding-linux-s390x-gnu@11.9.0':
resolution: {integrity: sha512-zLuEjlYIzfnr1Ei2UZYQBbCTa/9deh+BEjO9rh1ai8BfEq4uj6RupTtNpgHfgAsEYdqOBVExw9EU1S6SW3RCAw==}
cpu: [s390x]
os: [linux]
libc: [glibc]
'@oxc-resolver/binding-linux-x64-gnu@11.9.0':
resolution: {integrity: sha512-cxdg73WG+aVlPu/k4lEQPRVOhWunYOUglW6OSzclZLJJAXZU0tSZ5ymKaqPRkfTsyNSAafj1cA1XYd+P9UxBgw==}
cpu: [x64]
os: [linux]
libc: [glibc]
'@oxc-resolver/binding-linux-x64-musl@11.9.0':
resolution: {integrity: sha512-sy5nkVdMvNgqcx9sIY7G6U9TYZUZC4cmMGw/wKhJNuuD2/HFGtbje62ttXSwBAbVbmJ2GgZ4ZUo/S1OMyU+/OA==}
cpu: [x64]
os: [linux]
libc: [musl]
'@oxc-resolver/binding-wasm32-wasi@11.9.0':
resolution: {integrity: sha512-dfi/a0Xh6o6nOLbJdaYuy7txncEcwkRHp9DGGZaAP7zxDiepkBZ6ewSJODQrWwhjVmMteXo+XFzEOMjsC7WUtQ==}
@ -2494,42 +2477,36 @@ packages:
engines: {node: '>= 10.0.0'}
cpu: [arm]
os: [linux]
libc: [glibc]
'@parcel/watcher-linux-arm-musl@2.5.1':
resolution: {integrity: sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==}
engines: {node: '>= 10.0.0'}
cpu: [arm]
os: [linux]
libc: [musl]
'@parcel/watcher-linux-arm64-glibc@2.5.1':
resolution: {integrity: sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==}
engines: {node: '>= 10.0.0'}
cpu: [arm64]
os: [linux]
libc: [glibc]
'@parcel/watcher-linux-arm64-musl@2.5.1':
resolution: {integrity: sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==}
engines: {node: '>= 10.0.0'}
cpu: [arm64]
os: [linux]
libc: [musl]
'@parcel/watcher-linux-x64-glibc@2.5.1':
resolution: {integrity: sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==}
engines: {node: '>= 10.0.0'}
cpu: [x64]
os: [linux]
libc: [glibc]
'@parcel/watcher-linux-x64-musl@2.5.1':
resolution: {integrity: sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==}
engines: {node: '>= 10.0.0'}
cpu: [x64]
os: [linux]
libc: [musl]
'@parcel/watcher-win32-arm64@2.5.1':
resolution: {integrity: sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==}
@ -6190,8 +6167,8 @@ packages:
resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==}
engines: {node: '>=6'}
knip@5.64.1:
resolution: {integrity: sha512-80XnLsyeXuyxj1F4+NBtQFHxaRH0xWRw8EKwfQ6EkVZZ0bSz/kqqan08k/Qg8ajWsFPhFq+0S2RbLCBGIQtuOg==}
knip@5.64.3:
resolution: {integrity: sha512-P9dZetEZfSBwNBFwj55CAnPAMdzVLTTscWx6rdB8eBmPqXPji8F3L+hhWi+Xp+u9O6Xp2ClRDq2JENSK8Z04Qg==}
engines: {node: '>=18.18.0'}
hasBin: true
peerDependencies:
@ -6232,6 +6209,9 @@ packages:
lexical@0.36.2:
resolution: {integrity: sha512-gIDJCmSAhtxD7h95WK17Nz19wCZu92Zn0p1/R45X01S/KAsLCwEtVJ2fTvIJNFTyx3QNJTuGcm5mYgRMUwq8rg==}
lexical@0.37.0:
resolution: {integrity: sha512-r5VJR2TioQPAsZATfktnJFrGIiy6gjQN8b/+0a2u1d7/QTH7lhbB7byhGSvcq1iaa1TV/xcf/pFV55a5V5hTDQ==}
lib0@0.2.114:
resolution: {integrity: sha512-gcxmNFzA4hv8UYi8j43uPlQ7CGcyMJ2KQb5kZASw6SnAKAf10hK12i2fjrS3Cl/ugZa5Ui6WwIu1/6MIXiHttQ==}
engines: {node: '>=16'}
@ -10574,6 +10554,14 @@ snapshots:
'@lexical/utils': 0.36.2
lexical: 0.36.2
'@lexical/clipboard@0.37.0':
dependencies:
'@lexical/html': 0.37.0
'@lexical/list': 0.37.0
'@lexical/selection': 0.37.0
'@lexical/utils': 0.37.0
lexical: 0.37.0
'@lexical/code@0.36.2':
dependencies:
'@lexical/utils': 0.36.2
@ -10602,6 +10590,12 @@ snapshots:
'@preact/signals-core': 1.12.1
lexical: 0.36.2
'@lexical/extension@0.37.0':
dependencies:
'@lexical/utils': 0.37.0
'@preact/signals-core': 1.12.1
lexical: 0.37.0
'@lexical/hashtag@0.36.2':
dependencies:
'@lexical/text': 0.36.2
@ -10620,6 +10614,12 @@ snapshots:
'@lexical/utils': 0.36.2
lexical: 0.36.2
'@lexical/html@0.37.0':
dependencies:
'@lexical/selection': 0.37.0
'@lexical/utils': 0.37.0
lexical: 0.37.0
'@lexical/link@0.36.2':
dependencies:
'@lexical/extension': 0.36.2
@ -10633,6 +10633,13 @@ snapshots:
'@lexical/utils': 0.36.2
lexical: 0.36.2
'@lexical/list@0.37.0':
dependencies:
'@lexical/extension': 0.37.0
'@lexical/selection': 0.37.0
'@lexical/utils': 0.37.0
lexical: 0.37.0
'@lexical/mark@0.36.2':
dependencies:
'@lexical/utils': 0.36.2
@ -10702,6 +10709,10 @@ snapshots:
dependencies:
lexical: 0.36.2
'@lexical/selection@0.37.0':
dependencies:
lexical: 0.37.0
'@lexical/table@0.36.2':
dependencies:
'@lexical/clipboard': 0.36.2
@ -10709,6 +10720,13 @@ snapshots:
'@lexical/utils': 0.36.2
lexical: 0.36.2
'@lexical/table@0.37.0':
dependencies:
'@lexical/clipboard': 0.37.0
'@lexical/extension': 0.37.0
'@lexical/utils': 0.37.0
lexical: 0.37.0
'@lexical/text@0.36.2':
dependencies:
lexical: 0.36.2
@ -10720,6 +10738,13 @@ snapshots:
'@lexical/table': 0.36.2
lexical: 0.36.2
'@lexical/utils@0.37.0':
dependencies:
'@lexical/list': 0.37.0
'@lexical/selection': 0.37.0
'@lexical/table': 0.37.0
lexical: 0.37.0
'@lexical/yjs@0.36.2(yjs@13.6.27)':
dependencies:
'@lexical/offset': 0.36.2
@ -15558,7 +15583,7 @@ snapshots:
kleur@3.0.3: {}
knip@5.64.1(@types/node@18.15.0)(typescript@5.8.3):
knip@5.64.3(@types/node@18.15.0)(typescript@5.8.3):
dependencies:
'@nodelib/fs.walk': 1.2.8
'@types/node': 18.15.0
@ -15609,6 +15634,8 @@ snapshots:
lexical@0.36.2: {}
lexical@0.37.0: {}
lib0@0.2.114:
dependencies:
isomorphic.js: 0.2.5