diff --git a/.github/workflows/autofix.yml b/.github/workflows/autofix.yml new file mode 100644 index 0000000000..5e290c5d02 --- /dev/null +++ b/.github/workflows/autofix.yml @@ -0,0 +1,27 @@ +name: autofix.ci +on: + workflow_call: + pull_request: + push: + branches: [ "main" ] +permissions: + contents: read + +jobs: + autofix: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + # Use uv to ensure we have the same ruff version in CI and locally. + - uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f + - run: | + cd api + uv sync --dev + # Fix lint errors + uv run ruff check --fix-only . + # Format code + uv run ruff format . + + - uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27 + diff --git a/api/.env.example b/api/.env.example index daa0df535b..80b1c12cd8 100644 --- a/api/.env.example +++ b/api/.env.example @@ -471,6 +471,16 @@ APP_MAX_ACTIVE_REQUESTS=0 # Celery beat configuration CELERY_BEAT_SCHEDULER_TIME=1 +# Celery schedule tasks configuration +ENABLE_CLEAN_EMBEDDING_CACHE_TASK=false +ENABLE_CLEAN_UNUSED_DATASETS_TASK=false +ENABLE_CREATE_TIDB_SERVERLESS_TASK=false +ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false +ENABLE_CLEAN_MESSAGES=false +ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false +ENABLE_DATASETS_QUEUE_MONITOR=false +ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true + # Position configuration POSITION_TOOL_PINS= POSITION_TOOL_INCLUDES= diff --git a/api/README.md b/api/README.md index 9308d5dc44..6ab923070e 100644 --- a/api/README.md +++ b/api/README.md @@ -74,7 +74,12 @@ 10. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service. ```bash - uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion + uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin + ``` + + Addition, if you want to debug the celery scheduled tasks, you can use the following command in another terminal: + ```bash + uv run celery -A app.celery beat ``` ## Testing diff --git a/api/commands.py b/api/commands.py index eec5dda26d..56659a8228 100644 --- a/api/commands.py +++ b/api/commands.py @@ -51,7 +51,7 @@ def reset_password(email, new_password, password_confirm): click.echo(click.style("Passwords do not match.", fg="red")) return - account = db.session.query(Account).filter(Account.email == email).one_or_none() + account = db.session.query(Account).where(Account.email == email).one_or_none() if not account: click.echo(click.style("Account not found for email: {}".format(email), fg="red")) @@ -90,7 +90,7 @@ def reset_email(email, new_email, email_confirm): click.echo(click.style("New emails do not match.", fg="red")) return - account = db.session.query(Account).filter(Account.email == email).one_or_none() + account = db.session.query(Account).where(Account.email == email).one_or_none() if not account: click.echo(click.style("Account not found for email: {}".format(email), fg="red")) @@ -137,8 +137,8 @@ def reset_encrypt_key_pair(): tenant.encrypt_public_key = generate_key_pair(tenant.id) - db.session.query(Provider).filter(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete() - db.session.query(ProviderModel).filter(ProviderModel.tenant_id == tenant.id).delete() + db.session.query(Provider).where(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete() + db.session.query(ProviderModel).where(ProviderModel.tenant_id == tenant.id).delete() db.session.commit() click.echo( @@ -173,7 +173,7 @@ def migrate_annotation_vector_database(): per_page = 50 apps = ( db.session.query(App) - .filter(App.status == "normal") + .where(App.status == "normal") .order_by(App.created_at.desc()) .limit(per_page) .offset((page - 1) * per_page) @@ -193,7 +193,7 @@ def migrate_annotation_vector_database(): try: click.echo("Creating app annotation index: {}".format(app.id)) app_annotation_setting = ( - db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app.id).first() + db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app.id).first() ) if not app_annotation_setting: @@ -203,13 +203,13 @@ def migrate_annotation_vector_database(): # get dataset_collection_binding info dataset_collection_binding = ( db.session.query(DatasetCollectionBinding) - .filter(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id) + .where(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id) .first() ) if not dataset_collection_binding: click.echo("App annotation collection binding not found: {}".format(app.id)) continue - annotations = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app.id).all() + annotations = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app.id).all() dataset = Dataset( id=app.id, tenant_id=app.tenant_id, @@ -306,7 +306,7 @@ def migrate_knowledge_vector_database(): while True: try: stmt = ( - select(Dataset).filter(Dataset.indexing_technique == "high_quality").order_by(Dataset.created_at.desc()) + select(Dataset).where(Dataset.indexing_technique == "high_quality").order_by(Dataset.created_at.desc()) ) datasets = db.paginate(select=stmt, page=page, per_page=50, max_per_page=50, error_out=False) @@ -333,7 +333,7 @@ def migrate_knowledge_vector_database(): if dataset.collection_binding_id: dataset_collection_binding = ( db.session.query(DatasetCollectionBinding) - .filter(DatasetCollectionBinding.id == dataset.collection_binding_id) + .where(DatasetCollectionBinding.id == dataset.collection_binding_id) .one_or_none() ) if dataset_collection_binding: @@ -368,7 +368,7 @@ def migrate_knowledge_vector_database(): dataset_documents = ( db.session.query(DatasetDocument) - .filter( + .where( DatasetDocument.dataset_id == dataset.id, DatasetDocument.indexing_status == "completed", DatasetDocument.enabled == True, @@ -382,7 +382,7 @@ def migrate_knowledge_vector_database(): for dataset_document in dataset_documents: segments = ( db.session.query(DocumentSegment) - .filter( + .where( DocumentSegment.document_id == dataset_document.id, DocumentSegment.status == "completed", DocumentSegment.enabled == True, @@ -469,7 +469,7 @@ def convert_to_agent_apps(): app_id = str(i.id) if app_id not in proceeded_app_ids: proceeded_app_ids.append(app_id) - app = db.session.query(App).filter(App.id == app_id).first() + app = db.session.query(App).where(App.id == app_id).first() if app is not None: apps.append(app) @@ -484,7 +484,7 @@ def convert_to_agent_apps(): db.session.commit() # update conversation mode to agent - db.session.query(Conversation).filter(Conversation.app_id == app.id).update( + db.session.query(Conversation).where(Conversation.app_id == app.id).update( {Conversation.mode: AppMode.AGENT_CHAT.value} ) @@ -561,7 +561,7 @@ def old_metadata_migration(): try: stmt = ( select(DatasetDocument) - .filter(DatasetDocument.doc_metadata.is_not(None)) + .where(DatasetDocument.doc_metadata.is_not(None)) .order_by(DatasetDocument.created_at.desc()) ) documents = db.paginate(select=stmt, page=page, per_page=50, max_per_page=50, error_out=False) @@ -579,7 +579,7 @@ def old_metadata_migration(): else: dataset_metadata = ( db.session.query(DatasetMetadata) - .filter(DatasetMetadata.dataset_id == document.dataset_id, DatasetMetadata.name == key) + .where(DatasetMetadata.dataset_id == document.dataset_id, DatasetMetadata.name == key) .first() ) if not dataset_metadata: @@ -603,7 +603,7 @@ def old_metadata_migration(): else: dataset_metadata_binding = ( db.session.query(DatasetMetadataBinding) # type: ignore - .filter( + .where( DatasetMetadataBinding.dataset_id == document.dataset_id, DatasetMetadataBinding.document_id == document.id, DatasetMetadataBinding.metadata_id == dataset_metadata.id, @@ -718,7 +718,7 @@ where sites.id is null limit 1000""" continue try: - app = db.session.query(App).filter(App.id == app_id).first() + app = db.session.query(App).where(App.id == app_id).first() if not app: print(f"App {app_id} not found") continue diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index f1d529355d..9f1646ea7d 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -832,6 +832,41 @@ class CeleryBeatConfig(BaseSettings): ) +class CeleryScheduleTasksConfig(BaseSettings): + ENABLE_CLEAN_EMBEDDING_CACHE_TASK: bool = Field( + description="Enable clean embedding cache task", + default=False, + ) + ENABLE_CLEAN_UNUSED_DATASETS_TASK: bool = Field( + description="Enable clean unused datasets task", + default=False, + ) + ENABLE_CREATE_TIDB_SERVERLESS_TASK: bool = Field( + description="Enable create tidb service job task", + default=False, + ) + ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK: bool = Field( + description="Enable update tidb service job status task", + default=False, + ) + ENABLE_CLEAN_MESSAGES: bool = Field( + description="Enable clean messages task", + default=False, + ) + ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: bool = Field( + description="Enable mail clean document notify task", + default=False, + ) + ENABLE_DATASETS_QUEUE_MONITOR: bool = Field( + description="Enable queue monitor task", + default=False, + ) + ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK: bool = Field( + description="Enable check upgradable plugin task", + default=True, + ) + + class PositionConfig(BaseSettings): POSITION_PROVIDER_PINS: str = Field( description="Comma-separated list of pinned model providers", @@ -961,5 +996,6 @@ class FeatureConfig( # hosted services config HostedServiceConfig, CeleryBeatConfig, + CeleryScheduleTasksConfig, ): pass diff --git a/api/controllers/console/admin.py b/api/controllers/console/admin.py index f5257fae79..8a55197fb6 100644 --- a/api/controllers/console/admin.py +++ b/api/controllers/console/admin.py @@ -56,7 +56,7 @@ class InsertExploreAppListApi(Resource): parser.add_argument("position", type=int, required=True, nullable=False, location="json") args = parser.parse_args() - app = db.session.execute(select(App).filter(App.id == args["app_id"])).scalar_one_or_none() + app = db.session.execute(select(App).where(App.id == args["app_id"])).scalar_one_or_none() if not app: raise NotFound(f"App '{args['app_id']}' is not found") @@ -74,7 +74,7 @@ class InsertExploreAppListApi(Resource): with Session(db.engine) as session: recommended_app = session.execute( - select(RecommendedApp).filter(RecommendedApp.app_id == args["app_id"]) + select(RecommendedApp).where(RecommendedApp.app_id == args["app_id"]) ).scalar_one_or_none() if not recommended_app: @@ -117,21 +117,21 @@ class InsertExploreAppApi(Resource): def delete(self, app_id): with Session(db.engine) as session: recommended_app = session.execute( - select(RecommendedApp).filter(RecommendedApp.app_id == str(app_id)) + select(RecommendedApp).where(RecommendedApp.app_id == str(app_id)) ).scalar_one_or_none() if not recommended_app: return {"result": "success"}, 204 with Session(db.engine) as session: - app = session.execute(select(App).filter(App.id == recommended_app.app_id)).scalar_one_or_none() + app = session.execute(select(App).where(App.id == recommended_app.app_id)).scalar_one_or_none() if app: app.is_public = False with Session(db.engine) as session: installed_apps = session.execute( - select(InstalledApp).filter( + select(InstalledApp).where( InstalledApp.app_id == recommended_app.app_id, InstalledApp.tenant_id != InstalledApp.app_owner_tenant_id, ) diff --git a/api/controllers/console/apikey.py b/api/controllers/console/apikey.py index 47c93a15c6..d7500c415c 100644 --- a/api/controllers/console/apikey.py +++ b/api/controllers/console/apikey.py @@ -61,7 +61,7 @@ class BaseApiKeyListResource(Resource): _get_resource(resource_id, current_user.current_tenant_id, self.resource_model) keys = ( db.session.query(ApiToken) - .filter(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id) + .where(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id) .all() ) return {"items": keys} @@ -76,7 +76,7 @@ class BaseApiKeyListResource(Resource): current_key_count = ( db.session.query(ApiToken) - .filter(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id) + .where(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id) .count() ) @@ -117,7 +117,7 @@ class BaseApiKeyResource(Resource): key = ( db.session.query(ApiToken) - .filter( + .where( getattr(ApiToken, self.resource_id_field) == resource_id, ApiToken.type == self.resource_type, ApiToken.id == api_key_id, @@ -128,7 +128,7 @@ class BaseApiKeyResource(Resource): if key is None: flask_restful.abort(404, message="API key not found") - db.session.query(ApiToken).filter(ApiToken.id == api_key_id).delete() + db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete() db.session.commit() return {"result": "success"}, 204 diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py index 4eef9fed43..b5b6d1f75b 100644 --- a/api/controllers/console/app/conversation.py +++ b/api/controllers/console/app/conversation.py @@ -49,7 +49,7 @@ class CompletionConversationApi(Resource): query = db.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.mode == "completion") if args["keyword"]: - query = query.join(Message, Message.conversation_id == Conversation.id).filter( + query = query.join(Message, Message.conversation_id == Conversation.id).where( or_( Message.query.ilike("%{}%".format(args["keyword"])), Message.answer.ilike("%{}%".format(args["keyword"])), @@ -121,7 +121,7 @@ class CompletionConversationDetailApi(Resource): conversation = ( db.session.query(Conversation) - .filter(Conversation.id == conversation_id, Conversation.app_id == app_model.id) + .where(Conversation.id == conversation_id, Conversation.app_id == app_model.id) .first() ) @@ -181,7 +181,7 @@ class ChatConversationApi(Resource): Message.conversation_id == Conversation.id, ) .join(subquery, subquery.c.conversation_id == Conversation.id) - .filter( + .where( or_( Message.query.ilike(keyword_filter), Message.answer.ilike(keyword_filter), @@ -286,7 +286,7 @@ class ChatConversationDetailApi(Resource): conversation = ( db.session.query(Conversation) - .filter(Conversation.id == conversation_id, Conversation.app_id == app_model.id) + .where(Conversation.id == conversation_id, Conversation.app_id == app_model.id) .first() ) @@ -308,7 +308,7 @@ api.add_resource(ChatConversationDetailApi, "/apps//chat-conversati def _get_conversation(app_model, conversation_id): conversation = ( db.session.query(Conversation) - .filter(Conversation.id == conversation_id, Conversation.app_id == app_model.id) + .where(Conversation.id == conversation_id, Conversation.app_id == app_model.id) .first() ) diff --git a/api/controllers/console/app/mcp_server.py b/api/controllers/console/app/mcp_server.py index 503393f264..2344fd5acb 100644 --- a/api/controllers/console/app/mcp_server.py +++ b/api/controllers/console/app/mcp_server.py @@ -26,7 +26,7 @@ class AppMCPServerController(Resource): @get_app_model @marshal_with(app_server_fields) def get(self, app_model): - server = db.session.query(AppMCPServer).filter(AppMCPServer.app_id == app_model.id).first() + server = db.session.query(AppMCPServer).where(AppMCPServer.app_id == app_model.id).first() return server @setup_required @@ -73,7 +73,7 @@ class AppMCPServerController(Resource): parser.add_argument("parameters", type=dict, required=True, location="json") parser.add_argument("status", type=str, required=False, location="json") args = parser.parse_args() - server = db.session.query(AppMCPServer).filter(AppMCPServer.id == args["id"]).first() + server = db.session.query(AppMCPServer).where(AppMCPServer.id == args["id"]).first() if not server: raise NotFound() @@ -104,8 +104,8 @@ class AppMCPServerRefreshController(Resource): raise NotFound() server = ( db.session.query(AppMCPServer) - .filter(AppMCPServer.id == server_id) - .filter(AppMCPServer.tenant_id == current_user.current_tenant_id) + .where(AppMCPServer.id == server_id) + .where(AppMCPServer.tenant_id == current_user.current_tenant_id) .first() ) if not server: diff --git a/api/controllers/console/app/message.py b/api/controllers/console/app/message.py index ea659f9f5b..5e79e8dece 100644 --- a/api/controllers/console/app/message.py +++ b/api/controllers/console/app/message.py @@ -56,7 +56,7 @@ class ChatMessageListApi(Resource): conversation = ( db.session.query(Conversation) - .filter(Conversation.id == args["conversation_id"], Conversation.app_id == app_model.id) + .where(Conversation.id == args["conversation_id"], Conversation.app_id == app_model.id) .first() ) @@ -66,7 +66,7 @@ class ChatMessageListApi(Resource): if args["first_id"]: first_message = ( db.session.query(Message) - .filter(Message.conversation_id == conversation.id, Message.id == args["first_id"]) + .where(Message.conversation_id == conversation.id, Message.id == args["first_id"]) .first() ) @@ -75,7 +75,7 @@ class ChatMessageListApi(Resource): history_messages = ( db.session.query(Message) - .filter( + .where( Message.conversation_id == conversation.id, Message.created_at < first_message.created_at, Message.id != first_message.id, @@ -87,7 +87,7 @@ class ChatMessageListApi(Resource): else: history_messages = ( db.session.query(Message) - .filter(Message.conversation_id == conversation.id) + .where(Message.conversation_id == conversation.id) .order_by(Message.created_at.desc()) .limit(args["limit"]) .all() @@ -98,7 +98,7 @@ class ChatMessageListApi(Resource): current_page_first_message = history_messages[-1] rest_count = ( db.session.query(Message) - .filter( + .where( Message.conversation_id == conversation.id, Message.created_at < current_page_first_message.created_at, Message.id != current_page_first_message.id, @@ -167,7 +167,7 @@ class MessageAnnotationCountApi(Resource): @account_initialization_required @get_app_model def get(self, app_model): - count = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app_model.id).count() + count = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_model.id).count() return {"count": count} @@ -214,7 +214,7 @@ class MessageApi(Resource): def get(self, app_model, message_id): message_id = str(message_id) - message = db.session.query(Message).filter(Message.id == message_id, Message.app_id == app_model.id).first() + message = db.session.query(Message).where(Message.id == message_id, Message.app_id == app_model.id).first() if not message: raise NotFound("Message Not Exists.") diff --git a/api/controllers/console/app/model_config.py b/api/controllers/console/app/model_config.py index f30e3e893c..029138fb6b 100644 --- a/api/controllers/console/app/model_config.py +++ b/api/controllers/console/app/model_config.py @@ -42,7 +42,7 @@ class ModelConfigResource(Resource): if app_model.mode == AppMode.AGENT_CHAT.value or app_model.is_agent: # get original app model config original_app_model_config = ( - db.session.query(AppModelConfig).filter(AppModelConfig.id == app_model.app_model_config_id).first() + db.session.query(AppModelConfig).where(AppModelConfig.id == app_model.app_model_config_id).first() ) if original_app_model_config is None: raise ValueError("Original app model config not found") diff --git a/api/controllers/console/app/site.py b/api/controllers/console/app/site.py index 358a5e8cdb..03418f1dd2 100644 --- a/api/controllers/console/app/site.py +++ b/api/controllers/console/app/site.py @@ -49,7 +49,7 @@ class AppSite(Resource): if not current_user.is_editor: raise Forbidden() - site = db.session.query(Site).filter(Site.app_id == app_model.id).first() + site = db.session.query(Site).where(Site.app_id == app_model.id).first() if not site: raise NotFound @@ -93,7 +93,7 @@ class AppSiteAccessTokenReset(Resource): if not current_user.is_admin_or_owner: raise Forbidden() - site = db.session.query(Site).filter(Site.app_id == app_model.id).first() + site = db.session.query(Site).where(Site.app_id == app_model.id).first() if not site: raise NotFound diff --git a/api/controllers/console/app/wraps.py b/api/controllers/console/app/wraps.py index 3322350e25..132dc1f96b 100644 --- a/api/controllers/console/app/wraps.py +++ b/api/controllers/console/app/wraps.py @@ -11,7 +11,7 @@ from models import App, AppMode def _load_app_model(app_id: str) -> Optional[App]: app_model = ( db.session.query(App) - .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") .first() ) return app_model diff --git a/api/controllers/console/auth/error.py b/api/controllers/console/auth/error.py index 8c5e23de58..1984339add 100644 --- a/api/controllers/console/auth/error.py +++ b/api/controllers/console/auth/error.py @@ -113,3 +113,9 @@ class MemberNotInTenantError(BaseHTTPException): error_code = "member_not_in_tenant" description = "The member is not in the workspace." code = 400 + + +class AccountInFreezeError(BaseHTTPException): + error_code = "account_in_freeze" + description = "This email is temporarily unavailable." + code = 400 diff --git a/api/controllers/console/datasets/data_source.py b/api/controllers/console/datasets/data_source.py index b49f8affc8..39f8ab5787 100644 --- a/api/controllers/console/datasets/data_source.py +++ b/api/controllers/console/datasets/data_source.py @@ -30,7 +30,7 @@ class DataSourceApi(Resource): # get workspace data source integrates data_source_integrates = ( db.session.query(DataSourceOauthBinding) - .filter( + .where( DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, DataSourceOauthBinding.disabled == False, ) @@ -171,7 +171,7 @@ class DataSourceNotionApi(Resource): page_id = str(page_id) with Session(db.engine) as session: data_source_binding = session.execute( - select(DataSourceOauthBinding).filter( + select(DataSourceOauthBinding).where( db.and_( DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, DataSourceOauthBinding.provider == "notion", diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 6d996ee353..14db6706f6 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -421,7 +421,7 @@ class DatasetIndexingEstimateApi(Resource): file_ids = args["info_list"]["file_info_list"]["file_ids"] file_details = ( db.session.query(UploadFile) - .filter(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id.in_(file_ids)) + .where(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id.in_(file_ids)) .all() ) @@ -526,14 +526,14 @@ class DatasetIndexingStatusApi(Resource): dataset_id = str(dataset_id) documents = ( db.session.query(Document) - .filter(Document.dataset_id == dataset_id, Document.tenant_id == current_user.current_tenant_id) + .where(Document.dataset_id == dataset_id, Document.tenant_id == current_user.current_tenant_id) .all() ) documents_status = [] for document in documents: completed_segments = ( db.session.query(DocumentSegment) - .filter( + .where( DocumentSegment.completed_at.isnot(None), DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment", @@ -542,7 +542,7 @@ class DatasetIndexingStatusApi(Resource): ) total_segments = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment") + .where(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment") .count() ) # Create a dictionary with document attributes and additional fields @@ -577,7 +577,7 @@ class DatasetApiKeyApi(Resource): def get(self): keys = ( db.session.query(ApiToken) - .filter(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id) + .where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id) .all() ) return {"items": keys} @@ -593,7 +593,7 @@ class DatasetApiKeyApi(Resource): current_key_count = ( db.session.query(ApiToken) - .filter(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id) + .where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id) .count() ) @@ -629,7 +629,7 @@ class DatasetApiDeleteApi(Resource): key = ( db.session.query(ApiToken) - .filter( + .where( ApiToken.tenant_id == current_user.current_tenant_id, ApiToken.type == self.resource_type, ApiToken.id == api_key_id, @@ -640,7 +640,7 @@ class DatasetApiDeleteApi(Resource): if key is None: flask_restful.abort(404, message="API key not found") - db.session.query(ApiToken).filter(ApiToken.id == api_key_id).delete() + db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete() db.session.commit() return {"result": "success"}, 204 diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 6e039d735b..b101f704de 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -126,7 +126,7 @@ class GetProcessRuleApi(Resource): # get the latest process rule dataset_process_rule = ( db.session.query(DatasetProcessRule) - .filter(DatasetProcessRule.dataset_id == document.dataset_id) + .where(DatasetProcessRule.dataset_id == document.dataset_id) .order_by(DatasetProcessRule.created_at.desc()) .limit(1) .one_or_none() @@ -178,7 +178,7 @@ class DatasetDocumentListApi(Resource): if search: search = f"%{search}%" - query = query.filter(Document.name.like(search)) + query = query.where(Document.name.like(search)) if sort.startswith("-"): sort_logic = desc @@ -214,7 +214,7 @@ class DatasetDocumentListApi(Resource): for document in documents: completed_segments = ( db.session.query(DocumentSegment) - .filter( + .where( DocumentSegment.completed_at.isnot(None), DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment", @@ -223,7 +223,7 @@ class DatasetDocumentListApi(Resource): ) total_segments = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment") + .where(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment") .count() ) document.completed_segments = completed_segments @@ -419,7 +419,7 @@ class DocumentIndexingEstimateApi(DocumentResource): file = ( db.session.query(UploadFile) - .filter(UploadFile.tenant_id == document.tenant_id, UploadFile.id == file_id) + .where(UploadFile.tenant_id == document.tenant_id, UploadFile.id == file_id) .first() ) @@ -494,7 +494,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): file_id = data_source_info["upload_file_id"] file_detail = ( db.session.query(UploadFile) - .filter(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id == file_id) + .where(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id == file_id) .first() ) @@ -570,7 +570,7 @@ class DocumentBatchIndexingStatusApi(DocumentResource): for document in documents: completed_segments = ( db.session.query(DocumentSegment) - .filter( + .where( DocumentSegment.completed_at.isnot(None), DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment", @@ -579,7 +579,7 @@ class DocumentBatchIndexingStatusApi(DocumentResource): ) total_segments = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment") + .where(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment") .count() ) # Create a dictionary with document attributes and additional fields @@ -613,7 +613,7 @@ class DocumentIndexingStatusApi(DocumentResource): completed_segments = ( db.session.query(DocumentSegment) - .filter( + .where( DocumentSegment.completed_at.isnot(None), DocumentSegment.document_id == str(document_id), DocumentSegment.status != "re_segment", @@ -622,7 +622,7 @@ class DocumentIndexingStatusApi(DocumentResource): ) total_segments = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.document_id == str(document_id), DocumentSegment.status != "re_segment") + .where(DocumentSegment.document_id == str(document_id), DocumentSegment.status != "re_segment") .count() ) diff --git a/api/controllers/console/datasets/datasets_segments.py b/api/controllers/console/datasets/datasets_segments.py index 48142dbe73..b3704ce8b1 100644 --- a/api/controllers/console/datasets/datasets_segments.py +++ b/api/controllers/console/datasets/datasets_segments.py @@ -78,7 +78,7 @@ class DatasetDocumentSegmentListApi(Resource): query = ( select(DocumentSegment) - .filter( + .where( DocumentSegment.document_id == str(document_id), DocumentSegment.tenant_id == current_user.current_tenant_id, ) @@ -86,19 +86,19 @@ class DatasetDocumentSegmentListApi(Resource): ) if status_list: - query = query.filter(DocumentSegment.status.in_(status_list)) + query = query.where(DocumentSegment.status.in_(status_list)) if hit_count_gte is not None: - query = query.filter(DocumentSegment.hit_count >= hit_count_gte) + query = query.where(DocumentSegment.hit_count >= hit_count_gte) if keyword: query = query.where(DocumentSegment.content.ilike(f"%{keyword}%")) if args["enabled"].lower() != "all": if args["enabled"].lower() == "true": - query = query.filter(DocumentSegment.enabled == True) + query = query.where(DocumentSegment.enabled == True) elif args["enabled"].lower() == "false": - query = query.filter(DocumentSegment.enabled == False) + query = query.where(DocumentSegment.enabled == False) segments = db.paginate(select=query, page=page, per_page=limit, max_per_page=100, error_out=False) @@ -285,7 +285,7 @@ class DatasetDocumentSegmentUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) .first() ) if not segment: @@ -331,7 +331,7 @@ class DatasetDocumentSegmentUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) .first() ) if not segment: @@ -436,7 +436,7 @@ class ChildChunkAddApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) .first() ) if not segment: @@ -493,7 +493,7 @@ class ChildChunkAddApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) .first() ) if not segment: @@ -540,7 +540,7 @@ class ChildChunkAddApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) .first() ) if not segment: @@ -586,7 +586,7 @@ class ChildChunkUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) .first() ) if not segment: @@ -595,7 +595,7 @@ class ChildChunkUpdateApi(Resource): child_chunk_id = str(child_chunk_id) child_chunk = ( db.session.query(ChildChunk) - .filter(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id) + .where(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id) .first() ) if not child_chunk: @@ -635,7 +635,7 @@ class ChildChunkUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) .first() ) if not segment: @@ -644,7 +644,7 @@ class ChildChunkUpdateApi(Resource): child_chunk_id = str(child_chunk_id) child_chunk = ( db.session.query(ChildChunk) - .filter(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id) + .where(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id) .first() ) if not child_chunk: diff --git a/api/controllers/console/explore/installed_app.py b/api/controllers/console/explore/installed_app.py index 29111fb865..ffdf73c368 100644 --- a/api/controllers/console/explore/installed_app.py +++ b/api/controllers/console/explore/installed_app.py @@ -34,11 +34,11 @@ class InstalledAppsListApi(Resource): if app_id: installed_apps = ( db.session.query(InstalledApp) - .filter(and_(InstalledApp.tenant_id == current_tenant_id, InstalledApp.app_id == app_id)) + .where(and_(InstalledApp.tenant_id == current_tenant_id, InstalledApp.app_id == app_id)) .all() ) else: - installed_apps = db.session.query(InstalledApp).filter(InstalledApp.tenant_id == current_tenant_id).all() + installed_apps = db.session.query(InstalledApp).where(InstalledApp.tenant_id == current_tenant_id).all() current_user.role = TenantService.get_user_role(current_user, current_user.current_tenant) installed_app_list: list[dict[str, Any]] = [ @@ -94,12 +94,12 @@ class InstalledAppsListApi(Resource): parser.add_argument("app_id", type=str, required=True, help="Invalid app_id") args = parser.parse_args() - recommended_app = db.session.query(RecommendedApp).filter(RecommendedApp.app_id == args["app_id"]).first() + recommended_app = db.session.query(RecommendedApp).where(RecommendedApp.app_id == args["app_id"]).first() if recommended_app is None: raise NotFound("App not found") current_tenant_id = current_user.current_tenant_id - app = db.session.query(App).filter(App.id == args["app_id"]).first() + app = db.session.query(App).where(App.id == args["app_id"]).first() if app is None: raise NotFound("App not found") @@ -109,7 +109,7 @@ class InstalledAppsListApi(Resource): installed_app = ( db.session.query(InstalledApp) - .filter(and_(InstalledApp.app_id == args["app_id"], InstalledApp.tenant_id == current_tenant_id)) + .where(and_(InstalledApp.app_id == args["app_id"], InstalledApp.tenant_id == current_tenant_id)) .first() ) diff --git a/api/controllers/console/explore/wraps.py b/api/controllers/console/explore/wraps.py index afbd78bd5b..de97fb149e 100644 --- a/api/controllers/console/explore/wraps.py +++ b/api/controllers/console/explore/wraps.py @@ -28,7 +28,7 @@ def installed_app_required(view=None): installed_app = ( db.session.query(InstalledApp) - .filter( + .where( InstalledApp.id == str(installed_app_id), InstalledApp.tenant_id == current_user.current_tenant_id ) .first() diff --git a/api/controllers/console/workspace/__init__.py b/api/controllers/console/workspace/__init__.py index 072e904caf..ef814dd738 100644 --- a/api/controllers/console/workspace/__init__.py +++ b/api/controllers/console/workspace/__init__.py @@ -21,7 +21,7 @@ def plugin_permission_required( with Session(db.engine) as session: permission = ( session.query(TenantPluginPermission) - .filter( + .where( TenantPluginPermission.tenant_id == tenant_id, ) .first() diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py index 7f7e64a59c..9218ddf91d 100644 --- a/api/controllers/console/workspace/account.py +++ b/api/controllers/console/workspace/account.py @@ -9,6 +9,7 @@ from configs import dify_config from constants.languages import supported_language from controllers.console import api from controllers.console.auth.error import ( + AccountInFreezeError, EmailAlreadyInUseError, EmailChangeLimitError, EmailCodeError, @@ -68,7 +69,7 @@ class AccountInitApi(Resource): # check invitation code invitation_code = ( db.session.query(InvitationCode) - .filter( + .where( InvitationCode.code == args["invitation_code"], InvitationCode.status == "unused", ) @@ -228,7 +229,7 @@ class AccountIntegrateApi(Resource): def get(self): account = current_user - account_integrates = db.session.query(AccountIntegrate).filter(AccountIntegrate.account_id == account.id).all() + account_integrates = db.session.query(AccountIntegrate).where(AccountIntegrate.account_id == account.id).all() base_url = request.url_root.rstrip("/") oauth_base_path = "/console/api/oauth/login" @@ -479,21 +480,28 @@ class ChangeEmailResetApi(Resource): parser.add_argument("token", type=str, required=True, nullable=False, location="json") args = parser.parse_args() + if AccountService.is_account_in_freeze(args["new_email"]): + raise AccountInFreezeError() + + if not AccountService.check_email_unique(args["new_email"]): + raise EmailAlreadyInUseError() + reset_data = AccountService.get_change_email_data(args["token"]) if not reset_data: raise InvalidTokenError() AccountService.revoke_change_email_token(args["token"]) - if not AccountService.check_email_unique(args["new_email"]): - raise EmailAlreadyInUseError() - old_email = reset_data.get("old_email", "") if current_user.email != old_email: raise AccountNotFound() updated_account = AccountService.update_account(current_user, email=args["new_email"]) + AccountService.send_change_email_completed_notify_email( + email=args["new_email"], + ) + return updated_account diff --git a/api/controllers/console/workspace/members.py b/api/controllers/console/workspace/members.py index b1f79ffdec..f7424923b9 100644 --- a/api/controllers/console/workspace/members.py +++ b/api/controllers/console/workspace/members.py @@ -108,7 +108,7 @@ class MemberCancelInviteApi(Resource): @login_required @account_initialization_required def delete(self, member_id): - member = db.session.query(Account).filter(Account.id == str(member_id)).first() + member = db.session.query(Account).where(Account.id == str(member_id)).first() if member is None: abort(404) else: diff --git a/api/controllers/console/workspace/plugin.py b/api/controllers/console/workspace/plugin.py index c0a4734828..09846d5c94 100644 --- a/api/controllers/console/workspace/plugin.py +++ b/api/controllers/console/workspace/plugin.py @@ -12,7 +12,8 @@ from controllers.console.wraps import account_initialization_required, setup_req from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.exc import PluginDaemonClientSideError from libs.login import login_required -from models.account import TenantPluginPermission +from models.account import TenantPluginAutoUpgradeStrategy, TenantPluginPermission +from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService from services.plugin.plugin_parameter_service import PluginParameterService from services.plugin.plugin_permission_service import PluginPermissionService from services.plugin.plugin_service import PluginService @@ -534,6 +535,114 @@ class PluginFetchDynamicSelectOptionsApi(Resource): return jsonable_encoder({"options": options}) +class PluginChangePreferencesApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self): + user = current_user + if not user.is_admin_or_owner: + raise Forbidden() + + req = reqparse.RequestParser() + req.add_argument("permission", type=dict, required=True, location="json") + req.add_argument("auto_upgrade", type=dict, required=True, location="json") + args = req.parse_args() + + tenant_id = user.current_tenant_id + + permission = args["permission"] + + install_permission = TenantPluginPermission.InstallPermission(permission.get("install_permission", "everyone")) + debug_permission = TenantPluginPermission.DebugPermission(permission.get("debug_permission", "everyone")) + + auto_upgrade = args["auto_upgrade"] + + strategy_setting = TenantPluginAutoUpgradeStrategy.StrategySetting( + auto_upgrade.get("strategy_setting", "fix_only") + ) + upgrade_time_of_day = auto_upgrade.get("upgrade_time_of_day", 0) + upgrade_mode = TenantPluginAutoUpgradeStrategy.UpgradeMode(auto_upgrade.get("upgrade_mode", "exclude")) + exclude_plugins = auto_upgrade.get("exclude_plugins", []) + include_plugins = auto_upgrade.get("include_plugins", []) + + # set permission + set_permission_result = PluginPermissionService.change_permission( + tenant_id, + install_permission, + debug_permission, + ) + if not set_permission_result: + return jsonable_encoder({"success": False, "message": "Failed to set permission"}) + + # set auto upgrade strategy + set_auto_upgrade_strategy_result = PluginAutoUpgradeService.change_strategy( + tenant_id, + strategy_setting, + upgrade_time_of_day, + upgrade_mode, + exclude_plugins, + include_plugins, + ) + if not set_auto_upgrade_strategy_result: + return jsonable_encoder({"success": False, "message": "Failed to set auto upgrade strategy"}) + + return jsonable_encoder({"success": True}) + + +class PluginFetchPreferencesApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self): + tenant_id = current_user.current_tenant_id + + permission = PluginPermissionService.get_permission(tenant_id) + permission_dict = { + "install_permission": TenantPluginPermission.InstallPermission.EVERYONE, + "debug_permission": TenantPluginPermission.DebugPermission.EVERYONE, + } + + if permission: + permission_dict["install_permission"] = permission.install_permission + permission_dict["debug_permission"] = permission.debug_permission + + auto_upgrade = PluginAutoUpgradeService.get_strategy(tenant_id) + auto_upgrade_dict = { + "strategy_setting": TenantPluginAutoUpgradeStrategy.StrategySetting.DISABLED, + "upgrade_time_of_day": 0, + "upgrade_mode": TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE, + "exclude_plugins": [], + "include_plugins": [], + } + + if auto_upgrade: + auto_upgrade_dict = { + "strategy_setting": auto_upgrade.strategy_setting, + "upgrade_time_of_day": auto_upgrade.upgrade_time_of_day, + "upgrade_mode": auto_upgrade.upgrade_mode, + "exclude_plugins": auto_upgrade.exclude_plugins, + "include_plugins": auto_upgrade.include_plugins, + } + + return jsonable_encoder({"permission": permission_dict, "auto_upgrade": auto_upgrade_dict}) + + +class PluginAutoUpgradeExcludePluginApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self): + # exclude one single plugin + tenant_id = current_user.current_tenant_id + + req = reqparse.RequestParser() + req.add_argument("plugin_id", type=str, required=True, location="json") + args = req.parse_args() + + return jsonable_encoder({"success": PluginAutoUpgradeService.exclude_plugin(tenant_id, args["plugin_id"])}) + + api.add_resource(PluginDebuggingKeyApi, "/workspaces/current/plugin/debugging-key") api.add_resource(PluginListApi, "/workspaces/current/plugin/list") api.add_resource(PluginListLatestVersionsApi, "/workspaces/current/plugin/list/latest-versions") @@ -560,3 +669,7 @@ api.add_resource(PluginChangePermissionApi, "/workspaces/current/plugin/permissi api.add_resource(PluginFetchPermissionApi, "/workspaces/current/plugin/permission/fetch") api.add_resource(PluginFetchDynamicSelectOptionsApi, "/workspaces/current/plugin/parameters/dynamic-options") + +api.add_resource(PluginFetchPreferencesApi, "/workspaces/current/plugin/preferences/fetch") +api.add_resource(PluginChangePreferencesApi, "/workspaces/current/plugin/preferences/change") +api.add_resource(PluginAutoUpgradeExcludePluginApi, "/workspaces/current/plugin/preferences/autoupgrade/exclude") diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index c70bf84d2a..c4d1ef70d8 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -739,7 +739,7 @@ class ToolOAuthCallback(Resource): raise Forbidden("no oauth available client config found for this tool provider") redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider}/tool/callback" - credentials = oauth_handler.get_credentials( + credentials_response = oauth_handler.get_credentials( tenant_id=tenant_id, user_id=user_id, plugin_id=plugin_id, @@ -747,7 +747,10 @@ class ToolOAuthCallback(Resource): redirect_uri=redirect_uri, system_credentials=oauth_client_params, request=request, - ).credentials + ) + + credentials = credentials_response.credentials + expires_at = credentials_response.expires_at if not credentials: raise Exception("the plugin credentials failed") @@ -758,6 +761,7 @@ class ToolOAuthCallback(Resource): tenant_id=tenant_id, provider=provider, credentials=dict(credentials), + expires_at=expires_at, api_type=CredentialType.OAUTH2, ) return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback") diff --git a/api/controllers/inner_api/plugin/wraps.py b/api/controllers/inner_api/plugin/wraps.py index 50408e0929..b533614d4d 100644 --- a/api/controllers/inner_api/plugin/wraps.py +++ b/api/controllers/inner_api/plugin/wraps.py @@ -22,7 +22,7 @@ def get_user(tenant_id: str, user_id: str | None) -> Account | EndUser: user_id = "DEFAULT-USER" if user_id == "DEFAULT-USER": - user_model = session.query(EndUser).filter(EndUser.session_id == "DEFAULT-USER").first() + user_model = session.query(EndUser).where(EndUser.session_id == "DEFAULT-USER").first() if not user_model: user_model = EndUser( tenant_id=tenant_id, @@ -36,7 +36,7 @@ def get_user(tenant_id: str, user_id: str | None) -> Account | EndUser: else: user_model = AccountService.load_user(user_id) if not user_model: - user_model = session.query(EndUser).filter(EndUser.id == user_id).first() + user_model = session.query(EndUser).where(EndUser.id == user_id).first() if not user_model: raise ValueError("user not found") except Exception: @@ -71,7 +71,7 @@ def get_user_tenant(view: Optional[Callable] = None): try: tenant_model = ( db.session.query(Tenant) - .filter( + .where( Tenant.id == tenant_id, ) .first() diff --git a/api/controllers/inner_api/wraps.py b/api/controllers/inner_api/wraps.py index f3a9312dd0..9e7b3d4f29 100644 --- a/api/controllers/inner_api/wraps.py +++ b/api/controllers/inner_api/wraps.py @@ -55,7 +55,7 @@ def enterprise_inner_api_user_auth(view): if signature_base64 != token: return view(*args, **kwargs) - kwargs["user"] = db.session.query(EndUser).filter(EndUser.id == user_id).first() + kwargs["user"] = db.session.query(EndUser).where(EndUser.id == user_id).first() return view(*args, **kwargs) diff --git a/api/controllers/mcp/mcp.py b/api/controllers/mcp/mcp.py index ead728bfb0..87d678796f 100644 --- a/api/controllers/mcp/mcp.py +++ b/api/controllers/mcp/mcp.py @@ -30,7 +30,7 @@ class MCPAppApi(Resource): request_id = args.get("id") - server = db.session.query(AppMCPServer).filter(AppMCPServer.server_code == server_code).first() + server = db.session.query(AppMCPServer).where(AppMCPServer.server_code == server_code).first() if not server: return helper.compact_generate_response( create_mcp_error_response(request_id, types.INVALID_REQUEST, "Server Not Found") @@ -41,7 +41,7 @@ class MCPAppApi(Resource): create_mcp_error_response(request_id, types.INVALID_REQUEST, "Server is not active") ) - app = db.session.query(App).filter(App.id == server.app_id).first() + app = db.session.query(App).where(App.id == server.app_id).first() if not app: return helper.compact_generate_response( create_mcp_error_response(request_id, types.INVALID_REQUEST, "App Not Found") diff --git a/api/controllers/service_api/app/completion.py b/api/controllers/service_api/app/completion.py index 1d9890199d..7762672494 100644 --- a/api/controllers/service_api/app/completion.py +++ b/api/controllers/service_api/app/completion.py @@ -1,5 +1,6 @@ import logging +from flask import request from flask_restful import Resource, reqparse from werkzeug.exceptions import InternalServerError, NotFound @@ -23,6 +24,7 @@ from core.errors.error import ( ProviderTokenNotInitError, QuotaExceededError, ) +from core.helper.trace_id_helper import get_external_trace_id from core.model_runtime.errors.invoke import InvokeError from libs import helper from libs.helper import uuid_value @@ -111,6 +113,10 @@ class ChatApi(Resource): args = parser.parse_args() + external_trace_id = get_external_trace_id(request) + if external_trace_id: + args["external_trace_id"] = external_trace_id + streaming = args["response_mode"] == "streaming" try: diff --git a/api/controllers/service_api/app/site.py b/api/controllers/service_api/app/site.py index e752dfee30..c157b39f6b 100644 --- a/api/controllers/service_api/app/site.py +++ b/api/controllers/service_api/app/site.py @@ -16,7 +16,7 @@ class AppSiteApi(Resource): @marshal_with(fields.site_fields) def get(self, app_model: App): """Retrieve app site info.""" - site = db.session.query(Site).filter(Site.app_id == app_model.id).first() + site = db.session.query(Site).where(Site.app_id == app_model.id).first() if not site: raise Forbidden() diff --git a/api/controllers/service_api/app/workflow.py b/api/controllers/service_api/app/workflow.py index ac2ebf2b09..370ff911b4 100644 --- a/api/controllers/service_api/app/workflow.py +++ b/api/controllers/service_api/app/workflow.py @@ -1,6 +1,7 @@ import logging from dateutil.parser import isoparse +from flask import request from flask_restful import Resource, fields, marshal_with, reqparse from flask_restful.inputs import int_range from sqlalchemy.orm import Session, sessionmaker @@ -23,6 +24,7 @@ from core.errors.error import ( ProviderTokenNotInitError, QuotaExceededError, ) +from core.helper.trace_id_helper import get_external_trace_id from core.model_runtime.errors.invoke import InvokeError from core.workflow.entities.workflow_execution import WorkflowExecutionStatus from extensions.ext_database import db @@ -90,7 +92,9 @@ class WorkflowRunApi(Resource): parser.add_argument("files", type=list, required=False, location="json") parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") args = parser.parse_args() - + external_trace_id = get_external_trace_id(request) + if external_trace_id: + args["external_trace_id"] = external_trace_id streaming = args.get("response_mode") == "streaming" try: diff --git a/api/controllers/service_api/dataset/document.py b/api/controllers/service_api/dataset/document.py index d571b21a0a..ac85c0b38d 100644 --- a/api/controllers/service_api/dataset/document.py +++ b/api/controllers/service_api/dataset/document.py @@ -63,7 +63,7 @@ class DocumentAddByTextApi(DatasetApiResource): dataset_id = str(dataset_id) tenant_id = str(tenant_id) - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise ValueError("Dataset does not exist.") @@ -136,7 +136,7 @@ class DocumentUpdateByTextApi(DatasetApiResource): args = parser.parse_args() dataset_id = str(dataset_id) tenant_id = str(tenant_id) - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise ValueError("Dataset does not exist.") @@ -206,7 +206,7 @@ class DocumentAddByFileApi(DatasetApiResource): # get dataset info dataset_id = str(dataset_id) tenant_id = str(tenant_id) - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise ValueError("Dataset does not exist.") @@ -299,7 +299,7 @@ class DocumentUpdateByFileApi(DatasetApiResource): # get dataset info dataset_id = str(dataset_id) tenant_id = str(tenant_id) - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise ValueError("Dataset does not exist.") @@ -367,7 +367,7 @@ class DocumentDeleteApi(DatasetApiResource): tenant_id = str(tenant_id) # get dataset info - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise ValueError("Dataset does not exist.") @@ -398,7 +398,7 @@ class DocumentListApi(DatasetApiResource): page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) search = request.args.get("keyword", default=None, type=str) - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") @@ -406,7 +406,7 @@ class DocumentListApi(DatasetApiResource): if search: search = f"%{search}%" - query = query.filter(Document.name.like(search)) + query = query.where(Document.name.like(search)) query = query.order_by(desc(Document.created_at), desc(Document.position)) @@ -430,7 +430,7 @@ class DocumentIndexingStatusApi(DatasetApiResource): batch = str(batch) tenant_id = str(tenant_id) # get dataset - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") # get documents @@ -441,7 +441,7 @@ class DocumentIndexingStatusApi(DatasetApiResource): for document in documents: completed_segments = ( db.session.query(DocumentSegment) - .filter( + .where( DocumentSegment.completed_at.isnot(None), DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment", @@ -450,7 +450,7 @@ class DocumentIndexingStatusApi(DatasetApiResource): ) total_segments = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment") + .where(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment") .count() ) # Create a dictionary with document attributes and additional fields diff --git a/api/controllers/service_api/dataset/segment.py b/api/controllers/service_api/dataset/segment.py index 403b7f0a0c..31f862dc8f 100644 --- a/api/controllers/service_api/dataset/segment.py +++ b/api/controllers/service_api/dataset/segment.py @@ -42,7 +42,7 @@ class SegmentApi(DatasetApiResource): # check dataset dataset_id = str(dataset_id) tenant_id = str(tenant_id) - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") # check document @@ -89,7 +89,7 @@ class SegmentApi(DatasetApiResource): tenant_id = str(tenant_id) page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") # check document @@ -146,7 +146,7 @@ class DatasetSegmentApi(DatasetApiResource): # check dataset dataset_id = str(dataset_id) tenant_id = str(tenant_id) - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") # check user's model setting @@ -170,7 +170,7 @@ class DatasetSegmentApi(DatasetApiResource): # check dataset dataset_id = str(dataset_id) tenant_id = str(tenant_id) - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") # check user's model setting @@ -216,7 +216,7 @@ class DatasetSegmentApi(DatasetApiResource): # check dataset dataset_id = str(dataset_id) tenant_id = str(tenant_id) - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") # check user's model setting @@ -246,7 +246,7 @@ class ChildChunkApi(DatasetApiResource): # check dataset dataset_id = str(dataset_id) tenant_id = str(tenant_id) - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") @@ -296,7 +296,7 @@ class ChildChunkApi(DatasetApiResource): # check dataset dataset_id = str(dataset_id) tenant_id = str(tenant_id) - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") @@ -343,7 +343,7 @@ class DatasetChildChunkApi(DatasetApiResource): # check dataset dataset_id = str(dataset_id) tenant_id = str(tenant_id) - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") @@ -382,7 +382,7 @@ class DatasetChildChunkApi(DatasetApiResource): # check dataset dataset_id = str(dataset_id) tenant_id = str(tenant_id) - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") diff --git a/api/controllers/service_api/dataset/upload_file.py b/api/controllers/service_api/dataset/upload_file.py index 6382b63ea9..3b4721b5b0 100644 --- a/api/controllers/service_api/dataset/upload_file.py +++ b/api/controllers/service_api/dataset/upload_file.py @@ -17,7 +17,7 @@ class UploadFileApi(DatasetApiResource): # check dataset dataset_id = str(dataset_id) tenant_id = str(tenant_id) - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") # check document @@ -31,7 +31,7 @@ class UploadFileApi(DatasetApiResource): data_source_info = document.data_source_info_dict if data_source_info and "upload_file_id" in data_source_info: file_id = data_source_info["upload_file_id"] - upload_file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first() + upload_file = db.session.query(UploadFile).where(UploadFile.id == file_id).first() if not upload_file: raise NotFound("UploadFile not found.") else: diff --git a/api/controllers/service_api/wraps.py b/api/controllers/service_api/wraps.py index eeed321430..da81cc8bc3 100644 --- a/api/controllers/service_api/wraps.py +++ b/api/controllers/service_api/wraps.py @@ -44,7 +44,7 @@ def validate_app_token(view: Optional[Callable] = None, *, fetch_user_arg: Optio def decorated_view(*args, **kwargs): api_token = validate_and_get_api_token("app") - app_model = db.session.query(App).filter(App.id == api_token.app_id).first() + app_model = db.session.query(App).where(App.id == api_token.app_id).first() if not app_model: raise Forbidden("The app no longer exists.") @@ -54,7 +54,7 @@ def validate_app_token(view: Optional[Callable] = None, *, fetch_user_arg: Optio if not app_model.enable_api: raise Forbidden("The app's API service has been disabled.") - tenant = db.session.query(Tenant).filter(Tenant.id == app_model.tenant_id).first() + tenant = db.session.query(Tenant).where(Tenant.id == app_model.tenant_id).first() if tenant is None: raise ValueError("Tenant does not exist.") if tenant.status == TenantStatus.ARCHIVE: @@ -62,15 +62,15 @@ def validate_app_token(view: Optional[Callable] = None, *, fetch_user_arg: Optio tenant_account_join = ( db.session.query(Tenant, TenantAccountJoin) - .filter(Tenant.id == api_token.tenant_id) - .filter(TenantAccountJoin.tenant_id == Tenant.id) - .filter(TenantAccountJoin.role.in_(["owner"])) - .filter(Tenant.status == TenantStatus.NORMAL) + .where(Tenant.id == api_token.tenant_id) + .where(TenantAccountJoin.tenant_id == Tenant.id) + .where(TenantAccountJoin.role.in_(["owner"])) + .where(Tenant.status == TenantStatus.NORMAL) .one_or_none() ) # TODO: only owner information is required, so only one is returned. if tenant_account_join: tenant, ta = tenant_account_join - account = db.session.query(Account).filter(Account.id == ta.account_id).first() + account = db.session.query(Account).where(Account.id == ta.account_id).first() # Login admin if account: account.current_tenant = tenant @@ -213,15 +213,15 @@ def validate_dataset_token(view=None): api_token = validate_and_get_api_token("dataset") tenant_account_join = ( db.session.query(Tenant, TenantAccountJoin) - .filter(Tenant.id == api_token.tenant_id) - .filter(TenantAccountJoin.tenant_id == Tenant.id) - .filter(TenantAccountJoin.role.in_(["owner"])) - .filter(Tenant.status == TenantStatus.NORMAL) + .where(Tenant.id == api_token.tenant_id) + .where(TenantAccountJoin.tenant_id == Tenant.id) + .where(TenantAccountJoin.role.in_(["owner"])) + .where(Tenant.status == TenantStatus.NORMAL) .one_or_none() ) # TODO: only owner information is required, so only one is returned. if tenant_account_join: tenant, ta = tenant_account_join - account = db.session.query(Account).filter(Account.id == ta.account_id).first() + account = db.session.query(Account).where(Account.id == ta.account_id).first() # Login admin if account: account.current_tenant = tenant @@ -293,7 +293,7 @@ def create_or_update_end_user_for_user_id(app_model: App, user_id: Optional[str] end_user = ( db.session.query(EndUser) - .filter( + .where( EndUser.tenant_id == app_model.tenant_id, EndUser.app_id == app_model.id, EndUser.session_id == user_id, @@ -320,7 +320,7 @@ class DatasetApiResource(Resource): method_decorators = [validate_dataset_token] def get_dataset(self, dataset_id: str, tenant_id: str) -> Dataset: - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id, Dataset.tenant_id == tenant_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id, Dataset.tenant_id == tenant_id).first() if not dataset: raise NotFound("Dataset not found.") diff --git a/api/controllers/web/passport.py b/api/controllers/web/passport.py index 10c3cdcf0e..acd3a8b539 100644 --- a/api/controllers/web/passport.py +++ b/api/controllers/web/passport.py @@ -3,6 +3,7 @@ from datetime import UTC, datetime, timedelta from flask import request from flask_restful import Resource +from sqlalchemy import func, select from werkzeug.exceptions import NotFound, Unauthorized from configs import dify_config @@ -42,17 +43,17 @@ class PassportResource(Resource): raise WebAppAuthRequiredError() # get site from db and check if it is normal - site = db.session.query(Site).filter(Site.code == app_code, Site.status == "normal").first() + site = db.session.scalar(select(Site).where(Site.code == app_code, Site.status == "normal")) if not site: raise NotFound() # get app from db and check if it is normal and enable_site - app_model = db.session.query(App).filter(App.id == site.app_id).first() + app_model = db.session.scalar(select(App).where(App.id == site.app_id)) if not app_model or app_model.status != "normal" or not app_model.enable_site: raise NotFound() if user_id: - end_user = ( - db.session.query(EndUser).filter(EndUser.app_id == app_model.id, EndUser.session_id == user_id).first() + end_user = db.session.scalar( + select(EndUser).where(EndUser.app_id == app_model.id, EndUser.session_id == user_id) ) if end_user: @@ -121,11 +122,11 @@ def exchange_token_for_existing_web_user(app_code: str, enterprise_user_decoded: if not user_auth_type: raise Unauthorized("Missing auth_type in the token.") - site = db.session.query(Site).filter(Site.code == app_code, Site.status == "normal").first() + site = db.session.scalar(select(Site).where(Site.code == app_code, Site.status == "normal")) if not site: raise NotFound() - app_model = db.session.query(App).filter(App.id == site.app_id).first() + app_model = db.session.scalar(select(App).where(App.id == site.app_id)) if not app_model or app_model.status != "normal" or not app_model.enable_site: raise NotFound() @@ -140,16 +141,14 @@ def exchange_token_for_existing_web_user(app_code: str, enterprise_user_decoded: end_user = None if end_user_id: - end_user = db.session.query(EndUser).filter(EndUser.id == end_user_id).first() + end_user = db.session.scalar(select(EndUser).where(EndUser.id == end_user_id)) if session_id: - end_user = ( - db.session.query(EndUser) - .filter( + end_user = db.session.scalar( + select(EndUser).where( EndUser.session_id == session_id, EndUser.tenant_id == app_model.tenant_id, EndUser.app_id == app_model.id, ) - .first() ) if not end_user: if not session_id: @@ -187,8 +186,8 @@ def _exchange_for_public_app_token(app_model, site, token_decoded): user_id = token_decoded.get("user_id") end_user = None if user_id: - end_user = ( - db.session.query(EndUser).filter(EndUser.app_id == app_model.id, EndUser.session_id == user_id).first() + end_user = db.session.scalar( + select(EndUser).where(EndUser.app_id == app_model.id, EndUser.session_id == user_id) ) if not end_user: @@ -224,6 +223,8 @@ def generate_session_id(): """ while True: session_id = str(uuid.uuid4()) - existing_count = db.session.query(EndUser).filter(EndUser.session_id == session_id).count() + existing_count = db.session.scalar( + select(func.count()).select_from(EndUser).where(EndUser.session_id == session_id) + ) if existing_count == 0: return session_id diff --git a/api/controllers/web/site.py b/api/controllers/web/site.py index 0564b15ea3..3c133499b7 100644 --- a/api/controllers/web/site.py +++ b/api/controllers/web/site.py @@ -57,7 +57,7 @@ class AppSiteApi(WebApiResource): def get(self, app_model, end_user): """Retrieve app site info.""" # get site - site = db.session.query(Site).filter(Site.app_id == app_model.id).first() + site = db.session.query(Site).where(Site.app_id == app_model.id).first() if not site: raise Forbidden() diff --git a/api/controllers/web/wraps.py b/api/controllers/web/wraps.py index 154bddfc5c..ae6f14a689 100644 --- a/api/controllers/web/wraps.py +++ b/api/controllers/web/wraps.py @@ -3,6 +3,7 @@ from functools import wraps from flask import request from flask_restful import Resource +from sqlalchemy import select from werkzeug.exceptions import BadRequest, NotFound, Unauthorized from controllers.web.error import WebAppAuthAccessDeniedError, WebAppAuthRequiredError @@ -48,8 +49,8 @@ def decode_jwt_token(): decoded = PassportService().verify(tk) app_code = decoded.get("app_code") app_id = decoded.get("app_id") - app_model = db.session.query(App).filter(App.id == app_id).first() - site = db.session.query(Site).filter(Site.code == app_code).first() + app_model = db.session.scalar(select(App).where(App.id == app_id)) + site = db.session.scalar(select(Site).where(Site.code == app_code)) if not app_model: raise NotFound() if not app_code or not site: @@ -57,7 +58,7 @@ def decode_jwt_token(): if app_model.enable_site is False: raise BadRequest("Site is disabled.") end_user_id = decoded.get("end_user_id") - end_user = db.session.query(EndUser).filter(EndUser.id == end_user_id).first() + end_user = db.session.scalar(select(EndUser).where(EndUser.id == end_user_id)) if not end_user: raise NotFound() diff --git a/api/core/agent/base_agent_runner.py b/api/core/agent/base_agent_runner.py index 28bf4a9a23..1f3c218d59 100644 --- a/api/core/agent/base_agent_runner.py +++ b/api/core/agent/base_agent_runner.py @@ -99,7 +99,7 @@ class BaseAgentRunner(AppRunner): # get how many agent thoughts have been created self.agent_thought_count = ( db.session.query(MessageAgentThought) - .filter( + .where( MessageAgentThought.message_id == self.message.id, ) .count() @@ -336,7 +336,7 @@ class BaseAgentRunner(AppRunner): Save agent thought """ updated_agent_thought = ( - db.session.query(MessageAgentThought).filter(MessageAgentThought.id == agent_thought.id).first() + db.session.query(MessageAgentThought).where(MessageAgentThought.id == agent_thought.id).first() ) if not updated_agent_thought: raise ValueError("agent thought not found") @@ -496,7 +496,7 @@ class BaseAgentRunner(AppRunner): return result def organize_agent_user_prompt(self, message: Message) -> UserPromptMessage: - files = db.session.query(MessageFile).filter(MessageFile.message_id == message.id).all() + files = db.session.query(MessageFile).where(MessageFile.message_id == message.id).all() if not files: return UserPromptMessage(content=message.query) if message.app_model_config: diff --git a/api/core/app/apps/README.md b/api/core/app/apps/README.md deleted file mode 100644 index 7a57bb3658..0000000000 --- a/api/core/app/apps/README.md +++ /dev/null @@ -1,48 +0,0 @@ -## Guidelines for Database Connection Management in App Runner and Task Pipeline - -Due to the presence of tasks in App Runner that require long execution times, such as LLM generation and external requests, Flask-Sqlalchemy's strategy for database connection pooling is to allocate one connection (transaction) per request. This approach keeps a connection occupied even during non-DB tasks, leading to the inability to acquire new connections during high concurrency requests due to multiple long-running tasks. - -Therefore, the database operations in App Runner and Task Pipeline must ensure connections are closed immediately after use, and it's better to pass IDs rather than Model objects to avoid detach errors. - -Examples: - -1. Creating a new record: - - ```python - app = App(id=1) - db.session.add(app) - db.session.commit() - db.session.refresh(app) # Retrieve table default values, like created_at, cached in the app object, won't affect after close - - # Handle non-long-running tasks or store the content of the App instance in memory (via variable assignment). - - db.session.close() - - return app.id - ``` - -2. Fetching a record from the table: - - ```python - app = db.session.query(App).filter(App.id == app_id).first() - - created_at = app.created_at - - db.session.close() - - # Handle tasks (include long-running). - - ``` - -3. Updating a table field: - - ```python - app = db.session.query(App).filter(App.id == app_id).first() - - app.updated_at = time.utcnow() - db.session.commit() - db.session.close() - - return app_id - ``` - diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index bd5ad9c51b..610a5bb278 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -7,7 +7,8 @@ from typing import Any, Literal, Optional, Union, overload from flask import Flask, current_app from pydantic import ValidationError -from sqlalchemy.orm import sessionmaker +from sqlalchemy import select +from sqlalchemy.orm import Session, sessionmaker import contexts from configs import dify_config @@ -23,6 +24,7 @@ from core.app.apps.message_based_app_generator import MessageBasedAppGenerator from core.app.apps.message_based_app_queue_manager import MessageBasedAppQueueManager from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom from core.app.entities.task_entities import ChatbotAppBlockingResponse, ChatbotAppStreamResponse +from core.helper.trace_id_helper import extract_external_trace_id_from_args from core.model_runtime.errors.invoke import InvokeAuthorizationError from core.ops.ops_trace_manager import TraceQueueManager from core.prompt.utils.get_thread_messages_length import get_thread_messages_length @@ -112,7 +114,10 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): query = query.replace("\x00", "") inputs = args["inputs"] - extras = {"auto_generate_conversation_name": args.get("auto_generate_name", False)} + extras = { + "auto_generate_conversation_name": args.get("auto_generate_name", False), + **extract_external_trace_id_from_args(args), + } # get conversation conversation = None @@ -482,21 +487,52 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): """ with preserve_flask_contexts(flask_app, context_vars=context): - try: - # get conversation and message - conversation = self._get_conversation(conversation_id) - message = self._get_message(message_id) + # get conversation and message + conversation = self._get_conversation(conversation_id) + message = self._get_message(message_id) - # chatbot app - runner = AdvancedChatAppRunner( - application_generate_entity=application_generate_entity, - queue_manager=queue_manager, - conversation=conversation, - message=message, - dialogue_count=self._dialogue_count, - variable_loader=variable_loader, + with Session(db.engine, expire_on_commit=False) as session: + workflow = session.scalar( + select(Workflow).where( + Workflow.tenant_id == application_generate_entity.app_config.tenant_id, + Workflow.app_id == application_generate_entity.app_config.app_id, + Workflow.id == application_generate_entity.app_config.workflow_id, + ) ) + if workflow is None: + raise ValueError("Workflow not found") + # Determine system_user_id based on invocation source + is_external_api_call = application_generate_entity.invoke_from in { + InvokeFrom.WEB_APP, + InvokeFrom.SERVICE_API, + } + + if is_external_api_call: + # For external API calls, use end user's session ID + end_user = session.scalar(select(EndUser).where(EndUser.id == application_generate_entity.user_id)) + system_user_id = end_user.session_id if end_user else "" + else: + # For internal calls, use the original user ID + system_user_id = application_generate_entity.user_id + + app = session.scalar(select(App).where(App.id == application_generate_entity.app_config.app_id)) + if app is None: + raise ValueError("App not found") + + runner = AdvancedChatAppRunner( + application_generate_entity=application_generate_entity, + queue_manager=queue_manager, + conversation=conversation, + message=message, + dialogue_count=self._dialogue_count, + variable_loader=variable_loader, + workflow=workflow, + system_user_id=system_user_id, + app=app, + ) + + try: runner.run() except GenerateTaskStoppedError: pass diff --git a/api/core/app/apps/advanced_chat/app_runner.py b/api/core/app/apps/advanced_chat/app_runner.py index af15324f46..a75e17af64 100644 --- a/api/core/app/apps/advanced_chat/app_runner.py +++ b/api/core/app/apps/advanced_chat/app_runner.py @@ -1,6 +1,6 @@ import logging from collections.abc import Mapping -from typing import Any, cast +from typing import Any, Optional, cast from sqlalchemy import select from sqlalchemy.orm import Session @@ -9,13 +9,19 @@ from configs import dify_config from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfig from core.app.apps.base_app_queue_manager import AppQueueManager from core.app.apps.workflow_app_runner import WorkflowBasedAppRunner -from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom +from core.app.entities.app_invoke_entities import ( + AdvancedChatAppGenerateEntity, + AppGenerateEntity, + InvokeFrom, +) from core.app.entities.queue_entities import ( QueueAnnotationReplyEvent, QueueStopEvent, QueueTextChunkEvent, ) +from core.app.features.annotation_reply.annotation_reply import AnnotationReplyFeature from core.moderation.base import ModerationError +from core.moderation.input_moderation import InputModeration from core.variables.variables import VariableUnion from core.workflow.callbacks import WorkflowCallback, WorkflowLoggingCallback from core.workflow.entities.variable_pool import VariablePool @@ -23,8 +29,9 @@ from core.workflow.system_variable import SystemVariable from core.workflow.variable_loader import VariableLoader from core.workflow.workflow_entry import WorkflowEntry from extensions.ext_database import db +from models import Workflow from models.enums import UserFrom -from models.model import App, Conversation, EndUser, Message +from models.model import App, Conversation, Message, MessageAnnotation from models.workflow import ConversationVariable, WorkflowType logger = logging.getLogger(__name__) @@ -37,42 +44,38 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): def __init__( self, + *, application_generate_entity: AdvancedChatAppGenerateEntity, queue_manager: AppQueueManager, conversation: Conversation, message: Message, dialogue_count: int, variable_loader: VariableLoader, + workflow: Workflow, + system_user_id: str, + app: App, ) -> None: - super().__init__(queue_manager, variable_loader) + super().__init__( + queue_manager=queue_manager, + variable_loader=variable_loader, + app_id=application_generate_entity.app_config.app_id, + ) self.application_generate_entity = application_generate_entity self.conversation = conversation self.message = message self._dialogue_count = dialogue_count - - def _get_app_id(self) -> str: - return self.application_generate_entity.app_config.app_id + self._workflow = workflow + self.system_user_id = system_user_id + self._app = app def run(self) -> None: app_config = self.application_generate_entity.app_config app_config = cast(AdvancedChatAppConfig, app_config) - app_record = db.session.query(App).filter(App.id == app_config.app_id).first() + app_record = db.session.query(App).where(App.id == app_config.app_id).first() if not app_record: raise ValueError("App not found") - workflow = self.get_workflow(app_model=app_record, workflow_id=app_config.workflow_id) - if not workflow: - raise ValueError("Workflow not initialized") - - user_id: str | None = None - if self.application_generate_entity.invoke_from in {InvokeFrom.WEB_APP, InvokeFrom.SERVICE_API}: - end_user = db.session.query(EndUser).filter(EndUser.id == self.application_generate_entity.user_id).first() - if end_user: - user_id = end_user.session_id - else: - user_id = self.application_generate_entity.user_id - workflow_callbacks: list[WorkflowCallback] = [] if dify_config.DEBUG: workflow_callbacks.append(WorkflowLoggingCallback()) @@ -80,14 +83,14 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): if self.application_generate_entity.single_iteration_run: # if only single iteration run is requested graph, variable_pool = self._get_graph_and_variable_pool_of_single_iteration( - workflow=workflow, + workflow=self._workflow, node_id=self.application_generate_entity.single_iteration_run.node_id, user_inputs=dict(self.application_generate_entity.single_iteration_run.inputs), ) elif self.application_generate_entity.single_loop_run: # if only single loop run is requested graph, variable_pool = self._get_graph_and_variable_pool_of_single_loop( - workflow=workflow, + workflow=self._workflow, node_id=self.application_generate_entity.single_loop_run.node_id, user_inputs=dict(self.application_generate_entity.single_loop_run.inputs), ) @@ -98,7 +101,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): # moderation if self.handle_input_moderation( - app_record=app_record, + app_record=self._app, app_generate_entity=self.application_generate_entity, inputs=inputs, query=query, @@ -108,7 +111,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): # annotation reply if self.handle_annotation_reply( - app_record=app_record, + app_record=self._app, message=self.message, query=query, app_generate_entity=self.application_generate_entity, @@ -128,7 +131,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): ConversationVariable.from_variable( app_id=self.conversation.app_id, conversation_id=self.conversation.id, variable=variable ) - for variable in workflow.conversation_variables + for variable in self._workflow.conversation_variables ] session.add_all(db_conversation_variables) # Convert database entities to variables. @@ -141,7 +144,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): query=query, files=files, conversation_id=self.conversation.id, - user_id=user_id, + user_id=self.system_user_id, dialogue_count=self._dialogue_count, app_id=app_config.app_id, workflow_id=app_config.workflow_id, @@ -152,25 +155,25 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): variable_pool = VariablePool( system_variables=system_inputs, user_inputs=inputs, - environment_variables=workflow.environment_variables, + environment_variables=self._workflow.environment_variables, # Based on the definition of `VariableUnion`, # `list[Variable]` can be safely used as `list[VariableUnion]` since they are compatible. conversation_variables=cast(list[VariableUnion], conversation_variables), ) # init graph - graph = self._init_graph(graph_config=workflow.graph_dict) + graph = self._init_graph(graph_config=self._workflow.graph_dict) db.session.close() # RUN WORKFLOW workflow_entry = WorkflowEntry( - tenant_id=workflow.tenant_id, - app_id=workflow.app_id, - workflow_id=workflow.id, - workflow_type=WorkflowType.value_of(workflow.type), + tenant_id=self._workflow.tenant_id, + app_id=self._workflow.app_id, + workflow_id=self._workflow.id, + workflow_type=WorkflowType.value_of(self._workflow.type), graph=graph, - graph_config=workflow.graph_dict, + graph_config=self._workflow.graph_dict, user_id=self.application_generate_entity.user_id, user_from=( UserFrom.ACCOUNT @@ -241,3 +244,51 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): self._publish_event(QueueTextChunkEvent(text=text)) self._publish_event(QueueStopEvent(stopped_by=stopped_by)) + + def query_app_annotations_to_reply( + self, app_record: App, message: Message, query: str, user_id: str, invoke_from: InvokeFrom + ) -> Optional[MessageAnnotation]: + """ + Query app annotations to reply + :param app_record: app record + :param message: message + :param query: query + :param user_id: user id + :param invoke_from: invoke from + :return: + """ + annotation_reply_feature = AnnotationReplyFeature() + return annotation_reply_feature.query( + app_record=app_record, message=message, query=query, user_id=user_id, invoke_from=invoke_from + ) + + def moderation_for_inputs( + self, + *, + app_id: str, + tenant_id: str, + app_generate_entity: AppGenerateEntity, + inputs: Mapping[str, Any], + query: str | None = None, + message_id: str, + ) -> tuple[bool, Mapping[str, Any], str]: + """ + Process sensitive_word_avoidance. + :param app_id: app id + :param tenant_id: tenant id + :param app_generate_entity: app generate entity + :param inputs: inputs + :param query: query + :param message_id: message id + :return: + """ + moderation_feature = InputModeration() + return moderation_feature.check( + app_id=app_id, + tenant_id=tenant_id, + app_config=app_generate_entity.app_config, + inputs=dict(inputs), + query=query or "", + message_id=message_id, + trace_manager=app_generate_entity.trace_manager, + ) diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index 337b779b50..dc27076a4d 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -559,6 +559,7 @@ class AdvancedChatAppGenerateTaskPipeline: outputs=event.outputs, conversation_id=self._conversation_id, trace_manager=trace_manager, + external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), ) workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( session=session, @@ -590,6 +591,7 @@ class AdvancedChatAppGenerateTaskPipeline: exceptions_count=event.exceptions_count, conversation_id=None, trace_manager=trace_manager, + external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), ) workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( session=session, @@ -622,6 +624,7 @@ class AdvancedChatAppGenerateTaskPipeline: conversation_id=self._conversation_id, trace_manager=trace_manager, exceptions_count=event.exceptions_count, + external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), ) workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( session=session, @@ -653,6 +656,7 @@ class AdvancedChatAppGenerateTaskPipeline: error_message=event.get_stop_reason(), conversation_id=self._conversation_id, trace_manager=trace_manager, + external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), ) workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( session=session, diff --git a/api/core/app/apps/agent_chat/app_runner.py b/api/core/app/apps/agent_chat/app_runner.py index 71328f6d1b..39d6ba39f5 100644 --- a/api/core/app/apps/agent_chat/app_runner.py +++ b/api/core/app/apps/agent_chat/app_runner.py @@ -45,7 +45,7 @@ class AgentChatAppRunner(AppRunner): app_config = application_generate_entity.app_config app_config = cast(AgentChatAppConfig, app_config) - app_record = db.session.query(App).filter(App.id == app_config.app_id).first() + app_record = db.session.query(App).where(App.id == app_config.app_id).first() if not app_record: raise ValueError("App not found") @@ -183,10 +183,10 @@ class AgentChatAppRunner(AppRunner): if {ModelFeature.MULTI_TOOL_CALL, ModelFeature.TOOL_CALL}.intersection(model_schema.features or []): agent_entity.strategy = AgentEntity.Strategy.FUNCTION_CALLING - conversation_result = db.session.query(Conversation).filter(Conversation.id == conversation.id).first() + conversation_result = db.session.query(Conversation).where(Conversation.id == conversation.id).first() if conversation_result is None: raise ValueError("Conversation not found") - message_result = db.session.query(Message).filter(Message.id == message.id).first() + message_result = db.session.query(Message).where(Message.id == message.id).first() if message_result is None: raise ValueError("Message not found") db.session.close() diff --git a/api/core/app/apps/chat/app_runner.py b/api/core/app/apps/chat/app_runner.py index 39597fc036..894d7906d5 100644 --- a/api/core/app/apps/chat/app_runner.py +++ b/api/core/app/apps/chat/app_runner.py @@ -43,7 +43,7 @@ class ChatAppRunner(AppRunner): app_config = application_generate_entity.app_config app_config = cast(ChatAppConfig, app_config) - app_record = db.session.query(App).filter(App.id == app_config.app_id).first() + app_record = db.session.query(App).where(App.id == app_config.app_id).first() if not app_record: raise ValueError("App not found") diff --git a/api/core/app/apps/completion/app_generator.py b/api/core/app/apps/completion/app_generator.py index 195e7e2e3d..9356bd1cea 100644 --- a/api/core/app/apps/completion/app_generator.py +++ b/api/core/app/apps/completion/app_generator.py @@ -248,7 +248,7 @@ class CompletionAppGenerator(MessageBasedAppGenerator): """ message = ( db.session.query(Message) - .filter( + .where( Message.id == message_id, Message.app_id == app_model.id, Message.from_source == ("api" if isinstance(user, EndUser) else "console"), diff --git a/api/core/app/apps/completion/app_runner.py b/api/core/app/apps/completion/app_runner.py index 80fdd0b80e..50d2a0036c 100644 --- a/api/core/app/apps/completion/app_runner.py +++ b/api/core/app/apps/completion/app_runner.py @@ -36,7 +36,7 @@ class CompletionAppRunner(AppRunner): app_config = application_generate_entity.app_config app_config = cast(CompletionAppConfig, app_config) - app_record = db.session.query(App).filter(App.id == app_config.app_id).first() + app_record = db.session.query(App).where(App.id == app_config.app_id).first() if not app_record: raise ValueError("App not found") diff --git a/api/core/app/apps/message_based_app_generator.py b/api/core/app/apps/message_based_app_generator.py index d50cf1c941..7dd9904eeb 100644 --- a/api/core/app/apps/message_based_app_generator.py +++ b/api/core/app/apps/message_based_app_generator.py @@ -85,7 +85,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): if conversation: app_model_config = ( db.session.query(AppModelConfig) - .filter(AppModelConfig.id == conversation.app_model_config_id, AppModelConfig.app_id == app_model.id) + .where(AppModelConfig.id == conversation.app_model_config_id, AppModelConfig.app_id == app_model.id) .first() ) @@ -151,13 +151,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): introduction = self._get_conversation_introduction(application_generate_entity) # get conversation name - if isinstance(application_generate_entity, AdvancedChatAppGenerateEntity): - query = application_generate_entity.query or "New conversation" - else: - query = next(iter(application_generate_entity.inputs.values()), "New conversation") - if isinstance(query, int): - query = str(query) - query = query or "New conversation" + query = application_generate_entity.query or "New conversation" conversation_name = (query[:20] + "…") if len(query) > 20 else query if not conversation: @@ -259,7 +253,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): :param conversation_id: conversation id :return: conversation """ - conversation = db.session.query(Conversation).filter(Conversation.id == conversation_id).first() + conversation = db.session.query(Conversation).where(Conversation.id == conversation_id).first() if not conversation: raise ConversationNotExistsError("Conversation not exists") @@ -272,7 +266,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): :param message_id: message id :return: message """ - message = db.session.query(Message).filter(Message.id == message_id).first() + message = db.session.query(Message).where(Message.id == message_id).first() if message is None: raise MessageNotExistsError("Message not exists") diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index 6f560b3253..4c36f63c71 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -7,7 +7,8 @@ from typing import Any, Literal, Optional, Union, overload from flask import Flask, current_app from pydantic import ValidationError -from sqlalchemy.orm import sessionmaker +from sqlalchemy import select +from sqlalchemy.orm import Session, sessionmaker import contexts from configs import dify_config @@ -22,6 +23,7 @@ from core.app.apps.workflow.generate_response_converter import WorkflowAppGenera from core.app.apps.workflow.generate_task_pipeline import WorkflowAppGenerateTaskPipeline from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity from core.app.entities.task_entities import WorkflowAppBlockingResponse, WorkflowAppStreamResponse +from core.helper.trace_id_helper import extract_external_trace_id_from_args from core.model_runtime.errors.invoke import InvokeAuthorizationError from core.ops.ops_trace_manager import TraceQueueManager from core.repositories import DifyCoreRepositoryFactory @@ -123,6 +125,10 @@ class WorkflowAppGenerator(BaseAppGenerator): ) inputs: Mapping[str, Any] = args["inputs"] + + extras = { + **extract_external_trace_id_from_args(args), + } workflow_run_id = str(uuid.uuid4()) # init application generate entity application_generate_entity = WorkflowAppGenerateEntity( @@ -142,6 +148,7 @@ class WorkflowAppGenerator(BaseAppGenerator): call_depth=call_depth, trace_manager=trace_manager, workflow_execution_id=workflow_run_id, + extras=extras, ) contexts.plugin_tool_providers.set({}) @@ -439,17 +446,44 @@ class WorkflowAppGenerator(BaseAppGenerator): """ with preserve_flask_contexts(flask_app, context_vars=context): - try: - # workflow app - runner = WorkflowAppRunner( - application_generate_entity=application_generate_entity, - queue_manager=queue_manager, - workflow_thread_pool_id=workflow_thread_pool_id, - variable_loader=variable_loader, + with Session(db.engine, expire_on_commit=False) as session: + workflow = session.scalar( + select(Workflow).where( + Workflow.tenant_id == application_generate_entity.app_config.tenant_id, + Workflow.app_id == application_generate_entity.app_config.app_id, + Workflow.id == application_generate_entity.app_config.workflow_id, + ) ) + if workflow is None: + raise ValueError("Workflow not found") + # Determine system_user_id based on invocation source + is_external_api_call = application_generate_entity.invoke_from in { + InvokeFrom.WEB_APP, + InvokeFrom.SERVICE_API, + } + + if is_external_api_call: + # For external API calls, use end user's session ID + end_user = session.scalar(select(EndUser).where(EndUser.id == application_generate_entity.user_id)) + system_user_id = end_user.session_id if end_user else "" + else: + # For internal calls, use the original user ID + system_user_id = application_generate_entity.user_id + + runner = WorkflowAppRunner( + application_generate_entity=application_generate_entity, + queue_manager=queue_manager, + workflow_thread_pool_id=workflow_thread_pool_id, + variable_loader=variable_loader, + workflow=workflow, + system_user_id=system_user_id, + ) + + try: runner.run() - except GenerateTaskStoppedError: + except GenerateTaskStoppedError as e: + logger.warning(f"Task stopped: {str(e)}") pass except InvokeAuthorizationError: queue_manager.publish_error( @@ -465,8 +499,6 @@ class WorkflowAppGenerator(BaseAppGenerator): except Exception as e: logger.exception("Unknown Error when generating") queue_manager.publish_error(e, PublishFrom.APPLICATION_MANAGER) - finally: - db.session.close() def _handle_response( self, diff --git a/api/core/app/apps/workflow/app_runner.py b/api/core/app/apps/workflow/app_runner.py index 3a66ffa578..4f4c1460ae 100644 --- a/api/core/app/apps/workflow/app_runner.py +++ b/api/core/app/apps/workflow/app_runner.py @@ -14,10 +14,8 @@ from core.workflow.entities.variable_pool import VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.variable_loader import VariableLoader from core.workflow.workflow_entry import WorkflowEntry -from extensions.ext_database import db from models.enums import UserFrom -from models.model import App, EndUser -from models.workflow import WorkflowType +from models.workflow import Workflow, WorkflowType logger = logging.getLogger(__name__) @@ -29,22 +27,23 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): def __init__( self, + *, application_generate_entity: WorkflowAppGenerateEntity, queue_manager: AppQueueManager, variable_loader: VariableLoader, workflow_thread_pool_id: Optional[str] = None, + workflow: Workflow, + system_user_id: str, ) -> None: - """ - :param application_generate_entity: application generate entity - :param queue_manager: application queue manager - :param workflow_thread_pool_id: workflow thread pool id - """ - super().__init__(queue_manager, variable_loader) + super().__init__( + queue_manager=queue_manager, + variable_loader=variable_loader, + app_id=application_generate_entity.app_config.app_id, + ) self.application_generate_entity = application_generate_entity self.workflow_thread_pool_id = workflow_thread_pool_id - - def _get_app_id(self) -> str: - return self.application_generate_entity.app_config.app_id + self._workflow = workflow + self._sys_user_id = system_user_id def run(self) -> None: """ @@ -53,24 +52,6 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): app_config = self.application_generate_entity.app_config app_config = cast(WorkflowAppConfig, app_config) - user_id = None - if self.application_generate_entity.invoke_from in {InvokeFrom.WEB_APP, InvokeFrom.SERVICE_API}: - end_user = db.session.query(EndUser).filter(EndUser.id == self.application_generate_entity.user_id).first() - if end_user: - user_id = end_user.session_id - else: - user_id = self.application_generate_entity.user_id - - app_record = db.session.query(App).filter(App.id == app_config.app_id).first() - if not app_record: - raise ValueError("App not found") - - workflow = self.get_workflow(app_model=app_record, workflow_id=app_config.workflow_id) - if not workflow: - raise ValueError("Workflow not initialized") - - db.session.close() - workflow_callbacks: list[WorkflowCallback] = [] if dify_config.DEBUG: workflow_callbacks.append(WorkflowLoggingCallback()) @@ -79,14 +60,14 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): if self.application_generate_entity.single_iteration_run: # if only single iteration run is requested graph, variable_pool = self._get_graph_and_variable_pool_of_single_iteration( - workflow=workflow, + workflow=self._workflow, node_id=self.application_generate_entity.single_iteration_run.node_id, user_inputs=self.application_generate_entity.single_iteration_run.inputs, ) elif self.application_generate_entity.single_loop_run: # if only single loop run is requested graph, variable_pool = self._get_graph_and_variable_pool_of_single_loop( - workflow=workflow, + workflow=self._workflow, node_id=self.application_generate_entity.single_loop_run.node_id, user_inputs=self.application_generate_entity.single_loop_run.inputs, ) @@ -98,7 +79,7 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): system_inputs = SystemVariable( files=files, - user_id=user_id, + user_id=self._sys_user_id, app_id=app_config.app_id, workflow_id=app_config.workflow_id, workflow_execution_id=self.application_generate_entity.workflow_execution_id, @@ -107,21 +88,21 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): variable_pool = VariablePool( system_variables=system_inputs, user_inputs=inputs, - environment_variables=workflow.environment_variables, + environment_variables=self._workflow.environment_variables, conversation_variables=[], ) # init graph - graph = self._init_graph(graph_config=workflow.graph_dict) + graph = self._init_graph(graph_config=self._workflow.graph_dict) # RUN WORKFLOW workflow_entry = WorkflowEntry( - tenant_id=workflow.tenant_id, - app_id=workflow.app_id, - workflow_id=workflow.id, - workflow_type=WorkflowType.value_of(workflow.type), + tenant_id=self._workflow.tenant_id, + app_id=self._workflow.app_id, + workflow_id=self._workflow.id, + workflow_type=WorkflowType.value_of(self._workflow.type), graph=graph, - graph_config=workflow.graph_dict, + graph_config=self._workflow.graph_dict, user_id=self.application_generate_entity.user_id, user_from=( UserFrom.ACCOUNT diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index 9a39b2e01e..e31a316c56 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -490,6 +490,7 @@ class WorkflowAppGenerateTaskPipeline: outputs=event.outputs, conversation_id=None, trace_manager=trace_manager, + external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), ) # save workflow app log @@ -524,6 +525,7 @@ class WorkflowAppGenerateTaskPipeline: exceptions_count=event.exceptions_count, conversation_id=None, trace_manager=trace_manager, + external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), ) # save workflow app log @@ -561,6 +563,7 @@ class WorkflowAppGenerateTaskPipeline: conversation_id=None, trace_manager=trace_manager, exceptions_count=event.exceptions_count if isinstance(event, QueueWorkflowFailedEvent) else 0, + external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), ) # save workflow app log diff --git a/api/core/app/apps/workflow_app_runner.py b/api/core/app/apps/workflow_app_runner.py index 2f4d234ecd..948ea95e63 100644 --- a/api/core/app/apps/workflow_app_runner.py +++ b/api/core/app/apps/workflow_app_runner.py @@ -1,8 +1,7 @@ from collections.abc import Mapping -from typing import Any, Optional, cast +from typing import Any, cast from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom -from core.app.apps.base_app_runner import AppRunner from core.app.entities.queue_entities import ( AppQueueEvent, QueueAgentLogEvent, @@ -65,18 +64,20 @@ from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING from core.workflow.system_variable import SystemVariable from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader, load_into_variable_pool from core.workflow.workflow_entry import WorkflowEntry -from extensions.ext_database import db -from models.model import App from models.workflow import Workflow -class WorkflowBasedAppRunner(AppRunner): - def __init__(self, queue_manager: AppQueueManager, variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER) -> None: - self.queue_manager = queue_manager +class WorkflowBasedAppRunner: + def __init__( + self, + *, + queue_manager: AppQueueManager, + variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER, + app_id: str, + ) -> None: + self._queue_manager = queue_manager self._variable_loader = variable_loader - - def _get_app_id(self) -> str: - raise NotImplementedError("not implemented") + self._app_id = app_id def _init_graph(self, graph_config: Mapping[str, Any]) -> Graph: """ @@ -693,21 +694,5 @@ class WorkflowBasedAppRunner(AppRunner): ) ) - def get_workflow(self, app_model: App, workflow_id: str) -> Optional[Workflow]: - """ - Get workflow - """ - # fetch workflow by workflow_id - workflow = ( - db.session.query(Workflow) - .filter( - Workflow.tenant_id == app_model.tenant_id, Workflow.app_id == app_model.id, Workflow.id == workflow_id - ) - .first() - ) - - # return workflow - return workflow - def _publish_event(self, event: AppQueueEvent) -> None: - self.queue_manager.publish(event, PublishFrom.APPLICATION_MANAGER) + self._queue_manager.publish(event, PublishFrom.APPLICATION_MANAGER) diff --git a/api/core/app/features/annotation_reply/annotation_reply.py b/api/core/app/features/annotation_reply/annotation_reply.py index 83fd3debad..54dc69302a 100644 --- a/api/core/app/features/annotation_reply/annotation_reply.py +++ b/api/core/app/features/annotation_reply/annotation_reply.py @@ -26,7 +26,7 @@ class AnnotationReplyFeature: :return: """ annotation_setting = ( - db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_record.id).first() + db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_record.id).first() ) if not annotation_setting: diff --git a/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py b/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py index 3c8c7bb5a2..888434798a 100644 --- a/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py +++ b/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py @@ -471,7 +471,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline): :return: """ agent_thought: Optional[MessageAgentThought] = ( - db.session.query(MessageAgentThought).filter(MessageAgentThought.id == event.agent_thought_id).first() + db.session.query(MessageAgentThought).where(MessageAgentThought.id == event.agent_thought_id).first() ) if agent_thought: diff --git a/api/core/app/task_pipeline/message_cycle_manager.py b/api/core/app/task_pipeline/message_cycle_manager.py index 2343081eaf..824da0b934 100644 --- a/api/core/app/task_pipeline/message_cycle_manager.py +++ b/api/core/app/task_pipeline/message_cycle_manager.py @@ -81,7 +81,7 @@ class MessageCycleManager: def _generate_conversation_name_worker(self, flask_app: Flask, conversation_id: str, query: str): with flask_app.app_context(): # get conversation and message - conversation = db.session.query(Conversation).filter(Conversation.id == conversation_id).first() + conversation = db.session.query(Conversation).where(Conversation.id == conversation_id).first() if not conversation: return @@ -140,7 +140,7 @@ class MessageCycleManager: :param event: event :return: """ - message_file = db.session.query(MessageFile).filter(MessageFile.id == event.message_file_id).first() + message_file = db.session.query(MessageFile).where(MessageFile.id == event.message_file_id).first() if message_file and message_file.url is not None: # get tool file id diff --git a/api/core/callback_handler/index_tool_callback_handler.py b/api/core/callback_handler/index_tool_callback_handler.py index a3a7b4b812..c55ba5e0fe 100644 --- a/api/core/callback_handler/index_tool_callback_handler.py +++ b/api/core/callback_handler/index_tool_callback_handler.py @@ -49,7 +49,7 @@ class DatasetIndexToolCallbackHandler: for document in documents: if document.metadata is not None: document_id = document.metadata["document_id"] - dataset_document = db.session.query(DatasetDocument).filter(DatasetDocument.id == document_id).first() + dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == document_id).first() if not dataset_document: _logger.warning( "Expected DatasetDocument record to exist, but none was found, document_id=%s", @@ -59,7 +59,7 @@ class DatasetIndexToolCallbackHandler: if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX: child_chunk = ( db.session.query(ChildChunk) - .filter( + .where( ChildChunk.index_node_id == document.metadata["doc_id"], ChildChunk.dataset_id == dataset_document.dataset_id, ChildChunk.document_id == dataset_document.id, @@ -69,18 +69,18 @@ class DatasetIndexToolCallbackHandler: if child_chunk: segment = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.id == child_chunk.segment_id) + .where(DocumentSegment.id == child_chunk.segment_id) .update( {DocumentSegment.hit_count: DocumentSegment.hit_count + 1}, synchronize_session=False ) ) else: - query = db.session.query(DocumentSegment).filter( + query = db.session.query(DocumentSegment).where( DocumentSegment.index_node_id == document.metadata["doc_id"] ) if "dataset_id" in document.metadata: - query = query.filter(DocumentSegment.dataset_id == document.metadata["dataset_id"]) + query = query.where(DocumentSegment.dataset_id == document.metadata["dataset_id"]) # add hit count to document segment query.update({DocumentSegment.hit_count: DocumentSegment.hit_count + 1}, synchronize_session=False) diff --git a/api/core/entities/provider_configuration.py b/api/core/entities/provider_configuration.py index b316a3a51e..24b8edbc36 100644 --- a/api/core/entities/provider_configuration.py +++ b/api/core/entities/provider_configuration.py @@ -191,7 +191,7 @@ class ProviderConfiguration(BaseModel): provider_record = ( db.session.query(Provider) - .filter( + .where( Provider.tenant_id == self.tenant_id, Provider.provider_type == ProviderType.CUSTOM.value, Provider.provider_name.in_(provider_names), @@ -351,7 +351,7 @@ class ProviderConfiguration(BaseModel): provider_model_record = ( db.session.query(ProviderModel) - .filter( + .where( ProviderModel.tenant_id == self.tenant_id, ProviderModel.provider_name.in_(provider_names), ProviderModel.model_name == model, @@ -481,7 +481,7 @@ class ProviderConfiguration(BaseModel): return ( db.session.query(ProviderModelSetting) - .filter( + .where( ProviderModelSetting.tenant_id == self.tenant_id, ProviderModelSetting.provider_name.in_(provider_names), ProviderModelSetting.model_type == model_type.to_origin_model_type(), @@ -560,7 +560,7 @@ class ProviderConfiguration(BaseModel): return ( db.session.query(LoadBalancingModelConfig) - .filter( + .where( LoadBalancingModelConfig.tenant_id == self.tenant_id, LoadBalancingModelConfig.provider_name.in_(provider_names), LoadBalancingModelConfig.model_type == model_type.to_origin_model_type(), @@ -583,7 +583,7 @@ class ProviderConfiguration(BaseModel): load_balancing_config_count = ( db.session.query(LoadBalancingModelConfig) - .filter( + .where( LoadBalancingModelConfig.tenant_id == self.tenant_id, LoadBalancingModelConfig.provider_name.in_(provider_names), LoadBalancingModelConfig.model_type == model_type.to_origin_model_type(), @@ -627,7 +627,7 @@ class ProviderConfiguration(BaseModel): model_setting = ( db.session.query(ProviderModelSetting) - .filter( + .where( ProviderModelSetting.tenant_id == self.tenant_id, ProviderModelSetting.provider_name.in_(provider_names), ProviderModelSetting.model_type == model_type.to_origin_model_type(), @@ -693,7 +693,7 @@ class ProviderConfiguration(BaseModel): preferred_model_provider = ( db.session.query(TenantPreferredModelProvider) - .filter( + .where( TenantPreferredModelProvider.tenant_id == self.tenant_id, TenantPreferredModelProvider.provider_name.in_(provider_names), ) diff --git a/api/core/external_data_tool/api/api.py b/api/core/external_data_tool/api/api.py index 53acdf075f..2099a9e34c 100644 --- a/api/core/external_data_tool/api/api.py +++ b/api/core/external_data_tool/api/api.py @@ -32,7 +32,7 @@ class ApiExternalDataTool(ExternalDataTool): # get api_based_extension api_based_extension = ( db.session.query(APIBasedExtension) - .filter(APIBasedExtension.tenant_id == tenant_id, APIBasedExtension.id == api_based_extension_id) + .where(APIBasedExtension.tenant_id == tenant_id, APIBasedExtension.id == api_based_extension_id) .first() ) @@ -56,7 +56,7 @@ class ApiExternalDataTool(ExternalDataTool): # get api_based_extension api_based_extension = ( db.session.query(APIBasedExtension) - .filter(APIBasedExtension.tenant_id == self.tenant_id, APIBasedExtension.id == api_based_extension_id) + .where(APIBasedExtension.tenant_id == self.tenant_id, APIBasedExtension.id == api_based_extension_id) .first() ) diff --git a/api/core/helper/encrypter.py b/api/core/helper/encrypter.py index 1e40997a8b..f761d20374 100644 --- a/api/core/helper/encrypter.py +++ b/api/core/helper/encrypter.py @@ -15,7 +15,7 @@ def encrypt_token(tenant_id: str, token: str): from models.account import Tenant from models.engine import db - if not (tenant := db.session.query(Tenant).filter(Tenant.id == tenant_id).first()): + if not (tenant := db.session.query(Tenant).where(Tenant.id == tenant_id).first()): raise ValueError(f"Tenant with id {tenant_id} not found") encrypted_token = rsa.encrypt(token, tenant.encrypt_public_key) return base64.b64encode(encrypted_token).decode() diff --git a/api/core/helper/marketplace.py b/api/core/helper/marketplace.py index 65bf4fc1db..fe3078923d 100644 --- a/api/core/helper/marketplace.py +++ b/api/core/helper/marketplace.py @@ -25,9 +25,29 @@ def batch_fetch_plugin_manifests(plugin_ids: list[str]) -> Sequence[MarketplaceP url = str(marketplace_api_url / "api/v1/plugins/batch") response = requests.post(url, json={"plugin_ids": plugin_ids}) response.raise_for_status() + return [MarketplacePluginDeclaration(**plugin) for plugin in response.json()["data"]["plugins"]] +def batch_fetch_plugin_manifests_ignore_deserialization_error( + plugin_ids: list[str], +) -> Sequence[MarketplacePluginDeclaration]: + if len(plugin_ids) == 0: + return [] + + url = str(marketplace_api_url / "api/v1/plugins/batch") + response = requests.post(url, json={"plugin_ids": plugin_ids}) + response.raise_for_status() + result: list[MarketplacePluginDeclaration] = [] + for plugin in response.json()["data"]["plugins"]: + try: + result.append(MarketplacePluginDeclaration(**plugin)) + except Exception as e: + pass + + return result + + def record_install_plugin_event(plugin_unique_identifier: str): url = str(marketplace_api_url / "api/v1/stats/plugins/install_count") response = requests.post(url, json={"unique_identifier": plugin_unique_identifier}) diff --git a/api/core/helper/trace_id_helper.py b/api/core/helper/trace_id_helper.py new file mode 100644 index 0000000000..e90c3194f2 --- /dev/null +++ b/api/core/helper/trace_id_helper.py @@ -0,0 +1,42 @@ +import re +from collections.abc import Mapping +from typing import Any, Optional + + +def is_valid_trace_id(trace_id: str) -> bool: + """ + Check if the trace_id is valid. + + Requirements: 1-128 characters, only letters, numbers, '-', and '_'. + """ + return bool(re.match(r"^[a-zA-Z0-9\-_]{1,128}$", trace_id)) + + +def get_external_trace_id(request: Any) -> Optional[str]: + """ + Retrieve the trace_id from the request. + + Priority: header ('X-Trace-Id'), then parameters, then JSON body. Returns None if not provided or invalid. + """ + trace_id = request.headers.get("X-Trace-Id") + if not trace_id: + trace_id = request.args.get("trace_id") + if not trace_id and getattr(request, "is_json", False): + json_data = getattr(request, "json", None) + if json_data: + trace_id = json_data.get("trace_id") + if isinstance(trace_id, str) and is_valid_trace_id(trace_id): + return trace_id + return None + + +def extract_external_trace_id_from_args(args: Mapping[str, Any]) -> dict: + """ + Extract 'external_trace_id' from args. + + Returns a dict suitable for use in extras. Returns an empty dict if not found. + """ + trace_id = args.get("external_trace_id") + if trace_id: + return {"external_trace_id": trace_id} + return {} diff --git a/api/core/indexing_runner.py b/api/core/indexing_runner.py index 305a9190d5..fc5d0547fc 100644 --- a/api/core/indexing_runner.py +++ b/api/core/indexing_runner.py @@ -59,7 +59,7 @@ class IndexingRunner: # get the process rule processing_rule = ( db.session.query(DatasetProcessRule) - .filter(DatasetProcessRule.id == dataset_document.dataset_process_rule_id) + .where(DatasetProcessRule.id == dataset_document.dataset_process_rule_id) .first() ) if not processing_rule: @@ -119,12 +119,12 @@ class IndexingRunner: db.session.delete(document_segment) if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX: # delete child chunks - db.session.query(ChildChunk).filter(ChildChunk.segment_id == document_segment.id).delete() + db.session.query(ChildChunk).where(ChildChunk.segment_id == document_segment.id).delete() db.session.commit() # get the process rule processing_rule = ( db.session.query(DatasetProcessRule) - .filter(DatasetProcessRule.id == dataset_document.dataset_process_rule_id) + .where(DatasetProcessRule.id == dataset_document.dataset_process_rule_id) .first() ) if not processing_rule: @@ -212,7 +212,7 @@ class IndexingRunner: # get the process rule processing_rule = ( db.session.query(DatasetProcessRule) - .filter(DatasetProcessRule.id == dataset_document.dataset_process_rule_id) + .where(DatasetProcessRule.id == dataset_document.dataset_process_rule_id) .first() ) @@ -316,7 +316,7 @@ class IndexingRunner: # delete image files and related db records image_upload_file_ids = get_image_upload_file_ids(document.page_content) for upload_file_id in image_upload_file_ids: - image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first() + image_file = db.session.query(UploadFile).where(UploadFile.id == upload_file_id).first() if image_file is None: continue try: @@ -346,7 +346,7 @@ class IndexingRunner: raise ValueError("no upload file found") file_detail = ( - db.session.query(UploadFile).filter(UploadFile.id == data_source_info["upload_file_id"]).one_or_none() + db.session.query(UploadFile).where(UploadFile.id == data_source_info["upload_file_id"]).one_or_none() ) if file_detail: @@ -599,7 +599,7 @@ class IndexingRunner: keyword.create(documents) if dataset.indexing_technique != "high_quality": document_ids = [document.metadata["doc_id"] for document in documents] - db.session.query(DocumentSegment).filter( + db.session.query(DocumentSegment).where( DocumentSegment.document_id == document_id, DocumentSegment.dataset_id == dataset_id, DocumentSegment.index_node_id.in_(document_ids), @@ -630,7 +630,7 @@ class IndexingRunner: index_processor.load(dataset, chunk_documents, with_keywords=False) document_ids = [document.metadata["doc_id"] for document in chunk_documents] - db.session.query(DocumentSegment).filter( + db.session.query(DocumentSegment).where( DocumentSegment.document_id == dataset_document.id, DocumentSegment.dataset_id == dataset.id, DocumentSegment.index_node_id.in_(document_ids), @@ -672,8 +672,7 @@ class IndexingRunner: if extra_update_params: update_params.update(extra_update_params) - - db.session.query(DatasetDocument).filter_by(id=document_id).update(update_params) + db.session.query(DatasetDocument).filter_by(id=document_id).update(update_params) # type: ignore db.session.commit() @staticmethod diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index f7fd93be4a..331ac933c8 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -114,7 +114,8 @@ class LLMGenerator: ), ) - questions = output_parser.parse(cast(str, response.message.content)) + text_content = response.message.get_text_content() + questions = output_parser.parse(text_content) if text_content else [] except InvokeError: questions = [] except Exception: diff --git a/api/core/llm_generator/output_parser/suggested_questions_after_answer.py b/api/core/llm_generator/output_parser/suggested_questions_after_answer.py index c451bf514c..98cdc4c8b7 100644 --- a/api/core/llm_generator/output_parser/suggested_questions_after_answer.py +++ b/api/core/llm_generator/output_parser/suggested_questions_after_answer.py @@ -15,5 +15,4 @@ class SuggestedQuestionsAfterAnswerOutputParser: json_obj = json.loads(action_match.group(0).strip()) else: json_obj = [] - return json_obj diff --git a/api/core/mcp/server/streamable_http.py b/api/core/mcp/server/streamable_http.py index 20ff7e7524..496b5432a0 100644 --- a/api/core/mcp/server/streamable_http.py +++ b/api/core/mcp/server/streamable_http.py @@ -28,7 +28,7 @@ class MCPServerStreamableHTTPRequestHandler: ): self.app = app self.request = request - mcp_server = db.session.query(AppMCPServer).filter(AppMCPServer.app_id == self.app.id).first() + mcp_server = db.session.query(AppMCPServer).where(AppMCPServer.app_id == self.app.id).first() if not mcp_server: raise ValueError("MCP server not found") self.mcp_server: AppMCPServer = mcp_server @@ -192,7 +192,7 @@ class MCPServerStreamableHTTPRequestHandler: def retrieve_end_user(self): return ( db.session.query(EndUser) - .filter(EndUser.external_user_id == self.mcp_server.id, EndUser.type == "mcp") + .where(EndUser.external_user_id == self.mcp_server.id, EndUser.type == "mcp") .first() ) diff --git a/api/core/memory/token_buffer_memory.py b/api/core/memory/token_buffer_memory.py index a9f0a92e5d..7ce124594a 100644 --- a/api/core/memory/token_buffer_memory.py +++ b/api/core/memory/token_buffer_memory.py @@ -67,7 +67,7 @@ class TokenBufferMemory: prompt_messages: list[PromptMessage] = [] for message in messages: - files = db.session.query(MessageFile).filter(MessageFile.message_id == message.id).all() + files = db.session.query(MessageFile).where(MessageFile.message_id == message.id).all() if files: file_extra_config = None if self.conversation.mode in {AppMode.AGENT_CHAT, AppMode.COMPLETION, AppMode.CHAT}: diff --git a/api/core/model_runtime/entities/message_entities.py b/api/core/model_runtime/entities/message_entities.py index 9d010ae28d..83dc7f0525 100644 --- a/api/core/model_runtime/entities/message_entities.py +++ b/api/core/model_runtime/entities/message_entities.py @@ -156,6 +156,23 @@ class PromptMessage(ABC, BaseModel): """ return not self.content + def get_text_content(self) -> str: + """ + Get text content from prompt message. + + :return: Text content as string, empty string if no text content + """ + if isinstance(self.content, str): + return self.content + elif isinstance(self.content, list): + text_parts = [] + for item in self.content: + if isinstance(item, TextPromptMessageContent): + text_parts.append(item.data) + return "".join(text_parts) + else: + return "" + @field_validator("content", mode="before") @classmethod def validate_content(cls, v): diff --git a/api/core/moderation/api/api.py b/api/core/moderation/api/api.py index c65a3885fd..332381555b 100644 --- a/api/core/moderation/api/api.py +++ b/api/core/moderation/api/api.py @@ -89,7 +89,7 @@ class ApiModeration(Moderation): def _get_api_based_extension(tenant_id: str, api_based_extension_id: str) -> Optional[APIBasedExtension]: extension = ( db.session.query(APIBasedExtension) - .filter(APIBasedExtension.tenant_id == tenant_id, APIBasedExtension.id == api_based_extension_id) + .where(APIBasedExtension.tenant_id == tenant_id, APIBasedExtension.id == api_based_extension_id) .first() ) diff --git a/api/core/ops/aliyun_trace/aliyun_trace.py b/api/core/ops/aliyun_trace/aliyun_trace.py index db8fec4ee9..cf367efdf0 100644 --- a/api/core/ops/aliyun_trace/aliyun_trace.py +++ b/api/core/ops/aliyun_trace/aliyun_trace.py @@ -101,7 +101,8 @@ class AliyunDataTrace(BaseTraceInstance): raise ValueError(f"Aliyun get run url failed: {str(e)}") def workflow_trace(self, trace_info: WorkflowTraceInfo): - trace_id = convert_to_trace_id(trace_info.workflow_run_id) + external_trace_id = trace_info.metadata.get("external_trace_id") + trace_id = external_trace_id or convert_to_trace_id(trace_info.workflow_run_id) workflow_span_id = convert_to_span_id(trace_info.workflow_run_id, "workflow") self.add_workflow_span(trace_id, workflow_span_id, trace_info) @@ -119,7 +120,7 @@ class AliyunDataTrace(BaseTraceInstance): user_id = message_data.from_account_id if message_data.from_end_user_id: end_user_data: Optional[EndUser] = ( - db.session.query(EndUser).filter(EndUser.id == message_data.from_end_user_id).first() + db.session.query(EndUser).where(EndUser.id == message_data.from_end_user_id).first() ) if end_user_data is not None: user_id = end_user_data.session_id @@ -243,14 +244,14 @@ class AliyunDataTrace(BaseTraceInstance): if not app_id: raise ValueError("No app_id found in trace_info metadata") - app = session.query(App).filter(App.id == app_id).first() + app = session.query(App).where(App.id == app_id).first() if not app: raise ValueError(f"App with id {app_id} not found") if not app.created_by: raise ValueError(f"App with id {app_id} has no creator (created_by is None)") - service_account = session.query(Account).filter(Account.id == app.created_by).first() + service_account = session.query(Account).where(Account.id == app.created_by).first() if not service_account: raise ValueError(f"Creator account with id {app.created_by} not found for app {app_id}") current_tenant = ( diff --git a/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py b/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py index 8b3ce0c448..1b72a4775a 100644 --- a/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py +++ b/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py @@ -153,7 +153,8 @@ class ArizePhoenixDataTrace(BaseTraceInstance): } workflow_metadata.update(trace_info.metadata) - trace_id = uuid_to_trace_id(trace_info.workflow_run_id) + external_trace_id = trace_info.metadata.get("external_trace_id") + trace_id = external_trace_id or uuid_to_trace_id(trace_info.workflow_run_id) span_id = RandomIdGenerator().generate_span_id() context = SpanContext( trace_id=trace_id, @@ -296,7 +297,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance): # Add end user data if available if trace_info.message_data.from_end_user_id: end_user_data: Optional[EndUser] = ( - db.session.query(EndUser).filter(EndUser.id == trace_info.message_data.from_end_user_id).first() + db.session.query(EndUser).where(EndUser.id == trace_info.message_data.from_end_user_id).first() ) if end_user_data is not None: message_metadata["end_user_id"] = end_user_data.session_id @@ -702,7 +703,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance): WorkflowNodeExecutionModel.process_data, WorkflowNodeExecutionModel.execution_metadata, ) - .filter(WorkflowNodeExecutionModel.workflow_run_id == workflow_run_id) + .where(WorkflowNodeExecutionModel.workflow_run_id == workflow_run_id) .all() ) return workflow_nodes diff --git a/api/core/ops/base_trace_instance.py b/api/core/ops/base_trace_instance.py index 8593198bc2..f8e428daf1 100644 --- a/api/core/ops/base_trace_instance.py +++ b/api/core/ops/base_trace_instance.py @@ -44,14 +44,14 @@ class BaseTraceInstance(ABC): """ with Session(db.engine, expire_on_commit=False) as session: # Get the app to find its creator - app = session.query(App).filter(App.id == app_id).first() + app = session.query(App).where(App.id == app_id).first() if not app: raise ValueError(f"App with id {app_id} not found") if not app.created_by: raise ValueError(f"App with id {app_id} has no creator (created_by is None)") - service_account = session.query(Account).filter(Account.id == app.created_by).first() + service_account = session.query(Account).where(Account.id == app.created_by).first() if not service_account: raise ValueError(f"Creator account with id {app.created_by} not found for app {app_id}") diff --git a/api/core/ops/langfuse_trace/langfuse_trace.py b/api/core/ops/langfuse_trace/langfuse_trace.py index 4a7e66d27c..f4a59ef3a7 100644 --- a/api/core/ops/langfuse_trace/langfuse_trace.py +++ b/api/core/ops/langfuse_trace/langfuse_trace.py @@ -67,13 +67,14 @@ class LangFuseDataTrace(BaseTraceInstance): self.generate_name_trace(trace_info) def workflow_trace(self, trace_info: WorkflowTraceInfo): - trace_id = trace_info.workflow_run_id + external_trace_id = trace_info.metadata.get("external_trace_id") + trace_id = external_trace_id or trace_info.workflow_run_id user_id = trace_info.metadata.get("user_id") metadata = trace_info.metadata metadata["workflow_app_log_id"] = trace_info.workflow_app_log_id if trace_info.message_id: - trace_id = trace_info.message_id + trace_id = external_trace_id or trace_info.message_id name = TraceTaskName.MESSAGE_TRACE.value trace_data = LangfuseTrace( id=trace_id, @@ -243,7 +244,7 @@ class LangFuseDataTrace(BaseTraceInstance): user_id = message_data.from_account_id if message_data.from_end_user_id: end_user_data: Optional[EndUser] = ( - db.session.query(EndUser).filter(EndUser.id == message_data.from_end_user_id).first() + db.session.query(EndUser).where(EndUser.id == message_data.from_end_user_id).first() ) if end_user_data is not None: user_id = end_user_data.session_id diff --git a/api/core/ops/langsmith_trace/langsmith_trace.py b/api/core/ops/langsmith_trace/langsmith_trace.py index 8a559c4929..c97846dc9b 100644 --- a/api/core/ops/langsmith_trace/langsmith_trace.py +++ b/api/core/ops/langsmith_trace/langsmith_trace.py @@ -65,7 +65,8 @@ class LangSmithDataTrace(BaseTraceInstance): self.generate_name_trace(trace_info) def workflow_trace(self, trace_info: WorkflowTraceInfo): - trace_id = trace_info.message_id or trace_info.workflow_run_id + external_trace_id = trace_info.metadata.get("external_trace_id") + trace_id = external_trace_id or trace_info.message_id or trace_info.workflow_run_id if trace_info.start_time is None: trace_info.start_time = datetime.now() message_dotted_order = ( @@ -261,7 +262,7 @@ class LangSmithDataTrace(BaseTraceInstance): if message_data.from_end_user_id: end_user_data: Optional[EndUser] = ( - db.session.query(EndUser).filter(EndUser.id == message_data.from_end_user_id).first() + db.session.query(EndUser).where(EndUser.id == message_data.from_end_user_id).first() ) if end_user_data is not None: end_user_id = end_user_data.session_id diff --git a/api/core/ops/opik_trace/opik_trace.py b/api/core/ops/opik_trace/opik_trace.py index be4997a5bf..6079b2faef 100644 --- a/api/core/ops/opik_trace/opik_trace.py +++ b/api/core/ops/opik_trace/opik_trace.py @@ -96,7 +96,8 @@ class OpikDataTrace(BaseTraceInstance): self.generate_name_trace(trace_info) def workflow_trace(self, trace_info: WorkflowTraceInfo): - dify_trace_id = trace_info.workflow_run_id + external_trace_id = trace_info.metadata.get("external_trace_id") + dify_trace_id = external_trace_id or trace_info.workflow_run_id opik_trace_id = prepare_opik_uuid(trace_info.start_time, dify_trace_id) workflow_metadata = wrap_metadata( trace_info.metadata, message_id=trace_info.message_id, workflow_app_log_id=trace_info.workflow_app_log_id @@ -104,7 +105,7 @@ class OpikDataTrace(BaseTraceInstance): root_span_id = None if trace_info.message_id: - dify_trace_id = trace_info.message_id + dify_trace_id = external_trace_id or trace_info.message_id opik_trace_id = prepare_opik_uuid(trace_info.start_time, dify_trace_id) trace_data = { @@ -283,7 +284,7 @@ class OpikDataTrace(BaseTraceInstance): if message_data.from_end_user_id: end_user_data: Optional[EndUser] = ( - db.session.query(EndUser).filter(EndUser.id == message_data.from_end_user_id).first() + db.session.query(EndUser).where(EndUser.id == message_data.from_end_user_id).first() ) if end_user_data is not None: end_user_id = end_user_data.session_id diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index 5c9b9d27b7..2b546b47cc 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -218,7 +218,7 @@ class OpsTraceManager: """ trace_config_data: Optional[TraceAppConfig] = ( db.session.query(TraceAppConfig) - .filter(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider) + .where(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider) .first() ) @@ -226,7 +226,7 @@ class OpsTraceManager: return None # decrypt_token - app = db.session.query(App).filter(App.id == app_id).first() + app = db.session.query(App).where(App.id == app_id).first() if not app: raise ValueError("App not found") @@ -253,7 +253,7 @@ class OpsTraceManager: if app_id is None: return None - app: Optional[App] = db.session.query(App).filter(App.id == app_id).first() + app: Optional[App] = db.session.query(App).where(App.id == app_id).first() if app is None: return None @@ -293,18 +293,18 @@ class OpsTraceManager: @classmethod def get_app_config_through_message_id(cls, message_id: str): app_model_config = None - message_data = db.session.query(Message).filter(Message.id == message_id).first() + message_data = db.session.query(Message).where(Message.id == message_id).first() if not message_data: return None conversation_id = message_data.conversation_id - conversation_data = db.session.query(Conversation).filter(Conversation.id == conversation_id).first() + conversation_data = db.session.query(Conversation).where(Conversation.id == conversation_id).first() if not conversation_data: return None if conversation_data.app_model_config_id: app_model_config = ( db.session.query(AppModelConfig) - .filter(AppModelConfig.id == conversation_data.app_model_config_id) + .where(AppModelConfig.id == conversation_data.app_model_config_id) .first() ) elif conversation_data.app_model_config_id is None and conversation_data.override_model_configs: @@ -331,7 +331,7 @@ class OpsTraceManager: if tracing_provider is not None: raise ValueError(f"Invalid tracing provider: {tracing_provider}") - app_config: Optional[App] = db.session.query(App).filter(App.id == app_id).first() + app_config: Optional[App] = db.session.query(App).where(App.id == app_id).first() if not app_config: raise ValueError("App not found") app_config.tracing = json.dumps( @@ -349,7 +349,7 @@ class OpsTraceManager: :param app_id: app id :return: """ - app: Optional[App] = db.session.query(App).filter(App.id == app_id).first() + app: Optional[App] = db.session.query(App).where(App.id == app_id).first() if not app: raise ValueError("App not found") if not app.tracing: @@ -520,6 +520,10 @@ class TraceTask: "app_id": workflow_run.app_id, } + external_trace_id = self.kwargs.get("external_trace_id") + if external_trace_id: + metadata["external_trace_id"] = external_trace_id + workflow_trace_info = WorkflowTraceInfo( workflow_data=workflow_run.to_dict(), conversation_id=conversation_id, diff --git a/api/core/ops/utils.py b/api/core/ops/utils.py index 36d060afd2..573e8cac88 100644 --- a/api/core/ops/utils.py +++ b/api/core/ops/utils.py @@ -3,6 +3,8 @@ from datetime import datetime from typing import Optional, Union from urllib.parse import urlparse +from sqlalchemy import select + from extensions.ext_database import db from models.model import Message @@ -20,7 +22,7 @@ def filter_none_values(data: dict): def get_message_data(message_id: str): - return db.session.query(Message).filter(Message.id == message_id).first() + return db.session.scalar(select(Message).where(Message.id == message_id)) @contextmanager diff --git a/api/core/ops/weave_trace/weave_trace.py b/api/core/ops/weave_trace/weave_trace.py index 445c6a8741..a34b3b780c 100644 --- a/api/core/ops/weave_trace/weave_trace.py +++ b/api/core/ops/weave_trace/weave_trace.py @@ -87,7 +87,8 @@ class WeaveDataTrace(BaseTraceInstance): self.generate_name_trace(trace_info) def workflow_trace(self, trace_info: WorkflowTraceInfo): - trace_id = trace_info.message_id or trace_info.workflow_run_id + external_trace_id = trace_info.metadata.get("external_trace_id") + trace_id = external_trace_id or trace_info.message_id or trace_info.workflow_run_id if trace_info.start_time is None: trace_info.start_time = datetime.now() @@ -234,7 +235,7 @@ class WeaveDataTrace(BaseTraceInstance): if message_data.from_end_user_id: end_user_data: Optional[EndUser] = ( - db.session.query(EndUser).filter(EndUser.id == message_data.from_end_user_id).first() + db.session.query(EndUser).where(EndUser.id == message_data.from_end_user_id).first() ) if end_user_data is not None: end_user_id = end_user_data.session_id diff --git a/api/core/plugin/backwards_invocation/app.py b/api/core/plugin/backwards_invocation/app.py index 4e43561a15..e8c9bed099 100644 --- a/api/core/plugin/backwards_invocation/app.py +++ b/api/core/plugin/backwards_invocation/app.py @@ -193,9 +193,9 @@ class PluginAppBackwardsInvocation(BaseBackwardsInvocation): get the user by user id """ - user = db.session.query(EndUser).filter(EndUser.id == user_id).first() + user = db.session.query(EndUser).where(EndUser.id == user_id).first() if not user: - user = db.session.query(Account).filter(Account.id == user_id).first() + user = db.session.query(Account).where(Account.id == user_id).first() if not user: raise ValueError("user not found") @@ -208,7 +208,7 @@ class PluginAppBackwardsInvocation(BaseBackwardsInvocation): get app """ try: - app = db.session.query(App).filter(App.id == app_id).filter(App.tenant_id == tenant_id).first() + app = db.session.query(App).where(App.id == app_id).where(App.tenant_id == tenant_id).first() except Exception: raise ValueError("app not found") diff --git a/api/core/plugin/entities/plugin_daemon.py b/api/core/plugin/entities/plugin_daemon.py index 1d44bad808..2cb96ac7bb 100644 --- a/api/core/plugin/entities/plugin_daemon.py +++ b/api/core/plugin/entities/plugin_daemon.py @@ -194,6 +194,7 @@ class PluginOAuthCredentialsResponse(BaseModel): metadata: Mapping[str, Any] = Field( default_factory=dict, description="The metadata of the OAuth, like avatar url, name, etc." ) + expires_at: int = Field(default=-1, description="The expires at time of the credentials. UTC timestamp.") credentials: Mapping[str, Any] = Field(description="The credentials of the OAuth.") diff --git a/api/core/plugin/impl/oauth.py b/api/core/plugin/impl/oauth.py index d73e5d9f9e..7f022992ff 100644 --- a/api/core/plugin/impl/oauth.py +++ b/api/core/plugin/impl/oauth.py @@ -84,6 +84,41 @@ class OAuthHandler(BasePluginClient): except Exception as e: raise ValueError(f"Error getting credentials: {e}") + def refresh_credentials( + self, + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + redirect_uri: str, + system_credentials: Mapping[str, Any], + credentials: Mapping[str, Any], + ) -> PluginOAuthCredentialsResponse: + try: + response = self._request_with_plugin_daemon_response_stream( + "POST", + f"plugin/{tenant_id}/dispatch/oauth/refresh_credentials", + PluginOAuthCredentialsResponse, + data={ + "user_id": user_id, + "data": { + "provider": provider, + "redirect_uri": redirect_uri, + "system_credentials": system_credentials, + "credentials": credentials, + }, + }, + headers={ + "X-Plugin-ID": plugin_id, + "Content-Type": "application/json", + }, + ) + for resp in response: + return resp + raise ValueError("No response received from plugin daemon for refresh credentials request.") + except Exception as e: + raise ValueError(f"Error refreshing credentials: {e}") + def _convert_request_to_raw_data(self, request: Request) -> bytes: """ Convert a Request object to raw HTTP data. diff --git a/api/core/provider_manager.py b/api/core/provider_manager.py index a31f0a303a..88addd7e68 100644 --- a/api/core/provider_manager.py +++ b/api/core/provider_manager.py @@ -275,7 +275,7 @@ class ProviderManager: # Get the corresponding TenantDefaultModel record default_model = ( db.session.query(TenantDefaultModel) - .filter( + .where( TenantDefaultModel.tenant_id == tenant_id, TenantDefaultModel.model_type == model_type.to_origin_model_type(), ) @@ -367,7 +367,7 @@ class ProviderManager: # Get the list of available models from get_configurations and check if it is LLM default_model = ( db.session.query(TenantDefaultModel) - .filter( + .where( TenantDefaultModel.tenant_id == tenant_id, TenantDefaultModel.model_type == model_type.to_origin_model_type(), ) @@ -541,7 +541,7 @@ class ProviderManager: db.session.rollback() existed_provider_record = ( db.session.query(Provider) - .filter( + .where( Provider.tenant_id == tenant_id, Provider.provider_name == ModelProviderID(provider_name).provider_name, Provider.provider_type == ProviderType.SYSTEM.value, diff --git a/api/core/rag/datasource/keyword/jieba/jieba.py b/api/core/rag/datasource/keyword/jieba/jieba.py index be1765feee..7796a47955 100644 --- a/api/core/rag/datasource/keyword/jieba/jieba.py +++ b/api/core/rag/datasource/keyword/jieba/jieba.py @@ -94,11 +94,11 @@ class Jieba(BaseKeyword): documents = [] for chunk_index in sorted_chunk_indices: - segment_query = db.session.query(DocumentSegment).filter( + segment_query = db.session.query(DocumentSegment).where( DocumentSegment.dataset_id == self.dataset.id, DocumentSegment.index_node_id == chunk_index ) if document_ids_filter: - segment_query = segment_query.filter(DocumentSegment.document_id.in_(document_ids_filter)) + segment_query = segment_query.where(DocumentSegment.document_id.in_(document_ids_filter)) segment = segment_query.first() if segment: @@ -215,7 +215,7 @@ class Jieba(BaseKeyword): def _update_segment_keywords(self, dataset_id: str, node_id: str, keywords: list[str]): document_segment = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.dataset_id == dataset_id, DocumentSegment.index_node_id == node_id) + .where(DocumentSegment.dataset_id == dataset_id, DocumentSegment.index_node_id == node_id) .first() ) if document_segment: diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py index 5a6903d3d5..e872a4e375 100644 --- a/api/core/rag/datasource/retrieval_service.py +++ b/api/core/rag/datasource/retrieval_service.py @@ -127,7 +127,7 @@ class RetrievalService: external_retrieval_model: Optional[dict] = None, metadata_filtering_conditions: Optional[dict] = None, ): - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: return [] metadata_condition = ( @@ -145,7 +145,7 @@ class RetrievalService: @classmethod def _get_dataset(cls, dataset_id: str) -> Optional[Dataset]: with Session(db.engine) as session: - return session.query(Dataset).filter(Dataset.id == dataset_id).first() + return session.query(Dataset).where(Dataset.id == dataset_id).first() @classmethod def keyword_search( @@ -294,7 +294,7 @@ class RetrievalService: dataset_documents = { doc.id: doc for doc in db.session.query(DatasetDocument) - .filter(DatasetDocument.id.in_(document_ids)) + .where(DatasetDocument.id.in_(document_ids)) .options(load_only(DatasetDocument.id, DatasetDocument.doc_form, DatasetDocument.dataset_id)) .all() } @@ -318,7 +318,7 @@ class RetrievalService: child_index_node_id = document.metadata.get("doc_id") child_chunk = ( - db.session.query(ChildChunk).filter(ChildChunk.index_node_id == child_index_node_id).first() + db.session.query(ChildChunk).where(ChildChunk.index_node_id == child_index_node_id).first() ) if not child_chunk: @@ -326,7 +326,7 @@ class RetrievalService: segment = ( db.session.query(DocumentSegment) - .filter( + .where( DocumentSegment.dataset_id == dataset_document.dataset_id, DocumentSegment.enabled == True, DocumentSegment.status == "completed", @@ -381,7 +381,7 @@ class RetrievalService: segment = ( db.session.query(DocumentSegment) - .filter( + .where( DocumentSegment.dataset_id == dataset_document.dataset_id, DocumentSegment.enabled == True, DocumentSegment.status == "completed", diff --git a/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py b/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py index 46aefef11d..b0f0eeca38 100644 --- a/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py +++ b/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py @@ -6,7 +6,7 @@ from uuid import UUID, uuid4 from numpy import ndarray from pgvecto_rs.sqlalchemy import VECTOR # type: ignore from pydantic import BaseModel, model_validator -from sqlalchemy import Float, String, create_engine, insert, select, text +from sqlalchemy import Float, create_engine, insert, select, text from sqlalchemy import text as sql_text from sqlalchemy.dialects import postgresql from sqlalchemy.orm import Mapped, Session, mapped_column @@ -67,7 +67,7 @@ class PGVectoRS(BaseVector): postgresql.UUID(as_uuid=True), primary_key=True, ) - text: Mapped[str] = mapped_column(String) + text: Mapped[str] meta: Mapped[dict] = mapped_column(postgresql.JSONB) vector: Mapped[ndarray] = mapped_column(VECTOR(dim)) diff --git a/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py b/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py index 05fa73011a..dfb95a1839 100644 --- a/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py +++ b/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py @@ -443,7 +443,7 @@ class QdrantVectorFactory(AbstractVectorFactory): if dataset.collection_binding_id: dataset_collection_binding = ( db.session.query(DatasetCollectionBinding) - .filter(DatasetCollectionBinding.id == dataset.collection_binding_id) + .where(DatasetCollectionBinding.id == dataset.collection_binding_id) .one_or_none() ) if dataset_collection_binding: diff --git a/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py b/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py index 552068c99e..9ed6e7369b 100644 --- a/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py +++ b/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py @@ -118,10 +118,21 @@ class TableStoreVector(BaseVector): def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: top_k = kwargs.get("top_k", 4) - return self._search_by_vector(query_vector, top_k) + document_ids_filter = kwargs.get("document_ids_filter") + filtered_list = None + if document_ids_filter: + filtered_list = ["document_id=" + item for item in document_ids_filter] + score_threshold = float(kwargs.get("score_threshold") or 0.0) + return self._search_by_vector(query_vector, filtered_list, top_k, score_threshold) def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: - return self._search_by_full_text(query) + top_k = kwargs.get("top_k", 4) + document_ids_filter = kwargs.get("document_ids_filter") + filtered_list = None + if document_ids_filter: + filtered_list = ["document_id=" + item for item in document_ids_filter] + + return self._search_by_full_text(query, filtered_list, top_k) def delete(self) -> None: self._delete_table_if_exist() @@ -230,32 +241,51 @@ class TableStoreVector(BaseVector): primary_key = [("id", id)] row = tablestore.Row(primary_key) self._tablestore_client.delete_row(self._table_name, row, None) - logging.info("Tablestore delete row successfully. id:%s", id) def _search_by_metadata(self, key: str, value: str) -> list[str]: query = tablestore.SearchQuery( tablestore.TermQuery(self._tags_field, str(key) + "=" + str(value)), - limit=100, + limit=1000, get_total_count=False, ) + rows: list[str] = [] + next_token = None + while True: + if next_token is not None: + query.next_token = next_token - search_response = self._tablestore_client.search( - table_name=self._table_name, - index_name=self._index_name, - search_query=query, - columns_to_get=tablestore.ColumnsToGet(return_type=tablestore.ColumnReturnType.ALL_FROM_INDEX), - ) + search_response = self._tablestore_client.search( + table_name=self._table_name, + index_name=self._index_name, + search_query=query, + columns_to_get=tablestore.ColumnsToGet( + column_names=[Field.PRIMARY_KEY.value], return_type=tablestore.ColumnReturnType.SPECIFIED + ), + ) - return [row[0][0][1] for row in search_response.rows] + if search_response is not None: + rows.extend([row[0][0][1] for row in search_response.rows]) - def _search_by_vector(self, query_vector: list[float], top_k: int) -> list[Document]: - ots_query = tablestore.KnnVectorQuery( + if search_response is None or search_response.next_token == b"": + break + else: + next_token = search_response.next_token + + return rows + + def _search_by_vector( + self, query_vector: list[float], document_ids_filter: list[str] | None, top_k: int, score_threshold: float + ) -> list[Document]: + knn_vector_query = tablestore.KnnVectorQuery( field_name=Field.VECTOR.value, top_k=top_k, float32_query_vector=query_vector, ) + if document_ids_filter: + knn_vector_query.filter = tablestore.TermsQuery(self._tags_field, document_ids_filter) + sort = tablestore.Sort(sorters=[tablestore.ScoreSort(sort_order=tablestore.SortOrder.DESC)]) - search_query = tablestore.SearchQuery(ots_query, limit=top_k, get_total_count=False, sort=sort) + search_query = tablestore.SearchQuery(knn_vector_query, limit=top_k, get_total_count=False, sort=sort) search_response = self._tablestore_client.search( table_name=self._table_name, @@ -263,30 +293,42 @@ class TableStoreVector(BaseVector): search_query=search_query, columns_to_get=tablestore.ColumnsToGet(return_type=tablestore.ColumnReturnType.ALL_FROM_INDEX), ) - logging.info( - "Tablestore search successfully. request_id:%s", - search_response.request_id, - ) - return self._to_query_result(search_response) - - def _to_query_result(self, search_response: tablestore.SearchResponse) -> list[Document]: documents = [] - for row in search_response.rows: - documents.append( - Document( - page_content=row[1][2][1], - vector=json.loads(row[1][3][1]), - metadata=json.loads(row[1][0][1]), - ) - ) + for search_hit in search_response.search_hits: + if search_hit.score > score_threshold: + ots_column_map = {} + for col in search_hit.row[1]: + ots_column_map[col[0]] = col[1] + vector_str = ots_column_map.get(Field.VECTOR.value) + metadata_str = ots_column_map.get(Field.METADATA_KEY.value) + + vector = json.loads(vector_str) if vector_str else None + metadata = json.loads(metadata_str) if metadata_str else {} + + metadata["score"] = search_hit.score + + documents.append( + Document( + page_content=ots_column_map.get(Field.CONTENT_KEY.value) or "", + vector=vector, + metadata=metadata, + ) + ) + documents = sorted(documents, key=lambda x: x.metadata["score"] if x.metadata else 0, reverse=True) return documents - def _search_by_full_text(self, query: str) -> list[Document]: + def _search_by_full_text(self, query: str, document_ids_filter: list[str] | None, top_k: int) -> list[Document]: + bool_query = tablestore.BoolQuery(must_queries=[], filter_queries=[], should_queries=[], must_not_queries=[]) + bool_query.must_queries.append(tablestore.MatchQuery(text=query, field_name=Field.CONTENT_KEY.value)) + + if document_ids_filter: + bool_query.filter_queries.append(tablestore.TermsQuery(self._tags_field, document_ids_filter)) + search_query = tablestore.SearchQuery( - query=tablestore.MatchQuery(text=query, field_name=Field.CONTENT_KEY.value), + query=bool_query, sort=tablestore.Sort(sorters=[tablestore.ScoreSort(sort_order=tablestore.SortOrder.DESC)]), - limit=100, + limit=top_k, ) search_response = self._tablestore_client.search( table_name=self._table_name, @@ -295,7 +337,25 @@ class TableStoreVector(BaseVector): columns_to_get=tablestore.ColumnsToGet(return_type=tablestore.ColumnReturnType.ALL_FROM_INDEX), ) - return self._to_query_result(search_response) + documents = [] + for search_hit in search_response.search_hits: + ots_column_map = {} + for col in search_hit.row[1]: + ots_column_map[col[0]] = col[1] + + vector_str = ots_column_map.get(Field.VECTOR.value) + metadata_str = ots_column_map.get(Field.METADATA_KEY.value) + vector = json.loads(vector_str) if vector_str else None + metadata = json.loads(metadata_str) if metadata_str else {} + + documents.append( + Document( + page_content=ots_column_map.get(Field.CONTENT_KEY.value) or "", + vector=vector, + metadata=metadata, + ) + ) + return documents class TableStoreVectorFactory(AbstractVectorFactory): diff --git a/api/core/rag/datasource/vdb/tencent/tencent_vector.py b/api/core/rag/datasource/vdb/tencent/tencent_vector.py index 84746d23ea..23ed8a3344 100644 --- a/api/core/rag/datasource/vdb/tencent/tencent_vector.py +++ b/api/core/rag/datasource/vdb/tencent/tencent_vector.py @@ -284,7 +284,8 @@ class TencentVector(BaseVector): # Compatible with version 1.1.3 and below. meta = json.loads(meta) score = 1 - result.get("score", 0.0) - score = result.get("score", 0.0) + else: + score = result.get("score", 0.0) if score > score_threshold: meta["score"] = score doc = Document(page_content=result.get(self.field_text), metadata=meta) diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py index 6f895b12af..ba6a9654f0 100644 --- a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py @@ -418,13 +418,13 @@ class TidbOnQdrantVector(BaseVector): class TidbOnQdrantVectorFactory(AbstractVectorFactory): def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> TidbOnQdrantVector: tidb_auth_binding = ( - db.session.query(TidbAuthBinding).filter(TidbAuthBinding.tenant_id == dataset.tenant_id).one_or_none() + db.session.query(TidbAuthBinding).where(TidbAuthBinding.tenant_id == dataset.tenant_id).one_or_none() ) if not tidb_auth_binding: with redis_client.lock("create_tidb_serverless_cluster_lock", timeout=900): tidb_auth_binding = ( db.session.query(TidbAuthBinding) - .filter(TidbAuthBinding.tenant_id == dataset.tenant_id) + .where(TidbAuthBinding.tenant_id == dataset.tenant_id) .one_or_none() ) if tidb_auth_binding: @@ -433,7 +433,7 @@ class TidbOnQdrantVectorFactory(AbstractVectorFactory): else: idle_tidb_auth_binding = ( db.session.query(TidbAuthBinding) - .filter(TidbAuthBinding.active == False, TidbAuthBinding.status == "ACTIVE") + .where(TidbAuthBinding.active == False, TidbAuthBinding.status == "ACTIVE") .limit(1) .one_or_none() ) diff --git a/api/core/rag/datasource/vdb/vector_factory.py b/api/core/rag/datasource/vdb/vector_factory.py index 00080b0fae..e018f7d3d4 100644 --- a/api/core/rag/datasource/vdb/vector_factory.py +++ b/api/core/rag/datasource/vdb/vector_factory.py @@ -47,7 +47,7 @@ class Vector: if dify_config.VECTOR_STORE_WHITELIST_ENABLE: whitelist = ( db.session.query(Whitelist) - .filter(Whitelist.tenant_id == self._dataset.tenant_id, Whitelist.category == "vector_db") + .where(Whitelist.tenant_id == self._dataset.tenant_id, Whitelist.category == "vector_db") .one_or_none() ) if whitelist: diff --git a/api/core/rag/docstore/dataset_docstore.py b/api/core/rag/docstore/dataset_docstore.py index 398b0daad9..f844770a20 100644 --- a/api/core/rag/docstore/dataset_docstore.py +++ b/api/core/rag/docstore/dataset_docstore.py @@ -42,7 +42,7 @@ class DatasetDocumentStore: @property def docs(self) -> dict[str, Document]: document_segments = ( - db.session.query(DocumentSegment).filter(DocumentSegment.dataset_id == self._dataset.id).all() + db.session.query(DocumentSegment).where(DocumentSegment.dataset_id == self._dataset.id).all() ) output = {} @@ -63,7 +63,7 @@ class DatasetDocumentStore: def add_documents(self, docs: Sequence[Document], allow_update: bool = True, save_child: bool = False) -> None: max_position = ( db.session.query(func.max(DocumentSegment.position)) - .filter(DocumentSegment.document_id == self._document_id) + .where(DocumentSegment.document_id == self._document_id) .scalar() ) @@ -147,7 +147,7 @@ class DatasetDocumentStore: segment_document.tokens = tokens if save_child and doc.children: # delete the existing child chunks - db.session.query(ChildChunk).filter( + db.session.query(ChildChunk).where( ChildChunk.tenant_id == self._dataset.tenant_id, ChildChunk.dataset_id == self._dataset.id, ChildChunk.document_id == self._document_id, @@ -230,7 +230,7 @@ class DatasetDocumentStore: def get_document_segment(self, doc_id: str) -> Optional[DocumentSegment]: document_segment = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.dataset_id == self._dataset.id, DocumentSegment.index_node_id == doc_id) + .where(DocumentSegment.dataset_id == self._dataset.id, DocumentSegment.index_node_id == doc_id) .first() ) diff --git a/api/core/rag/extractor/notion_extractor.py b/api/core/rag/extractor/notion_extractor.py index eca955ddd1..875626eb34 100644 --- a/api/core/rag/extractor/notion_extractor.py +++ b/api/core/rag/extractor/notion_extractor.py @@ -331,9 +331,10 @@ class NotionExtractor(BaseExtractor): last_edited_time = self.get_notion_last_edited_time() data_source_info = document_model.data_source_info_dict data_source_info["last_edited_time"] = last_edited_time - update_params = {DocumentModel.data_source_info: json.dumps(data_source_info)} - db.session.query(DocumentModel).filter_by(id=document_model.id).update(update_params) + db.session.query(DocumentModel).filter_by(id=document_model.id).update( + {DocumentModel.data_source_info: json.dumps(data_source_info)} + ) # type: ignore db.session.commit() def get_notion_last_edited_time(self) -> str: @@ -365,7 +366,7 @@ class NotionExtractor(BaseExtractor): def _get_access_token(cls, tenant_id: str, notion_workspace_id: str) -> str: data_source_binding = ( db.session.query(DataSourceOauthBinding) - .filter( + .where( db.and_( DataSourceOauthBinding.tenant_id == tenant_id, DataSourceOauthBinding.provider == "notion", diff --git a/api/core/rag/index_processor/processor/parent_child_index_processor.py b/api/core/rag/index_processor/processor/parent_child_index_processor.py index 158fc819ee..6183f0900c 100644 --- a/api/core/rag/index_processor/processor/parent_child_index_processor.py +++ b/api/core/rag/index_processor/processor/parent_child_index_processor.py @@ -121,7 +121,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor): child_node_ids = ( db.session.query(ChildChunk.index_node_id) .join(DocumentSegment, ChildChunk.segment_id == DocumentSegment.id) - .filter( + .where( DocumentSegment.dataset_id == dataset.id, DocumentSegment.index_node_id.in_(node_ids), ChildChunk.dataset_id == dataset.id, @@ -131,7 +131,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor): child_node_ids = [child_node_id[0] for child_node_id in child_node_ids] vector.delete_by_ids(child_node_ids) if delete_child_chunks: - db.session.query(ChildChunk).filter( + db.session.query(ChildChunk).where( ChildChunk.dataset_id == dataset.id, ChildChunk.index_node_id.in_(child_node_ids) ).delete() db.session.commit() @@ -139,7 +139,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor): vector.delete() if delete_child_chunks: - db.session.query(ChildChunk).filter(ChildChunk.dataset_id == dataset.id).delete() + db.session.query(ChildChunk).where(ChildChunk.dataset_id == dataset.id).delete() db.session.commit() def retrieve( diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index 3d0f0f97bc..a25bc65646 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -135,7 +135,7 @@ class DatasetRetrieval: available_datasets = [] for dataset_id in dataset_ids: # get dataset from dataset id - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() # pass if dataset is not available if not dataset: @@ -242,7 +242,7 @@ class DatasetRetrieval: dataset = db.session.query(Dataset).filter_by(id=segment.dataset_id).first() document = ( db.session.query(DatasetDocument) - .filter( + .where( DatasetDocument.id == segment.document_id, DatasetDocument.enabled == True, DatasetDocument.archived == False, @@ -327,7 +327,7 @@ class DatasetRetrieval: if dataset_id: # get retrieval model config - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if dataset: results = [] if dataset.provider == "external": @@ -516,14 +516,14 @@ class DatasetRetrieval: if document.metadata is not None: dataset_document = ( db.session.query(DatasetDocument) - .filter(DatasetDocument.id == document.metadata["document_id"]) + .where(DatasetDocument.id == document.metadata["document_id"]) .first() ) if dataset_document: if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX: child_chunk = ( db.session.query(ChildChunk) - .filter( + .where( ChildChunk.index_node_id == document.metadata["doc_id"], ChildChunk.dataset_id == dataset_document.dataset_id, ChildChunk.document_id == dataset_document.id, @@ -533,7 +533,7 @@ class DatasetRetrieval: if child_chunk: segment = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.id == child_chunk.segment_id) + .where(DocumentSegment.id == child_chunk.segment_id) .update( {DocumentSegment.hit_count: DocumentSegment.hit_count + 1}, synchronize_session=False, @@ -541,13 +541,13 @@ class DatasetRetrieval: ) db.session.commit() else: - query = db.session.query(DocumentSegment).filter( + query = db.session.query(DocumentSegment).where( DocumentSegment.index_node_id == document.metadata["doc_id"] ) # if 'dataset_id' in document.metadata: if "dataset_id" in document.metadata: - query = query.filter(DocumentSegment.dataset_id == document.metadata["dataset_id"]) + query = query.where(DocumentSegment.dataset_id == document.metadata["dataset_id"]) # add hit count to document segment query.update( @@ -600,7 +600,7 @@ class DatasetRetrieval: ): with flask_app.app_context(): with Session(db.engine) as session: - dataset = session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: return [] @@ -685,7 +685,7 @@ class DatasetRetrieval: available_datasets = [] for dataset_id in dataset_ids: # get dataset from dataset id - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() # pass if dataset is not available if not dataset: @@ -862,7 +862,7 @@ class DatasetRetrieval: metadata_filtering_conditions: Optional[MetadataFilteringCondition], inputs: dict, ) -> tuple[Optional[dict[str, list[str]]], Optional[MetadataCondition]]: - document_query = db.session.query(DatasetDocument).filter( + document_query = db.session.query(DatasetDocument).where( DatasetDocument.dataset_id.in_(dataset_ids), DatasetDocument.indexing_status == "completed", DatasetDocument.enabled == True, @@ -930,9 +930,9 @@ class DatasetRetrieval: raise ValueError("Invalid metadata filtering mode") if filters: if metadata_filtering_conditions and metadata_filtering_conditions.logical_operator == "and": # type: ignore - document_query = document_query.filter(and_(*filters)) + document_query = document_query.where(and_(*filters)) else: - document_query = document_query.filter(or_(*filters)) + document_query = document_query.where(or_(*filters)) documents = document_query.all() # group by dataset_id metadata_filter_document_ids = defaultdict(list) if documents else None # type: ignore @@ -958,7 +958,7 @@ class DatasetRetrieval: self, dataset_ids: list, query: str, tenant_id: str, user_id: str, metadata_model_config: ModelConfig ) -> Optional[list[dict[str, Any]]]: # get all metadata field - metadata_fields = db.session.query(DatasetMetadata).filter(DatasetMetadata.dataset_id.in_(dataset_ids)).all() + metadata_fields = db.session.query(DatasetMetadata).where(DatasetMetadata.dataset_id.in_(dataset_ids)).all() all_metadata_fields = [metadata_field.name for metadata_field in metadata_fields] # get metadata model config if metadata_model_config is None: diff --git a/api/core/tools/custom_tool/provider.py b/api/core/tools/custom_tool/provider.py index fbe1d79137..95fab6151a 100644 --- a/api/core/tools/custom_tool/provider.py +++ b/api/core/tools/custom_tool/provider.py @@ -178,7 +178,7 @@ class ApiToolProviderController(ToolProviderController): # get tenant api providers db_providers: list[ApiToolProvider] = ( db.session.query(ApiToolProvider) - .filter(ApiToolProvider.tenant_id == tenant_id, ApiToolProvider.name == self.entity.identity.name) + .where(ApiToolProvider.tenant_id == tenant_id, ApiToolProvider.name == self.entity.identity.name) .all() ) diff --git a/api/core/tools/tool_file_manager.py b/api/core/tools/tool_file_manager.py index ece02f9d59..ff054041cf 100644 --- a/api/core/tools/tool_file_manager.py +++ b/api/core/tools/tool_file_manager.py @@ -160,7 +160,7 @@ class ToolFileManager: with Session(self._engine, expire_on_commit=False) as session: tool_file: ToolFile | None = ( session.query(ToolFile) - .filter( + .where( ToolFile.id == id, ) .first() @@ -184,7 +184,7 @@ class ToolFileManager: with Session(self._engine, expire_on_commit=False) as session: message_file: MessageFile | None = ( session.query(MessageFile) - .filter( + .where( MessageFile.id == id, ) .first() @@ -204,7 +204,7 @@ class ToolFileManager: tool_file: ToolFile | None = ( session.query(ToolFile) - .filter( + .where( ToolFile.id == tool_file_id, ) .first() @@ -228,7 +228,7 @@ class ToolFileManager: with Session(self._engine, expire_on_commit=False) as session: tool_file: ToolFile | None = ( session.query(ToolFile) - .filter( + .where( ToolFile.id == tool_file_id, ) .first() diff --git a/api/core/tools/tool_label_manager.py b/api/core/tools/tool_label_manager.py index 4787d7d79c..cdfefbadb3 100644 --- a/api/core/tools/tool_label_manager.py +++ b/api/core/tools/tool_label_manager.py @@ -29,7 +29,7 @@ class ToolLabelManager: raise ValueError("Unsupported tool type") # delete old labels - db.session.query(ToolLabelBinding).filter(ToolLabelBinding.tool_id == provider_id).delete() + db.session.query(ToolLabelBinding).where(ToolLabelBinding.tool_id == provider_id).delete() # insert new labels for label in labels: @@ -57,7 +57,7 @@ class ToolLabelManager: labels = ( db.session.query(ToolLabelBinding.label_name) - .filter( + .where( ToolLabelBinding.tool_id == provider_id, ToolLabelBinding.tool_type == controller.provider_type.value, ) @@ -90,7 +90,7 @@ class ToolLabelManager: provider_ids.append(controller.provider_id) labels: list[ToolLabelBinding] = ( - db.session.query(ToolLabelBinding).filter(ToolLabelBinding.tool_id.in_(provider_ids)).all() + db.session.query(ToolLabelBinding).where(ToolLabelBinding.tool_id.in_(provider_ids)).all() ) tool_labels: dict[str, list[str]] = {label.tool_id: [] for label in labels} diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 7822bc389c..71c237c7f7 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -1,16 +1,19 @@ import json import logging import mimetypes -from collections.abc import Generator +import time +from collections.abc import Generator, Mapping from os import listdir, path from threading import Lock from typing import TYPE_CHECKING, Any, Literal, Optional, Union, cast +from pydantic import TypeAdapter from yarl import URL import contexts from core.helper.provider_cache import ToolProviderCredentialsCache from core.plugin.entities.plugin import ToolProviderID +from core.plugin.impl.oauth import OAuthHandler from core.plugin.impl.tool import PluginToolManager from core.tools.__base.tool_provider import ToolProviderController from core.tools.__base.tool_runtime import ToolRuntime @@ -195,7 +198,7 @@ class ToolManager: try: builtin_provider = ( db.session.query(BuiltinToolProvider) - .filter( + .where( BuiltinToolProvider.tenant_id == tenant_id, BuiltinToolProvider.id == credential_id, ) @@ -213,7 +216,7 @@ class ToolManager: # use the default provider builtin_provider = ( db.session.query(BuiltinToolProvider) - .filter( + .where( BuiltinToolProvider.tenant_id == tenant_id, (BuiltinToolProvider.provider == str(provider_id_entity)) | (BuiltinToolProvider.provider == provider_id_entity.provider_name), @@ -226,7 +229,7 @@ class ToolManager: else: builtin_provider = ( db.session.query(BuiltinToolProvider) - .filter(BuiltinToolProvider.tenant_id == tenant_id, (BuiltinToolProvider.provider == provider_id)) + .where(BuiltinToolProvider.tenant_id == tenant_id, (BuiltinToolProvider.provider == provider_id)) .order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc()) .first() ) @@ -244,12 +247,47 @@ class ToolManager: tenant_id=tenant_id, provider=provider_id, credential_id=builtin_provider.id ), ) + + # decrypt the credentials + decrypted_credentials: Mapping[str, Any] = encrypter.decrypt(builtin_provider.credentials) + + # check if the credentials is expired + if builtin_provider.expires_at != -1 and (builtin_provider.expires_at - 60) < int(time.time()): + # TODO: circular import + from services.tools.builtin_tools_manage_service import BuiltinToolManageService + + # refresh the credentials + tool_provider = ToolProviderID(provider_id) + provider_name = tool_provider.provider_name + redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider_id}/tool/callback" + system_credentials = BuiltinToolManageService.get_oauth_client(tenant_id, provider_id) + oauth_handler = OAuthHandler() + # refresh the credentials + refreshed_credentials = oauth_handler.refresh_credentials( + tenant_id=tenant_id, + user_id=builtin_provider.user_id, + plugin_id=tool_provider.plugin_id, + provider=provider_name, + redirect_uri=redirect_uri, + system_credentials=system_credentials or {}, + credentials=decrypted_credentials, + ) + # update the credentials + builtin_provider.encrypted_credentials = ( + TypeAdapter(dict[str, Any]) + .dump_json(encrypter.encrypt(dict(refreshed_credentials.credentials))) + .decode("utf-8") + ) + builtin_provider.expires_at = refreshed_credentials.expires_at + db.session.commit() + decrypted_credentials = refreshed_credentials.credentials + return cast( BuiltinTool, builtin_tool.fork_tool_runtime( runtime=ToolRuntime( tenant_id=tenant_id, - credentials=encrypter.decrypt(builtin_provider.credentials), + credentials=dict(decrypted_credentials), credential_type=CredentialType.of(builtin_provider.credential_type), runtime_parameters={}, invoke_from=invoke_from, @@ -278,7 +316,7 @@ class ToolManager: elif provider_type == ToolProviderType.WORKFLOW: workflow_provider = ( db.session.query(WorkflowToolProvider) - .filter(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == provider_id) + .where(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == provider_id) .first() ) @@ -578,7 +616,7 @@ class ToolManager: ORDER BY tenant_id, provider, is_default DESC, created_at DESC """ ids = [row.id for row in db.session.execute(db.text(sql), {"tenant_id": tenant_id}).all()] - return db.session.query(BuiltinToolProvider).filter(BuiltinToolProvider.id.in_(ids)).all() + return db.session.query(BuiltinToolProvider).where(BuiltinToolProvider.id.in_(ids)).all() @classmethod def list_providers_from_api( @@ -626,7 +664,7 @@ class ToolManager: # get db api providers if "api" in filters: db_api_providers: list[ApiToolProvider] = ( - db.session.query(ApiToolProvider).filter(ApiToolProvider.tenant_id == tenant_id).all() + db.session.query(ApiToolProvider).where(ApiToolProvider.tenant_id == tenant_id).all() ) api_provider_controllers: list[dict[str, Any]] = [ @@ -649,7 +687,7 @@ class ToolManager: if "workflow" in filters: # get workflow providers workflow_providers: list[WorkflowToolProvider] = ( - db.session.query(WorkflowToolProvider).filter(WorkflowToolProvider.tenant_id == tenant_id).all() + db.session.query(WorkflowToolProvider).where(WorkflowToolProvider.tenant_id == tenant_id).all() ) workflow_provider_controllers: list[WorkflowToolProviderController] = [] @@ -693,7 +731,7 @@ class ToolManager: """ provider: ApiToolProvider | None = ( db.session.query(ApiToolProvider) - .filter( + .where( ApiToolProvider.id == provider_id, ApiToolProvider.tenant_id == tenant_id, ) @@ -730,7 +768,7 @@ class ToolManager: """ provider: MCPToolProvider | None = ( db.session.query(MCPToolProvider) - .filter( + .where( MCPToolProvider.server_identifier == provider_id, MCPToolProvider.tenant_id == tenant_id, ) @@ -755,7 +793,7 @@ class ToolManager: provider_name = provider provider_obj: ApiToolProvider | None = ( db.session.query(ApiToolProvider) - .filter( + .where( ApiToolProvider.tenant_id == tenant_id, ApiToolProvider.name == provider, ) @@ -847,7 +885,7 @@ class ToolManager: try: workflow_provider: WorkflowToolProvider | None = ( db.session.query(WorkflowToolProvider) - .filter(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == provider_id) + .where(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == provider_id) .first() ) @@ -864,7 +902,7 @@ class ToolManager: try: api_provider: ApiToolProvider | None = ( db.session.query(ApiToolProvider) - .filter(ApiToolProvider.tenant_id == tenant_id, ApiToolProvider.id == provider_id) + .where(ApiToolProvider.tenant_id == tenant_id, ApiToolProvider.id == provider_id) .first() ) @@ -881,7 +919,7 @@ class ToolManager: try: mcp_provider: MCPToolProvider | None = ( db.session.query(MCPToolProvider) - .filter(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.server_identifier == provider_id) + .where(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.server_identifier == provider_id) .first() ) @@ -973,7 +1011,9 @@ class ToolManager: if variable is None: raise ToolParameterError(f"Variable {tool_input.value} does not exist") parameter_value = variable.value - elif tool_input.type in {"mixed", "constant"}: + elif tool_input.type == "constant": + parameter_value = tool_input.value + elif tool_input.type == "mixed": segment_group = variable_pool.convert_template(str(tool_input.value)) parameter_value = segment_group.text else: diff --git a/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py b/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py index 2cbc4b9821..7eb4bc017a 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py @@ -87,7 +87,7 @@ class DatasetMultiRetrieverTool(DatasetRetrieverBaseTool): index_node_ids = [document.metadata["doc_id"] for document in all_documents if document.metadata] segments = ( db.session.query(DocumentSegment) - .filter( + .where( DocumentSegment.dataset_id.in_(self.dataset_ids), DocumentSegment.completed_at.isnot(None), DocumentSegment.status == "completed", @@ -114,7 +114,7 @@ class DatasetMultiRetrieverTool(DatasetRetrieverBaseTool): dataset = db.session.query(Dataset).filter_by(id=segment.dataset_id).first() document = ( db.session.query(Document) - .filter( + .where( Document.id == segment.document_id, Document.enabled == True, Document.archived == False, @@ -163,7 +163,7 @@ class DatasetMultiRetrieverTool(DatasetRetrieverBaseTool): ): with flask_app.app_context(): dataset = ( - db.session.query(Dataset).filter(Dataset.tenant_id == self.tenant_id, Dataset.id == dataset_id).first() + db.session.query(Dataset).where(Dataset.tenant_id == self.tenant_id, Dataset.id == dataset_id).first() ) if not dataset: diff --git a/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py b/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py index a4d2de3b1c..567275531e 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py @@ -1,5 +1,5 @@ from abc import abstractmethod -from typing import Any, Optional +from typing import Optional from msal_extensions.persistence import ABC # type: ignore from pydantic import BaseModel, ConfigDict @@ -21,11 +21,7 @@ class DatasetRetrieverBaseTool(BaseModel, ABC): model_config = ConfigDict(arbitrary_types_allowed=True) @abstractmethod - def _run( - self, - *args: Any, - **kwargs: Any, - ) -> Any: + def _run(self, query: str) -> str: """Use the tool. Add run_manager: Optional[CallbackManagerForToolRun] = None diff --git a/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py b/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py index ff1d9021ce..f7689d7707 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py @@ -57,7 +57,7 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool): def _run(self, query: str) -> str: dataset = ( - db.session.query(Dataset).filter(Dataset.tenant_id == self.tenant_id, Dataset.id == self.dataset_id).first() + db.session.query(Dataset).where(Dataset.tenant_id == self.tenant_id, Dataset.id == self.dataset_id).first() ) if not dataset: @@ -190,7 +190,7 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool): dataset = db.session.query(Dataset).filter_by(id=segment.dataset_id).first() document = ( db.session.query(DatasetDocument) # type: ignore - .filter( + .where( DatasetDocument.id == segment.document_id, DatasetDocument.enabled == True, DatasetDocument.archived == False, diff --git a/api/core/tools/workflow_as_tool/provider.py b/api/core/tools/workflow_as_tool/provider.py index 7661e1e6a5..83f5f558d5 100644 --- a/api/core/tools/workflow_as_tool/provider.py +++ b/api/core/tools/workflow_as_tool/provider.py @@ -84,7 +84,7 @@ class WorkflowToolProviderController(ToolProviderController): """ workflow: Workflow | None = ( db.session.query(Workflow) - .filter(Workflow.app_id == db_provider.app_id, Workflow.version == db_provider.version) + .where(Workflow.app_id == db_provider.app_id, Workflow.version == db_provider.version) .first() ) @@ -190,7 +190,7 @@ class WorkflowToolProviderController(ToolProviderController): db_providers: WorkflowToolProvider | None = ( db.session.query(WorkflowToolProvider) - .filter( + .where( WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.app_id == self.provider_id, ) diff --git a/api/core/tools/workflow_as_tool/tool.py b/api/core/tools/workflow_as_tool/tool.py index 10bf8ca640..8b89c2a7a9 100644 --- a/api/core/tools/workflow_as_tool/tool.py +++ b/api/core/tools/workflow_as_tool/tool.py @@ -142,12 +142,12 @@ class WorkflowTool(Tool): if not version: workflow = ( db.session.query(Workflow) - .filter(Workflow.app_id == app_id, Workflow.version != "draft") + .where(Workflow.app_id == app_id, Workflow.version != "draft") .order_by(Workflow.created_at.desc()) .first() ) else: - workflow = db.session.query(Workflow).filter(Workflow.app_id == app_id, Workflow.version == version).first() + workflow = db.session.query(Workflow).where(Workflow.app_id == app_id, Workflow.version == version).first() if not workflow: raise ValueError("workflow not found or not published") @@ -158,7 +158,7 @@ class WorkflowTool(Tool): """ get the app by app id """ - app = db.session.query(App).filter(App.id == app_id).first() + app = db.session.query(App).where(App.id == app_id).first() if not app: raise ValueError("app not found") diff --git a/api/core/workflow/nodes/agent/agent_node.py b/api/core/workflow/nodes/agent/agent_node.py index 8cf33ac81e..c83303034e 100644 --- a/api/core/workflow/nodes/agent/agent_node.py +++ b/api/core/workflow/nodes/agent/agent_node.py @@ -309,7 +309,7 @@ class AgentNode(BaseNode): } ) value = tool_value - if parameter.type == "model-selector": + if parameter.type == AgentStrategyParameter.AgentStrategyParameterType.MODEL_SELECTOR: value = cast(dict[str, Any], value) model_instance, model_schema = self._fetch_model(value) # memory config diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index 5f092dc2f1..34b0afc75d 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -228,7 +228,7 @@ class KnowledgeRetrievalNode(BaseNode): # Subquery: Count the number of available documents for each dataset subquery = ( db.session.query(Document.dataset_id, func.count(Document.id).label("available_document_count")) - .filter( + .where( Document.indexing_status == "completed", Document.enabled == True, Document.archived == False, @@ -242,8 +242,8 @@ class KnowledgeRetrievalNode(BaseNode): results = ( db.session.query(Dataset) .outerjoin(subquery, Dataset.id == subquery.c.dataset_id) - .filter(Dataset.tenant_id == self.tenant_id, Dataset.id.in_(dataset_ids)) - .filter((subquery.c.available_document_count > 0) | (Dataset.provider == "external")) + .where(Dataset.tenant_id == self.tenant_id, Dataset.id.in_(dataset_ids)) + .where((subquery.c.available_document_count > 0) | (Dataset.provider == "external")) .all() ) @@ -370,7 +370,7 @@ class KnowledgeRetrievalNode(BaseNode): dataset = db.session.query(Dataset).filter_by(id=segment.dataset_id).first() # type: ignore document = ( db.session.query(Document) - .filter( + .where( Document.id == segment.document_id, Document.enabled == True, Document.archived == False, @@ -415,7 +415,7 @@ class KnowledgeRetrievalNode(BaseNode): def _get_metadata_filter_condition( self, dataset_ids: list, query: str, node_data: KnowledgeRetrievalNodeData ) -> tuple[Optional[dict[str, list[str]]], Optional[MetadataCondition]]: - document_query = db.session.query(Document).filter( + document_query = db.session.query(Document).where( Document.dataset_id.in_(dataset_ids), Document.indexing_status == "completed", Document.enabled == True, @@ -462,7 +462,7 @@ class KnowledgeRetrievalNode(BaseNode): expected_value = self.graph_runtime_state.variable_pool.convert_template( expected_value ).value[0] - if expected_value.value_type == "number": # type: ignore + if expected_value.value_type in {"number", "integer", "float"}: # type: ignore expected_value = expected_value.value # type: ignore elif expected_value.value_type == "string": # type: ignore expected_value = re.sub(r"[\r\n\t]+", " ", expected_value.text).strip() # type: ignore @@ -493,9 +493,9 @@ class KnowledgeRetrievalNode(BaseNode): node_data.metadata_filtering_conditions and node_data.metadata_filtering_conditions.logical_operator == "and" ): # type: ignore - document_query = document_query.filter(and_(*filters)) + document_query = document_query.where(and_(*filters)) else: - document_query = document_query.filter(or_(*filters)) + document_query = document_query.where(or_(*filters)) documents = document_query.all() # group by dataset_id metadata_filter_document_ids = defaultdict(list) if documents else None # type: ignore @@ -507,7 +507,7 @@ class KnowledgeRetrievalNode(BaseNode): self, dataset_ids: list, query: str, node_data: KnowledgeRetrievalNodeData ) -> list[dict[str, Any]]: # get all metadata field - metadata_fields = db.session.query(DatasetMetadata).filter(DatasetMetadata.dataset_id.in_(dataset_ids)).all() + metadata_fields = db.session.query(DatasetMetadata).where(DatasetMetadata.dataset_id.in_(dataset_ids)).all() all_metadata_fields = [metadata_field.name for metadata_field in metadata_fields] if node_data.metadata_model_config is None: raise ValueError("metadata_model_config is required") diff --git a/api/core/workflow/nodes/list_operator/node.py b/api/core/workflow/nodes/list_operator/node.py index ae9401b056..b91fc622f6 100644 --- a/api/core/workflow/nodes/list_operator/node.py +++ b/api/core/workflow/nodes/list_operator/node.py @@ -184,11 +184,10 @@ class ListOperatorNode(BaseNode): value = int(self.graph_runtime_state.variable_pool.convert_template(self._node_data.extract_by.serial).text) if value < 1: raise ValueError(f"Invalid serial index: must be >= 1, got {value}") + if value > len(variable.value): + raise InvalidKeyError(f"Invalid serial index: must be <= {len(variable.value)}, got {value}") value -= 1 - if len(variable.value) > int(value): - result = variable.value[value] - else: - result = "" + result = variable.value[value] return variable.model_copy(update={"value": [result]}) diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index 91e7312805..90a0397b67 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -565,7 +565,7 @@ class LLMNode(BaseNode): retriever_resources=original_retriever_resource, context=context_str.strip() ) - def _convert_to_original_retriever_resource(self, context_dict: dict): + def _convert_to_original_retriever_resource(self, context_dict: dict) -> RetrievalSourceMetadata | None: if ( "metadata" in context_dict and "_source" in context_dict["metadata"] diff --git a/api/core/workflow/nodes/tool/entities.py b/api/core/workflow/nodes/tool/entities.py index f0a44d919b..4f47fb1efc 100644 --- a/api/core/workflow/nodes/tool/entities.py +++ b/api/core/workflow/nodes/tool/entities.py @@ -54,7 +54,7 @@ class ToolNodeData(BaseNodeData, ToolEntity): for val in value: if not isinstance(val, str): raise ValueError("value must be a list of strings") - elif typ == "constant" and not isinstance(value, str | int | float | bool): + elif typ == "constant" and not isinstance(value, str | int | float | bool | dict): raise ValueError("value must be a string, int, float, or bool") return typ diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index 140fe71f60..f437ac841d 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -316,7 +316,14 @@ class ToolNode(BaseNode): variables[variable_name] = variable_value elif message.type == ToolInvokeMessage.MessageType.FILE: assert message.meta is not None - assert isinstance(message.meta, File) + assert isinstance(message.meta, dict) + # Validate that meta contains a 'file' key + if "file" not in message.meta: + raise ToolNodeError("File message is missing 'file' key in meta") + + # Validate that the file is an instance of File + if not isinstance(message.meta["file"], File): + raise ToolNodeError(f"Expected File object but got {type(message.meta['file']).__name__}") files.append(message.meta["file"]) elif message.type == ToolInvokeMessage.MessageType.LOG: assert isinstance(message.message, ToolInvokeMessage.LogMessage) diff --git a/api/core/workflow/workflow_cycle_manager.py b/api/core/workflow/workflow_cycle_manager.py index f844aada95..03f670707e 100644 --- a/api/core/workflow/workflow_cycle_manager.py +++ b/api/core/workflow/workflow_cycle_manager.py @@ -85,6 +85,7 @@ class WorkflowCycleManager: outputs: Mapping[str, Any] | None = None, conversation_id: Optional[str] = None, trace_manager: Optional[TraceQueueManager] = None, + external_trace_id: Optional[str] = None, ) -> WorkflowExecution: workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id) @@ -96,7 +97,7 @@ class WorkflowCycleManager: total_steps=total_steps, ) - self._add_trace_task_if_needed(trace_manager, workflow_execution, conversation_id) + self._add_trace_task_if_needed(trace_manager, workflow_execution, conversation_id, external_trace_id) self._workflow_execution_repository.save(workflow_execution) return workflow_execution @@ -111,6 +112,7 @@ class WorkflowCycleManager: exceptions_count: int = 0, conversation_id: Optional[str] = None, trace_manager: Optional[TraceQueueManager] = None, + external_trace_id: Optional[str] = None, ) -> WorkflowExecution: execution = self._get_workflow_execution_or_raise_error(workflow_run_id) @@ -123,7 +125,7 @@ class WorkflowCycleManager: exceptions_count=exceptions_count, ) - self._add_trace_task_if_needed(trace_manager, execution, conversation_id) + self._add_trace_task_if_needed(trace_manager, execution, conversation_id, external_trace_id) self._workflow_execution_repository.save(execution) return execution @@ -139,6 +141,7 @@ class WorkflowCycleManager: conversation_id: Optional[str] = None, trace_manager: Optional[TraceQueueManager] = None, exceptions_count: int = 0, + external_trace_id: Optional[str] = None, ) -> WorkflowExecution: workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id) now = naive_utc_now() @@ -154,7 +157,7 @@ class WorkflowCycleManager: ) self._fail_running_node_executions(workflow_execution.id_, error_message, now) - self._add_trace_task_if_needed(trace_manager, workflow_execution, conversation_id) + self._add_trace_task_if_needed(trace_manager, workflow_execution, conversation_id, external_trace_id) self._workflow_execution_repository.save(workflow_execution) return workflow_execution @@ -312,6 +315,7 @@ class WorkflowCycleManager: trace_manager: Optional[TraceQueueManager], workflow_execution: WorkflowExecution, conversation_id: Optional[str], + external_trace_id: Optional[str], ) -> None: """Add trace task if trace manager is provided.""" if trace_manager: @@ -321,6 +325,7 @@ class WorkflowCycleManager: workflow_execution=workflow_execution, conversation_id=conversation_id, user_id=trace_manager.user_id, + external_trace_id=external_trace_id, ) ) diff --git a/api/docker/entrypoint.sh b/api/docker/entrypoint.sh index 18d4f4885d..4de9a25c2f 100755 --- a/api/docker/entrypoint.sh +++ b/api/docker/entrypoint.sh @@ -5,6 +5,11 @@ set -e if [[ "${MIGRATION_ENABLED}" == "true" ]]; then echo "Running migrations" flask upgrade-db + # Pure migration mode + if [[ "${MODE}" == "migration" ]]; then + echo "Migration completed, exiting normally" + exit 0 + fi fi if [[ "${MODE}" == "worker" ]]; then @@ -22,7 +27,7 @@ if [[ "${MODE}" == "worker" ]]; then exec celery -A app.celery worker -P ${CELERY_WORKER_CLASS:-gevent} $CONCURRENCY_OPTION \ --max-tasks-per-child ${MAX_TASK_PRE_CHILD:-50} --loglevel ${LOG_LEVEL:-INFO} \ - -Q ${CELERY_QUEUES:-dataset,mail,ops_trace,app_deletion} + -Q ${CELERY_QUEUES:-dataset,mail,ops_trace,app_deletion,plugin} elif [[ "${MODE}" == "beat" ]]; then exec celery -A app.celery beat --loglevel ${LOG_LEVEL:-INFO} diff --git a/api/events/event_handlers/create_document_index.py b/api/events/event_handlers/create_document_index.py index cb48bd92a0..dc50ca8d96 100644 --- a/api/events/event_handlers/create_document_index.py +++ b/api/events/event_handlers/create_document_index.py @@ -22,7 +22,7 @@ def handle(sender, **kwargs): document = ( db.session.query(Document) - .filter( + .where( Document.id == document_id, Document.dataset_id == dataset_id, ) diff --git a/api/events/event_handlers/update_app_dataset_join_when_app_model_config_updated.py b/api/events/event_handlers/update_app_dataset_join_when_app_model_config_updated.py index 14396e9920..b8b5a89dc5 100644 --- a/api/events/event_handlers/update_app_dataset_join_when_app_model_config_updated.py +++ b/api/events/event_handlers/update_app_dataset_join_when_app_model_config_updated.py @@ -13,7 +13,7 @@ def handle(sender, **kwargs): dataset_ids = get_dataset_ids_from_model_config(app_model_config) - app_dataset_joins = db.session.query(AppDatasetJoin).filter(AppDatasetJoin.app_id == app.id).all() + app_dataset_joins = db.session.query(AppDatasetJoin).where(AppDatasetJoin.app_id == app.id).all() removed_dataset_ids: set[str] = set() if not app_dataset_joins: @@ -27,7 +27,7 @@ def handle(sender, **kwargs): if removed_dataset_ids: for dataset_id in removed_dataset_ids: - db.session.query(AppDatasetJoin).filter( + db.session.query(AppDatasetJoin).where( AppDatasetJoin.app_id == app.id, AppDatasetJoin.dataset_id == dataset_id ).delete() diff --git a/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py b/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py index dd2efed94b..cf4ba69833 100644 --- a/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py +++ b/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py @@ -15,7 +15,7 @@ def handle(sender, **kwargs): published_workflow = cast(Workflow, published_workflow) dataset_ids = get_dataset_ids_from_workflow(published_workflow) - app_dataset_joins = db.session.query(AppDatasetJoin).filter(AppDatasetJoin.app_id == app.id).all() + app_dataset_joins = db.session.query(AppDatasetJoin).where(AppDatasetJoin.app_id == app.id).all() removed_dataset_ids: set[str] = set() if not app_dataset_joins: @@ -29,7 +29,7 @@ def handle(sender, **kwargs): if removed_dataset_ids: for dataset_id in removed_dataset_ids: - db.session.query(AppDatasetJoin).filter( + db.session.query(AppDatasetJoin).where( AppDatasetJoin.app_id == app.id, AppDatasetJoin.dataset_id == dataset_id ).delete() diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py index 6279b1ad36..2c2846ba26 100644 --- a/api/extensions/ext_celery.py +++ b/api/extensions/ext_celery.py @@ -64,49 +64,62 @@ def init_app(app: DifyApp) -> Celery: celery_app.set_default() app.extensions["celery"] = celery_app - imports = [ - "schedule.clean_embedding_cache_task", - "schedule.clean_unused_datasets_task", - "schedule.create_tidb_serverless_task", - "schedule.update_tidb_serverless_status_task", - "schedule.clean_messages", - "schedule.mail_clean_document_notify_task", - "schedule.queue_monitor_task", - ] + imports = [] day = dify_config.CELERY_BEAT_SCHEDULER_TIME - beat_schedule = { - "clean_embedding_cache_task": { + + # if you add a new task, please add the switch to CeleryScheduleTasksConfig + beat_schedule = {} + if dify_config.ENABLE_CLEAN_EMBEDDING_CACHE_TASK: + imports.append("schedule.clean_embedding_cache_task") + beat_schedule["clean_embedding_cache_task"] = { "task": "schedule.clean_embedding_cache_task.clean_embedding_cache_task", "schedule": timedelta(days=day), - }, - "clean_unused_datasets_task": { + } + if dify_config.ENABLE_CLEAN_UNUSED_DATASETS_TASK: + imports.append("schedule.clean_unused_datasets_task") + beat_schedule["clean_unused_datasets_task"] = { "task": "schedule.clean_unused_datasets_task.clean_unused_datasets_task", "schedule": timedelta(days=day), - }, - "create_tidb_serverless_task": { + } + if dify_config.ENABLE_CREATE_TIDB_SERVERLESS_TASK: + imports.append("schedule.create_tidb_serverless_task") + beat_schedule["create_tidb_serverless_task"] = { "task": "schedule.create_tidb_serverless_task.create_tidb_serverless_task", "schedule": crontab(minute="0", hour="*"), - }, - "update_tidb_serverless_status_task": { + } + if dify_config.ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK: + imports.append("schedule.update_tidb_serverless_status_task") + beat_schedule["update_tidb_serverless_status_task"] = { "task": "schedule.update_tidb_serverless_status_task.update_tidb_serverless_status_task", "schedule": timedelta(minutes=10), - }, - "clean_messages": { + } + if dify_config.ENABLE_CLEAN_MESSAGES: + imports.append("schedule.clean_messages") + beat_schedule["clean_messages"] = { "task": "schedule.clean_messages.clean_messages", "schedule": timedelta(days=day), - }, - # every Monday - "mail_clean_document_notify_task": { + } + if dify_config.ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: + imports.append("schedule.mail_clean_document_notify_task") + beat_schedule["mail_clean_document_notify_task"] = { "task": "schedule.mail_clean_document_notify_task.mail_clean_document_notify_task", "schedule": crontab(minute="0", hour="10", day_of_week="1"), - }, - "datasets-queue-monitor": { + } + if dify_config.ENABLE_DATASETS_QUEUE_MONITOR: + imports.append("schedule.queue_monitor_task") + beat_schedule["datasets-queue-monitor"] = { "task": "schedule.queue_monitor_task.queue_monitor_task", "schedule": timedelta( minutes=dify_config.QUEUE_MONITOR_INTERVAL if dify_config.QUEUE_MONITOR_INTERVAL else 30 ), - }, - } + } + if dify_config.ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK: + imports.append("schedule.check_upgradable_plugin_task") + beat_schedule["check_upgradable_plugin_task"] = { + "task": "schedule.check_upgradable_plugin_task.check_upgradable_plugin_task", + "schedule": crontab(minute="*/15"), + } + celery_app.conf.update(beat_schedule=beat_schedule, imports=imports) return celery_app diff --git a/api/extensions/ext_login.py b/api/extensions/ext_login.py index 11d1856ac4..9b18e25eaa 100644 --- a/api/extensions/ext_login.py +++ b/api/extensions/ext_login.py @@ -40,9 +40,9 @@ def load_user_from_request(request_from_flask_login): if workspace_id: tenant_account_join = ( db.session.query(Tenant, TenantAccountJoin) - .filter(Tenant.id == workspace_id) - .filter(TenantAccountJoin.tenant_id == Tenant.id) - .filter(TenantAccountJoin.role == "owner") + .where(Tenant.id == workspace_id) + .where(TenantAccountJoin.tenant_id == Tenant.id) + .where(TenantAccountJoin.role == "owner") .one_or_none() ) if tenant_account_join: @@ -70,7 +70,7 @@ def load_user_from_request(request_from_flask_login): end_user_id = decoded.get("end_user_id") if not end_user_id: raise Unauthorized("Invalid Authorization token.") - end_user = db.session.query(EndUser).filter(EndUser.id == decoded["end_user_id"]).first() + end_user = db.session.query(EndUser).where(EndUser.id == decoded["end_user_id"]).first() if not end_user: raise NotFound("End user not found.") return end_user @@ -78,12 +78,12 @@ def load_user_from_request(request_from_flask_login): server_code = request.view_args.get("server_code") if request.view_args else None if not server_code: raise Unauthorized("Invalid Authorization token.") - app_mcp_server = db.session.query(AppMCPServer).filter(AppMCPServer.server_code == server_code).first() + app_mcp_server = db.session.query(AppMCPServer).where(AppMCPServer.server_code == server_code).first() if not app_mcp_server: raise NotFound("App MCP server not found.") end_user = ( db.session.query(EndUser) - .filter(EndUser.external_user_id == app_mcp_server.id, EndUser.type == "mcp") + .where(EndUser.external_user_id == app_mcp_server.id, EndUser.type == "mcp") .first() ) if not end_user: diff --git a/api/factories/file_factory.py b/api/factories/file_factory.py index adf4cf68ee..9836113077 100644 --- a/api/factories/file_factory.py +++ b/api/factories/file_factory.py @@ -262,13 +262,11 @@ def _build_from_tool_file( transfer_method: FileTransferMethod, strict_type_validation: bool = False, ) -> File: - tool_file = ( - db.session.query(ToolFile) - .filter( + tool_file = db.session.scalar( + select(ToolFile).where( ToolFile.id == mapping.get("tool_file_id"), ToolFile.tenant_id == tenant_id, ) - .first() ) if tool_file is None: @@ -276,7 +274,7 @@ def _build_from_tool_file( extension = "." + tool_file.file_key.split(".")[-1] if "." in tool_file.file_key else ".bin" - detected_file_type = _standardize_file_type(extension="." + extension, mime_type=tool_file.mimetype) + detected_file_type = _standardize_file_type(extension=extension, mime_type=tool_file.mimetype) specified_type = mapping.get("type") diff --git a/api/libs/email_i18n.py b/api/libs/email_i18n.py new file mode 100644 index 0000000000..b7c9f3ec6c --- /dev/null +++ b/api/libs/email_i18n.py @@ -0,0 +1,474 @@ +""" +Email Internationalization Module + +This module provides a centralized, elegant way to handle email internationalization +in Dify. It follows Domain-Driven Design principles with proper type hints and +eliminates the need for repetitive language switching logic. +""" + +from dataclasses import dataclass +from enum import Enum +from typing import Any, Optional, Protocol + +from flask import render_template +from pydantic import BaseModel, Field + +from extensions.ext_mail import mail +from services.feature_service import BrandingModel, FeatureService + + +class EmailType(Enum): + """Enumeration of supported email types.""" + + RESET_PASSWORD = "reset_password" + INVITE_MEMBER = "invite_member" + EMAIL_CODE_LOGIN = "email_code_login" + CHANGE_EMAIL_OLD = "change_email_old" + CHANGE_EMAIL_NEW = "change_email_new" + CHANGE_EMAIL_COMPLETED = "change_email_completed" + OWNER_TRANSFER_CONFIRM = "owner_transfer_confirm" + OWNER_TRANSFER_OLD_NOTIFY = "owner_transfer_old_notify" + OWNER_TRANSFER_NEW_NOTIFY = "owner_transfer_new_notify" + ACCOUNT_DELETION_SUCCESS = "account_deletion_success" + ACCOUNT_DELETION_VERIFICATION = "account_deletion_verification" + ENTERPRISE_CUSTOM = "enterprise_custom" + QUEUE_MONITOR_ALERT = "queue_monitor_alert" + DOCUMENT_CLEAN_NOTIFY = "document_clean_notify" + + +class EmailLanguage(Enum): + """Supported email languages with fallback handling.""" + + EN_US = "en-US" + ZH_HANS = "zh-Hans" + + @classmethod + def from_language_code(cls, language_code: str) -> "EmailLanguage": + """Convert a language code to EmailLanguage with fallback to English.""" + if language_code == "zh-Hans": + return cls.ZH_HANS + return cls.EN_US + + +@dataclass(frozen=True) +class EmailTemplate: + """Immutable value object representing an email template configuration.""" + + subject: str + template_path: str + branded_template_path: str + + +@dataclass(frozen=True) +class EmailContent: + """Immutable value object containing rendered email content.""" + + subject: str + html_content: str + template_context: dict[str, Any] + + +class EmailI18nConfig(BaseModel): + """Configuration for email internationalization.""" + + model_config = {"frozen": True, "extra": "forbid"} + + templates: dict[EmailType, dict[EmailLanguage, EmailTemplate]] = Field( + default_factory=dict, description="Mapping of email types to language-specific templates" + ) + + def get_template(self, email_type: EmailType, language: EmailLanguage) -> EmailTemplate: + """Get template configuration for specific email type and language.""" + type_templates = self.templates.get(email_type) + if not type_templates: + raise ValueError(f"No templates configured for email type: {email_type}") + + template = type_templates.get(language) + if not template: + # Fallback to English if specific language not found + template = type_templates.get(EmailLanguage.EN_US) + if not template: + raise ValueError(f"No template found for {email_type} in {language} or English") + + return template + + +class EmailRenderer(Protocol): + """Protocol for email template renderers.""" + + def render_template(self, template_path: str, **context: Any) -> str: + """Render email template with given context.""" + ... + + +class FlaskEmailRenderer: + """Flask-based email template renderer.""" + + def render_template(self, template_path: str, **context: Any) -> str: + """Render email template using Flask's render_template.""" + return render_template(template_path, **context) + + +class BrandingService(Protocol): + """Protocol for branding service abstraction.""" + + def get_branding_config(self) -> BrandingModel: + """Get current branding configuration.""" + ... + + +class FeatureBrandingService: + """Feature service based branding implementation.""" + + def get_branding_config(self) -> BrandingModel: + """Get branding configuration from feature service.""" + return FeatureService.get_system_features().branding + + +class EmailSender(Protocol): + """Protocol for email sending abstraction.""" + + def send_email(self, to: str, subject: str, html_content: str) -> None: + """Send email with given parameters.""" + ... + + +class FlaskMailSender: + """Flask-Mail based email sender.""" + + def send_email(self, to: str, subject: str, html_content: str) -> None: + """Send email using Flask-Mail.""" + if mail.is_inited(): + mail.send(to=to, subject=subject, html=html_content) + + +class EmailI18nService: + """ + Main service for internationalized email handling. + + This service provides a clean API for sending internationalized emails + with proper branding support and template management. + """ + + def __init__( + self, + config: EmailI18nConfig, + renderer: EmailRenderer, + branding_service: BrandingService, + sender: EmailSender, + ) -> None: + self._config = config + self._renderer = renderer + self._branding_service = branding_service + self._sender = sender + + def send_email( + self, + email_type: EmailType, + language_code: str, + to: str, + template_context: Optional[dict[str, Any]] = None, + ) -> None: + """ + Send internationalized email with branding support. + + Args: + email_type: Type of email to send + language_code: Target language code + to: Recipient email address + template_context: Additional context for template rendering + """ + if template_context is None: + template_context = {} + + language = EmailLanguage.from_language_code(language_code) + email_content = self._render_email_content(email_type, language, template_context) + + self._sender.send_email(to=to, subject=email_content.subject, html_content=email_content.html_content) + + def send_change_email( + self, + language_code: str, + to: str, + code: str, + phase: str, + ) -> None: + """ + Send change email notification with phase-specific handling. + + Args: + language_code: Target language code + to: Recipient email address + code: Verification code + phase: Either 'old_email' or 'new_email' + """ + if phase == "old_email": + email_type = EmailType.CHANGE_EMAIL_OLD + elif phase == "new_email": + email_type = EmailType.CHANGE_EMAIL_NEW + else: + raise ValueError(f"Invalid phase: {phase}. Must be 'old_email' or 'new_email'") + + self.send_email( + email_type=email_type, + language_code=language_code, + to=to, + template_context={ + "to": to, + "code": code, + }, + ) + + def send_raw_email( + self, + to: str | list[str], + subject: str, + html_content: str, + ) -> None: + """ + Send a raw email directly without template processing. + + This method is provided for backward compatibility with legacy email + sending that uses pre-rendered HTML content (e.g., enterprise emails + with custom templates). + + Args: + to: Recipient email address(es) + subject: Email subject + html_content: Pre-rendered HTML content + """ + if isinstance(to, list): + for recipient in to: + self._sender.send_email(to=recipient, subject=subject, html_content=html_content) + else: + self._sender.send_email(to=to, subject=subject, html_content=html_content) + + def _render_email_content( + self, + email_type: EmailType, + language: EmailLanguage, + template_context: dict[str, Any], + ) -> EmailContent: + """Render email content with branding and internationalization.""" + template_config = self._config.get_template(email_type, language) + branding = self._branding_service.get_branding_config() + + # Determine template path based on branding + template_path = template_config.branded_template_path if branding.enabled else template_config.template_path + + # Prepare template context with branding information + full_context = { + **template_context, + "branding_enabled": branding.enabled, + "application_title": branding.application_title if branding.enabled else "Dify", + } + + # Render template + html_content = self._renderer.render_template(template_path, **full_context) + + # Apply templating to subject with all context variables + subject = template_config.subject + try: + subject = subject.format(**full_context) + except KeyError: + # If template variables are missing, fall back to basic formatting + if branding.enabled and "{application_title}" in subject: + subject = subject.format(application_title=branding.application_title) + + return EmailContent( + subject=subject, + html_content=html_content, + template_context=full_context, + ) + + +def create_default_email_config() -> EmailI18nConfig: + """Create default email i18n configuration with all supported templates.""" + templates: dict[EmailType, dict[EmailLanguage, EmailTemplate]] = { + EmailType.RESET_PASSWORD: { + EmailLanguage.EN_US: EmailTemplate( + subject="Set Your {application_title} Password", + template_path="reset_password_mail_template_en-US.html", + branded_template_path="without-brand/reset_password_mail_template_en-US.html", + ), + EmailLanguage.ZH_HANS: EmailTemplate( + subject="设置您的 {application_title} 密码", + template_path="reset_password_mail_template_zh-CN.html", + branded_template_path="without-brand/reset_password_mail_template_zh-CN.html", + ), + }, + EmailType.INVITE_MEMBER: { + EmailLanguage.EN_US: EmailTemplate( + subject="Join {application_title} Workspace Now", + template_path="invite_member_mail_template_en-US.html", + branded_template_path="without-brand/invite_member_mail_template_en-US.html", + ), + EmailLanguage.ZH_HANS: EmailTemplate( + subject="立即加入 {application_title} 工作空间", + template_path="invite_member_mail_template_zh-CN.html", + branded_template_path="without-brand/invite_member_mail_template_zh-CN.html", + ), + }, + EmailType.EMAIL_CODE_LOGIN: { + EmailLanguage.EN_US: EmailTemplate( + subject="{application_title} Login Code", + template_path="email_code_login_mail_template_en-US.html", + branded_template_path="without-brand/email_code_login_mail_template_en-US.html", + ), + EmailLanguage.ZH_HANS: EmailTemplate( + subject="{application_title} 登录验证码", + template_path="email_code_login_mail_template_zh-CN.html", + branded_template_path="without-brand/email_code_login_mail_template_zh-CN.html", + ), + }, + EmailType.CHANGE_EMAIL_OLD: { + EmailLanguage.EN_US: EmailTemplate( + subject="Check your current email", + template_path="change_mail_confirm_old_template_en-US.html", + branded_template_path="without-brand/change_mail_confirm_old_template_en-US.html", + ), + EmailLanguage.ZH_HANS: EmailTemplate( + subject="检测您现在的邮箱", + template_path="change_mail_confirm_old_template_zh-CN.html", + branded_template_path="without-brand/change_mail_confirm_old_template_zh-CN.html", + ), + }, + EmailType.CHANGE_EMAIL_NEW: { + EmailLanguage.EN_US: EmailTemplate( + subject="Confirm your new email address", + template_path="change_mail_confirm_new_template_en-US.html", + branded_template_path="without-brand/change_mail_confirm_new_template_en-US.html", + ), + EmailLanguage.ZH_HANS: EmailTemplate( + subject="确认您的邮箱地址变更", + template_path="change_mail_confirm_new_template_zh-CN.html", + branded_template_path="without-brand/change_mail_confirm_new_template_zh-CN.html", + ), + }, + EmailType.CHANGE_EMAIL_COMPLETED: { + EmailLanguage.EN_US: EmailTemplate( + subject="Your login email has been changed", + template_path="change_mail_completed_template_en-US.html", + branded_template_path="without-brand/change_mail_completed_template_en-US.html", + ), + EmailLanguage.ZH_HANS: EmailTemplate( + subject="您的登录邮箱已更改", + template_path="change_mail_completed_template_zh-CN.html", + branded_template_path="without-brand/change_mail_completed_template_zh-CN.html", + ), + }, + EmailType.OWNER_TRANSFER_CONFIRM: { + EmailLanguage.EN_US: EmailTemplate( + subject="Verify Your Request to Transfer Workspace Ownership", + template_path="transfer_workspace_owner_confirm_template_en-US.html", + branded_template_path="without-brand/transfer_workspace_owner_confirm_template_en-US.html", + ), + EmailLanguage.ZH_HANS: EmailTemplate( + subject="验证您转移工作空间所有权的请求", + template_path="transfer_workspace_owner_confirm_template_zh-CN.html", + branded_template_path="without-brand/transfer_workspace_owner_confirm_template_zh-CN.html", + ), + }, + EmailType.OWNER_TRANSFER_OLD_NOTIFY: { + EmailLanguage.EN_US: EmailTemplate( + subject="Workspace ownership has been transferred", + template_path="transfer_workspace_old_owner_notify_template_en-US.html", + branded_template_path="without-brand/transfer_workspace_old_owner_notify_template_en-US.html", + ), + EmailLanguage.ZH_HANS: EmailTemplate( + subject="工作区所有权已转移", + template_path="transfer_workspace_old_owner_notify_template_zh-CN.html", + branded_template_path="without-brand/transfer_workspace_old_owner_notify_template_zh-CN.html", + ), + }, + EmailType.OWNER_TRANSFER_NEW_NOTIFY: { + EmailLanguage.EN_US: EmailTemplate( + subject="You are now the owner of {WorkspaceName}", + template_path="transfer_workspace_new_owner_notify_template_en-US.html", + branded_template_path="without-brand/transfer_workspace_new_owner_notify_template_en-US.html", + ), + EmailLanguage.ZH_HANS: EmailTemplate( + subject="您现在是 {WorkspaceName} 的所有者", + template_path="transfer_workspace_new_owner_notify_template_zh-CN.html", + branded_template_path="without-brand/transfer_workspace_new_owner_notify_template_zh-CN.html", + ), + }, + EmailType.ACCOUNT_DELETION_SUCCESS: { + EmailLanguage.EN_US: EmailTemplate( + subject="Your Dify.AI Account Has Been Successfully Deleted", + template_path="delete_account_success_template_en-US.html", + branded_template_path="delete_account_success_template_en-US.html", + ), + EmailLanguage.ZH_HANS: EmailTemplate( + subject="您的 Dify.AI 账户已成功删除", + template_path="delete_account_success_template_zh-CN.html", + branded_template_path="delete_account_success_template_zh-CN.html", + ), + }, + EmailType.ACCOUNT_DELETION_VERIFICATION: { + EmailLanguage.EN_US: EmailTemplate( + subject="Dify.AI Account Deletion and Verification", + template_path="delete_account_code_email_template_en-US.html", + branded_template_path="delete_account_code_email_template_en-US.html", + ), + EmailLanguage.ZH_HANS: EmailTemplate( + subject="Dify.AI 账户删除和验证", + template_path="delete_account_code_email_template_zh-CN.html", + branded_template_path="delete_account_code_email_template_zh-CN.html", + ), + }, + EmailType.QUEUE_MONITOR_ALERT: { + EmailLanguage.EN_US: EmailTemplate( + subject="Alert: Dataset Queue pending tasks exceeded the limit", + template_path="queue_monitor_alert_email_template_en-US.html", + branded_template_path="queue_monitor_alert_email_template_en-US.html", + ), + EmailLanguage.ZH_HANS: EmailTemplate( + subject="警报:数据集队列待处理任务超过限制", + template_path="queue_monitor_alert_email_template_zh-CN.html", + branded_template_path="queue_monitor_alert_email_template_zh-CN.html", + ), + }, + EmailType.DOCUMENT_CLEAN_NOTIFY: { + EmailLanguage.EN_US: EmailTemplate( + subject="Dify Knowledge base auto disable notification", + template_path="clean_document_job_mail_template-US.html", + branded_template_path="clean_document_job_mail_template-US.html", + ), + EmailLanguage.ZH_HANS: EmailTemplate( + subject="Dify 知识库自动禁用通知", + template_path="clean_document_job_mail_template_zh-CN.html", + branded_template_path="clean_document_job_mail_template_zh-CN.html", + ), + }, + } + + return EmailI18nConfig(templates=templates) + + +# Singleton instance for application-wide use +def get_default_email_i18n_service() -> EmailI18nService: + """Get configured email i18n service with default dependencies.""" + config = create_default_email_config() + renderer = FlaskEmailRenderer() + branding_service = FeatureBrandingService() + sender = FlaskMailSender() + + return EmailI18nService( + config=config, + renderer=renderer, + branding_service=branding_service, + sender=sender, + ) + + +# Global instance +_email_i18n_service: Optional[EmailI18nService] = None + + +def get_email_i18n_service() -> EmailI18nService: + """Get global email i18n service instance.""" + global _email_i18n_service + if _email_i18n_service is None: + _email_i18n_service = get_default_email_i18n_service() + return _email_i18n_service diff --git a/api/libs/oauth_data_source.py b/api/libs/oauth_data_source.py index 78f827584c..987c5d7135 100644 --- a/api/libs/oauth_data_source.py +++ b/api/libs/oauth_data_source.py @@ -3,6 +3,7 @@ from typing import Any import requests from flask_login import current_user +from sqlalchemy import select from extensions.ext_database import db from libs.datetime_utils import naive_utc_now @@ -61,16 +62,12 @@ class NotionOAuth(OAuthDataSource): "total": len(pages), } # save data source binding - data_source_binding = ( - db.session.query(DataSourceOauthBinding) - .filter( - db.and_( - DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, - DataSourceOauthBinding.provider == "notion", - DataSourceOauthBinding.access_token == access_token, - ) + data_source_binding = db.session.scalar( + select(DataSourceOauthBinding).where( + DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, + DataSourceOauthBinding.provider == "notion", + DataSourceOauthBinding.access_token == access_token, ) - .first() ) if data_source_binding: data_source_binding.source_info = source_info @@ -101,16 +98,12 @@ class NotionOAuth(OAuthDataSource): "total": len(pages), } # save data source binding - data_source_binding = ( - db.session.query(DataSourceOauthBinding) - .filter( - db.and_( - DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, - DataSourceOauthBinding.provider == "notion", - DataSourceOauthBinding.access_token == access_token, - ) + data_source_binding = db.session.scalar( + select(DataSourceOauthBinding).where( + DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, + DataSourceOauthBinding.provider == "notion", + DataSourceOauthBinding.access_token == access_token, ) - .first() ) if data_source_binding: data_source_binding.source_info = source_info @@ -129,18 +122,15 @@ class NotionOAuth(OAuthDataSource): def sync_data_source(self, binding_id: str): # save data source binding - data_source_binding = ( - db.session.query(DataSourceOauthBinding) - .filter( - db.and_( - DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, - DataSourceOauthBinding.provider == "notion", - DataSourceOauthBinding.id == binding_id, - DataSourceOauthBinding.disabled == False, - ) + data_source_binding = db.session.scalar( + select(DataSourceOauthBinding).where( + DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, + DataSourceOauthBinding.provider == "notion", + DataSourceOauthBinding.id == binding_id, + DataSourceOauthBinding.disabled == False, ) - .first() ) + if data_source_binding: # get all authorized pages pages = self.get_authorized_pages(data_source_binding.access_token) diff --git a/api/libs/rsa.py b/api/libs/rsa.py index da279eb32b..ed7a0eb116 100644 --- a/api/libs/rsa.py +++ b/api/libs/rsa.py @@ -1,4 +1,5 @@ import hashlib +import os from typing import Union from Crypto.Cipher import AES @@ -17,7 +18,7 @@ def generate_key_pair(tenant_id: str) -> str: pem_private = private_key.export_key() pem_public = public_key.export_key() - filepath = "privkeys/{tenant_id}".format(tenant_id=tenant_id) + "/private.pem" + filepath = os.path.join("privkeys", tenant_id, "private.pem") storage.save(filepath, pem_private) @@ -47,7 +48,7 @@ def encrypt(text: str, public_key: Union[str, bytes]) -> bytes: def get_decrypt_decoding(tenant_id: str) -> tuple[RSA.RsaKey, object]: - filepath = "privkeys/{tenant_id}".format(tenant_id=tenant_id) + "/private.pem" + filepath = os.path.join("privkeys", tenant_id, "private.pem") cache_key = "tenant_privkey:{hash}".format(hash=hashlib.sha3_256(filepath.encode()).hexdigest()) private_key = redis_client.get(cache_key) diff --git a/api/migrations/versions/2025_07_22_0019-375fe79ead14_oauth_refresh_token.py b/api/migrations/versions/2025_07_22_0019-375fe79ead14_oauth_refresh_token.py new file mode 100644 index 0000000000..76d0cb2940 --- /dev/null +++ b/api/migrations/versions/2025_07_22_0019-375fe79ead14_oauth_refresh_token.py @@ -0,0 +1,34 @@ +"""oauth_refresh_token + +Revision ID: 375fe79ead14 +Revises: 1a83934ad6d1 +Create Date: 2025-07-22 00:19:45.599636 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '375fe79ead14' +down_revision = '1a83934ad6d1' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('expires_at', sa.BigInteger(), server_default=sa.text('-1'), nullable=False)) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op: + batch_op.drop_column('expires_at') + + # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_07_23_1508-8bcc02c9bd07_add_tenant_plugin_autoupgrade_table.py b/api/migrations/versions/2025_07_23_1508-8bcc02c9bd07_add_tenant_plugin_autoupgrade_table.py new file mode 100644 index 0000000000..4ff0402a97 --- /dev/null +++ b/api/migrations/versions/2025_07_23_1508-8bcc02c9bd07_add_tenant_plugin_autoupgrade_table.py @@ -0,0 +1,42 @@ +"""add_tenant_plugin_autoupgrade_table + +Revision ID: 8bcc02c9bd07 +Revises: 375fe79ead14 +Create Date: 2025-07-23 15:08:50.161441 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '8bcc02c9bd07' +down_revision = '375fe79ead14' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('tenant_plugin_auto_upgrade_strategies', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('strategy_setting', sa.String(length=16), server_default='fix_only', nullable=False), + sa.Column('upgrade_time_of_day', sa.Integer(), nullable=False), + sa.Column('upgrade_mode', sa.String(length=16), server_default='exclude', nullable=False), + sa.Column('exclude_plugins', sa.ARRAY(sa.String(length=255)), nullable=False), + sa.Column('include_plugins', sa.ARRAY(sa.String(length=255)), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tenant_plugin_auto_upgrade_strategy_pkey'), + sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin_auto_upgrade_strategy') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + op.drop_table('tenant_plugin_auto_upgrade_strategies') + # ### end Alembic commands ### diff --git a/api/models/account.py b/api/models/account.py index 1af571bc01..d63c5d7fb5 100644 --- a/api/models/account.py +++ b/api/models/account.py @@ -1,9 +1,10 @@ import enum import json +from datetime import datetime from typing import Optional, cast from flask_login import UserMixin # type: ignore -from sqlalchemy import func +from sqlalchemy import func, select from sqlalchemy.orm import Mapped, mapped_column, reconstructor from models.base import Base @@ -85,21 +86,23 @@ class Account(UserMixin, Base): __table_args__ = (db.PrimaryKeyConstraint("id", name="account_pkey"), db.Index("account_email_idx", "email")) id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) - name = db.Column(db.String(255), nullable=False) - email = db.Column(db.String(255), nullable=False) - password = db.Column(db.String(255), nullable=True) - password_salt = db.Column(db.String(255), nullable=True) - avatar = db.Column(db.String(255)) - interface_language = db.Column(db.String(255)) - interface_theme = db.Column(db.String(255)) - timezone = db.Column(db.String(255)) - last_login_at = db.Column(db.DateTime) - last_login_ip = db.Column(db.String(255)) - last_active_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - status = db.Column(db.String(16), nullable=False, server_default=db.text("'active'::character varying")) - initialized_at = db.Column(db.DateTime) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + name: Mapped[str] = mapped_column(db.String(255)) + email: Mapped[str] = mapped_column(db.String(255)) + password: Mapped[Optional[str]] = mapped_column(db.String(255)) + password_salt: Mapped[Optional[str]] = mapped_column(db.String(255)) + avatar: Mapped[Optional[str]] = mapped_column(db.String(255), nullable=True) + interface_language: Mapped[Optional[str]] = mapped_column(db.String(255)) + interface_theme: Mapped[Optional[str]] = mapped_column(db.String(255), nullable=True) + timezone: Mapped[Optional[str]] = mapped_column(db.String(255)) + last_login_at: Mapped[Optional[datetime]] = mapped_column(db.DateTime, nullable=True) + last_login_ip: Mapped[Optional[str]] = mapped_column(db.String(255), nullable=True) + last_active_at: Mapped[datetime] = mapped_column( + db.DateTime, server_default=func.current_timestamp(), nullable=False + ) + status: Mapped[str] = mapped_column(db.String(16), server_default=db.text("'active'::character varying")) + initialized_at: Mapped[Optional[datetime]] = mapped_column(db.DateTime, nullable=True) + created_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp(), nullable=False) + updated_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp(), nullable=False) @reconstructor def init_on_load(self): @@ -116,7 +119,7 @@ class Account(UserMixin, Base): @current_tenant.setter def current_tenant(self, tenant: "Tenant"): - ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=self.id).first() + ta = db.session.scalar(select(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=self.id).limit(1)) if ta: self.role = TenantAccountRole(ta.role) self._current_tenant = tenant @@ -132,9 +135,9 @@ class Account(UserMixin, Base): tuple[Tenant, TenantAccountJoin], ( db.session.query(Tenant, TenantAccountJoin) - .filter(Tenant.id == tenant_id) - .filter(TenantAccountJoin.tenant_id == Tenant.id) - .filter(TenantAccountJoin.account_id == self.id) + .where(Tenant.id == tenant_id) + .where(TenantAccountJoin.tenant_id == Tenant.id) + .where(TenantAccountJoin.account_id == self.id) .one_or_none() ), ) @@ -143,7 +146,7 @@ class Account(UserMixin, Base): return tenant, join = tenant_account_join - self.role = join.role + self.role = TenantAccountRole(join.role) self._current_tenant = tenant @property @@ -158,11 +161,11 @@ class Account(UserMixin, Base): def get_by_openid(cls, provider: str, open_id: str): account_integrate = ( db.session.query(AccountIntegrate) - .filter(AccountIntegrate.provider == provider, AccountIntegrate.open_id == open_id) + .where(AccountIntegrate.provider == provider, AccountIntegrate.open_id == open_id) .one_or_none() ) if account_integrate: - return db.session.query(Account).filter(Account.id == account_integrate.account_id).one_or_none() + return db.session.query(Account).where(Account.id == account_integrate.account_id).one_or_none() return None # check current_user.current_tenant.current_role in ['admin', 'owner'] @@ -196,19 +199,19 @@ class Tenant(Base): __tablename__ = "tenants" __table_args__ = (db.PrimaryKeyConstraint("id", name="tenant_pkey"),) - id: Mapped[str] = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - name = db.Column(db.String(255), nullable=False) + id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + name: Mapped[str] = mapped_column(db.String(255)) encrypt_public_key = db.Column(db.Text) - plan = db.Column(db.String(255), nullable=False, server_default=db.text("'basic'::character varying")) - status = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying")) - custom_config = db.Column(db.Text) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + plan: Mapped[str] = mapped_column(db.String(255), server_default=db.text("'basic'::character varying")) + status: Mapped[str] = mapped_column(db.String(255), server_default=db.text("'normal'::character varying")) + custom_config: Mapped[Optional[str]] = mapped_column(db.Text) + created_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp(), nullable=False) + updated_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp()) def get_accounts(self) -> list[Account]: return ( db.session.query(Account) - .filter(Account.id == TenantAccountJoin.account_id, TenantAccountJoin.tenant_id == self.id) + .where(Account.id == TenantAccountJoin.account_id, TenantAccountJoin.tenant_id == self.id) .all() ) @@ -230,14 +233,14 @@ class TenantAccountJoin(Base): db.UniqueConstraint("tenant_id", "account_id", name="unique_tenant_account_join"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - account_id = db.Column(StringUUID, nullable=False) - current = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - role = db.Column(db.String(16), nullable=False, server_default="normal") - invited_by = db.Column(StringUUID, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id: Mapped[str] = mapped_column(StringUUID) + account_id: Mapped[str] = mapped_column(StringUUID) + current: Mapped[bool] = mapped_column(db.Boolean, server_default=db.text("false")) + role: Mapped[str] = mapped_column(db.String(16), server_default="normal") + invited_by: Mapped[Optional[str]] = mapped_column(StringUUID) + created_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp()) + updated_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp()) class AccountIntegrate(Base): @@ -248,13 +251,13 @@ class AccountIntegrate(Base): db.UniqueConstraint("provider", "open_id", name="unique_provider_open_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - account_id = db.Column(StringUUID, nullable=False) - provider = db.Column(db.String(16), nullable=False) - open_id = db.Column(db.String(255), nullable=False) - encrypted_token = db.Column(db.String(255), nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + account_id: Mapped[str] = mapped_column(StringUUID) + provider: Mapped[str] = mapped_column(db.String(16)) + open_id: Mapped[str] = mapped_column(db.String(255)) + encrypted_token: Mapped[str] = mapped_column(db.String(255)) + created_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp()) + updated_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp()) class InvitationCode(Base): @@ -265,15 +268,15 @@ class InvitationCode(Base): db.Index("invitation_codes_code_idx", "code", "status"), ) - id = db.Column(db.Integer, nullable=False) - batch = db.Column(db.String(255), nullable=False) - code = db.Column(db.String(32), nullable=False) - status = db.Column(db.String(16), nullable=False, server_default=db.text("'unused'::character varying")) - used_at = db.Column(db.DateTime) - used_by_tenant_id = db.Column(StringUUID) - used_by_account_id = db.Column(StringUUID) - deprecated_at = db.Column(db.DateTime) - created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + id: Mapped[int] = mapped_column(db.Integer) + batch: Mapped[str] = mapped_column(db.String(255)) + code: Mapped[str] = mapped_column(db.String(32)) + status: Mapped[str] = mapped_column(db.String(16), server_default=db.text("'unused'::character varying")) + used_at: Mapped[Optional[datetime]] = mapped_column(db.DateTime) + used_by_tenant_id: Mapped[Optional[str]] = mapped_column(StringUUID) + used_by_account_id: Mapped[Optional[str]] = mapped_column(StringUUID) + deprecated_at: Mapped[Optional[datetime]] = mapped_column(db.DateTime, nullable=True) + created_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=db.text("CURRENT_TIMESTAMP(0)")) class TenantPluginPermission(Base): @@ -299,3 +302,35 @@ class TenantPluginPermission(Base): db.String(16), nullable=False, server_default="everyone" ) debug_permission: Mapped[DebugPermission] = mapped_column(db.String(16), nullable=False, server_default="noone") + + +class TenantPluginAutoUpgradeStrategy(Base): + class StrategySetting(enum.StrEnum): + DISABLED = "disabled" + FIX_ONLY = "fix_only" + LATEST = "latest" + + class UpgradeMode(enum.StrEnum): + ALL = "all" + PARTIAL = "partial" + EXCLUDE = "exclude" + + __tablename__ = "tenant_plugin_auto_upgrade_strategies" + __table_args__ = ( + db.PrimaryKeyConstraint("id", name="tenant_plugin_auto_upgrade_strategy_pkey"), + db.UniqueConstraint("tenant_id", name="unique_tenant_plugin_auto_upgrade_strategy"), + ) + + id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + strategy_setting: Mapped[StrategySetting] = mapped_column(db.String(16), nullable=False, server_default="fix_only") + upgrade_time_of_day: Mapped[int] = mapped_column(db.Integer, nullable=False, default=0) # seconds of the day + upgrade_mode: Mapped[UpgradeMode] = mapped_column(db.String(16), nullable=False, server_default="exclude") + exclude_plugins: Mapped[list[str]] = mapped_column( + db.ARRAY(db.String(255)), nullable=False + ) # plugin_id (author/name) + include_plugins: Mapped[list[str]] = mapped_column( + db.ARRAY(db.String(255)), nullable=False + ) # plugin_id (author/name) + created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) diff --git a/api/models/api_based_extension.py b/api/models/api_based_extension.py index 5a70e18622..3cef5a0fb2 100644 --- a/api/models/api_based_extension.py +++ b/api/models/api_based_extension.py @@ -1,6 +1,7 @@ import enum from sqlalchemy import func +from sqlalchemy.orm import mapped_column from .base import Base from .engine import db @@ -21,9 +22,9 @@ class APIBasedExtension(Base): db.Index("api_based_extension_tenant_idx", "tenant_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - name = db.Column(db.String(255), nullable=False) - api_endpoint = db.Column(db.String(255), nullable=False) - api_key = db.Column(db.Text, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + name = mapped_column(db.String(255), nullable=False) + api_endpoint = mapped_column(db.String(255), nullable=False) + api_key = mapped_column(db.Text, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) diff --git a/api/models/dataset.py b/api/models/dataset.py index a26788df0d..f287d9a3fc 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -8,12 +8,13 @@ import os import pickle import re import time +from datetime import datetime from json import JSONDecodeError -from typing import Any, cast +from typing import Any, Optional, cast -from sqlalchemy import func +from sqlalchemy import func, select from sqlalchemy.dialects.postgresql import JSONB -from sqlalchemy.orm import Mapped +from sqlalchemy.orm import Mapped, mapped_column from configs import dify_config from core.rag.index_processor.constant.built_in_field import BuiltInField, MetadataDataSource @@ -45,25 +46,25 @@ class Dataset(Base): INDEXING_TECHNIQUE_LIST = ["high_quality", "economy", None] PROVIDER_LIST = ["vendor", "external", None] - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - name = db.Column(db.String(255), nullable=False) - description = db.Column(db.Text, nullable=True) - provider = db.Column(db.String(255), nullable=False, server_default=db.text("'vendor'::character varying")) - permission = db.Column(db.String(255), nullable=False, server_default=db.text("'only_me'::character varying")) - data_source_type = db.Column(db.String(255)) - indexing_technique = db.Column(db.String(255), nullable=True) - index_struct = db.Column(db.Text, nullable=True) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_by = db.Column(StringUUID, nullable=True) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - embedding_model = db.Column(db.String(255), nullable=True) - embedding_model_provider = db.Column(db.String(255), nullable=True) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id: Mapped[str] = mapped_column(StringUUID) + name: Mapped[str] = mapped_column(db.String(255)) + description = mapped_column(db.Text, nullable=True) + provider: Mapped[str] = mapped_column(db.String(255), server_default=db.text("'vendor'::character varying")) + permission: Mapped[str] = mapped_column(db.String(255), server_default=db.text("'only_me'::character varying")) + data_source_type = mapped_column(db.String(255)) + indexing_technique: Mapped[Optional[str]] = mapped_column(db.String(255)) + index_struct = mapped_column(db.Text, nullable=True) + created_by = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_by = mapped_column(StringUUID, nullable=True) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + embedding_model = db.Column(db.String(255), nullable=True) # TODO: mapped_column + embedding_model_provider = db.Column(db.String(255), nullable=True) # TODO: mapped_column keyword_number = db.Column(db.Integer, nullable=True, server_default=db.text("10")) - collection_binding_id = db.Column(StringUUID, nullable=True) - retrieval_model = db.Column(JSONB, nullable=True) - built_in_field_enabled = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) + collection_binding_id = mapped_column(StringUUID, nullable=True) + retrieval_model = mapped_column(JSONB, nullable=True) + built_in_field_enabled = mapped_column(db.Boolean, nullable=False, server_default=db.text("false")) icon_info = db.Column(JSONB, nullable=True) runtime_mode = db.Column(db.String(255), nullable=True, server_default=db.text("'general'::character varying")) pipeline_id = db.Column(StringUUID, nullable=True) @@ -89,7 +90,7 @@ class Dataset(Base): @property def dataset_keyword_table(self): dataset_keyword_table = ( - db.session.query(DatasetKeywordTable).filter(DatasetKeywordTable.dataset_id == self.id).first() + db.session.query(DatasetKeywordTable).where(DatasetKeywordTable.dataset_id == self.id).first() ) if dataset_keyword_table: return dataset_keyword_table @@ -116,7 +117,7 @@ class Dataset(Base): def latest_process_rule(self): return ( db.session.query(DatasetProcessRule) - .filter(DatasetProcessRule.dataset_id == self.id) + .where(DatasetProcessRule.dataset_id == self.id) .order_by(DatasetProcessRule.created_at.desc()) .first() ) @@ -125,19 +126,19 @@ class Dataset(Base): def app_count(self): return ( db.session.query(func.count(AppDatasetJoin.id)) - .filter(AppDatasetJoin.dataset_id == self.id, App.id == AppDatasetJoin.app_id) + .where(AppDatasetJoin.dataset_id == self.id, App.id == AppDatasetJoin.app_id) .scalar() ) @property def document_count(self): - return db.session.query(func.count(Document.id)).filter(Document.dataset_id == self.id).scalar() + return db.session.query(func.count(Document.id)).where(Document.dataset_id == self.id).scalar() @property def available_document_count(self): return ( db.session.query(func.count(Document.id)) - .filter( + .where( Document.dataset_id == self.id, Document.indexing_status == "completed", Document.enabled == True, @@ -150,7 +151,7 @@ class Dataset(Base): def available_segment_count(self): return ( db.session.query(func.count(DocumentSegment.id)) - .filter( + .where( DocumentSegment.dataset_id == self.id, DocumentSegment.status == "completed", DocumentSegment.enabled == True, @@ -163,7 +164,7 @@ class Dataset(Base): return ( db.session.query(Document) .with_entities(func.coalesce(func.sum(Document.word_count), 0)) - .filter(Document.dataset_id == self.id) + .where(Document.dataset_id == self.id) .scalar() ) @@ -192,7 +193,7 @@ class Dataset(Base): tags = ( db.session.query(Tag) .join(TagBinding, Tag.id == TagBinding.tag_id) - .filter( + .where( TagBinding.target_id == self.id, TagBinding.tenant_id == self.tenant_id, Tag.tenant_id == self.tenant_id, @@ -208,14 +209,14 @@ class Dataset(Base): if self.provider != "external": return None external_knowledge_binding = ( - db.session.query(ExternalKnowledgeBindings).filter(ExternalKnowledgeBindings.dataset_id == self.id).first() + db.session.query(ExternalKnowledgeBindings).where(ExternalKnowledgeBindings.dataset_id == self.id).first() ) if not external_knowledge_binding: return None - external_knowledge_api = ( - db.session.query(ExternalKnowledgeApis) - .filter(ExternalKnowledgeApis.id == external_knowledge_binding.external_knowledge_api_id) - .first() + external_knowledge_api = db.session.scalar( + select(ExternalKnowledgeApis).where( + ExternalKnowledgeApis.id == external_knowledge_binding.external_knowledge_api_id + ) ) if not external_knowledge_api: return None @@ -236,7 +237,7 @@ class Dataset(Base): @property def doc_metadata(self): - dataset_metadatas = db.session.query(DatasetMetadata).filter(DatasetMetadata.dataset_id == self.id).all() + dataset_metadatas = db.session.query(DatasetMetadata).where(DatasetMetadata.dataset_id == self.id).all() doc_metadata = [ { @@ -297,12 +298,12 @@ class DatasetProcessRule(Base): db.Index("dataset_process_rule_dataset_id_idx", "dataset_id"), ) - id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) - dataset_id = db.Column(StringUUID, nullable=False) - mode = db.Column(db.String(255), nullable=False, server_default=db.text("'automatic'::character varying")) - rules = db.Column(db.Text, nullable=True) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) + dataset_id = mapped_column(StringUUID, nullable=False) + mode = mapped_column(db.String(255), nullable=False, server_default=db.text("'automatic'::character varying")) + rules = mapped_column(db.Text, nullable=True) + created_by = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) MODES = ["automatic", "custom", "hierarchical"] PRE_PROCESSING_RULES = ["remove_stopwords", "remove_extra_spaces", "remove_urls_emails"] @@ -341,62 +342,64 @@ class Document(Base): ) # initial fields - id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - dataset_id = db.Column(StringUUID, nullable=False) - position = db.Column(db.Integer, nullable=False) - data_source_type = db.Column(db.String(255), nullable=False) - data_source_info = db.Column(db.Text, nullable=True) - dataset_process_rule_id = db.Column(StringUUID, nullable=True) - batch = db.Column(db.String(255), nullable=False) - name = db.Column(db.String(255), nullable=False) - created_from = db.Column(db.String(255), nullable=False) - created_by = db.Column(StringUUID, nullable=False) - created_api_request_id = db.Column(StringUUID, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + dataset_id = mapped_column(StringUUID, nullable=False) + position = mapped_column(db.Integer, nullable=False) + data_source_type = mapped_column(db.String(255), nullable=False) + data_source_info = mapped_column(db.Text, nullable=True) + dataset_process_rule_id = mapped_column(StringUUID, nullable=True) + batch = mapped_column(db.String(255), nullable=False) + name = mapped_column(db.String(255), nullable=False) + created_from = mapped_column(db.String(255), nullable=False) + created_by = mapped_column(StringUUID, nullable=False) + created_api_request_id = mapped_column(StringUUID, nullable=True) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) # start processing - processing_started_at = db.Column(db.DateTime, nullable=True) + processing_started_at = mapped_column(db.DateTime, nullable=True) # parsing - file_id = db.Column(db.Text, nullable=True) - word_count = db.Column(db.Integer, nullable=True) - parsing_completed_at = db.Column(db.DateTime, nullable=True) + file_id = mapped_column(db.Text, nullable=True) + word_count = mapped_column(db.Integer, nullable=True) + parsing_completed_at = mapped_column(db.DateTime, nullable=True) # cleaning - cleaning_completed_at = db.Column(db.DateTime, nullable=True) + cleaning_completed_at = mapped_column(db.DateTime, nullable=True) # split - splitting_completed_at = db.Column(db.DateTime, nullable=True) + splitting_completed_at = mapped_column(db.DateTime, nullable=True) # indexing - tokens = db.Column(db.Integer, nullable=True) - indexing_latency = db.Column(db.Float, nullable=True) - completed_at = db.Column(db.DateTime, nullable=True) + tokens = mapped_column(db.Integer, nullable=True) + indexing_latency = mapped_column(db.Float, nullable=True) + completed_at = mapped_column(db.DateTime, nullable=True) # pause - is_paused = db.Column(db.Boolean, nullable=True, server_default=db.text("false")) - paused_by = db.Column(StringUUID, nullable=True) - paused_at = db.Column(db.DateTime, nullable=True) + is_paused = mapped_column(db.Boolean, nullable=True, server_default=db.text("false")) + paused_by = mapped_column(StringUUID, nullable=True) + paused_at = mapped_column(db.DateTime, nullable=True) # error - error = db.Column(db.Text, nullable=True) - stopped_at = db.Column(db.DateTime, nullable=True) + error = mapped_column(db.Text, nullable=True) + stopped_at = mapped_column(db.DateTime, nullable=True) # basic fields - indexing_status = db.Column(db.String(255), nullable=False, server_default=db.text("'waiting'::character varying")) - enabled = db.Column(db.Boolean, nullable=False, server_default=db.text("true")) - disabled_at = db.Column(db.DateTime, nullable=True) - disabled_by = db.Column(StringUUID, nullable=True) - archived = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - archived_reason = db.Column(db.String(255), nullable=True) - archived_by = db.Column(StringUUID, nullable=True) - archived_at = db.Column(db.DateTime, nullable=True) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - doc_type = db.Column(db.String(40), nullable=True) - doc_metadata = db.Column(JSONB, nullable=True) - doc_form = db.Column(db.String(255), nullable=False, server_default=db.text("'text_model'::character varying")) - doc_language = db.Column(db.String(255), nullable=True) + indexing_status = mapped_column( + db.String(255), nullable=False, server_default=db.text("'waiting'::character varying") + ) + enabled = mapped_column(db.Boolean, nullable=False, server_default=db.text("true")) + disabled_at = mapped_column(db.DateTime, nullable=True) + disabled_by = mapped_column(StringUUID, nullable=True) + archived = mapped_column(db.Boolean, nullable=False, server_default=db.text("false")) + archived_reason = mapped_column(db.String(255), nullable=True) + archived_by = mapped_column(StringUUID, nullable=True) + archived_at = mapped_column(db.DateTime, nullable=True) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + doc_type = mapped_column(db.String(40), nullable=True) + doc_metadata = mapped_column(JSONB, nullable=True) + doc_form = mapped_column(db.String(255), nullable=False, server_default=db.text("'text_model'::character varying")) + doc_language = mapped_column(db.String(255), nullable=True) DATA_SOURCES = ["upload_file", "notion_import", "website_crawl"] @@ -437,7 +440,7 @@ class Document(Base): data_source_info_dict = json.loads(self.data_source_info) file_detail = ( db.session.query(UploadFile) - .filter(UploadFile.id == data_source_info_dict["upload_file_id"]) + .where(UploadFile.id == data_source_info_dict["upload_file_id"]) .one_or_none() ) if file_detail: @@ -470,24 +473,24 @@ class Document(Base): @property def dataset(self): - return db.session.query(Dataset).filter(Dataset.id == self.dataset_id).one_or_none() + return db.session.query(Dataset).where(Dataset.id == self.dataset_id).one_or_none() @property def segment_count(self): - return db.session.query(DocumentSegment).filter(DocumentSegment.document_id == self.id).count() + return db.session.query(DocumentSegment).where(DocumentSegment.document_id == self.id).count() @property def hit_count(self): return ( db.session.query(DocumentSegment) .with_entities(func.coalesce(func.sum(DocumentSegment.hit_count), 0)) - .filter(DocumentSegment.document_id == self.id) + .where(DocumentSegment.document_id == self.id) .scalar() ) @property def uploader(self): - user = db.session.query(Account).filter(Account.id == self.created_by).first() + user = db.session.query(Account).where(Account.id == self.created_by).first() return user.name if user else None @property @@ -504,7 +507,7 @@ class Document(Base): document_metadatas = ( db.session.query(DatasetMetadata) .join(DatasetMetadataBinding, DatasetMetadataBinding.metadata_id == DatasetMetadata.id) - .filter( + .where( DatasetMetadataBinding.dataset_id == self.dataset_id, DatasetMetadataBinding.document_id == self.id ) .all() @@ -684,58 +687,58 @@ class DocumentSegment(Base): ) # initial fields - id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - dataset_id = db.Column(StringUUID, nullable=False) - document_id = db.Column(StringUUID, nullable=False) + id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + dataset_id = mapped_column(StringUUID, nullable=False) + document_id = mapped_column(StringUUID, nullable=False) position: Mapped[int] - content = db.Column(db.Text, nullable=False) - answer = db.Column(db.Text, nullable=True) - word_count = db.Column(db.Integer, nullable=False) - tokens = db.Column(db.Integer, nullable=False) + content = mapped_column(db.Text, nullable=False) + answer = mapped_column(db.Text, nullable=True) + word_count: Mapped[int] + tokens: Mapped[int] # indexing fields - keywords = db.Column(db.JSON, nullable=True) - index_node_id = db.Column(db.String(255), nullable=True) - index_node_hash = db.Column(db.String(255), nullable=True) + keywords = mapped_column(db.JSON, nullable=True) + index_node_id = mapped_column(db.String(255), nullable=True) + index_node_hash = mapped_column(db.String(255), nullable=True) # basic fields - hit_count = db.Column(db.Integer, nullable=False, default=0) - enabled = db.Column(db.Boolean, nullable=False, server_default=db.text("true")) - disabled_at = db.Column(db.DateTime, nullable=True) - disabled_by = db.Column(StringUUID, nullable=True) - status = db.Column(db.String(255), nullable=False, server_default=db.text("'waiting'::character varying")) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_by = db.Column(StringUUID, nullable=True) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - indexing_at = db.Column(db.DateTime, nullable=True) - completed_at = db.Column(db.DateTime, nullable=True) - error = db.Column(db.Text, nullable=True) - stopped_at = db.Column(db.DateTime, nullable=True) + hit_count = mapped_column(db.Integer, nullable=False, default=0) + enabled = mapped_column(db.Boolean, nullable=False, server_default=db.text("true")) + disabled_at = mapped_column(db.DateTime, nullable=True) + disabled_by = mapped_column(StringUUID, nullable=True) + status: Mapped[str] = mapped_column(db.String(255), server_default=db.text("'waiting'::character varying")) + created_by = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_by = mapped_column(StringUUID, nullable=True) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + indexing_at = mapped_column(db.DateTime, nullable=True) + completed_at: Mapped[Optional[datetime]] = mapped_column(db.DateTime, nullable=True) + error = mapped_column(db.Text, nullable=True) + stopped_at = mapped_column(db.DateTime, nullable=True) @property def dataset(self): - return db.session.query(Dataset).filter(Dataset.id == self.dataset_id).first() + return db.session.scalar(select(Dataset).where(Dataset.id == self.dataset_id)) @property def document(self): - return db.session.query(Document).filter(Document.id == self.document_id).first() + return db.session.scalar(select(Document).where(Document.id == self.document_id)) @property def previous_segment(self): - return ( - db.session.query(DocumentSegment) - .filter(DocumentSegment.document_id == self.document_id, DocumentSegment.position == self.position - 1) - .first() + return db.session.scalar( + select(DocumentSegment).where( + DocumentSegment.document_id == self.document_id, DocumentSegment.position == self.position - 1 + ) ) @property def next_segment(self): - return ( - db.session.query(DocumentSegment) - .filter(DocumentSegment.document_id == self.document_id, DocumentSegment.position == self.position + 1) - .first() + return db.session.scalar( + select(DocumentSegment).where( + DocumentSegment.document_id == self.document_id, DocumentSegment.position == self.position + 1 + ) ) @property @@ -746,7 +749,7 @@ class DocumentSegment(Base): if rules.parent_mode and rules.parent_mode != ParentMode.FULL_DOC: child_chunks = ( db.session.query(ChildChunk) - .filter(ChildChunk.segment_id == self.id) + .where(ChildChunk.segment_id == self.id) .order_by(ChildChunk.position.asc()) .all() ) @@ -763,7 +766,7 @@ class DocumentSegment(Base): if rules.parent_mode: child_chunks = ( db.session.query(ChildChunk) - .filter(ChildChunk.segment_id == self.id) + .where(ChildChunk.segment_id == self.id) .order_by(ChildChunk.position.asc()) .all() ) @@ -832,37 +835,37 @@ class ChildChunk(Base): ) # initial fields - id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - dataset_id = db.Column(StringUUID, nullable=False) - document_id = db.Column(StringUUID, nullable=False) - segment_id = db.Column(StringUUID, nullable=False) - position = db.Column(db.Integer, nullable=False) - content = db.Column(db.Text, nullable=False) - word_count = db.Column(db.Integer, nullable=False) + id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + dataset_id = mapped_column(StringUUID, nullable=False) + document_id = mapped_column(StringUUID, nullable=False) + segment_id = mapped_column(StringUUID, nullable=False) + position = mapped_column(db.Integer, nullable=False) + content = mapped_column(db.Text, nullable=False) + word_count = mapped_column(db.Integer, nullable=False) # indexing fields - index_node_id = db.Column(db.String(255), nullable=True) - index_node_hash = db.Column(db.String(255), nullable=True) - type = db.Column(db.String(255), nullable=False, server_default=db.text("'automatic'::character varying")) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) - updated_by = db.Column(StringUUID, nullable=True) - updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) - indexing_at = db.Column(db.DateTime, nullable=True) - completed_at = db.Column(db.DateTime, nullable=True) - error = db.Column(db.Text, nullable=True) + index_node_id = mapped_column(db.String(255), nullable=True) + index_node_hash = mapped_column(db.String(255), nullable=True) + type = mapped_column(db.String(255), nullable=False, server_default=db.text("'automatic'::character varying")) + created_by = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + updated_by = mapped_column(StringUUID, nullable=True) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + indexing_at = mapped_column(db.DateTime, nullable=True) + completed_at = mapped_column(db.DateTime, nullable=True) + error = mapped_column(db.Text, nullable=True) @property def dataset(self): - return db.session.query(Dataset).filter(Dataset.id == self.dataset_id).first() + return db.session.query(Dataset).where(Dataset.id == self.dataset_id).first() @property def document(self): - return db.session.query(Document).filter(Document.id == self.document_id).first() + return db.session.query(Document).where(Document.id == self.document_id).first() @property def segment(self): - return db.session.query(DocumentSegment).filter(DocumentSegment.id == self.segment_id).first() + return db.session.query(DocumentSegment).where(DocumentSegment.id == self.segment_id).first() class AppDatasetJoin(Base): @@ -872,10 +875,10 @@ class AppDatasetJoin(Base): db.Index("app_dataset_join_app_dataset_idx", "dataset_id", "app_id"), ) - id = db.Column(StringUUID, primary_key=True, nullable=False, server_default=db.text("uuid_generate_v4()")) - app_id = db.Column(StringUUID, nullable=False) - dataset_id = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.current_timestamp()) + id = mapped_column(StringUUID, primary_key=True, nullable=False, server_default=db.text("uuid_generate_v4()")) + app_id = mapped_column(StringUUID, nullable=False) + dataset_id = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=db.func.current_timestamp()) @property def app(self): @@ -889,14 +892,14 @@ class DatasetQuery(Base): db.Index("dataset_query_dataset_id_idx", "dataset_id"), ) - id = db.Column(StringUUID, primary_key=True, nullable=False, server_default=db.text("uuid_generate_v4()")) - dataset_id = db.Column(StringUUID, nullable=False) - content = db.Column(db.Text, nullable=False) - source = db.Column(db.String(255), nullable=False) - source_app_id = db.Column(StringUUID, nullable=True) - created_by_role = db.Column(db.String, nullable=False) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.current_timestamp()) + id = mapped_column(StringUUID, primary_key=True, nullable=False, server_default=db.text("uuid_generate_v4()")) + dataset_id = mapped_column(StringUUID, nullable=False) + content = mapped_column(db.Text, nullable=False) + source = mapped_column(db.String(255), nullable=False) + source_app_id = mapped_column(StringUUID, nullable=True) + created_by_role = mapped_column(db.String, nullable=False) + created_by = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=db.func.current_timestamp()) class DatasetKeywordTable(Base): @@ -906,10 +909,10 @@ class DatasetKeywordTable(Base): db.Index("dataset_keyword_table_dataset_id_idx", "dataset_id"), ) - id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) - dataset_id = db.Column(StringUUID, nullable=False, unique=True) - keyword_table = db.Column(db.Text, nullable=False) - data_source_type = db.Column( + id = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) + dataset_id = mapped_column(StringUUID, nullable=False, unique=True) + keyword_table = mapped_column(db.Text, nullable=False) + data_source_type = mapped_column( db.String(255), nullable=False, server_default=db.text("'database'::character varying") ) @@ -952,14 +955,14 @@ class Embedding(Base): db.Index("created_at_idx", "created_at"), ) - id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) - model_name = db.Column( + id = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) + model_name = mapped_column( db.String(255), nullable=False, server_default=db.text("'text-embedding-ada-002'::character varying") ) - hash = db.Column(db.String(64), nullable=False) - embedding = db.Column(db.LargeBinary, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - provider_name = db.Column(db.String(255), nullable=False, server_default=db.text("''::character varying")) + hash = mapped_column(db.String(64), nullable=False) + embedding = mapped_column(db.LargeBinary, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + provider_name = mapped_column(db.String(255), nullable=False, server_default=db.text("''::character varying")) def set_embedding(self, embedding_data: list[float]): self.embedding = pickle.dumps(embedding_data, protocol=pickle.HIGHEST_PROTOCOL) @@ -975,12 +978,12 @@ class DatasetCollectionBinding(Base): db.Index("provider_model_name_idx", "provider_name", "model_name"), ) - id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) - provider_name = db.Column(db.String(255), nullable=False) - model_name = db.Column(db.String(255), nullable=False) - type = db.Column(db.String(40), server_default=db.text("'dataset'::character varying"), nullable=False) - collection_name = db.Column(db.String(64), nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) + provider_name = mapped_column(db.String(255), nullable=False) + model_name = mapped_column(db.String(255), nullable=False) + type = mapped_column(db.String(40), server_default=db.text("'dataset'::character varying"), nullable=False) + collection_name = mapped_column(db.String(64), nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) class TidbAuthBinding(Base): @@ -992,15 +995,15 @@ class TidbAuthBinding(Base): db.Index("tidb_auth_bindings_created_at_idx", "created_at"), db.Index("tidb_auth_bindings_status_idx", "status"), ) - id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=True) - cluster_id = db.Column(db.String(255), nullable=False) - cluster_name = db.Column(db.String(255), nullable=False) - active = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - status = db.Column(db.String(255), nullable=False, server_default=db.text("CREATING")) - account = db.Column(db.String(255), nullable=False) - password = db.Column(db.String(255), nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=True) + cluster_id = mapped_column(db.String(255), nullable=False) + cluster_name = mapped_column(db.String(255), nullable=False) + active = mapped_column(db.Boolean, nullable=False, server_default=db.text("false")) + status = mapped_column(db.String(255), nullable=False, server_default=db.text("CREATING")) + account = mapped_column(db.String(255), nullable=False) + password = mapped_column(db.String(255), nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) class Whitelist(Base): @@ -1009,10 +1012,10 @@ class Whitelist(Base): db.PrimaryKeyConstraint("id", name="whitelists_pkey"), db.Index("whitelists_tenant_idx", "tenant_id"), ) - id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=True) - category = db.Column(db.String(255), nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=True) + category = mapped_column(db.String(255), nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) class DatasetPermission(Base): @@ -1024,12 +1027,12 @@ class DatasetPermission(Base): db.Index("idx_dataset_permissions_tenant_id", "tenant_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"), primary_key=True) - dataset_id = db.Column(StringUUID, nullable=False) - account_id = db.Column(StringUUID, nullable=False) - tenant_id = db.Column(StringUUID, nullable=False) - has_permission = db.Column(db.Boolean, nullable=False, server_default=db.text("true")) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"), primary_key=True) + dataset_id = mapped_column(StringUUID, nullable=False) + account_id = mapped_column(StringUUID, nullable=False) + tenant_id = mapped_column(StringUUID, nullable=False) + has_permission = mapped_column(db.Boolean, nullable=False, server_default=db.text("true")) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) class ExternalKnowledgeApis(Base): @@ -1040,15 +1043,15 @@ class ExternalKnowledgeApis(Base): db.Index("external_knowledge_apis_name_idx", "name"), ) - id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) - name = db.Column(db.String(255), nullable=False) - description = db.Column(db.String(255), nullable=False) - tenant_id = db.Column(StringUUID, nullable=False) - settings = db.Column(db.Text, nullable=True) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_by = db.Column(StringUUID, nullable=True) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) + name = mapped_column(db.String(255), nullable=False) + description = mapped_column(db.String(255), nullable=False) + tenant_id = mapped_column(StringUUID, nullable=False) + settings = mapped_column(db.Text, nullable=True) + created_by = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_by = mapped_column(StringUUID, nullable=True) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) def to_dict(self): return { @@ -1073,11 +1076,11 @@ class ExternalKnowledgeApis(Base): def dataset_bindings(self): external_knowledge_bindings = ( db.session.query(ExternalKnowledgeBindings) - .filter(ExternalKnowledgeBindings.external_knowledge_api_id == self.id) + .where(ExternalKnowledgeBindings.external_knowledge_api_id == self.id) .all() ) dataset_ids = [binding.dataset_id for binding in external_knowledge_bindings] - datasets = db.session.query(Dataset).filter(Dataset.id.in_(dataset_ids)).all() + datasets = db.session.query(Dataset).where(Dataset.id.in_(dataset_ids)).all() dataset_bindings = [] for dataset in datasets: dataset_bindings.append({"id": dataset.id, "name": dataset.name}) @@ -1095,15 +1098,15 @@ class ExternalKnowledgeBindings(Base): db.Index("external_knowledge_bindings_external_knowledge_api_idx", "external_knowledge_api_id"), ) - id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - external_knowledge_api_id = db.Column(StringUUID, nullable=False) - dataset_id = db.Column(StringUUID, nullable=False) - external_knowledge_id = db.Column(db.Text, nullable=False) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_by = db.Column(StringUUID, nullable=True) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + external_knowledge_api_id = mapped_column(StringUUID, nullable=False) + dataset_id = mapped_column(StringUUID, nullable=False) + external_knowledge_id = mapped_column(db.Text, nullable=False) + created_by = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_by = mapped_column(StringUUID, nullable=True) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) class DatasetAutoDisableLog(Base): @@ -1115,12 +1118,12 @@ class DatasetAutoDisableLog(Base): db.Index("dataset_auto_disable_log_created_atx", "created_at"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - dataset_id = db.Column(StringUUID, nullable=False) - document_id = db.Column(StringUUID, nullable=False) - notified = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + dataset_id = mapped_column(StringUUID, nullable=False) + document_id = mapped_column(StringUUID, nullable=False) + notified = mapped_column(db.Boolean, nullable=False, server_default=db.text("false")) + created_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) class RateLimitLog(Base): @@ -1131,11 +1134,11 @@ class RateLimitLog(Base): db.Index("rate_limit_log_operation_idx", "operation"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - subscription_plan = db.Column(db.String(255), nullable=False) - operation = db.Column(db.String(255), nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + subscription_plan = mapped_column(db.String(255), nullable=False) + operation = mapped_column(db.String(255), nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) class DatasetMetadata(Base): @@ -1146,15 +1149,15 @@ class DatasetMetadata(Base): db.Index("dataset_metadata_dataset_idx", "dataset_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - dataset_id = db.Column(StringUUID, nullable=False) - type = db.Column(db.String(255), nullable=False) - name = db.Column(db.String(255), nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) - updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) - created_by = db.Column(StringUUID, nullable=False) - updated_by = db.Column(StringUUID, nullable=True) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + dataset_id = mapped_column(StringUUID, nullable=False) + type = mapped_column(db.String(255), nullable=False) + name = mapped_column(db.String(255), nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + created_by = mapped_column(StringUUID, nullable=False) + updated_by = mapped_column(StringUUID, nullable=True) class DatasetMetadataBinding(Base): @@ -1167,13 +1170,13 @@ class DatasetMetadataBinding(Base): db.Index("dataset_metadata_binding_document_idx", "document_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - dataset_id = db.Column(StringUUID, nullable=False) - metadata_id = db.Column(StringUUID, nullable=False) - document_id = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - created_by = db.Column(StringUUID, nullable=False) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + dataset_id = mapped_column(StringUUID, nullable=False) + metadata_id = mapped_column(StringUUID, nullable=False) + document_id = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + created_by = mapped_column(StringUUID, nullable=False) class PipelineBuiltInTemplate(Base): # type: ignore[name-defined] diff --git a/api/models/model.py b/api/models/model.py index 96b4b6fcac..a007a88de4 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -40,8 +40,8 @@ class DifySetup(Base): __tablename__ = "dify_setups" __table_args__ = (db.PrimaryKeyConstraint("version", name="dify_setup_pkey"),) - version = db.Column(db.String(255), nullable=False) - setup_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + version = mapped_column(db.String(255), nullable=False) + setup_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) class AppMode(StrEnum): @@ -76,31 +76,31 @@ class App(Base): __tablename__ = "apps" __table_args__ = (db.PrimaryKeyConstraint("id", name="app_pkey"), db.Index("app_tenant_id_idx", "tenant_id")) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id: Mapped[str] = db.Column(StringUUID, nullable=False) - name = db.Column(db.String(255), nullable=False) - description = db.Column(db.Text, nullable=False, server_default=db.text("''::character varying")) - mode: Mapped[str] = mapped_column(db.String(255), nullable=False) - icon_type = db.Column(db.String(255), nullable=True) # image, emoji + id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id: Mapped[str] = mapped_column(StringUUID) + name: Mapped[str] = mapped_column(db.String(255)) + description: Mapped[str] = mapped_column(db.Text, server_default=db.text("''::character varying")) + mode: Mapped[str] = mapped_column(db.String(255)) + icon_type: Mapped[Optional[str]] = mapped_column(db.String(255)) # image, emoji icon = db.Column(db.String(255)) - icon_background = db.Column(db.String(255)) - app_model_config_id = db.Column(StringUUID, nullable=True) - workflow_id = db.Column(StringUUID, nullable=True) - status = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying")) - enable_site = db.Column(db.Boolean, nullable=False) - enable_api = db.Column(db.Boolean, nullable=False) - api_rpm = db.Column(db.Integer, nullable=False, server_default=db.text("0")) - api_rph = db.Column(db.Integer, nullable=False, server_default=db.text("0")) - is_demo = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - is_public = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - is_universal = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - tracing = db.Column(db.Text, nullable=True) - max_active_requests: Mapped[Optional[int]] = mapped_column(nullable=True) - created_by = db.Column(StringUUID, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_by = db.Column(StringUUID, nullable=True) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - use_icon_as_answer_icon = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) + icon_background: Mapped[Optional[str]] = mapped_column(db.String(255)) + app_model_config_id = mapped_column(StringUUID, nullable=True) + workflow_id = mapped_column(StringUUID, nullable=True) + status: Mapped[str] = mapped_column(db.String(255), server_default=db.text("'normal'::character varying")) + enable_site: Mapped[bool] = mapped_column(db.Boolean) + enable_api: Mapped[bool] = mapped_column(db.Boolean) + api_rpm: Mapped[int] = mapped_column(db.Integer, server_default=db.text("0")) + api_rph: Mapped[int] = mapped_column(db.Integer, server_default=db.text("0")) + is_demo: Mapped[bool] = mapped_column(db.Boolean, server_default=db.text("false")) + is_public: Mapped[bool] = mapped_column(db.Boolean, server_default=db.text("false")) + is_universal: Mapped[bool] = mapped_column(db.Boolean, server_default=db.text("false")) + tracing = mapped_column(db.Text, nullable=True) + max_active_requests: Mapped[Optional[int]] + created_by = mapped_column(StringUUID, nullable=True) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_by = mapped_column(StringUUID, nullable=True) + updated_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + use_icon_as_answer_icon: Mapped[bool] = mapped_column(db.Boolean, nullable=False, server_default=db.text("false")) @property def desc_or_prompt(self): @@ -115,13 +115,13 @@ class App(Base): @property def site(self): - site = db.session.query(Site).filter(Site.app_id == self.id).first() + site = db.session.query(Site).where(Site.app_id == self.id).first() return site @property def app_model_config(self): if self.app_model_config_id: - return db.session.query(AppModelConfig).filter(AppModelConfig.id == self.app_model_config_id).first() + return db.session.query(AppModelConfig).where(AppModelConfig.id == self.app_model_config_id).first() return None @@ -130,7 +130,7 @@ class App(Base): if self.workflow_id: from .workflow import Workflow - return db.session.query(Workflow).filter(Workflow.id == self.workflow_id).first() + return db.session.query(Workflow).where(Workflow.id == self.workflow_id).first() return None @@ -140,7 +140,7 @@ class App(Base): @property def tenant(self): - tenant = db.session.query(Tenant).filter(Tenant.id == self.tenant_id).first() + tenant = db.session.query(Tenant).where(Tenant.id == self.tenant_id).first() return tenant @property @@ -284,7 +284,7 @@ class App(Base): tags = ( db.session.query(Tag) .join(TagBinding, Tag.id == TagBinding.tag_id) - .filter( + .where( TagBinding.target_id == self.id, TagBinding.tenant_id == self.tenant_id, Tag.tenant_id == self.tenant_id, @@ -298,7 +298,7 @@ class App(Base): @property def author_name(self): if self.created_by: - account = db.session.query(Account).filter(Account.id == self.created_by).first() + account = db.session.query(Account).where(Account.id == self.created_by).first() if account: return account.name @@ -309,38 +309,38 @@ class AppModelConfig(Base): __tablename__ = "app_model_configs" __table_args__ = (db.PrimaryKeyConstraint("id", name="app_model_config_pkey"), db.Index("app_app_id_idx", "app_id")) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - app_id = db.Column(StringUUID, nullable=False) - provider = db.Column(db.String(255), nullable=True) - model_id = db.Column(db.String(255), nullable=True) - configs = db.Column(db.JSON, nullable=True) - created_by = db.Column(StringUUID, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_by = db.Column(StringUUID, nullable=True) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - opening_statement = db.Column(db.Text) - suggested_questions = db.Column(db.Text) - suggested_questions_after_answer = db.Column(db.Text) - speech_to_text = db.Column(db.Text) - text_to_speech = db.Column(db.Text) - more_like_this = db.Column(db.Text) - model = db.Column(db.Text) - user_input_form = db.Column(db.Text) - dataset_query_variable = db.Column(db.String(255)) - pre_prompt = db.Column(db.Text) - agent_mode = db.Column(db.Text) - sensitive_word_avoidance = db.Column(db.Text) - retriever_resource = db.Column(db.Text) - prompt_type = db.Column(db.String(255), nullable=False, server_default=db.text("'simple'::character varying")) - chat_prompt_config = db.Column(db.Text) - completion_prompt_config = db.Column(db.Text) - dataset_configs = db.Column(db.Text) - external_data_tools = db.Column(db.Text) - file_upload = db.Column(db.Text) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + app_id = mapped_column(StringUUID, nullable=False) + provider = mapped_column(db.String(255), nullable=True) + model_id = mapped_column(db.String(255), nullable=True) + configs = mapped_column(db.JSON, nullable=True) + created_by = mapped_column(StringUUID, nullable=True) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_by = mapped_column(StringUUID, nullable=True) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + opening_statement = mapped_column(db.Text) + suggested_questions = mapped_column(db.Text) + suggested_questions_after_answer = mapped_column(db.Text) + speech_to_text = mapped_column(db.Text) + text_to_speech = mapped_column(db.Text) + more_like_this = mapped_column(db.Text) + model = mapped_column(db.Text) + user_input_form = mapped_column(db.Text) + dataset_query_variable = mapped_column(db.String(255)) + pre_prompt = mapped_column(db.Text) + agent_mode = mapped_column(db.Text) + sensitive_word_avoidance = mapped_column(db.Text) + retriever_resource = mapped_column(db.Text) + prompt_type = mapped_column(db.String(255), nullable=False, server_default=db.text("'simple'::character varying")) + chat_prompt_config = mapped_column(db.Text) + completion_prompt_config = mapped_column(db.Text) + dataset_configs = mapped_column(db.Text) + external_data_tools = mapped_column(db.Text) + file_upload = mapped_column(db.Text) @property def app(self): - app = db.session.query(App).filter(App.id == self.app_id).first() + app = db.session.query(App).where(App.id == self.app_id).first() return app @property @@ -374,7 +374,7 @@ class AppModelConfig(Base): @property def annotation_reply_dict(self) -> dict: annotation_setting = ( - db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == self.app_id).first() + db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == self.app_id).first() ) if annotation_setting: collection_binding_detail = annotation_setting.collection_binding_detail @@ -563,23 +563,23 @@ class RecommendedApp(Base): db.Index("recommended_app_is_listed_idx", "is_listed", "language"), ) - id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) - app_id = db.Column(StringUUID, nullable=False) - description = db.Column(db.JSON, nullable=False) - copyright = db.Column(db.String(255), nullable=False) - privacy_policy = db.Column(db.String(255), nullable=False) + id = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) + app_id = mapped_column(StringUUID, nullable=False) + description = mapped_column(db.JSON, nullable=False) + copyright = mapped_column(db.String(255), nullable=False) + privacy_policy = mapped_column(db.String(255), nullable=False) custom_disclaimer: Mapped[str] = mapped_column(sa.TEXT, default="") - category = db.Column(db.String(255), nullable=False) - position = db.Column(db.Integer, nullable=False, default=0) - is_listed = db.Column(db.Boolean, nullable=False, default=True) - install_count = db.Column(db.Integer, nullable=False, default=0) - language = db.Column(db.String(255), nullable=False, server_default=db.text("'en-US'::character varying")) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + category = mapped_column(db.String(255), nullable=False) + position = mapped_column(db.Integer, nullable=False, default=0) + is_listed = mapped_column(db.Boolean, nullable=False, default=True) + install_count = mapped_column(db.Integer, nullable=False, default=0) + language = mapped_column(db.String(255), nullable=False, server_default=db.text("'en-US'::character varying")) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) @property def app(self): - app = db.session.query(App).filter(App.id == self.app_id).first() + app = db.session.query(App).where(App.id == self.app_id).first() return app @@ -592,23 +592,23 @@ class InstalledApp(Base): db.UniqueConstraint("tenant_id", "app_id", name="unique_tenant_app"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - app_id = db.Column(StringUUID, nullable=False) - app_owner_tenant_id = db.Column(StringUUID, nullable=False) - position = db.Column(db.Integer, nullable=False, default=0) - is_pinned = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - last_used_at = db.Column(db.DateTime, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + app_id = mapped_column(StringUUID, nullable=False) + app_owner_tenant_id = mapped_column(StringUUID, nullable=False) + position = mapped_column(db.Integer, nullable=False, default=0) + is_pinned = mapped_column(db.Boolean, nullable=False, server_default=db.text("false")) + last_used_at = mapped_column(db.DateTime, nullable=True) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) @property def app(self): - app = db.session.query(App).filter(App.id == self.app_id).first() + app = db.session.query(App).where(App.id == self.app_id).first() return app @property def tenant(self): - tenant = db.session.query(Tenant).filter(Tenant.id == self.tenant_id).first() + tenant = db.session.query(Tenant).where(Tenant.id == self.tenant_id).first() return tenant @@ -620,42 +620,42 @@ class Conversation(Base): ) id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) - app_id = db.Column(StringUUID, nullable=False) - app_model_config_id = db.Column(StringUUID, nullable=True) - model_provider = db.Column(db.String(255), nullable=True) - override_model_configs = db.Column(db.Text) - model_id = db.Column(db.String(255), nullable=True) + app_id = mapped_column(StringUUID, nullable=False) + app_model_config_id = mapped_column(StringUUID, nullable=True) + model_provider = mapped_column(db.String(255), nullable=True) + override_model_configs = mapped_column(db.Text) + model_id = mapped_column(db.String(255), nullable=True) mode: Mapped[str] = mapped_column(db.String(255)) - name = db.Column(db.String(255), nullable=False) - summary = db.Column(db.Text) + name = mapped_column(db.String(255), nullable=False) + summary = mapped_column(db.Text) _inputs: Mapped[dict] = mapped_column("inputs", db.JSON) - introduction = db.Column(db.Text) - system_instruction = db.Column(db.Text) - system_instruction_tokens = db.Column(db.Integer, nullable=False, server_default=db.text("0")) - status = db.Column(db.String(255), nullable=False) + introduction = mapped_column(db.Text) + system_instruction = mapped_column(db.Text) + system_instruction_tokens = mapped_column(db.Integer, nullable=False, server_default=db.text("0")) + status = mapped_column(db.String(255), nullable=False) # The `invoke_from` records how the conversation is created. # # Its value corresponds to the members of `InvokeFrom`. # (api/core/app/entities/app_invoke_entities.py) - invoke_from = db.Column(db.String(255), nullable=True) + invoke_from = mapped_column(db.String(255), nullable=True) # ref: ConversationSource. - from_source = db.Column(db.String(255), nullable=False) - from_end_user_id = db.Column(StringUUID) - from_account_id = db.Column(StringUUID) - read_at = db.Column(db.DateTime) - read_account_id = db.Column(StringUUID) + from_source = mapped_column(db.String(255), nullable=False) + from_end_user_id = mapped_column(StringUUID) + from_account_id = mapped_column(StringUUID) + read_at = mapped_column(db.DateTime) + read_account_id = mapped_column(StringUUID) dialogue_count: Mapped[int] = mapped_column(default=0) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) messages = db.relationship("Message", backref="conversation", lazy="select", passive_deletes="all") message_annotations = db.relationship( "MessageAnnotation", backref="conversation", lazy="select", passive_deletes="all" ) - is_deleted = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) + is_deleted = mapped_column(db.Boolean, nullable=False, server_default=db.text("false")) @property def inputs(self): @@ -716,7 +716,7 @@ class Conversation(Base): model_config["configs"] = override_model_configs else: app_model_config = ( - db.session.query(AppModelConfig).filter(AppModelConfig.id == self.app_model_config_id).first() + db.session.query(AppModelConfig).where(AppModelConfig.id == self.app_model_config_id).first() ) if app_model_config: model_config = app_model_config.to_dict() @@ -739,21 +739,21 @@ class Conversation(Base): @property def annotated(self): - return db.session.query(MessageAnnotation).filter(MessageAnnotation.conversation_id == self.id).count() > 0 + return db.session.query(MessageAnnotation).where(MessageAnnotation.conversation_id == self.id).count() > 0 @property def annotation(self): - return db.session.query(MessageAnnotation).filter(MessageAnnotation.conversation_id == self.id).first() + return db.session.query(MessageAnnotation).where(MessageAnnotation.conversation_id == self.id).first() @property def message_count(self): - return db.session.query(Message).filter(Message.conversation_id == self.id).count() + return db.session.query(Message).where(Message.conversation_id == self.id).count() @property def user_feedback_stats(self): like = ( db.session.query(MessageFeedback) - .filter( + .where( MessageFeedback.conversation_id == self.id, MessageFeedback.from_source == "user", MessageFeedback.rating == "like", @@ -763,7 +763,7 @@ class Conversation(Base): dislike = ( db.session.query(MessageFeedback) - .filter( + .where( MessageFeedback.conversation_id == self.id, MessageFeedback.from_source == "user", MessageFeedback.rating == "dislike", @@ -777,7 +777,7 @@ class Conversation(Base): def admin_feedback_stats(self): like = ( db.session.query(MessageFeedback) - .filter( + .where( MessageFeedback.conversation_id == self.id, MessageFeedback.from_source == "admin", MessageFeedback.rating == "like", @@ -787,7 +787,7 @@ class Conversation(Base): dislike = ( db.session.query(MessageFeedback) - .filter( + .where( MessageFeedback.conversation_id == self.id, MessageFeedback.from_source == "admin", MessageFeedback.rating == "dislike", @@ -799,7 +799,7 @@ class Conversation(Base): @property def status_count(self): - messages = db.session.query(Message).filter(Message.conversation_id == self.id).all() + messages = db.session.query(Message).where(Message.conversation_id == self.id).all() status_counts = { WorkflowExecutionStatus.RUNNING: 0, WorkflowExecutionStatus.SUCCEEDED: 0, @@ -826,19 +826,19 @@ class Conversation(Base): def first_message(self): return ( db.session.query(Message) - .filter(Message.conversation_id == self.id) + .where(Message.conversation_id == self.id) .order_by(Message.created_at.asc()) .first() ) @property def app(self): - return db.session.query(App).filter(App.id == self.app_id).first() + return db.session.query(App).where(App.id == self.app_id).first() @property def from_end_user_session_id(self): if self.from_end_user_id: - end_user = db.session.query(EndUser).filter(EndUser.id == self.from_end_user_id).first() + end_user = db.session.query(EndUser).where(EndUser.id == self.from_end_user_id).first() if end_user: return end_user.session_id @@ -847,7 +847,7 @@ class Conversation(Base): @property def from_account_name(self): if self.from_account_id: - account = db.session.query(Account).filter(Account.id == self.from_account_id).first() + account = db.session.query(Account).where(Account.id == self.from_account_id).first() if account: return account.name @@ -898,36 +898,36 @@ class Message(Base): ) id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) - app_id = db.Column(StringUUID, nullable=False) - model_provider = db.Column(db.String(255), nullable=True) - model_id = db.Column(db.String(255), nullable=True) - override_model_configs = db.Column(db.Text) - conversation_id = db.Column(StringUUID, db.ForeignKey("conversations.id"), nullable=False) + app_id = mapped_column(StringUUID, nullable=False) + model_provider = mapped_column(db.String(255), nullable=True) + model_id = mapped_column(db.String(255), nullable=True) + override_model_configs = mapped_column(db.Text) + conversation_id = mapped_column(StringUUID, db.ForeignKey("conversations.id"), nullable=False) _inputs: Mapped[dict] = mapped_column("inputs", db.JSON) - query: Mapped[str] = db.Column(db.Text, nullable=False) - message = db.Column(db.JSON, nullable=False) - message_tokens: Mapped[int] = db.Column(db.Integer, nullable=False, server_default=db.text("0")) - message_unit_price = db.Column(db.Numeric(10, 4), nullable=False) - message_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001")) - answer: Mapped[str] = db.Column(db.Text, nullable=False) - answer_tokens: Mapped[int] = db.Column(db.Integer, nullable=False, server_default=db.text("0")) - answer_unit_price = db.Column(db.Numeric(10, 4), nullable=False) - answer_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001")) - parent_message_id = db.Column(StringUUID, nullable=True) - provider_response_latency = db.Column(db.Float, nullable=False, server_default=db.text("0")) - total_price = db.Column(db.Numeric(10, 7)) - currency = db.Column(db.String(255), nullable=False) - status = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying")) - error = db.Column(db.Text) - message_metadata = db.Column(db.Text) - invoke_from: Mapped[Optional[str]] = db.Column(db.String(255), nullable=True) - from_source = db.Column(db.String(255), nullable=False) - from_end_user_id: Mapped[Optional[str]] = db.Column(StringUUID) - from_account_id: Mapped[Optional[str]] = db.Column(StringUUID) + query: Mapped[str] = mapped_column(db.Text, nullable=False) + message = mapped_column(db.JSON, nullable=False) + message_tokens: Mapped[int] = mapped_column(db.Integer, nullable=False, server_default=db.text("0")) + message_unit_price = mapped_column(db.Numeric(10, 4), nullable=False) + message_price_unit = mapped_column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001")) + answer: Mapped[str] = db.Column(db.Text, nullable=False) # TODO make it mapped_column + answer_tokens: Mapped[int] = mapped_column(db.Integer, nullable=False, server_default=db.text("0")) + answer_unit_price = mapped_column(db.Numeric(10, 4), nullable=False) + answer_price_unit = mapped_column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001")) + parent_message_id = mapped_column(StringUUID, nullable=True) + provider_response_latency = mapped_column(db.Float, nullable=False, server_default=db.text("0")) + total_price = mapped_column(db.Numeric(10, 7)) + currency = mapped_column(db.String(255), nullable=False) + status = mapped_column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying")) + error = mapped_column(db.Text) + message_metadata = mapped_column(db.Text) + invoke_from: Mapped[Optional[str]] = mapped_column(db.String(255), nullable=True) + from_source = mapped_column(db.String(255), nullable=False) + from_end_user_id: Mapped[Optional[str]] = mapped_column(StringUUID) + from_account_id: Mapped[Optional[str]] = mapped_column(StringUUID) created_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - agent_based = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - workflow_run_id: Mapped[str] = db.Column(StringUUID) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + agent_based = mapped_column(db.Boolean, nullable=False, server_default=db.text("false")) + workflow_run_id: Mapped[Optional[str]] = mapped_column(StringUUID) @property def inputs(self): @@ -1042,7 +1042,7 @@ class Message(Base): def user_feedback(self): feedback = ( db.session.query(MessageFeedback) - .filter(MessageFeedback.message_id == self.id, MessageFeedback.from_source == "user") + .where(MessageFeedback.message_id == self.id, MessageFeedback.from_source == "user") .first() ) return feedback @@ -1051,30 +1051,30 @@ class Message(Base): def admin_feedback(self): feedback = ( db.session.query(MessageFeedback) - .filter(MessageFeedback.message_id == self.id, MessageFeedback.from_source == "admin") + .where(MessageFeedback.message_id == self.id, MessageFeedback.from_source == "admin") .first() ) return feedback @property def feedbacks(self): - feedbacks = db.session.query(MessageFeedback).filter(MessageFeedback.message_id == self.id).all() + feedbacks = db.session.query(MessageFeedback).where(MessageFeedback.message_id == self.id).all() return feedbacks @property def annotation(self): - annotation = db.session.query(MessageAnnotation).filter(MessageAnnotation.message_id == self.id).first() + annotation = db.session.query(MessageAnnotation).where(MessageAnnotation.message_id == self.id).first() return annotation @property def annotation_hit_history(self): annotation_history = ( - db.session.query(AppAnnotationHitHistory).filter(AppAnnotationHitHistory.message_id == self.id).first() + db.session.query(AppAnnotationHitHistory).where(AppAnnotationHitHistory.message_id == self.id).first() ) if annotation_history: annotation = ( db.session.query(MessageAnnotation) - .filter(MessageAnnotation.id == annotation_history.annotation_id) + .where(MessageAnnotation.id == annotation_history.annotation_id) .first() ) return annotation @@ -1082,11 +1082,9 @@ class Message(Base): @property def app_model_config(self): - conversation = db.session.query(Conversation).filter(Conversation.id == self.conversation_id).first() + conversation = db.session.query(Conversation).where(Conversation.id == self.conversation_id).first() if conversation: - return ( - db.session.query(AppModelConfig).filter(AppModelConfig.id == conversation.app_model_config_id).first() - ) + return db.session.query(AppModelConfig).where(AppModelConfig.id == conversation.app_model_config_id).first() return None @@ -1102,7 +1100,7 @@ class Message(Base): def agent_thoughts(self): return ( db.session.query(MessageAgentThought) - .filter(MessageAgentThought.message_id == self.id) + .where(MessageAgentThought.message_id == self.id) .order_by(MessageAgentThought.position.asc()) .all() ) @@ -1115,8 +1113,8 @@ class Message(Base): def message_files(self): from factories import file_factory - message_files = db.session.query(MessageFile).filter(MessageFile.message_id == self.id).all() - current_app = db.session.query(App).filter(App.id == self.app_id).first() + message_files = db.session.query(MessageFile).where(MessageFile.message_id == self.id).all() + current_app = db.session.query(App).where(App.id == self.app_id).first() if not current_app: raise ValueError(f"App {self.app_id} not found") @@ -1180,7 +1178,7 @@ class Message(Base): if self.workflow_run_id: from .workflow import WorkflowRun - return db.session.query(WorkflowRun).filter(WorkflowRun.id == self.workflow_run_id).first() + return db.session.query(WorkflowRun).where(WorkflowRun.id == self.workflow_run_id).first() return None @@ -1241,21 +1239,21 @@ class MessageFeedback(Base): db.Index("message_feedback_conversation_idx", "conversation_id", "from_source", "rating"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - app_id = db.Column(StringUUID, nullable=False) - conversation_id = db.Column(StringUUID, nullable=False) - message_id = db.Column(StringUUID, nullable=False) - rating = db.Column(db.String(255), nullable=False) - content = db.Column(db.Text) - from_source = db.Column(db.String(255), nullable=False) - from_end_user_id = db.Column(StringUUID) - from_account_id = db.Column(StringUUID) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + app_id = mapped_column(StringUUID, nullable=False) + conversation_id = mapped_column(StringUUID, nullable=False) + message_id = mapped_column(StringUUID, nullable=False) + rating = mapped_column(db.String(255), nullable=False) + content = mapped_column(db.Text) + from_source = mapped_column(db.String(255), nullable=False) + from_end_user_id = mapped_column(StringUUID) + from_account_id = mapped_column(StringUUID) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) @property def from_account(self): - account = db.session.query(Account).filter(Account.id == self.from_account_id).first() + account = db.session.query(Account).where(Account.id == self.from_account_id).first() return account def to_dict(self): @@ -1303,16 +1301,16 @@ class MessageFile(Base): self.created_by_role = created_by_role.value self.created_by = created_by - id: Mapped[str] = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - message_id: Mapped[str] = db.Column(StringUUID, nullable=False) - type: Mapped[str] = db.Column(db.String(255), nullable=False) - transfer_method: Mapped[str] = db.Column(db.String(255), nullable=False) - url: Mapped[Optional[str]] = db.Column(db.Text, nullable=True) - belongs_to: Mapped[Optional[str]] = db.Column(db.String(255), nullable=True) - upload_file_id: Mapped[Optional[str]] = db.Column(StringUUID, nullable=True) - created_by_role: Mapped[str] = db.Column(db.String(255), nullable=False) - created_by: Mapped[str] = db.Column(StringUUID, nullable=False) - created_at: Mapped[datetime] = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + message_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + type: Mapped[str] = mapped_column(db.String(255), nullable=False) + transfer_method: Mapped[str] = mapped_column(db.String(255), nullable=False) + url: Mapped[Optional[str]] = mapped_column(db.Text, nullable=True) + belongs_to: Mapped[Optional[str]] = mapped_column(db.String(255), nullable=True) + upload_file_id: Mapped[Optional[str]] = mapped_column(StringUUID, nullable=True) + created_by_role: Mapped[str] = mapped_column(db.String(255), nullable=False) + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) class MessageAnnotation(Base): @@ -1324,25 +1322,25 @@ class MessageAnnotation(Base): db.Index("message_annotation_message_idx", "message_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - app_id = db.Column(StringUUID, nullable=False) - conversation_id = db.Column(StringUUID, db.ForeignKey("conversations.id"), nullable=True) - message_id = db.Column(StringUUID, nullable=True) + id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + app_id: Mapped[str] = mapped_column(StringUUID) + conversation_id: Mapped[Optional[str]] = mapped_column(StringUUID, db.ForeignKey("conversations.id")) + message_id: Mapped[Optional[str]] = mapped_column(StringUUID) question = db.Column(db.Text, nullable=True) - content = db.Column(db.Text, nullable=False) - hit_count = db.Column(db.Integer, nullable=False, server_default=db.text("0")) - account_id = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + content = mapped_column(db.Text, nullable=False) + hit_count = mapped_column(db.Integer, nullable=False, server_default=db.text("0")) + account_id = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) @property def account(self): - account = db.session.query(Account).filter(Account.id == self.account_id).first() + account = db.session.query(Account).where(Account.id == self.account_id).first() return account @property def annotation_create_account(self): - account = db.session.query(Account).filter(Account.id == self.account_id).first() + account = db.session.query(Account).where(Account.id == self.account_id).first() return account @@ -1356,31 +1354,31 @@ class AppAnnotationHitHistory(Base): db.Index("app_annotation_hit_histories_message_idx", "message_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - app_id = db.Column(StringUUID, nullable=False) - annotation_id: Mapped[str] = db.Column(StringUUID, nullable=False) - source = db.Column(db.Text, nullable=False) - question = db.Column(db.Text, nullable=False) - account_id = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - score = db.Column(Float, nullable=False, server_default=db.text("0")) - message_id = db.Column(StringUUID, nullable=False) - annotation_question = db.Column(db.Text, nullable=False) - annotation_content = db.Column(db.Text, nullable=False) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + app_id = mapped_column(StringUUID, nullable=False) + annotation_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + source = mapped_column(db.Text, nullable=False) + question = mapped_column(db.Text, nullable=False) + account_id = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + score = mapped_column(Float, nullable=False, server_default=db.text("0")) + message_id = mapped_column(StringUUID, nullable=False) + annotation_question = mapped_column(db.Text, nullable=False) + annotation_content = mapped_column(db.Text, nullable=False) @property def account(self): account = ( db.session.query(Account) .join(MessageAnnotation, MessageAnnotation.account_id == Account.id) - .filter(MessageAnnotation.id == self.annotation_id) + .where(MessageAnnotation.id == self.annotation_id) .first() ) return account @property def annotation_create_account(self): - account = db.session.query(Account).filter(Account.id == self.account_id).first() + account = db.session.query(Account).where(Account.id == self.account_id).first() return account @@ -1391,14 +1389,14 @@ class AppAnnotationSetting(Base): db.Index("app_annotation_settings_app_idx", "app_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - app_id = db.Column(StringUUID, nullable=False) - score_threshold = db.Column(Float, nullable=False, server_default=db.text("0")) - collection_binding_id = db.Column(StringUUID, nullable=False) - created_user_id = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_user_id = db.Column(StringUUID, nullable=False) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + app_id = mapped_column(StringUUID, nullable=False) + score_threshold = mapped_column(Float, nullable=False, server_default=db.text("0")) + collection_binding_id = mapped_column(StringUUID, nullable=False) + created_user_id = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_user_id = mapped_column(StringUUID, nullable=False) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) @property def collection_binding_detail(self): @@ -1406,7 +1404,7 @@ class AppAnnotationSetting(Base): collection_binding_detail = ( db.session.query(DatasetCollectionBinding) - .filter(DatasetCollectionBinding.id == self.collection_binding_id) + .where(DatasetCollectionBinding.id == self.collection_binding_id) .first() ) return collection_binding_detail @@ -1419,14 +1417,14 @@ class OperationLog(Base): db.Index("operation_log_account_action_idx", "tenant_id", "account_id", "action"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - account_id = db.Column(StringUUID, nullable=False) - action = db.Column(db.String(255), nullable=False) - content = db.Column(db.JSON) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - created_ip = db.Column(db.String(255), nullable=False) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + account_id = mapped_column(StringUUID, nullable=False) + action = mapped_column(db.String(255), nullable=False) + content = mapped_column(db.JSON) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + created_ip = mapped_column(db.String(255), nullable=False) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) class EndUser(Base, UserMixin): @@ -1437,16 +1435,16 @@ class EndUser(Base, UserMixin): db.Index("end_user_tenant_session_id_idx", "tenant_id", "session_id", "type"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id: Mapped[str] = db.Column(StringUUID, nullable=False) - app_id = db.Column(StringUUID, nullable=True) - type = db.Column(db.String(255), nullable=False) - external_user_id = db.Column(db.String(255), nullable=True) - name = db.Column(db.String(255)) - is_anonymous = db.Column(db.Boolean, nullable=False, server_default=db.text("true")) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + app_id = mapped_column(StringUUID, nullable=True) + type = mapped_column(db.String(255), nullable=False) + external_user_id = mapped_column(db.String(255), nullable=True) + name = mapped_column(db.String(255)) + is_anonymous = mapped_column(db.Boolean, nullable=False, server_default=db.text("true")) session_id: Mapped[str] = mapped_column() - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) class AppMCPServer(Base): @@ -1456,23 +1454,23 @@ class AppMCPServer(Base): db.UniqueConstraint("tenant_id", "app_id", name="unique_app_mcp_server_tenant_app_id"), db.UniqueConstraint("server_code", name="unique_app_mcp_server_server_code"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - app_id = db.Column(StringUUID, nullable=False) - name = db.Column(db.String(255), nullable=False) - description = db.Column(db.String(255), nullable=False) - server_code = db.Column(db.String(255), nullable=False) - status = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying")) - parameters = db.Column(db.Text, nullable=False) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + app_id = mapped_column(StringUUID, nullable=False) + name = mapped_column(db.String(255), nullable=False) + description = mapped_column(db.String(255), nullable=False) + server_code = mapped_column(db.String(255), nullable=False) + status = mapped_column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying")) + parameters = mapped_column(db.Text, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) @staticmethod def generate_server_code(n): while True: result = generate_string(n) - while db.session.query(AppMCPServer).filter(AppMCPServer.server_code == result).count() > 0: + while db.session.query(AppMCPServer).where(AppMCPServer.server_code == result).count() > 0: result = generate_string(n) return result @@ -1490,30 +1488,30 @@ class Site(Base): db.Index("site_code_idx", "code", "status"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - app_id = db.Column(StringUUID, nullable=False) - title = db.Column(db.String(255), nullable=False) - icon_type = db.Column(db.String(255), nullable=True) - icon = db.Column(db.String(255)) - icon_background = db.Column(db.String(255)) - description = db.Column(db.Text) - default_language = db.Column(db.String(255), nullable=False) - chat_color_theme = db.Column(db.String(255)) - chat_color_theme_inverted = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - copyright = db.Column(db.String(255)) - privacy_policy = db.Column(db.String(255)) - show_workflow_steps = db.Column(db.Boolean, nullable=False, server_default=db.text("true")) - use_icon_as_answer_icon = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + app_id = mapped_column(StringUUID, nullable=False) + title = mapped_column(db.String(255), nullable=False) + icon_type = mapped_column(db.String(255), nullable=True) + icon = mapped_column(db.String(255)) + icon_background = mapped_column(db.String(255)) + description = mapped_column(db.Text) + default_language = mapped_column(db.String(255), nullable=False) + chat_color_theme = mapped_column(db.String(255)) + chat_color_theme_inverted = mapped_column(db.Boolean, nullable=False, server_default=db.text("false")) + copyright = mapped_column(db.String(255)) + privacy_policy = mapped_column(db.String(255)) + show_workflow_steps = mapped_column(db.Boolean, nullable=False, server_default=db.text("true")) + use_icon_as_answer_icon = mapped_column(db.Boolean, nullable=False, server_default=db.text("false")) _custom_disclaimer: Mapped[str] = mapped_column("custom_disclaimer", sa.TEXT, default="") - customize_domain = db.Column(db.String(255)) - customize_token_strategy = db.Column(db.String(255), nullable=False) - prompt_public = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - status = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying")) - created_by = db.Column(StringUUID, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_by = db.Column(StringUUID, nullable=True) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - code = db.Column(db.String(255)) + customize_domain = mapped_column(db.String(255)) + customize_token_strategy = mapped_column(db.String(255), nullable=False) + prompt_public = mapped_column(db.Boolean, nullable=False, server_default=db.text("false")) + status = mapped_column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying")) + created_by = mapped_column(StringUUID, nullable=True) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_by = mapped_column(StringUUID, nullable=True) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + code = mapped_column(db.String(255)) @property def custom_disclaimer(self): @@ -1529,7 +1527,7 @@ class Site(Base): def generate_code(n): while True: result = generate_string(n) - while db.session.query(Site).filter(Site.code == result).count() > 0: + while db.session.query(Site).where(Site.code == result).count() > 0: result = generate_string(n) return result @@ -1548,19 +1546,19 @@ class ApiToken(Base): db.Index("api_token_tenant_idx", "tenant_id", "type"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - app_id = db.Column(StringUUID, nullable=True) - tenant_id = db.Column(StringUUID, nullable=True) - type = db.Column(db.String(16), nullable=False) - token = db.Column(db.String(255), nullable=False) - last_used_at = db.Column(db.DateTime, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + app_id = mapped_column(StringUUID, nullable=True) + tenant_id = mapped_column(StringUUID, nullable=True) + type = mapped_column(db.String(16), nullable=False) + token = mapped_column(db.String(255), nullable=False) + last_used_at = mapped_column(db.DateTime, nullable=True) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) @staticmethod def generate_api_key(prefix, n): while True: result = prefix + generate_string(n) - if db.session.query(ApiToken).filter(ApiToken.token == result).count() > 0: + if db.session.query(ApiToken).where(ApiToken.token == result).count() > 0: continue return result @@ -1572,23 +1570,23 @@ class UploadFile(Base): db.Index("upload_file_tenant_idx", "tenant_id"), ) - id: Mapped[str] = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id: Mapped[str] = db.Column(StringUUID, nullable=False) - storage_type: Mapped[str] = db.Column(db.String(255), nullable=False) - key: Mapped[str] = db.Column(db.String(255), nullable=False) - name: Mapped[str] = db.Column(db.String(255), nullable=False) - size: Mapped[int] = db.Column(db.Integer, nullable=False) - extension: Mapped[str] = db.Column(db.String(255), nullable=False) - mime_type: Mapped[str] = db.Column(db.String(255), nullable=True) - created_by_role: Mapped[str] = db.Column( + id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + storage_type: Mapped[str] = mapped_column(db.String(255), nullable=False) + key: Mapped[str] = mapped_column(db.String(255), nullable=False) + name: Mapped[str] = mapped_column(db.String(255), nullable=False) + size: Mapped[int] = mapped_column(db.Integer, nullable=False) + extension: Mapped[str] = mapped_column(db.String(255), nullable=False) + mime_type: Mapped[str] = mapped_column(db.String(255), nullable=True) + created_by_role: Mapped[str] = mapped_column( db.String(255), nullable=False, server_default=db.text("'account'::character varying") ) - created_by: Mapped[str] = db.Column(StringUUID, nullable=False) - created_at: Mapped[datetime] = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - used: Mapped[bool] = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - used_by: Mapped[str | None] = db.Column(StringUUID, nullable=True) - used_at: Mapped[datetime | None] = db.Column(db.DateTime, nullable=True) - hash: Mapped[str | None] = db.Column(db.String(255), nullable=True) + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + used: Mapped[bool] = mapped_column(db.Boolean, nullable=False, server_default=db.text("false")) + used_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True) + used_at: Mapped[datetime | None] = mapped_column(db.DateTime, nullable=True) + hash: Mapped[str | None] = mapped_column(db.String(255), nullable=True) source_url: Mapped[str] = mapped_column(sa.TEXT, default="") def __init__( @@ -1634,14 +1632,14 @@ class ApiRequest(Base): db.Index("api_request_token_idx", "tenant_id", "api_token_id"), ) - id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - api_token_id = db.Column(StringUUID, nullable=False) - path = db.Column(db.String(255), nullable=False) - request = db.Column(db.Text, nullable=True) - response = db.Column(db.Text, nullable=True) - ip = db.Column(db.String(255), nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + api_token_id = mapped_column(StringUUID, nullable=False) + path = mapped_column(db.String(255), nullable=False) + request = mapped_column(db.Text, nullable=True) + response = mapped_column(db.Text, nullable=True) + ip = mapped_column(db.String(255), nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) class MessageChain(Base): @@ -1651,12 +1649,12 @@ class MessageChain(Base): db.Index("message_chain_message_id_idx", "message_id"), ) - id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) - message_id = db.Column(StringUUID, nullable=False) - type = db.Column(db.String(255), nullable=False) - input = db.Column(db.Text, nullable=True) - output = db.Column(db.Text, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.current_timestamp()) + id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) + message_id = mapped_column(StringUUID, nullable=False) + type = mapped_column(db.String(255), nullable=False) + input = mapped_column(db.Text, nullable=True) + output = mapped_column(db.Text, nullable=True) + created_at = mapped_column(db.DateTime, nullable=False, server_default=db.func.current_timestamp()) class MessageAgentThought(Base): @@ -1667,34 +1665,34 @@ class MessageAgentThought(Base): db.Index("message_agent_thought_message_chain_id_idx", "message_chain_id"), ) - id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) - message_id = db.Column(StringUUID, nullable=False) - message_chain_id = db.Column(StringUUID, nullable=True) - position = db.Column(db.Integer, nullable=False) - thought = db.Column(db.Text, nullable=True) - tool = db.Column(db.Text, nullable=True) - tool_labels_str = db.Column(db.Text, nullable=False, server_default=db.text("'{}'::text")) - tool_meta_str = db.Column(db.Text, nullable=False, server_default=db.text("'{}'::text")) - tool_input = db.Column(db.Text, nullable=True) - observation = db.Column(db.Text, nullable=True) - # plugin_id = db.Column(StringUUID, nullable=True) ## for future design - tool_process_data = db.Column(db.Text, nullable=True) - message = db.Column(db.Text, nullable=True) - message_token = db.Column(db.Integer, nullable=True) - message_unit_price = db.Column(db.Numeric, nullable=True) - message_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001")) - message_files = db.Column(db.Text, nullable=True) + id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) + message_id = mapped_column(StringUUID, nullable=False) + message_chain_id = mapped_column(StringUUID, nullable=True) + position = mapped_column(db.Integer, nullable=False) + thought = mapped_column(db.Text, nullable=True) + tool = mapped_column(db.Text, nullable=True) + tool_labels_str = mapped_column(db.Text, nullable=False, server_default=db.text("'{}'::text")) + tool_meta_str = mapped_column(db.Text, nullable=False, server_default=db.text("'{}'::text")) + tool_input = mapped_column(db.Text, nullable=True) + observation = mapped_column(db.Text, nullable=True) + # plugin_id = mapped_column(StringUUID, nullable=True) ## for future design + tool_process_data = mapped_column(db.Text, nullable=True) + message = mapped_column(db.Text, nullable=True) + message_token = mapped_column(db.Integer, nullable=True) + message_unit_price = mapped_column(db.Numeric, nullable=True) + message_price_unit = mapped_column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001")) + message_files = mapped_column(db.Text, nullable=True) answer = db.Column(db.Text, nullable=True) - answer_token = db.Column(db.Integer, nullable=True) - answer_unit_price = db.Column(db.Numeric, nullable=True) - answer_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001")) - tokens = db.Column(db.Integer, nullable=True) - total_price = db.Column(db.Numeric, nullable=True) - currency = db.Column(db.String, nullable=True) - latency = db.Column(db.Float, nullable=True) - created_by_role = db.Column(db.String, nullable=False) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.current_timestamp()) + answer_token = mapped_column(db.Integer, nullable=True) + answer_unit_price = mapped_column(db.Numeric, nullable=True) + answer_price_unit = mapped_column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001")) + tokens = mapped_column(db.Integer, nullable=True) + total_price = mapped_column(db.Numeric, nullable=True) + currency = mapped_column(db.String, nullable=True) + latency = mapped_column(db.Float, nullable=True) + created_by_role = mapped_column(db.String, nullable=False) + created_by = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=db.func.current_timestamp()) @property def files(self) -> list: @@ -1780,24 +1778,24 @@ class DatasetRetrieverResource(Base): db.Index("dataset_retriever_resource_message_id_idx", "message_id"), ) - id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) - message_id = db.Column(StringUUID, nullable=False) - position = db.Column(db.Integer, nullable=False) - dataset_id = db.Column(StringUUID, nullable=False) - dataset_name = db.Column(db.Text, nullable=False) - document_id = db.Column(StringUUID, nullable=True) - document_name = db.Column(db.Text, nullable=False) - data_source_type = db.Column(db.Text, nullable=True) - segment_id = db.Column(StringUUID, nullable=True) - score = db.Column(db.Float, nullable=True) - content = db.Column(db.Text, nullable=False) - hit_count = db.Column(db.Integer, nullable=True) - word_count = db.Column(db.Integer, nullable=True) - segment_position = db.Column(db.Integer, nullable=True) - index_node_hash = db.Column(db.Text, nullable=True) - retriever_from = db.Column(db.Text, nullable=False) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.current_timestamp()) + id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) + message_id = mapped_column(StringUUID, nullable=False) + position = mapped_column(db.Integer, nullable=False) + dataset_id = mapped_column(StringUUID, nullable=False) + dataset_name = mapped_column(db.Text, nullable=False) + document_id = mapped_column(StringUUID, nullable=True) + document_name = mapped_column(db.Text, nullable=False) + data_source_type = mapped_column(db.Text, nullable=True) + segment_id = mapped_column(StringUUID, nullable=True) + score = mapped_column(db.Float, nullable=True) + content = mapped_column(db.Text, nullable=False) + hit_count = mapped_column(db.Integer, nullable=True) + word_count = mapped_column(db.Integer, nullable=True) + segment_position = mapped_column(db.Integer, nullable=True) + index_node_hash = mapped_column(db.Text, nullable=True) + retriever_from = mapped_column(db.Text, nullable=False) + created_by = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=db.func.current_timestamp()) class Tag(Base): @@ -1810,12 +1808,12 @@ class Tag(Base): TAG_TYPE_LIST = ["knowledge", "app"] - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=True) - type = db.Column(db.String(16), nullable=False) - name = db.Column(db.String(255), nullable=False) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=True) + type = mapped_column(db.String(16), nullable=False) + name = mapped_column(db.String(255), nullable=False) + created_by = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) class TagBinding(Base): @@ -1826,12 +1824,12 @@ class TagBinding(Base): db.Index("tag_bind_tag_id_idx", "tag_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=True) - tag_id = db.Column(StringUUID, nullable=True) - target_id = db.Column(StringUUID, nullable=True) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=True) + tag_id = mapped_column(StringUUID, nullable=True) + target_id = mapped_column(StringUUID, nullable=True) + created_by = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) class TraceAppConfig(Base): @@ -1841,15 +1839,15 @@ class TraceAppConfig(Base): db.Index("trace_app_config_app_id_idx", "app_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - app_id = db.Column(StringUUID, nullable=False) - tracing_provider = db.Column(db.String(255), nullable=True) - tracing_config = db.Column(db.JSON, nullable=True) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column( + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + app_id = mapped_column(StringUUID, nullable=False) + tracing_provider = mapped_column(db.String(255), nullable=True) + tracing_config = mapped_column(db.JSON, nullable=True) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column( db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() ) - is_active = db.Column(db.Boolean, nullable=False, server_default=db.text("true")) + is_active = mapped_column(db.Boolean, nullable=False, server_default=db.text("true")) @property def tracing_config_dict(self): diff --git a/api/models/source.py b/api/models/source.py index f6e0900ae6..100e0d96ef 100644 --- a/api/models/source.py +++ b/api/models/source.py @@ -2,6 +2,7 @@ import json from sqlalchemy import func from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.orm import mapped_column from models.base import Base @@ -17,14 +18,14 @@ class DataSourceOauthBinding(Base): db.Index("source_info_idx", "source_info", postgresql_using="gin"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - access_token = db.Column(db.String(255), nullable=False) - provider = db.Column(db.String(255), nullable=False) - source_info = db.Column(JSONB, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - disabled = db.Column(db.Boolean, nullable=True, server_default=db.text("false")) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + access_token = mapped_column(db.String(255), nullable=False) + provider = mapped_column(db.String(255), nullable=False) + source_info = mapped_column(JSONB, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + disabled = mapped_column(db.Boolean, nullable=True, server_default=db.text("false")) class DataSourceApiKeyAuthBinding(Base): @@ -35,14 +36,14 @@ class DataSourceApiKeyAuthBinding(Base): db.Index("data_source_api_key_auth_binding_provider_idx", "provider"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - category = db.Column(db.String(255), nullable=False) - provider = db.Column(db.String(255), nullable=False) - credentials = db.Column(db.Text, nullable=True) # JSON - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - disabled = db.Column(db.Boolean, nullable=True, server_default=db.text("false")) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id = mapped_column(StringUUID, nullable=False) + category = mapped_column(db.String(255), nullable=False) + provider = mapped_column(db.String(255), nullable=False) + credentials = mapped_column(db.Text, nullable=True) # JSON + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + disabled = mapped_column(db.Boolean, nullable=True, server_default=db.text("false")) def to_dict(self): return { diff --git a/api/models/task.py b/api/models/task.py index 1a4b606ff5..3e5ebd2099 100644 --- a/api/models/task.py +++ b/api/models/task.py @@ -1,4 +1,8 @@ +from datetime import datetime +from typing import Optional + from celery import states # type: ignore +from sqlalchemy.orm import Mapped, mapped_column from libs.datetime_utils import naive_utc_now from models.base import Base @@ -11,23 +15,23 @@ class CeleryTask(Base): __tablename__ = "celery_taskmeta" - id = db.Column(db.Integer, db.Sequence("task_id_sequence"), primary_key=True, autoincrement=True) - task_id = db.Column(db.String(155), unique=True) - status = db.Column(db.String(50), default=states.PENDING) - result = db.Column(db.PickleType, nullable=True) - date_done = db.Column( + id = mapped_column(db.Integer, db.Sequence("task_id_sequence"), primary_key=True, autoincrement=True) + task_id = mapped_column(db.String(155), unique=True) + status = mapped_column(db.String(50), default=states.PENDING) + result = mapped_column(db.PickleType, nullable=True) + date_done = mapped_column( db.DateTime, default=lambda: naive_utc_now(), onupdate=lambda: naive_utc_now(), nullable=True, ) - traceback = db.Column(db.Text, nullable=True) - name = db.Column(db.String(155), nullable=True) - args = db.Column(db.LargeBinary, nullable=True) - kwargs = db.Column(db.LargeBinary, nullable=True) - worker = db.Column(db.String(155), nullable=True) - retries = db.Column(db.Integer, nullable=True) - queue = db.Column(db.String(155), nullable=True) + traceback = mapped_column(db.Text, nullable=True) + name = mapped_column(db.String(155), nullable=True) + args = mapped_column(db.LargeBinary, nullable=True) + kwargs = mapped_column(db.LargeBinary, nullable=True) + worker = mapped_column(db.String(155), nullable=True) + retries = mapped_column(db.Integer, nullable=True) + queue = mapped_column(db.String(155), nullable=True) class CeleryTaskSet(Base): @@ -35,7 +39,9 @@ class CeleryTaskSet(Base): __tablename__ = "celery_tasksetmeta" - id = db.Column(db.Integer, db.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True) - taskset_id = db.Column(db.String(155), unique=True) - result = db.Column(db.PickleType, nullable=True) - date_done = db.Column(db.DateTime, default=lambda: naive_utc_now(), nullable=True) + id: Mapped[int] = mapped_column( + db.Integer, db.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True + ) + taskset_id = mapped_column(db.String(155), unique=True) + result = mapped_column(db.PickleType, nullable=True) + date_done: Mapped[Optional[datetime]] = mapped_column(db.DateTime, default=lambda: naive_utc_now(), nullable=True) diff --git a/api/models/tools.py b/api/models/tools.py index 2f94b4bb87..857a41059d 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -93,6 +93,7 @@ class BuiltinToolProvider(Base): credential_type: Mapped[str] = mapped_column( db.String(32), nullable=False, server_default=db.text("'api-key'::character varying") ) + expires_at: Mapped[int] = mapped_column(db.BigInteger, nullable=False, server_default=db.text("-1")) @property def credentials(self) -> dict: @@ -109,26 +110,26 @@ class ApiToolProvider(Base): db.UniqueConstraint("name", "tenant_id", name="unique_api_tool_provider"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) # name of the api provider - name = db.Column(db.String(255), nullable=False, server_default=db.text("'API KEY 1'::character varying")) + name = mapped_column(db.String(255), nullable=False, server_default=db.text("'API KEY 1'::character varying")) # icon - icon = db.Column(db.String(255), nullable=False) + icon = mapped_column(db.String(255), nullable=False) # original schema - schema = db.Column(db.Text, nullable=False) - schema_type_str: Mapped[str] = db.Column(db.String(40), nullable=False) + schema = mapped_column(db.Text, nullable=False) + schema_type_str: Mapped[str] = mapped_column(db.String(40), nullable=False) # who created this tool - user_id = db.Column(StringUUID, nullable=False) + user_id = mapped_column(StringUUID, nullable=False) # tenant id - tenant_id = db.Column(StringUUID, nullable=False) + tenant_id = mapped_column(StringUUID, nullable=False) # description of the provider - description = db.Column(db.Text, nullable=False) + description = mapped_column(db.Text, nullable=False) # json format tools - tools_str = db.Column(db.Text, nullable=False) + tools_str = mapped_column(db.Text, nullable=False) # json format credentials - credentials_str = db.Column(db.Text, nullable=False) + credentials_str = mapped_column(db.Text, nullable=False) # privacy policy - privacy_policy = db.Column(db.String(255), nullable=True) + privacy_policy = mapped_column(db.String(255), nullable=True) # custom_disclaimer custom_disclaimer: Mapped[str] = mapped_column(sa.TEXT, default="") @@ -151,11 +152,11 @@ class ApiToolProvider(Base): def user(self) -> Account | None: if not self.user_id: return None - return db.session.query(Account).filter(Account.id == self.user_id).first() + return db.session.query(Account).where(Account.id == self.user_id).first() @property def tenant(self) -> Tenant | None: - return db.session.query(Tenant).filter(Tenant.id == self.tenant_id).first() + return db.session.query(Tenant).where(Tenant.id == self.tenant_id).first() class ToolLabelBinding(Base): @@ -221,11 +222,11 @@ class WorkflowToolProvider(Base): @property def user(self) -> Account | None: - return db.session.query(Account).filter(Account.id == self.user_id).first() + return db.session.query(Account).where(Account.id == self.user_id).first() @property def tenant(self) -> Tenant | None: - return db.session.query(Tenant).filter(Tenant.id == self.tenant_id).first() + return db.session.query(Tenant).where(Tenant.id == self.tenant_id).first() @property def parameter_configurations(self) -> list[WorkflowToolParameterConfiguration]: @@ -233,7 +234,7 @@ class WorkflowToolProvider(Base): @property def app(self) -> App | None: - return db.session.query(App).filter(App.id == self.app_id).first() + return db.session.query(App).where(App.id == self.app_id).first() class MCPToolProvider(Base): @@ -278,11 +279,11 @@ class MCPToolProvider(Base): ) def load_user(self) -> Account | None: - return db.session.query(Account).filter(Account.id == self.user_id).first() + return db.session.query(Account).where(Account.id == self.user_id).first() @property def tenant(self) -> Tenant | None: - return db.session.query(Tenant).filter(Tenant.id == self.tenant_id).first() + return db.session.query(Tenant).where(Tenant.id == self.tenant_id).first() @property def credentials(self) -> dict: @@ -347,33 +348,33 @@ class ToolModelInvoke(Base): __tablename__ = "tool_model_invokes" __table_args__ = (db.PrimaryKeyConstraint("id", name="tool_model_invoke_pkey"),) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) # who invoke this tool - user_id = db.Column(StringUUID, nullable=False) + user_id = mapped_column(StringUUID, nullable=False) # tenant id - tenant_id = db.Column(StringUUID, nullable=False) + tenant_id = mapped_column(StringUUID, nullable=False) # provider - provider = db.Column(db.String(255), nullable=False) + provider = mapped_column(db.String(255), nullable=False) # type - tool_type = db.Column(db.String(40), nullable=False) + tool_type = mapped_column(db.String(40), nullable=False) # tool name - tool_name = db.Column(db.String(128), nullable=False) + tool_name = mapped_column(db.String(128), nullable=False) # invoke parameters - model_parameters = db.Column(db.Text, nullable=False) + model_parameters = mapped_column(db.Text, nullable=False) # prompt messages - prompt_messages = db.Column(db.Text, nullable=False) + prompt_messages = mapped_column(db.Text, nullable=False) # invoke response - model_response = db.Column(db.Text, nullable=False) + model_response = mapped_column(db.Text, nullable=False) - prompt_tokens = db.Column(db.Integer, nullable=False, server_default=db.text("0")) - answer_tokens = db.Column(db.Integer, nullable=False, server_default=db.text("0")) - answer_unit_price = db.Column(db.Numeric(10, 4), nullable=False) - answer_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001")) - provider_response_latency = db.Column(db.Float, nullable=False, server_default=db.text("0")) - total_price = db.Column(db.Numeric(10, 7)) - currency = db.Column(db.String(255), nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + prompt_tokens = mapped_column(db.Integer, nullable=False, server_default=db.text("0")) + answer_tokens = mapped_column(db.Integer, nullable=False, server_default=db.text("0")) + answer_unit_price = mapped_column(db.Numeric(10, 4), nullable=False) + answer_price_unit = mapped_column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001")) + provider_response_latency = mapped_column(db.Float, nullable=False, server_default=db.text("0")) + total_price = mapped_column(db.Numeric(10, 7)) + currency = mapped_column(db.String(255), nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) @deprecated @@ -390,18 +391,18 @@ class ToolConversationVariables(Base): db.Index("conversation_id_idx", "conversation_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) # conversation user id - user_id = db.Column(StringUUID, nullable=False) + user_id = mapped_column(StringUUID, nullable=False) # tenant id - tenant_id = db.Column(StringUUID, nullable=False) + tenant_id = mapped_column(StringUUID, nullable=False) # conversation id - conversation_id = db.Column(StringUUID, nullable=False) + conversation_id = mapped_column(StringUUID, nullable=False) # variables pool - variables_str = db.Column(db.Text, nullable=False) + variables_str = mapped_column(db.Text, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) @property def variables(self) -> Any: @@ -450,26 +451,26 @@ class DeprecatedPublishedAppTool(Base): db.UniqueConstraint("app_id", "user_id", name="unique_published_app_tool"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) # id of the app - app_id = db.Column(StringUUID, ForeignKey("apps.id"), nullable=False) + app_id = mapped_column(StringUUID, ForeignKey("apps.id"), nullable=False) - user_id: Mapped[str] = db.Column(StringUUID, nullable=False) + user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # who published this tool - description = db.Column(db.Text, nullable=False) + description = mapped_column(db.Text, nullable=False) # llm_description of the tool, for LLM - llm_description = db.Column(db.Text, nullable=False) + llm_description = mapped_column(db.Text, nullable=False) # query description, query will be seem as a parameter of the tool, # to describe this parameter to llm, we need this field - query_description = db.Column(db.Text, nullable=False) + query_description = mapped_column(db.Text, nullable=False) # query name, the name of the query parameter - query_name = db.Column(db.String(40), nullable=False) + query_name = mapped_column(db.String(40), nullable=False) # name of the tool provider - tool_name = db.Column(db.String(40), nullable=False) + tool_name = mapped_column(db.String(40), nullable=False) # author - author = db.Column(db.String(40), nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) - updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + author = mapped_column(db.String(40), nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + updated_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) @property def description_i18n(self) -> I18nObject: diff --git a/api/models/web.py b/api/models/web.py index fe2f0c47f8..ce00f4010f 100644 --- a/api/models/web.py +++ b/api/models/web.py @@ -15,16 +15,18 @@ class SavedMessage(Base): db.Index("saved_message_message_idx", "app_id", "message_id", "created_by_role", "created_by"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - app_id = db.Column(StringUUID, nullable=False) - message_id = db.Column(StringUUID, nullable=False) - created_by_role = db.Column(db.String(255), nullable=False, server_default=db.text("'end_user'::character varying")) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + app_id = mapped_column(StringUUID, nullable=False) + message_id = mapped_column(StringUUID, nullable=False) + created_by_role = mapped_column( + db.String(255), nullable=False, server_default=db.text("'end_user'::character varying") + ) + created_by = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) @property def message(self): - return db.session.query(Message).filter(Message.id == self.message_id).first() + return db.session.query(Message).where(Message.id == self.message_id).first() class PinnedConversation(Base): @@ -34,9 +36,11 @@ class PinnedConversation(Base): db.Index("pinned_conversation_conversation_idx", "app_id", "conversation_id", "created_by_role", "created_by"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - app_id = db.Column(StringUUID, nullable=False) + id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + app_id = mapped_column(StringUUID, nullable=False) conversation_id: Mapped[str] = mapped_column(StringUUID) - created_by_role = db.Column(db.String(255), nullable=False, server_default=db.text("'end_user'::character varying")) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + created_by_role = mapped_column( + db.String(255), nullable=False, server_default=db.text("'end_user'::character varying") + ) + created_by = mapped_column(StringUUID, nullable=False) + created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) diff --git a/api/models/workflow.py b/api/models/workflow.py index dd123478f8..9e4484d36a 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -355,7 +355,7 @@ class Workflow(Base): return ( db.session.query(WorkflowToolProvider) - .filter(WorkflowToolProvider.tenant_id == self.tenant_id, WorkflowToolProvider.app_id == self.app_id) + .where(WorkflowToolProvider.tenant_id == self.tenant_id, WorkflowToolProvider.app_id == self.app_id) .count() > 0 ) @@ -579,12 +579,12 @@ class WorkflowRun(Base): from models.model import Message return ( - db.session.query(Message).filter(Message.app_id == self.app_id, Message.workflow_run_id == self.id).first() + db.session.query(Message).where(Message.app_id == self.app_id, Message.workflow_run_id == self.id).first() ) @property def workflow(self): - return db.session.query(Workflow).filter(Workflow.id == self.workflow_id).first() + return db.session.query(Workflow).where(Workflow.id == self.workflow_id).first() def to_dict(self): return { diff --git a/api/pyproject.toml b/api/pyproject.toml index 7f1efa671f..7ec8a91198 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "dify-api" -version = "1.6.0" +version = "1.7.0" requires-python = ">=3.11,<3.13" dependencies = [ diff --git a/api/schedule/check_upgradable_plugin_task.py b/api/schedule/check_upgradable_plugin_task.py new file mode 100644 index 0000000000..c1d6018827 --- /dev/null +++ b/api/schedule/check_upgradable_plugin_task.py @@ -0,0 +1,49 @@ +import time + +import click + +import app +from extensions.ext_database import db +from models.account import TenantPluginAutoUpgradeStrategy +from tasks.process_tenant_plugin_autoupgrade_check_task import process_tenant_plugin_autoupgrade_check_task + +AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL = 15 * 60 # 15 minutes + + +@app.celery.task(queue="plugin") +def check_upgradable_plugin_task(): + click.echo(click.style("Start check upgradable plugin.", fg="green")) + start_at = time.perf_counter() + + now_seconds_of_day = time.time() % 86400 - 30 # we assume the tz is UTC + click.echo(click.style("Now seconds of day: {}".format(now_seconds_of_day), fg="green")) + + strategies = ( + db.session.query(TenantPluginAutoUpgradeStrategy) + .filter( + TenantPluginAutoUpgradeStrategy.upgrade_time_of_day >= now_seconds_of_day, + TenantPluginAutoUpgradeStrategy.upgrade_time_of_day + < now_seconds_of_day + AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL, + TenantPluginAutoUpgradeStrategy.strategy_setting + != TenantPluginAutoUpgradeStrategy.StrategySetting.DISABLED, + ) + .all() + ) + + for strategy in strategies: + process_tenant_plugin_autoupgrade_check_task.delay( + strategy.tenant_id, + strategy.strategy_setting, + strategy.upgrade_time_of_day, + strategy.upgrade_mode, + strategy.exclude_plugins, + strategy.include_plugins, + ) + + end_at = time.perf_counter() + click.echo( + click.style( + "Checked upgradable plugin success latency: {}".format(end_at - start_at), + fg="green", + ) + ) diff --git a/api/schedule/clean_embedding_cache_task.py b/api/schedule/clean_embedding_cache_task.py index 9efe120b7a..024e3d6f50 100644 --- a/api/schedule/clean_embedding_cache_task.py +++ b/api/schedule/clean_embedding_cache_task.py @@ -21,7 +21,7 @@ def clean_embedding_cache_task(): try: embedding_ids = ( db.session.query(Embedding.id) - .filter(Embedding.created_at < thirty_days_ago) + .where(Embedding.created_at < thirty_days_ago) .order_by(Embedding.created_at.desc()) .limit(100) .all() diff --git a/api/schedule/clean_messages.py b/api/schedule/clean_messages.py index d02bc81f33..a6851e36e5 100644 --- a/api/schedule/clean_messages.py +++ b/api/schedule/clean_messages.py @@ -36,7 +36,7 @@ def clean_messages(): # Main query with join and filter messages = ( db.session.query(Message) - .filter(Message.created_at < plan_sandbox_clean_message_day) + .where(Message.created_at < plan_sandbox_clean_message_day) .order_by(Message.created_at.desc()) .limit(100) .all() @@ -66,25 +66,25 @@ def clean_messages(): plan = plan_cache.decode() if plan == "sandbox": # clean related message - db.session.query(MessageFeedback).filter(MessageFeedback.message_id == message.id).delete( + db.session.query(MessageFeedback).where(MessageFeedback.message_id == message.id).delete( synchronize_session=False ) - db.session.query(MessageAnnotation).filter(MessageAnnotation.message_id == message.id).delete( + db.session.query(MessageAnnotation).where(MessageAnnotation.message_id == message.id).delete( synchronize_session=False ) - db.session.query(MessageChain).filter(MessageChain.message_id == message.id).delete( + db.session.query(MessageChain).where(MessageChain.message_id == message.id).delete( synchronize_session=False ) - db.session.query(MessageAgentThought).filter(MessageAgentThought.message_id == message.id).delete( + db.session.query(MessageAgentThought).where(MessageAgentThought.message_id == message.id).delete( synchronize_session=False ) - db.session.query(MessageFile).filter(MessageFile.message_id == message.id).delete( + db.session.query(MessageFile).where(MessageFile.message_id == message.id).delete( synchronize_session=False ) - db.session.query(SavedMessage).filter(SavedMessage.message_id == message.id).delete( + db.session.query(SavedMessage).where(SavedMessage.message_id == message.id).delete( synchronize_session=False ) - db.session.query(Message).filter(Message.id == message.id).delete() + db.session.query(Message).where(Message.id == message.id).delete() db.session.commit() end_at = time.perf_counter() click.echo(click.style("Cleaned messages from db success latency: {}".format(end_at - start_at), fg="green")) diff --git a/api/schedule/clean_unused_datasets_task.py b/api/schedule/clean_unused_datasets_task.py index c0cd42a226..72e2e73e65 100644 --- a/api/schedule/clean_unused_datasets_task.py +++ b/api/schedule/clean_unused_datasets_task.py @@ -27,7 +27,7 @@ def clean_unused_datasets_task(): # Subquery for counting new documents document_subquery_new = ( db.session.query(Document.dataset_id, func.count(Document.id).label("document_count")) - .filter( + .where( Document.indexing_status == "completed", Document.enabled == True, Document.archived == False, @@ -40,7 +40,7 @@ def clean_unused_datasets_task(): # Subquery for counting old documents document_subquery_old = ( db.session.query(Document.dataset_id, func.count(Document.id).label("document_count")) - .filter( + .where( Document.indexing_status == "completed", Document.enabled == True, Document.archived == False, @@ -55,7 +55,7 @@ def clean_unused_datasets_task(): select(Dataset) .outerjoin(document_subquery_new, Dataset.id == document_subquery_new.c.dataset_id) .outerjoin(document_subquery_old, Dataset.id == document_subquery_old.c.dataset_id) - .filter( + .where( Dataset.created_at < plan_sandbox_clean_day, func.coalesce(document_subquery_new.c.document_count, 0) == 0, func.coalesce(document_subquery_old.c.document_count, 0) > 0, @@ -72,7 +72,7 @@ def clean_unused_datasets_task(): for dataset in datasets: dataset_query = ( db.session.query(DatasetQuery) - .filter(DatasetQuery.created_at > plan_sandbox_clean_day, DatasetQuery.dataset_id == dataset.id) + .where(DatasetQuery.created_at > plan_sandbox_clean_day, DatasetQuery.dataset_id == dataset.id) .all() ) if not dataset_query or len(dataset_query) == 0: @@ -80,7 +80,7 @@ def clean_unused_datasets_task(): # add auto disable log documents = ( db.session.query(Document) - .filter( + .where( Document.dataset_id == dataset.id, Document.enabled == True, Document.archived == False, @@ -99,9 +99,7 @@ def clean_unused_datasets_task(): index_processor.clean(dataset, None) # update document - update_params = {Document.enabled: False} - - db.session.query(Document).filter_by(dataset_id=dataset.id).update(update_params) + db.session.query(Document).filter_by(dataset_id=dataset.id).update({Document.enabled: False}) db.session.commit() click.echo(click.style("Cleaned unused dataset {} from db success!".format(dataset.id), fg="green")) except Exception as e: @@ -113,7 +111,7 @@ def clean_unused_datasets_task(): # Subquery for counting new documents document_subquery_new = ( db.session.query(Document.dataset_id, func.count(Document.id).label("document_count")) - .filter( + .where( Document.indexing_status == "completed", Document.enabled == True, Document.archived == False, @@ -126,7 +124,7 @@ def clean_unused_datasets_task(): # Subquery for counting old documents document_subquery_old = ( db.session.query(Document.dataset_id, func.count(Document.id).label("document_count")) - .filter( + .where( Document.indexing_status == "completed", Document.enabled == True, Document.archived == False, @@ -141,7 +139,7 @@ def clean_unused_datasets_task(): select(Dataset) .outerjoin(document_subquery_new, Dataset.id == document_subquery_new.c.dataset_id) .outerjoin(document_subquery_old, Dataset.id == document_subquery_old.c.dataset_id) - .filter( + .where( Dataset.created_at < plan_pro_clean_day, func.coalesce(document_subquery_new.c.document_count, 0) == 0, func.coalesce(document_subquery_old.c.document_count, 0) > 0, @@ -157,7 +155,7 @@ def clean_unused_datasets_task(): for dataset in datasets: dataset_query = ( db.session.query(DatasetQuery) - .filter(DatasetQuery.created_at > plan_pro_clean_day, DatasetQuery.dataset_id == dataset.id) + .where(DatasetQuery.created_at > plan_pro_clean_day, DatasetQuery.dataset_id == dataset.id) .all() ) if not dataset_query or len(dataset_query) == 0: @@ -176,9 +174,7 @@ def clean_unused_datasets_task(): index_processor.clean(dataset, None) # update document - update_params = {Document.enabled: False} - - db.session.query(Document).filter_by(dataset_id=dataset.id).update(update_params) + db.session.query(Document).filter_by(dataset_id=dataset.id).update({Document.enabled: False}) db.session.commit() click.echo( click.style("Cleaned unused dataset {} from db success!".format(dataset.id), fg="green") diff --git a/api/schedule/create_tidb_serverless_task.py b/api/schedule/create_tidb_serverless_task.py index 8a02278de8..91953354e6 100644 --- a/api/schedule/create_tidb_serverless_task.py +++ b/api/schedule/create_tidb_serverless_task.py @@ -20,7 +20,7 @@ def create_tidb_serverless_task(): try: # check the number of idle tidb serverless idle_tidb_serverless_number = ( - db.session.query(TidbAuthBinding).filter(TidbAuthBinding.active == False).count() + db.session.query(TidbAuthBinding).where(TidbAuthBinding.active == False).count() ) if idle_tidb_serverless_number >= tidb_serverless_number: break diff --git a/api/schedule/mail_clean_document_notify_task.py b/api/schedule/mail_clean_document_notify_task.py index 5ee813e1de..5911c98b0a 100644 --- a/api/schedule/mail_clean_document_notify_task.py +++ b/api/schedule/mail_clean_document_notify_task.py @@ -3,12 +3,12 @@ import time from collections import defaultdict import click -from flask import render_template # type: ignore import app from configs import dify_config from extensions.ext_database import db from extensions.ext_mail import mail +from libs.email_i18n import EmailType, get_email_i18n_service from models.account import Account, Tenant, TenantAccountJoin from models.dataset import Dataset, DatasetAutoDisableLog from services.feature_service import FeatureService @@ -30,7 +30,7 @@ def mail_clean_document_notify_task(): # send document clean notify mail try: dataset_auto_disable_logs = ( - db.session.query(DatasetAutoDisableLog).filter(DatasetAutoDisableLog.notified == False).all() + db.session.query(DatasetAutoDisableLog).where(DatasetAutoDisableLog.notified == False).all() ) # group by tenant_id dataset_auto_disable_logs_map: dict[str, list[DatasetAutoDisableLog]] = defaultdict(list) @@ -45,7 +45,7 @@ def mail_clean_document_notify_task(): if plan != "sandbox": knowledge_details = [] # check tenant - tenant = db.session.query(Tenant).filter(Tenant.id == tenant_id).first() + tenant = db.session.query(Tenant).where(Tenant.id == tenant_id).first() if not tenant: continue # check current owner @@ -54,7 +54,7 @@ def mail_clean_document_notify_task(): ) if not current_owner_join: continue - account = db.session.query(Account).filter(Account.id == current_owner_join.account_id).first() + account = db.session.query(Account).where(Account.id == current_owner_join.account_id).first() if not account: continue @@ -67,19 +67,21 @@ def mail_clean_document_notify_task(): ) for dataset_id, document_ids in dataset_auto_dataset_map.items(): - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if dataset: document_count = len(document_ids) knowledge_details.append(rf"Knowledge base {dataset.name}: {document_count} documents") if knowledge_details: - html_content = render_template( - "clean_document_job_mail_template-US.html", - userName=account.email, - knowledge_details=knowledge_details, - url=url, - ) - mail.send( - to=account.email, subject="Dify Knowledge base auto disable notification", html=html_content + email_service = get_email_i18n_service() + email_service.send_email( + email_type=EmailType.DOCUMENT_CLEAN_NOTIFY, + language_code="en-US", + to=account.email, + template_context={ + "userName": account.email, + "knowledge_details": knowledge_details, + "url": url, + }, ) # update notified to True diff --git a/api/schedule/queue_monitor_task.py b/api/schedule/queue_monitor_task.py index e3a7021b9d..a05e1358ed 100644 --- a/api/schedule/queue_monitor_task.py +++ b/api/schedule/queue_monitor_task.py @@ -3,13 +3,12 @@ from datetime import datetime from urllib.parse import urlparse import click -from flask import render_template from redis import Redis import app from configs import dify_config from extensions.ext_database import db -from extensions.ext_mail import mail +from libs.email_i18n import EmailType, get_email_i18n_service # Create a dedicated Redis connection (using the same configuration as Celery) celery_broker_url = dify_config.CELERY_BROKER_URL @@ -39,18 +38,20 @@ def queue_monitor_task(): alter_emails = dify_config.QUEUE_MONITOR_ALERT_EMAILS if alter_emails: to_list = alter_emails.split(",") + email_service = get_email_i18n_service() for to in to_list: try: current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S") - html_content = render_template( - "queue_monitor_alert_email_template_en-US.html", - queue_name=queue_name, - queue_length=queue_length, - threshold=threshold, - alert_time=current_time, - ) - mail.send( - to=to, subject="Alert: Dataset Queue pending tasks exceeded the limit", html=html_content + email_service.send_email( + email_type=EmailType.QUEUE_MONITOR_ALERT, + language_code="en-US", + to=to, + template_context={ + "queue_name": queue_name, + "queue_length": queue_length, + "threshold": threshold, + "alert_time": current_time, + }, ) except Exception as e: logging.exception(click.style("Exception occurred during sending email", fg="red")) diff --git a/api/schedule/update_tidb_serverless_status_task.py b/api/schedule/update_tidb_serverless_status_task.py index ce4ecb6e7c..4d6c1f1877 100644 --- a/api/schedule/update_tidb_serverless_status_task.py +++ b/api/schedule/update_tidb_serverless_status_task.py @@ -17,7 +17,7 @@ def update_tidb_serverless_status_task(): # check the number of idle tidb serverless tidb_serverless_list = ( db.session.query(TidbAuthBinding) - .filter(TidbAuthBinding.active == False, TidbAuthBinding.status == "CREATING") + .where(TidbAuthBinding.active == False, TidbAuthBinding.status == "CREATING") .all() ) if len(tidb_serverless_list) == 0: diff --git a/api/services/account_service.py b/api/services/account_service.py index 352efb2f0c..e11f1580e5 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -29,6 +29,7 @@ from models.account import ( Tenant, TenantAccountJoin, TenantAccountRole, + TenantPluginAutoUpgradeStrategy, TenantStatus, ) from models.model import DifySetup @@ -53,7 +54,10 @@ from services.errors.workspace import WorkSpaceNotAllowedCreateError, Workspaces from services.feature_service import FeatureService from tasks.delete_account_task import delete_account_task from tasks.mail_account_deletion_task import send_account_deletion_verification_code -from tasks.mail_change_mail_task import send_change_mail_task +from tasks.mail_change_mail_task import ( + send_change_mail_completed_notification_task, + send_change_mail_task, +) from tasks.mail_email_code_login import send_email_code_login_mail_task from tasks.mail_invite_member_task import send_invite_member_mail_task from tasks.mail_owner_transfer_task import ( @@ -460,6 +464,22 @@ class AccountService: cls.change_email_rate_limiter.increment_rate_limit(account_email) return token + @classmethod + def send_change_email_completed_notify_email( + cls, + account: Optional[Account] = None, + email: Optional[str] = None, + language: Optional[str] = "en-US", + ): + account_email = account.email if account else email + if account_email is None: + raise ValueError("Email must be provided.") + + send_change_mail_completed_notification_task.delay( + language=language, + to=account_email, + ) + @classmethod def send_owner_transfer_email( cls, @@ -642,7 +662,7 @@ class AccountService: ) ) - account = db.session.query(Account).filter(Account.email == email).first() + account = db.session.query(Account).where(Account.email == email).first() if not account: return None @@ -651,6 +671,12 @@ class AccountService: return account + @classmethod + def is_account_in_freeze(cls, email: str) -> bool: + if dify_config.BILLING_ENABLED and BillingService.is_email_in_freeze(email): + return True + return False + @staticmethod @redis_fallback(default_return=None) def add_login_error_rate_limit(email: str) -> None: @@ -828,6 +854,17 @@ class TenantService: db.session.add(tenant) db.session.commit() + plugin_upgrade_strategy = TenantPluginAutoUpgradeStrategy( + tenant_id=tenant.id, + strategy_setting=TenantPluginAutoUpgradeStrategy.StrategySetting.FIX_ONLY, + upgrade_time_of_day=0, + upgrade_mode=TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE, + exclude_plugins=[], + include_plugins=[], + ) + db.session.add(plugin_upgrade_strategy) + db.session.commit() + tenant.encrypt_public_key = generate_key_pair(tenant.id) db.session.commit() return tenant @@ -888,7 +925,7 @@ class TenantService: return ( db.session.query(Tenant) .join(TenantAccountJoin, Tenant.id == TenantAccountJoin.tenant_id) - .filter(TenantAccountJoin.account_id == account.id, Tenant.status == TenantStatus.NORMAL) + .where(TenantAccountJoin.account_id == account.id, Tenant.status == TenantStatus.NORMAL) .all() ) @@ -917,7 +954,7 @@ class TenantService: tenant_account_join = ( db.session.query(TenantAccountJoin) .join(Tenant, TenantAccountJoin.tenant_id == Tenant.id) - .filter( + .where( TenantAccountJoin.account_id == account.id, TenantAccountJoin.tenant_id == tenant_id, Tenant.status == TenantStatus.NORMAL, @@ -928,7 +965,7 @@ class TenantService: if not tenant_account_join: raise AccountNotLinkTenantError("Tenant not found or account is not a member of the tenant.") else: - db.session.query(TenantAccountJoin).filter( + db.session.query(TenantAccountJoin).where( TenantAccountJoin.account_id == account.id, TenantAccountJoin.tenant_id != tenant_id ).update({"current": False}) tenant_account_join.current = True @@ -943,7 +980,7 @@ class TenantService: db.session.query(Account, TenantAccountJoin.role) .select_from(Account) .join(TenantAccountJoin, Account.id == TenantAccountJoin.account_id) - .filter(TenantAccountJoin.tenant_id == tenant.id) + .where(TenantAccountJoin.tenant_id == tenant.id) ) # Initialize an empty list to store the updated accounts @@ -962,8 +999,8 @@ class TenantService: db.session.query(Account, TenantAccountJoin.role) .select_from(Account) .join(TenantAccountJoin, Account.id == TenantAccountJoin.account_id) - .filter(TenantAccountJoin.tenant_id == tenant.id) - .filter(TenantAccountJoin.role == "dataset_operator") + .where(TenantAccountJoin.tenant_id == tenant.id) + .where(TenantAccountJoin.role == "dataset_operator") ) # Initialize an empty list to store the updated accounts @@ -983,9 +1020,7 @@ class TenantService: return ( db.session.query(TenantAccountJoin) - .filter( - TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.role.in_([role.value for role in roles]) - ) + .where(TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.role.in_([role.value for role in roles])) .first() is not None ) @@ -995,10 +1030,10 @@ class TenantService: """Get the role of the current account for a given tenant""" join = ( db.session.query(TenantAccountJoin) - .filter(TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.account_id == account.id) + .where(TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.account_id == account.id) .first() ) - return join.role if join else None + return TenantAccountRole(join.role) if join else None @staticmethod def get_tenant_count() -> int: @@ -1262,7 +1297,7 @@ class RegisterService: tenant = ( db.session.query(Tenant) - .filter(Tenant.id == invitation_data["workspace_id"], Tenant.status == "normal") + .where(Tenant.id == invitation_data["workspace_id"], Tenant.status == "normal") .first() ) @@ -1272,7 +1307,7 @@ class RegisterService: tenant_account = ( db.session.query(Account, TenantAccountJoin.role) .join(TenantAccountJoin, Account.id == TenantAccountJoin.account_id) - .filter(Account.email == invitation_data["email"], TenantAccountJoin.tenant_id == tenant.id) + .where(Account.email == invitation_data["email"], TenantAccountJoin.tenant_id == tenant.id) .first() ) diff --git a/api/services/agent_service.py b/api/services/agent_service.py index 503b31ede2..7c6df2428f 100644 --- a/api/services/agent_service.py +++ b/api/services/agent_service.py @@ -25,7 +25,7 @@ class AgentService: conversation: Conversation | None = ( db.session.query(Conversation) - .filter( + .where( Conversation.id == conversation_id, Conversation.app_id == app_model.id, ) @@ -37,7 +37,7 @@ class AgentService: message: Optional[Message] = ( db.session.query(Message) - .filter( + .where( Message.id == message_id, Message.conversation_id == conversation_id, ) @@ -52,12 +52,10 @@ class AgentService: if conversation.from_end_user_id: # only select name field executor = ( - db.session.query(EndUser, EndUser.name).filter(EndUser.id == conversation.from_end_user_id).first() + db.session.query(EndUser, EndUser.name).where(EndUser.id == conversation.from_end_user_id).first() ) else: - executor = ( - db.session.query(Account, Account.name).filter(Account.id == conversation.from_account_id).first() - ) + executor = db.session.query(Account, Account.name).where(Account.id == conversation.from_account_id).first() if executor: executor = executor.name diff --git a/api/services/annotation_service.py b/api/services/annotation_service.py index 8c950abc24..7cb0b46517 100644 --- a/api/services/annotation_service.py +++ b/api/services/annotation_service.py @@ -26,7 +26,7 @@ class AppAnnotationService: # get app info app = ( db.session.query(App) - .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") .first() ) @@ -35,7 +35,7 @@ class AppAnnotationService: if args.get("message_id"): message_id = str(args["message_id"]) # get message info - message = db.session.query(Message).filter(Message.id == message_id, Message.app_id == app.id).first() + message = db.session.query(Message).where(Message.id == message_id, Message.app_id == app.id).first() if not message: raise NotFound("Message Not Exists.") @@ -61,9 +61,7 @@ class AppAnnotationService: db.session.add(annotation) db.session.commit() # if annotation reply is enabled , add annotation to index - annotation_setting = ( - db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first() - ) + annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first() if annotation_setting: add_annotation_to_index_task.delay( annotation.id, @@ -117,7 +115,7 @@ class AppAnnotationService: # get app info app = ( db.session.query(App) - .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") .first() ) @@ -126,8 +124,8 @@ class AppAnnotationService: if keyword: stmt = ( select(MessageAnnotation) - .filter(MessageAnnotation.app_id == app_id) - .filter( + .where(MessageAnnotation.app_id == app_id) + .where( or_( MessageAnnotation.question.ilike("%{}%".format(keyword)), MessageAnnotation.content.ilike("%{}%".format(keyword)), @@ -138,7 +136,7 @@ class AppAnnotationService: else: stmt = ( select(MessageAnnotation) - .filter(MessageAnnotation.app_id == app_id) + .where(MessageAnnotation.app_id == app_id) .order_by(MessageAnnotation.created_at.desc(), MessageAnnotation.id.desc()) ) annotations = db.paginate(select=stmt, page=page, per_page=limit, max_per_page=100, error_out=False) @@ -149,7 +147,7 @@ class AppAnnotationService: # get app info app = ( db.session.query(App) - .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") .first() ) @@ -157,7 +155,7 @@ class AppAnnotationService: raise NotFound("App not found") annotations = ( db.session.query(MessageAnnotation) - .filter(MessageAnnotation.app_id == app_id) + .where(MessageAnnotation.app_id == app_id) .order_by(MessageAnnotation.created_at.desc()) .all() ) @@ -168,7 +166,7 @@ class AppAnnotationService: # get app info app = ( db.session.query(App) - .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") .first() ) @@ -181,9 +179,7 @@ class AppAnnotationService: db.session.add(annotation) db.session.commit() # if annotation reply is enabled , add annotation to index - annotation_setting = ( - db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first() - ) + annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first() if annotation_setting: add_annotation_to_index_task.delay( annotation.id, @@ -199,14 +195,14 @@ class AppAnnotationService: # get app info app = ( db.session.query(App) - .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") .first() ) if not app: raise NotFound("App not found") - annotation = db.session.query(MessageAnnotation).filter(MessageAnnotation.id == annotation_id).first() + annotation = db.session.query(MessageAnnotation).where(MessageAnnotation.id == annotation_id).first() if not annotation: raise NotFound("Annotation not found") @@ -217,7 +213,7 @@ class AppAnnotationService: db.session.commit() # if annotation reply is enabled , add annotation to index app_annotation_setting = ( - db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first() + db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first() ) if app_annotation_setting: @@ -236,14 +232,14 @@ class AppAnnotationService: # get app info app = ( db.session.query(App) - .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") .first() ) if not app: raise NotFound("App not found") - annotation = db.session.query(MessageAnnotation).filter(MessageAnnotation.id == annotation_id).first() + annotation = db.session.query(MessageAnnotation).where(MessageAnnotation.id == annotation_id).first() if not annotation: raise NotFound("Annotation not found") @@ -252,7 +248,7 @@ class AppAnnotationService: annotation_hit_histories = ( db.session.query(AppAnnotationHitHistory) - .filter(AppAnnotationHitHistory.annotation_id == annotation_id) + .where(AppAnnotationHitHistory.annotation_id == annotation_id) .all() ) if annotation_hit_histories: @@ -262,7 +258,7 @@ class AppAnnotationService: db.session.commit() # if annotation reply is enabled , delete annotation index app_annotation_setting = ( - db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first() + db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first() ) if app_annotation_setting: @@ -275,7 +271,7 @@ class AppAnnotationService: # get app info app = ( db.session.query(App) - .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") .first() ) @@ -314,21 +310,21 @@ class AppAnnotationService: # get app info app = ( db.session.query(App) - .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") .first() ) if not app: raise NotFound("App not found") - annotation = db.session.query(MessageAnnotation).filter(MessageAnnotation.id == annotation_id).first() + annotation = db.session.query(MessageAnnotation).where(MessageAnnotation.id == annotation_id).first() if not annotation: raise NotFound("Annotation not found") stmt = ( select(AppAnnotationHitHistory) - .filter( + .where( AppAnnotationHitHistory.app_id == app_id, AppAnnotationHitHistory.annotation_id == annotation_id, ) @@ -341,7 +337,7 @@ class AppAnnotationService: @classmethod def get_annotation_by_id(cls, annotation_id: str) -> MessageAnnotation | None: - annotation = db.session.query(MessageAnnotation).filter(MessageAnnotation.id == annotation_id).first() + annotation = db.session.query(MessageAnnotation).where(MessageAnnotation.id == annotation_id).first() if not annotation: return None @@ -361,7 +357,7 @@ class AppAnnotationService: score: float, ): # add hit count to annotation - db.session.query(MessageAnnotation).filter(MessageAnnotation.id == annotation_id).update( + db.session.query(MessageAnnotation).where(MessageAnnotation.id == annotation_id).update( {MessageAnnotation.hit_count: MessageAnnotation.hit_count + 1}, synchronize_session=False ) @@ -384,16 +380,14 @@ class AppAnnotationService: # get app info app = ( db.session.query(App) - .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") .first() ) if not app: raise NotFound("App not found") - annotation_setting = ( - db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first() - ) + annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first() if annotation_setting: collection_binding_detail = annotation_setting.collection_binding_detail return { @@ -412,7 +406,7 @@ class AppAnnotationService: # get app info app = ( db.session.query(App) - .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") .first() ) @@ -421,7 +415,7 @@ class AppAnnotationService: annotation_setting = ( db.session.query(AppAnnotationSetting) - .filter( + .where( AppAnnotationSetting.app_id == app_id, AppAnnotationSetting.id == annotation_setting_id, ) diff --git a/api/services/api_based_extension_service.py b/api/services/api_based_extension_service.py index 601d67d2fb..457c91e5c0 100644 --- a/api/services/api_based_extension_service.py +++ b/api/services/api_based_extension_service.py @@ -73,7 +73,7 @@ class APIBasedExtensionService: db.session.query(APIBasedExtension) .filter_by(tenant_id=extension_data.tenant_id) .filter_by(name=extension_data.name) - .filter(APIBasedExtension.id != extension_data.id) + .where(APIBasedExtension.id != extension_data.id) .first() ) diff --git a/api/services/app_dsl_service.py b/api/services/app_dsl_service.py index 08e13c588e..fe0efd061d 100644 --- a/api/services/app_dsl_service.py +++ b/api/services/app_dsl_service.py @@ -41,7 +41,7 @@ IMPORT_INFO_REDIS_KEY_PREFIX = "app_import_info:" CHECK_DEPENDENCIES_REDIS_KEY_PREFIX = "app_check_dependencies:" IMPORT_INFO_REDIS_EXPIRY = 10 * 60 # 10 minutes DSL_MAX_SIZE = 10 * 1024 * 1024 # 10MB -CURRENT_DSL_VERSION = "0.3.0" +CURRENT_DSL_VERSION = "0.3.1" class ImportMode(StrEnum): diff --git a/api/services/app_generate_service.py b/api/services/app_generate_service.py index 245c123a04..6f7e705b52 100644 --- a/api/services/app_generate_service.py +++ b/api/services/app_generate_service.py @@ -129,11 +129,25 @@ class AppGenerateService: rate_limit.exit(request_id) @staticmethod - def _get_max_active_requests(app_model: App) -> int: - max_active_requests = app_model.max_active_requests - if max_active_requests is None: - max_active_requests = int(dify_config.APP_MAX_ACTIVE_REQUESTS) - return max_active_requests + def _get_max_active_requests(app: App) -> int: + """ + Get the maximum number of active requests allowed for an app. + + Returns the smaller value between app's custom limit and global config limit. + A value of 0 means infinite (no limit). + + Args: + app: The App model instance + + Returns: + The maximum number of active requests allowed + """ + app_limit = app.max_active_requests or 0 + config_limit = dify_config.APP_MAX_ACTIVE_REQUESTS + + # Filter out infinite (0) values and return the minimum, or 0 if both are infinite + limits = [limit for limit in [app_limit, config_limit] if limit > 0] + return min(limits) if limits else 0 @classmethod def generate_single_iteration(cls, app_model: App, user: Account, node_id: str, args: Any, streaming: bool = True): diff --git a/api/services/app_service.py b/api/services/app_service.py index 3494b2796b..0b6b85bcb2 100644 --- a/api/services/app_service.py +++ b/api/services/app_service.py @@ -1,6 +1,6 @@ import json import logging -from typing import Optional, cast +from typing import Optional, TypedDict, cast from flask_login import current_user from flask_sqlalchemy.pagination import Pagination @@ -220,18 +220,27 @@ class AppService: return app - def update_app(self, app: App, args: dict) -> App: + class ArgsDict(TypedDict): + name: str + description: str + icon_type: str + icon: str + icon_background: str + use_icon_as_answer_icon: bool + max_active_requests: int + + def update_app(self, app: App, args: ArgsDict) -> App: """ Update app :param app: App instance :param args: request args :return: App instance """ - app.name = args.get("name") - app.description = args.get("description", "") - app.icon_type = args.get("icon_type", "emoji") - app.icon = args.get("icon") - app.icon_background = args.get("icon_background") + app.name = args["name"] + app.description = args["description"] + app.icon_type = args["icon_type"] + app.icon = args["icon"] + app.icon_background = args["icon_background"] app.use_icon_as_answer_icon = args.get("use_icon_as_answer_icon", False) app.max_active_requests = args.get("max_active_requests") app.updated_by = current_user.id @@ -373,7 +382,7 @@ class AppService: elif provider_type == "api": try: provider: Optional[ApiToolProvider] = ( - db.session.query(ApiToolProvider).filter(ApiToolProvider.id == provider_id).first() + db.session.query(ApiToolProvider).where(ApiToolProvider.id == provider_id).first() ) if provider is None: raise ValueError(f"provider not found for tool {tool_name}") @@ -390,7 +399,7 @@ class AppService: :param app_id: app id :return: app code """ - site = db.session.query(Site).filter(Site.app_id == app_id).first() + site = db.session.query(Site).where(Site.app_id == app_id).first() if not site: raise ValueError(f"App with id {app_id} not found") return str(site.code) @@ -402,7 +411,7 @@ class AppService: :param app_code: app code :return: app id """ - site = db.session.query(Site).filter(Site.code == app_code).first() + site = db.session.query(Site).where(Site.code == app_code).first() if not site: raise ValueError(f"App with code {app_code} not found") return str(site.app_id) diff --git a/api/services/audio_service.py b/api/services/audio_service.py index e8923eb51b..0084eebb32 100644 --- a/api/services/audio_service.py +++ b/api/services/audio_service.py @@ -135,7 +135,7 @@ class AudioService: uuid.UUID(message_id) except ValueError: return None - message = db.session.query(Message).filter(Message.id == message_id).first() + message = db.session.query(Message).where(Message.id == message_id).first() if message is None: return None if message.answer == "" and message.status == MessageStatus.NORMAL: diff --git a/api/services/auth/api_key_auth_service.py b/api/services/auth/api_key_auth_service.py index e5f4a3ef6e..996e9187f3 100644 --- a/api/services/auth/api_key_auth_service.py +++ b/api/services/auth/api_key_auth_service.py @@ -11,7 +11,7 @@ class ApiKeyAuthService: def get_provider_auth_list(tenant_id: str) -> list: data_source_api_key_bindings = ( db.session.query(DataSourceApiKeyAuthBinding) - .filter(DataSourceApiKeyAuthBinding.tenant_id == tenant_id, DataSourceApiKeyAuthBinding.disabled.is_(False)) + .where(DataSourceApiKeyAuthBinding.tenant_id == tenant_id, DataSourceApiKeyAuthBinding.disabled.is_(False)) .all() ) return data_source_api_key_bindings @@ -36,7 +36,7 @@ class ApiKeyAuthService: def get_auth_credentials(tenant_id: str, category: str, provider: str): data_source_api_key_bindings = ( db.session.query(DataSourceApiKeyAuthBinding) - .filter( + .where( DataSourceApiKeyAuthBinding.tenant_id == tenant_id, DataSourceApiKeyAuthBinding.category == category, DataSourceApiKeyAuthBinding.provider == provider, @@ -53,7 +53,7 @@ class ApiKeyAuthService: def delete_provider_auth(tenant_id: str, binding_id: str): data_source_api_key_binding = ( db.session.query(DataSourceApiKeyAuthBinding) - .filter(DataSourceApiKeyAuthBinding.tenant_id == tenant_id, DataSourceApiKeyAuthBinding.id == binding_id) + .where(DataSourceApiKeyAuthBinding.tenant_id == tenant_id, DataSourceApiKeyAuthBinding.id == binding_id) .first() ) if data_source_api_key_binding: diff --git a/api/services/billing_service.py b/api/services/billing_service.py index d44483ad89..5a12aa2e54 100644 --- a/api/services/billing_service.py +++ b/api/services/billing_service.py @@ -75,14 +75,14 @@ class BillingService: join: Optional[TenantAccountJoin] = ( db.session.query(TenantAccountJoin) - .filter(TenantAccountJoin.tenant_id == tenant_id, TenantAccountJoin.account_id == current_user.id) + .where(TenantAccountJoin.tenant_id == tenant_id, TenantAccountJoin.account_id == current_user.id) .first() ) if not join: raise ValueError("Tenant account join not found") - if not TenantAccountRole.is_privileged_role(join.role): + if not TenantAccountRole.is_privileged_role(TenantAccountRole(join.role)): raise ValueError("Only team owner or team admin can perform this action") @classmethod diff --git a/api/services/clear_free_plan_tenant_expired_logs.py b/api/services/clear_free_plan_tenant_expired_logs.py index ddd16b2e0c..ad9b750d40 100644 --- a/api/services/clear_free_plan_tenant_expired_logs.py +++ b/api/services/clear_free_plan_tenant_expired_logs.py @@ -24,13 +24,13 @@ class ClearFreePlanTenantExpiredLogs: @classmethod def process_tenant(cls, flask_app: Flask, tenant_id: str, days: int, batch: int): with flask_app.app_context(): - apps = db.session.query(App).filter(App.tenant_id == tenant_id).all() + apps = db.session.query(App).where(App.tenant_id == tenant_id).all() app_ids = [app.id for app in apps] while True: with Session(db.engine).no_autoflush as session: messages = ( session.query(Message) - .filter( + .where( Message.app_id.in_(app_ids), Message.created_at < datetime.datetime.now() - datetime.timedelta(days=days), ) @@ -54,7 +54,7 @@ class ClearFreePlanTenantExpiredLogs: message_ids = [message.id for message in messages] # delete messages - session.query(Message).filter( + session.query(Message).where( Message.id.in_(message_ids), ).delete(synchronize_session=False) @@ -70,7 +70,7 @@ class ClearFreePlanTenantExpiredLogs: with Session(db.engine).no_autoflush as session: conversations = ( session.query(Conversation) - .filter( + .where( Conversation.app_id.in_(app_ids), Conversation.updated_at < datetime.datetime.now() - datetime.timedelta(days=days), ) @@ -93,7 +93,7 @@ class ClearFreePlanTenantExpiredLogs: ) conversation_ids = [conversation.id for conversation in conversations] - session.query(Conversation).filter( + session.query(Conversation).where( Conversation.id.in_(conversation_ids), ).delete(synchronize_session=False) session.commit() @@ -276,7 +276,7 @@ class ClearFreePlanTenantExpiredLogs: for test_interval in test_intervals: tenant_count = ( session.query(Tenant.id) - .filter(Tenant.created_at.between(current_time, current_time + test_interval)) + .where(Tenant.created_at.between(current_time, current_time + test_interval)) .count() ) if tenant_count <= 100: @@ -301,7 +301,7 @@ class ClearFreePlanTenantExpiredLogs: rs = ( session.query(Tenant.id) - .filter(Tenant.created_at.between(current_time, batch_end)) + .where(Tenant.created_at.between(current_time, batch_end)) .order_by(Tenant.created_at) ) diff --git a/api/services/conversation_service.py b/api/services/conversation_service.py index 40097d5ed5..525c87fe4a 100644 --- a/api/services/conversation_service.py +++ b/api/services/conversation_service.py @@ -123,7 +123,7 @@ class ConversationService: # get conversation first message message = ( db.session.query(Message) - .filter(Message.app_id == app_model.id, Message.conversation_id == conversation.id) + .where(Message.app_id == app_model.id, Message.conversation_id == conversation.id) .order_by(Message.created_at.asc()) .first() ) @@ -148,7 +148,7 @@ class ConversationService: def get_conversation(cls, app_model: App, conversation_id: str, user: Optional[Union[Account, EndUser]]): conversation = ( db.session.query(Conversation) - .filter( + .where( Conversation.id == conversation_id, Conversation.app_id == app_model.id, Conversation.from_source == ("api" if isinstance(user, EndUser) else "console"), diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index 924006e601..1183d77656 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -86,7 +86,7 @@ from tasks.sync_website_document_indexing_task import sync_website_document_inde class DatasetService: @staticmethod def get_datasets(page, per_page, tenant_id=None, user=None, search=None, tag_ids=None, include_all=False): - query = select(Dataset).filter(Dataset.tenant_id == tenant_id).order_by(Dataset.created_at.desc()) + query = select(Dataset).where(Dataset.tenant_id == tenant_id).order_by(Dataset.created_at.desc()) if user: # get permitted dataset ids @@ -98,14 +98,14 @@ class DatasetService: if user.current_role == TenantAccountRole.DATASET_OPERATOR: # only show datasets that the user has permission to access if permitted_dataset_ids: - query = query.filter(Dataset.id.in_(permitted_dataset_ids)) + query = query.where(Dataset.id.in_(permitted_dataset_ids)) else: return [], 0 else: if user.current_role != TenantAccountRole.OWNER or not include_all: # show all datasets that the user has permission to access if permitted_dataset_ids: - query = query.filter( + query = query.where( db.or_( Dataset.permission == DatasetPermissionEnum.ALL_TEAM, db.and_( @@ -118,7 +118,7 @@ class DatasetService: ) ) else: - query = query.filter( + query = query.where( db.or_( Dataset.permission == DatasetPermissionEnum.ALL_TEAM, db.and_( @@ -128,15 +128,15 @@ class DatasetService: ) else: # if no user, only show datasets that are shared with all team members - query = query.filter(Dataset.permission == DatasetPermissionEnum.ALL_TEAM) + query = query.where(Dataset.permission == DatasetPermissionEnum.ALL_TEAM) if search: - query = query.filter(Dataset.name.ilike(f"%{search}%")) + query = query.where(Dataset.name.ilike(f"%{search}%")) if tag_ids: target_ids = TagService.get_target_ids_by_tag_ids("knowledge", tenant_id, tag_ids) if target_ids: - query = query.filter(Dataset.id.in_(target_ids)) + query = query.where(Dataset.id.in_(target_ids)) else: return [], 0 @@ -149,7 +149,7 @@ class DatasetService: # get the latest process rule dataset_process_rule = ( db.session.query(DatasetProcessRule) - .filter(DatasetProcessRule.dataset_id == dataset_id) + .where(DatasetProcessRule.dataset_id == dataset_id) .order_by(DatasetProcessRule.created_at.desc()) .limit(1) .one_or_none() @@ -164,7 +164,7 @@ class DatasetService: @staticmethod def get_datasets_by_ids(ids, tenant_id): - stmt = select(Dataset).filter(Dataset.id.in_(ids), Dataset.tenant_id == tenant_id) + stmt = select(Dataset).where(Dataset.id.in_(ids), Dataset.tenant_id == tenant_id) datasets = db.paginate(select=stmt, page=1, per_page=len(ids), max_per_page=len(ids), error_out=False) @@ -221,9 +221,9 @@ class DatasetService: dataset.created_by = account.id dataset.updated_by = account.id dataset.tenant_id = tenant_id - dataset.embedding_model_provider = embedding_model.provider if embedding_model else None - dataset.embedding_model = embedding_model.model if embedding_model else None - dataset.retrieval_model = retrieval_model.model_dump() if retrieval_model else None + dataset.embedding_model_provider = embedding_model.provider if embedding_model else None # type: ignore + dataset.embedding_model = embedding_model.model if embedding_model else None # type: ignore + dataset.retrieval_model = retrieval_model.model_dump() if retrieval_model else None # type: ignore dataset.permission = permission or DatasetPermissionEnum.ONLY_ME dataset.provider = provider db.session.add(dataset) @@ -878,7 +878,7 @@ class DatasetService: def get_related_apps(dataset_id: str): return ( db.session.query(AppDatasetJoin) - .filter(AppDatasetJoin.dataset_id == dataset_id) + .where(AppDatasetJoin.dataset_id == dataset_id) .order_by(db.desc(AppDatasetJoin.created_at)) .all() ) @@ -895,7 +895,7 @@ class DatasetService: start_date = datetime.datetime.now() - datetime.timedelta(days=30) dataset_auto_disable_logs = ( db.session.query(DatasetAutoDisableLog) - .filter( + .where( DatasetAutoDisableLog.dataset_id == dataset_id, DatasetAutoDisableLog.created_at >= start_date, ) @@ -1024,7 +1024,7 @@ class DocumentService: def get_document(dataset_id: str, document_id: Optional[str] = None) -> Optional[Document]: if document_id: document = ( - db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first() + db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() ) return document else: @@ -1032,7 +1032,7 @@ class DocumentService: @staticmethod def get_document_by_id(document_id: str) -> Optional[Document]: - document = db.session.query(Document).filter(Document.id == document_id).first() + document = db.session.query(Document).where(Document.id == document_id).first() return document @@ -1040,7 +1040,7 @@ class DocumentService: def get_document_by_ids(document_ids: list[str]) -> list[Document]: documents = ( db.session.query(Document) - .filter( + .where( Document.id.in_(document_ids), Document.enabled == True, Document.indexing_status == "completed", @@ -1054,7 +1054,7 @@ class DocumentService: def get_document_by_dataset_id(dataset_id: str) -> list[Document]: documents = ( db.session.query(Document) - .filter( + .where( Document.dataset_id == dataset_id, Document.enabled == True, ) @@ -1067,7 +1067,7 @@ class DocumentService: def get_working_documents_by_dataset_id(dataset_id: str) -> list[Document]: documents = ( db.session.query(Document) - .filter( + .where( Document.dataset_id == dataset_id, Document.enabled == True, Document.indexing_status == "completed", @@ -1082,7 +1082,7 @@ class DocumentService: def get_error_documents_by_dataset_id(dataset_id: str) -> list[Document]: documents = ( db.session.query(Document) - .filter(Document.dataset_id == dataset_id, Document.indexing_status.in_(["error", "paused"])) + .where(Document.dataset_id == dataset_id, Document.indexing_status.in_(["error", "paused"])) .all() ) return documents @@ -1091,7 +1091,7 @@ class DocumentService: def get_batch_documents(dataset_id: str, batch: str) -> list[Document]: documents = ( db.session.query(Document) - .filter( + .where( Document.batch == batch, Document.dataset_id == dataset_id, Document.tenant_id == current_user.current_tenant_id, @@ -1103,7 +1103,7 @@ class DocumentService: @staticmethod def get_document_file_detail(file_id: str): - file_detail = db.session.query(UploadFile).filter(UploadFile.id == file_id).one_or_none() + file_detail = db.session.query(UploadFile).where(UploadFile.id == file_id).one_or_none() return file_detail @staticmethod @@ -1131,7 +1131,7 @@ class DocumentService: @staticmethod def delete_documents(dataset: Dataset, document_ids: list[str]): - documents = db.session.query(Document).filter(Document.id.in_(document_ids)).all() + documents = db.session.query(Document).where(Document.id.in_(document_ids)).all() file_ids = [ document.data_source_info_dict["upload_file_id"] for document in documents @@ -1370,7 +1370,7 @@ class DocumentService: for file_id in upload_file_list: file = ( db.session.query(UploadFile) - .filter(UploadFile.tenant_id == dataset.tenant_id, UploadFile.id == file_id) + .where(UploadFile.tenant_id == dataset.tenant_id, UploadFile.id == file_id) .first() ) @@ -1451,7 +1451,7 @@ class DocumentService: workspace_id = notion_info.workspace_id data_source_binding = ( db.session.query(DataSourceOauthBinding) - .filter( + .where( db.and_( DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, DataSourceOauthBinding.provider == "notion", @@ -1871,7 +1871,7 @@ class DocumentService: def get_tenant_documents_count(): documents_count = ( db.session.query(Document) - .filter( + .where( Document.completed_at.isnot(None), Document.enabled == True, Document.archived == False, @@ -1927,7 +1927,7 @@ class DocumentService: for file_id in upload_file_list: file = ( db.session.query(UploadFile) - .filter(UploadFile.tenant_id == dataset.tenant_id, UploadFile.id == file_id) + .where(UploadFile.tenant_id == dataset.tenant_id, UploadFile.id == file_id) .first() ) @@ -1947,7 +1947,7 @@ class DocumentService: workspace_id = notion_info.workspace_id data_source_binding = ( db.session.query(DataSourceOauthBinding) - .filter( + .where( db.and_( DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, DataSourceOauthBinding.provider == "notion", @@ -1998,8 +1998,10 @@ class DocumentService: db.session.add(document) db.session.commit() # update document segment - update_params = {DocumentSegment.status: "re_segment"} - db.session.query(DocumentSegment).filter_by(document_id=document.id).update(update_params) + + db.session.query(DocumentSegment).filter_by(document_id=document.id).update( + {DocumentSegment.status: "re_segment"} + ) # type: ignore db.session.commit() # trigger async task document_indexing_update_task.delay(document.dataset_id, document.id) @@ -2461,7 +2463,7 @@ class SegmentService: with redis_client.lock(lock_name, timeout=600): max_position = ( db.session.query(func.max(DocumentSegment.position)) - .filter(DocumentSegment.document_id == document.id) + .where(DocumentSegment.document_id == document.id) .scalar() ) segment_document = DocumentSegment( @@ -2499,7 +2501,7 @@ class SegmentService: segment_document.status = "error" segment_document.error = str(e) db.session.commit() - segment = db.session.query(DocumentSegment).filter(DocumentSegment.id == segment_document.id).first() + segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_document.id).first() return segment @classmethod @@ -2518,7 +2520,7 @@ class SegmentService: ) max_position = ( db.session.query(func.max(DocumentSegment.position)) - .filter(DocumentSegment.document_id == document.id) + .where(DocumentSegment.document_id == document.id) .scalar() ) pre_segment_data_list = [] @@ -2657,7 +2659,7 @@ class SegmentService: # get the process rule processing_rule = ( db.session.query(DatasetProcessRule) - .filter(DatasetProcessRule.id == document.dataset_process_rule_id) + .where(DatasetProcessRule.id == document.dataset_process_rule_id) .first() ) if not processing_rule: @@ -2684,7 +2686,7 @@ class SegmentService: # calc embedding use tokens if document.doc_form == "qa_model": segment.answer = args.answer - tokens = embedding_model.get_text_embedding_num_tokens(texts=[content + segment.answer])[0] + tokens = embedding_model.get_text_embedding_num_tokens(texts=[content + segment.answer])[0] # type: ignore else: tokens = embedding_model.get_text_embedding_num_tokens(texts=[content])[0] segment.content = content @@ -2732,7 +2734,7 @@ class SegmentService: # get the process rule processing_rule = ( db.session.query(DatasetProcessRule) - .filter(DatasetProcessRule.id == document.dataset_process_rule_id) + .where(DatasetProcessRule.id == document.dataset_process_rule_id) .first() ) if not processing_rule: @@ -2751,7 +2753,7 @@ class SegmentService: segment.status = "error" segment.error = str(e) db.session.commit() - new_segment = db.session.query(DocumentSegment).filter(DocumentSegment.id == segment.id).first() + new_segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment.id).first() return new_segment @classmethod @@ -2777,7 +2779,7 @@ class SegmentService: index_node_ids = ( db.session.query(DocumentSegment) .with_entities(DocumentSegment.index_node_id) - .filter( + .where( DocumentSegment.id.in_(segment_ids), DocumentSegment.dataset_id == dataset.id, DocumentSegment.document_id == document.id, @@ -2788,7 +2790,7 @@ class SegmentService: index_node_ids = [index_node_id[0] for index_node_id in index_node_ids] delete_segment_from_index_task.delay(index_node_ids, dataset.id, document.id) - db.session.query(DocumentSegment).filter(DocumentSegment.id.in_(segment_ids)).delete() + db.session.query(DocumentSegment).where(DocumentSegment.id.in_(segment_ids)).delete() db.session.commit() @classmethod @@ -2796,7 +2798,7 @@ class SegmentService: if action == "enable": segments = ( db.session.query(DocumentSegment) - .filter( + .where( DocumentSegment.id.in_(segment_ids), DocumentSegment.dataset_id == dataset.id, DocumentSegment.document_id == document.id, @@ -2823,7 +2825,7 @@ class SegmentService: elif action == "disable": segments = ( db.session.query(DocumentSegment) - .filter( + .where( DocumentSegment.id.in_(segment_ids), DocumentSegment.dataset_id == dataset.id, DocumentSegment.document_id == document.id, @@ -2860,7 +2862,7 @@ class SegmentService: index_node_hash = helper.generate_text_hash(content) child_chunk_count = ( db.session.query(ChildChunk) - .filter( + .where( ChildChunk.tenant_id == current_user.current_tenant_id, ChildChunk.dataset_id == dataset.id, ChildChunk.document_id == document.id, @@ -2870,7 +2872,7 @@ class SegmentService: ) max_position = ( db.session.query(func.max(ChildChunk.position)) - .filter( + .where( ChildChunk.tenant_id == current_user.current_tenant_id, ChildChunk.dataset_id == dataset.id, ChildChunk.document_id == document.id, @@ -2913,7 +2915,7 @@ class SegmentService: ) -> list[ChildChunk]: child_chunks = ( db.session.query(ChildChunk) - .filter( + .where( ChildChunk.dataset_id == dataset.id, ChildChunk.document_id == document.id, ChildChunk.segment_id == segment.id, @@ -3034,7 +3036,7 @@ class SegmentService: """Get a child chunk by its ID.""" result = ( db.session.query(ChildChunk) - .filter(ChildChunk.id == child_chunk_id, ChildChunk.tenant_id == tenant_id) + .where(ChildChunk.id == child_chunk_id, ChildChunk.tenant_id == tenant_id) .first() ) return result if isinstance(result, ChildChunk) else None @@ -3050,15 +3052,15 @@ class SegmentService: limit: int = 20, ): """Get segments for a document with optional filtering.""" - query = select(DocumentSegment).filter( + query = select(DocumentSegment).where( DocumentSegment.document_id == document_id, DocumentSegment.tenant_id == tenant_id ) if status_list: - query = query.filter(DocumentSegment.status.in_(status_list)) + query = query.where(DocumentSegment.status.in_(status_list)) if keyword: - query = query.filter(DocumentSegment.content.ilike(f"%{keyword}%")) + query = query.where(DocumentSegment.content.ilike(f"%{keyword}%")) query = query.order_by(DocumentSegment.position.asc()) paginated_segments = db.paginate(select=query, page=page, per_page=limit, max_per_page=100, error_out=False) @@ -3071,7 +3073,7 @@ class SegmentService: ) -> tuple[DocumentSegment, Document]: """Update a segment by its ID with validation and checks.""" # check dataset - dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") @@ -3103,7 +3105,7 @@ class SegmentService: # check segment segment = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.id == segment_id, DocumentSegment.tenant_id == user_id) + .where(DocumentSegment.id == segment_id, DocumentSegment.tenant_id == user_id) .first() ) if not segment: @@ -3120,7 +3122,7 @@ class SegmentService: """Get a segment by its ID.""" result = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.id == segment_id, DocumentSegment.tenant_id == tenant_id) + .where(DocumentSegment.id == segment_id, DocumentSegment.tenant_id == tenant_id) .first() ) return result if isinstance(result, DocumentSegment) else None @@ -3133,7 +3135,7 @@ class DatasetCollectionBindingService: ) -> DatasetCollectionBinding: dataset_collection_binding = ( db.session.query(DatasetCollectionBinding) - .filter( + .where( DatasetCollectionBinding.provider_name == provider_name, DatasetCollectionBinding.model_name == model_name, DatasetCollectionBinding.type == collection_type, @@ -3159,7 +3161,7 @@ class DatasetCollectionBindingService: ) -> DatasetCollectionBinding: dataset_collection_binding = ( db.session.query(DatasetCollectionBinding) - .filter( + .where( DatasetCollectionBinding.id == collection_binding_id, DatasetCollectionBinding.type == collection_type ) .order_by(DatasetCollectionBinding.created_at) @@ -3178,7 +3180,7 @@ class DatasetPermissionService: db.session.query( DatasetPermission.account_id, ) - .filter(DatasetPermission.dataset_id == dataset_id) + .where(DatasetPermission.dataset_id == dataset_id) .all() ) @@ -3191,7 +3193,7 @@ class DatasetPermissionService: @classmethod def update_partial_member_list(cls, tenant_id, dataset_id, user_list): try: - db.session.query(DatasetPermission).filter(DatasetPermission.dataset_id == dataset_id).delete() + db.session.query(DatasetPermission).where(DatasetPermission.dataset_id == dataset_id).delete() permissions = [] for user in user_list: permission = DatasetPermission( @@ -3227,7 +3229,7 @@ class DatasetPermissionService: @classmethod def clear_partial_member_list(cls, dataset_id): try: - db.session.query(DatasetPermission).filter(DatasetPermission.dataset_id == dataset_id).delete() + db.session.query(DatasetPermission).where(DatasetPermission.dataset_id == dataset_id).delete() db.session.commit() except Exception as e: db.session.rollback() diff --git a/api/services/external_knowledge_service.py b/api/services/external_knowledge_service.py index 06a4c22117..b7af03e91f 100644 --- a/api/services/external_knowledge_service.py +++ b/api/services/external_knowledge_service.py @@ -30,11 +30,11 @@ class ExternalDatasetService: ) -> tuple[list[ExternalKnowledgeApis], int | None]: query = ( select(ExternalKnowledgeApis) - .filter(ExternalKnowledgeApis.tenant_id == tenant_id) + .where(ExternalKnowledgeApis.tenant_id == tenant_id) .order_by(ExternalKnowledgeApis.created_at.desc()) ) if search: - query = query.filter(ExternalKnowledgeApis.name.ilike(f"%{search}%")) + query = query.where(ExternalKnowledgeApis.name.ilike(f"%{search}%")) external_knowledge_apis = db.paginate( select=query, page=page, per_page=per_page, max_per_page=100, error_out=False diff --git a/api/services/file_service.py b/api/services/file_service.py index 286535bd18..e234c2f325 100644 --- a/api/services/file_service.py +++ b/api/services/file_service.py @@ -144,7 +144,7 @@ class FileService: @staticmethod def get_file_preview(file_id: str): - upload_file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first() + upload_file = db.session.query(UploadFile).where(UploadFile.id == file_id).first() if not upload_file: raise NotFound("File not found") @@ -167,7 +167,7 @@ class FileService: if not result: raise NotFound("File not found or signature is invalid") - upload_file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first() + upload_file = db.session.query(UploadFile).where(UploadFile.id == file_id).first() if not upload_file: raise NotFound("File not found or signature is invalid") @@ -187,7 +187,7 @@ class FileService: if not result: raise NotFound("File not found or signature is invalid") - upload_file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first() + upload_file = db.session.query(UploadFile).where(UploadFile.id == file_id).first() if not upload_file: raise NotFound("File not found or signature is invalid") @@ -198,7 +198,7 @@ class FileService: @staticmethod def get_public_image_preview(file_id: str): - upload_file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first() + upload_file = db.session.query(UploadFile).where(UploadFile.id == file_id).first() if not upload_file: raise NotFound("File not found or signature is invalid") diff --git a/api/services/message_service.py b/api/services/message_service.py index 51b070ece7..283b7b9b4b 100644 --- a/api/services/message_service.py +++ b/api/services/message_service.py @@ -50,7 +50,7 @@ class MessageService: if first_id: first_message = ( db.session.query(Message) - .filter(Message.conversation_id == conversation.id, Message.id == first_id) + .where(Message.conversation_id == conversation.id, Message.id == first_id) .first() ) @@ -59,7 +59,7 @@ class MessageService: history_messages = ( db.session.query(Message) - .filter( + .where( Message.conversation_id == conversation.id, Message.created_at < first_message.created_at, Message.id != first_message.id, @@ -71,7 +71,7 @@ class MessageService: else: history_messages = ( db.session.query(Message) - .filter(Message.conversation_id == conversation.id) + .where(Message.conversation_id == conversation.id) .order_by(Message.created_at.desc()) .limit(fetch_limit) .all() @@ -109,19 +109,19 @@ class MessageService: app_model=app_model, user=user, conversation_id=conversation_id ) - base_query = base_query.filter(Message.conversation_id == conversation.id) + base_query = base_query.where(Message.conversation_id == conversation.id) if include_ids is not None: - base_query = base_query.filter(Message.id.in_(include_ids)) + base_query = base_query.where(Message.id.in_(include_ids)) if last_id: - last_message = base_query.filter(Message.id == last_id).first() + last_message = base_query.where(Message.id == last_id).first() if not last_message: raise LastMessageNotExistsError() history_messages = ( - base_query.filter(Message.created_at < last_message.created_at, Message.id != last_message.id) + base_query.where(Message.created_at < last_message.created_at, Message.id != last_message.id) .order_by(Message.created_at.desc()) .limit(fetch_limit) .all() @@ -183,7 +183,7 @@ class MessageService: offset = (page - 1) * limit feedbacks = ( db.session.query(MessageFeedback) - .filter(MessageFeedback.app_id == app_model.id) + .where(MessageFeedback.app_id == app_model.id) .order_by(MessageFeedback.created_at.desc(), MessageFeedback.id.desc()) .limit(limit) .offset(offset) @@ -196,7 +196,7 @@ class MessageService: def get_message(cls, app_model: App, user: Optional[Union[Account, EndUser]], message_id: str): message = ( db.session.query(Message) - .filter( + .where( Message.id == message_id, Message.app_id == app_model.id, Message.from_source == ("api" if isinstance(user, EndUser) else "console"), @@ -248,9 +248,7 @@ class MessageService: if not conversation.override_model_configs: app_model_config = ( db.session.query(AppModelConfig) - .filter( - AppModelConfig.id == conversation.app_model_config_id, AppModelConfig.app_id == app_model.id - ) + .where(AppModelConfig.id == conversation.app_model_config_id, AppModelConfig.app_id == app_model.id) .first() ) else: diff --git a/api/services/model_load_balancing_service.py b/api/services/model_load_balancing_service.py index 26311a6377..a200cfa146 100644 --- a/api/services/model_load_balancing_service.py +++ b/api/services/model_load_balancing_service.py @@ -103,7 +103,7 @@ class ModelLoadBalancingService: # Get load balancing configurations load_balancing_configs = ( db.session.query(LoadBalancingModelConfig) - .filter( + .where( LoadBalancingModelConfig.tenant_id == tenant_id, LoadBalancingModelConfig.provider_name == provider_configuration.provider.provider, LoadBalancingModelConfig.model_type == model_type_enum.to_origin_model_type(), @@ -219,7 +219,7 @@ class ModelLoadBalancingService: # Get load balancing configurations load_balancing_model_config = ( db.session.query(LoadBalancingModelConfig) - .filter( + .where( LoadBalancingModelConfig.tenant_id == tenant_id, LoadBalancingModelConfig.provider_name == provider_configuration.provider.provider, LoadBalancingModelConfig.model_type == model_type_enum.to_origin_model_type(), @@ -307,7 +307,7 @@ class ModelLoadBalancingService: current_load_balancing_configs = ( db.session.query(LoadBalancingModelConfig) - .filter( + .where( LoadBalancingModelConfig.tenant_id == tenant_id, LoadBalancingModelConfig.provider_name == provider_configuration.provider.provider, LoadBalancingModelConfig.model_type == model_type_enum.to_origin_model_type(), @@ -457,7 +457,7 @@ class ModelLoadBalancingService: # Get load balancing config load_balancing_model_config = ( db.session.query(LoadBalancingModelConfig) - .filter( + .where( LoadBalancingModelConfig.tenant_id == tenant_id, LoadBalancingModelConfig.provider_name == provider, LoadBalancingModelConfig.model_type == model_type_enum.to_origin_model_type(), diff --git a/api/services/ops_service.py b/api/services/ops_service.py index dbeb4f1908..62f37c1588 100644 --- a/api/services/ops_service.py +++ b/api/services/ops_service.py @@ -17,7 +17,7 @@ class OpsService: """ trace_config_data: Optional[TraceAppConfig] = ( db.session.query(TraceAppConfig) - .filter(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider) + .where(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider) .first() ) @@ -25,7 +25,7 @@ class OpsService: return None # decrypt_token and obfuscated_token - app = db.session.query(App).filter(App.id == app_id).first() + app = db.session.query(App).where(App.id == app_id).first() if not app: return None tenant_id = app.tenant_id @@ -148,7 +148,7 @@ class OpsService: # check if trace config already exists trace_config_data: Optional[TraceAppConfig] = ( db.session.query(TraceAppConfig) - .filter(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider) + .where(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider) .first() ) @@ -156,7 +156,7 @@ class OpsService: return None # get tenant id - app = db.session.query(App).filter(App.id == app_id).first() + app = db.session.query(App).where(App.id == app_id).first() if not app: return None tenant_id = app.tenant_id @@ -190,7 +190,7 @@ class OpsService: # check if trace config already exists current_trace_config = ( db.session.query(TraceAppConfig) - .filter(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider) + .where(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider) .first() ) @@ -198,7 +198,7 @@ class OpsService: return None # get tenant id - app = db.session.query(App).filter(App.id == app_id).first() + app = db.session.query(App).where(App.id == app_id).first() if not app: return None tenant_id = app.tenant_id @@ -227,7 +227,7 @@ class OpsService: """ trace_config = ( db.session.query(TraceAppConfig) - .filter(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider) + .where(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider) .first() ) diff --git a/api/services/plugin/plugin_auto_upgrade_service.py b/api/services/plugin/plugin_auto_upgrade_service.py new file mode 100644 index 0000000000..3774050445 --- /dev/null +++ b/api/services/plugin/plugin_auto_upgrade_service.py @@ -0,0 +1,87 @@ +from sqlalchemy.orm import Session + +from extensions.ext_database import db +from models.account import TenantPluginAutoUpgradeStrategy + + +class PluginAutoUpgradeService: + @staticmethod + def get_strategy(tenant_id: str) -> TenantPluginAutoUpgradeStrategy | None: + with Session(db.engine) as session: + return ( + session.query(TenantPluginAutoUpgradeStrategy) + .filter(TenantPluginAutoUpgradeStrategy.tenant_id == tenant_id) + .first() + ) + + @staticmethod + def change_strategy( + tenant_id: str, + strategy_setting: TenantPluginAutoUpgradeStrategy.StrategySetting, + upgrade_time_of_day: int, + upgrade_mode: TenantPluginAutoUpgradeStrategy.UpgradeMode, + exclude_plugins: list[str], + include_plugins: list[str], + ) -> bool: + with Session(db.engine) as session: + exist_strategy = ( + session.query(TenantPluginAutoUpgradeStrategy) + .filter(TenantPluginAutoUpgradeStrategy.tenant_id == tenant_id) + .first() + ) + if not exist_strategy: + strategy = TenantPluginAutoUpgradeStrategy( + tenant_id=tenant_id, + strategy_setting=strategy_setting, + upgrade_time_of_day=upgrade_time_of_day, + upgrade_mode=upgrade_mode, + exclude_plugins=exclude_plugins, + include_plugins=include_plugins, + ) + session.add(strategy) + else: + exist_strategy.strategy_setting = strategy_setting + exist_strategy.upgrade_time_of_day = upgrade_time_of_day + exist_strategy.upgrade_mode = upgrade_mode + exist_strategy.exclude_plugins = exclude_plugins + exist_strategy.include_plugins = include_plugins + + session.commit() + return True + + @staticmethod + def exclude_plugin(tenant_id: str, plugin_id: str) -> bool: + with Session(db.engine) as session: + exist_strategy = ( + session.query(TenantPluginAutoUpgradeStrategy) + .filter(TenantPluginAutoUpgradeStrategy.tenant_id == tenant_id) + .first() + ) + if not exist_strategy: + # create for this tenant + PluginAutoUpgradeService.change_strategy( + tenant_id, + TenantPluginAutoUpgradeStrategy.StrategySetting.FIX_ONLY, + 0, + TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE, + [plugin_id], + [], + ) + return True + else: + if exist_strategy.upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE: + if plugin_id not in exist_strategy.exclude_plugins: + new_exclude_plugins = exist_strategy.exclude_plugins.copy() + new_exclude_plugins.append(plugin_id) + exist_strategy.exclude_plugins = new_exclude_plugins + elif exist_strategy.upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.PARTIAL: + if plugin_id in exist_strategy.include_plugins: + new_include_plugins = exist_strategy.include_plugins.copy() + new_include_plugins.remove(plugin_id) + exist_strategy.include_plugins = new_include_plugins + elif exist_strategy.upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.ALL: + exist_strategy.upgrade_mode = TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE + exist_strategy.exclude_plugins = [plugin_id] + + session.commit() + return True diff --git a/api/services/plugin/plugin_migration.py b/api/services/plugin/plugin_migration.py index dbaaa7160e..1806fbcfd6 100644 --- a/api/services/plugin/plugin_migration.py +++ b/api/services/plugin/plugin_migration.py @@ -101,7 +101,7 @@ class PluginMigration: for test_interval in test_intervals: tenant_count = ( session.query(Tenant.id) - .filter(Tenant.created_at.between(current_time, current_time + test_interval)) + .where(Tenant.created_at.between(current_time, current_time + test_interval)) .count() ) if tenant_count <= 100: @@ -126,7 +126,7 @@ class PluginMigration: rs = ( session.query(Tenant.id) - .filter(Tenant.created_at.between(current_time, batch_end)) + .where(Tenant.created_at.between(current_time, batch_end)) .order_by(Tenant.created_at) ) @@ -212,7 +212,7 @@ class PluginMigration: Extract tool tables. """ with Session(db.engine) as session: - rs = session.query(BuiltinToolProvider).filter(BuiltinToolProvider.tenant_id == tenant_id).all() + rs = session.query(BuiltinToolProvider).where(BuiltinToolProvider.tenant_id == tenant_id).all() result = [] for row in rs: result.append(ToolProviderID(row.provider).plugin_id) @@ -226,7 +226,7 @@ class PluginMigration: """ with Session(db.engine) as session: - rs = session.query(Workflow).filter(Workflow.tenant_id == tenant_id).all() + rs = session.query(Workflow).where(Workflow.tenant_id == tenant_id).all() result = [] for row in rs: graph = row.graph_dict @@ -249,7 +249,7 @@ class PluginMigration: Extract app tables. """ with Session(db.engine) as session: - apps = session.query(App).filter(App.tenant_id == tenant_id).all() + apps = session.query(App).where(App.tenant_id == tenant_id).all() if not apps: return [] @@ -257,7 +257,7 @@ class PluginMigration: app.app_model_config_id for app in apps if app.is_agent or app.mode == AppMode.AGENT_CHAT.value ] - rs = session.query(AppModelConfig).filter(AppModelConfig.id.in_(agent_app_model_config_ids)).all() + rs = session.query(AppModelConfig).where(AppModelConfig.id.in_(agent_app_model_config_ids)).all() result = [] for row in rs: agent_config = row.agent_mode_dict diff --git a/api/services/plugin/plugin_parameter_service.py b/api/services/plugin/plugin_parameter_service.py index a1c5639e00..00b59dacb3 100644 --- a/api/services/plugin/plugin_parameter_service.py +++ b/api/services/plugin/plugin_parameter_service.py @@ -51,7 +51,7 @@ class PluginParameterService: with Session(db.engine) as session: db_record = ( session.query(BuiltinToolProvider) - .filter( + .where( BuiltinToolProvider.tenant_id == tenant_id, BuiltinToolProvider.provider == provider, ) diff --git a/api/services/plugin/plugin_permission_service.py b/api/services/plugin/plugin_permission_service.py index 275e496037..60fa269640 100644 --- a/api/services/plugin/plugin_permission_service.py +++ b/api/services/plugin/plugin_permission_service.py @@ -8,7 +8,7 @@ class PluginPermissionService: @staticmethod def get_permission(tenant_id: str) -> TenantPluginPermission | None: with Session(db.engine) as session: - return session.query(TenantPluginPermission).filter(TenantPluginPermission.tenant_id == tenant_id).first() + return session.query(TenantPluginPermission).where(TenantPluginPermission.tenant_id == tenant_id).first() @staticmethod def change_permission( @@ -18,7 +18,7 @@ class PluginPermissionService: ): with Session(db.engine) as session: permission = ( - session.query(TenantPluginPermission).filter(TenantPluginPermission.tenant_id == tenant_id).first() + session.query(TenantPluginPermission).where(TenantPluginPermission.tenant_id == tenant_id).first() ) if not permission: permission = TenantPluginPermission( diff --git a/api/services/recommend_app/database/database_retrieval.py b/api/services/recommend_app/database/database_retrieval.py index 3295516cce..b97d13d012 100644 --- a/api/services/recommend_app/database/database_retrieval.py +++ b/api/services/recommend_app/database/database_retrieval.py @@ -33,14 +33,14 @@ class DatabaseRecommendAppRetrieval(RecommendAppRetrievalBase): """ recommended_apps = ( db.session.query(RecommendedApp) - .filter(RecommendedApp.is_listed == True, RecommendedApp.language == language) + .where(RecommendedApp.is_listed == True, RecommendedApp.language == language) .all() ) if len(recommended_apps) == 0: recommended_apps = ( db.session.query(RecommendedApp) - .filter(RecommendedApp.is_listed == True, RecommendedApp.language == languages[0]) + .where(RecommendedApp.is_listed == True, RecommendedApp.language == languages[0]) .all() ) @@ -83,7 +83,7 @@ class DatabaseRecommendAppRetrieval(RecommendAppRetrievalBase): # is in public recommended list recommended_app = ( db.session.query(RecommendedApp) - .filter(RecommendedApp.is_listed == True, RecommendedApp.app_id == app_id) + .where(RecommendedApp.is_listed == True, RecommendedApp.app_id == app_id) .first() ) @@ -91,7 +91,7 @@ class DatabaseRecommendAppRetrieval(RecommendAppRetrievalBase): return None # get app detail - app_model = db.session.query(App).filter(App.id == app_id).first() + app_model = db.session.query(App).where(App.id == app_id).first() if not app_model or not app_model.is_public: return None diff --git a/api/services/saved_message_service.py b/api/services/saved_message_service.py index 4cb8700117..641e03c3cf 100644 --- a/api/services/saved_message_service.py +++ b/api/services/saved_message_service.py @@ -17,7 +17,7 @@ class SavedMessageService: raise ValueError("User is required") saved_messages = ( db.session.query(SavedMessage) - .filter( + .where( SavedMessage.app_id == app_model.id, SavedMessage.created_by_role == ("account" if isinstance(user, Account) else "end_user"), SavedMessage.created_by == user.id, @@ -37,7 +37,7 @@ class SavedMessageService: return saved_message = ( db.session.query(SavedMessage) - .filter( + .where( SavedMessage.app_id == app_model.id, SavedMessage.message_id == message_id, SavedMessage.created_by_role == ("account" if isinstance(user, Account) else "end_user"), @@ -67,7 +67,7 @@ class SavedMessageService: return saved_message = ( db.session.query(SavedMessage) - .filter( + .where( SavedMessage.app_id == app_model.id, SavedMessage.message_id == message_id, SavedMessage.created_by_role == ("account" if isinstance(user, Account) else "end_user"), diff --git a/api/services/tag_service.py b/api/services/tag_service.py index 74c6150b44..75fa52a75c 100644 --- a/api/services/tag_service.py +++ b/api/services/tag_service.py @@ -16,10 +16,10 @@ class TagService: query = ( db.session.query(Tag.id, Tag.type, Tag.name, func.count(TagBinding.id).label("binding_count")) .outerjoin(TagBinding, Tag.id == TagBinding.tag_id) - .filter(Tag.type == tag_type, Tag.tenant_id == current_tenant_id) + .where(Tag.type == tag_type, Tag.tenant_id == current_tenant_id) ) if keyword: - query = query.filter(db.and_(Tag.name.ilike(f"%{keyword}%"))) + query = query.where(db.and_(Tag.name.ilike(f"%{keyword}%"))) query = query.group_by(Tag.id, Tag.type, Tag.name, Tag.created_at) results: list = query.order_by(Tag.created_at.desc()).all() return results @@ -28,7 +28,7 @@ class TagService: def get_target_ids_by_tag_ids(tag_type: str, current_tenant_id: str, tag_ids: list) -> list: tags = ( db.session.query(Tag) - .filter(Tag.id.in_(tag_ids), Tag.tenant_id == current_tenant_id, Tag.type == tag_type) + .where(Tag.id.in_(tag_ids), Tag.tenant_id == current_tenant_id, Tag.type == tag_type) .all() ) if not tags: @@ -36,7 +36,7 @@ class TagService: tag_ids = [tag.id for tag in tags] tag_bindings = ( db.session.query(TagBinding.target_id) - .filter(TagBinding.tag_id.in_(tag_ids), TagBinding.tenant_id == current_tenant_id) + .where(TagBinding.tag_id.in_(tag_ids), TagBinding.tenant_id == current_tenant_id) .all() ) if not tag_bindings: @@ -50,7 +50,7 @@ class TagService: return [] tags = ( db.session.query(Tag) - .filter(Tag.name == tag_name, Tag.tenant_id == current_tenant_id, Tag.type == tag_type) + .where(Tag.name == tag_name, Tag.tenant_id == current_tenant_id, Tag.type == tag_type) .all() ) if not tags: @@ -62,7 +62,7 @@ class TagService: tags = ( db.session.query(Tag) .join(TagBinding, Tag.id == TagBinding.tag_id) - .filter( + .where( TagBinding.target_id == target_id, TagBinding.tenant_id == current_tenant_id, Tag.tenant_id == current_tenant_id, @@ -92,7 +92,7 @@ class TagService: def update_tags(args: dict, tag_id: str) -> Tag: if TagService.get_tag_by_tag_name(args.get("type", ""), current_user.current_tenant_id, args.get("name", "")): raise ValueError("Tag name already exists") - tag = db.session.query(Tag).filter(Tag.id == tag_id).first() + tag = db.session.query(Tag).where(Tag.id == tag_id).first() if not tag: raise NotFound("Tag not found") tag.name = args["name"] @@ -101,17 +101,17 @@ class TagService: @staticmethod def get_tag_binding_count(tag_id: str) -> int: - count = db.session.query(TagBinding).filter(TagBinding.tag_id == tag_id).count() + count = db.session.query(TagBinding).where(TagBinding.tag_id == tag_id).count() return count @staticmethod def delete_tag(tag_id: str): - tag = db.session.query(Tag).filter(Tag.id == tag_id).first() + tag = db.session.query(Tag).where(Tag.id == tag_id).first() if not tag: raise NotFound("Tag not found") db.session.delete(tag) # delete tag binding - tag_bindings = db.session.query(TagBinding).filter(TagBinding.tag_id == tag_id).all() + tag_bindings = db.session.query(TagBinding).where(TagBinding.tag_id == tag_id).all() if tag_bindings: for tag_binding in tag_bindings: db.session.delete(tag_binding) @@ -125,7 +125,7 @@ class TagService: for tag_id in args["tag_ids"]: tag_binding = ( db.session.query(TagBinding) - .filter(TagBinding.tag_id == tag_id, TagBinding.target_id == args["target_id"]) + .where(TagBinding.tag_id == tag_id, TagBinding.target_id == args["target_id"]) .first() ) if tag_binding: @@ -146,7 +146,7 @@ class TagService: # delete tag binding tag_bindings = ( db.session.query(TagBinding) - .filter(TagBinding.target_id == args["target_id"], TagBinding.tag_id == (args["tag_id"])) + .where(TagBinding.target_id == args["target_id"], TagBinding.tag_id == (args["tag_id"])) .first() ) if tag_bindings: @@ -158,7 +158,7 @@ class TagService: if type == "knowledge": dataset = ( db.session.query(Dataset) - .filter(Dataset.tenant_id == current_user.current_tenant_id, Dataset.id == target_id) + .where(Dataset.tenant_id == current_user.current_tenant_id, Dataset.id == target_id) .first() ) if not dataset: @@ -166,7 +166,7 @@ class TagService: elif type == "app": app = ( db.session.query(App) - .filter(App.tenant_id == current_user.current_tenant_id, App.id == target_id) + .where(App.tenant_id == current_user.current_tenant_id, App.id == target_id) .first() ) if not app: diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index 80badf2335..78e587abee 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -119,7 +119,7 @@ class ApiToolManageService: # check if the provider exists provider = ( db.session.query(ApiToolProvider) - .filter( + .where( ApiToolProvider.tenant_id == tenant_id, ApiToolProvider.name == provider_name, ) @@ -210,7 +210,7 @@ class ApiToolManageService: """ provider: ApiToolProvider | None = ( db.session.query(ApiToolProvider) - .filter( + .where( ApiToolProvider.tenant_id == tenant_id, ApiToolProvider.name == provider_name, ) @@ -257,7 +257,7 @@ class ApiToolManageService: # check if the provider exists provider = ( db.session.query(ApiToolProvider) - .filter( + .where( ApiToolProvider.tenant_id == tenant_id, ApiToolProvider.name == original_provider, ) @@ -326,7 +326,7 @@ class ApiToolManageService: """ provider = ( db.session.query(ApiToolProvider) - .filter( + .where( ApiToolProvider.tenant_id == tenant_id, ApiToolProvider.name == provider_name, ) @@ -376,7 +376,7 @@ class ApiToolManageService: db_provider = ( db.session.query(ApiToolProvider) - .filter( + .where( ApiToolProvider.tenant_id == tenant_id, ApiToolProvider.name == provider_name, ) @@ -444,7 +444,7 @@ class ApiToolManageService: """ # get all api providers db_providers: list[ApiToolProvider] = ( - db.session.query(ApiToolProvider).filter(ApiToolProvider.tenant_id == tenant_id).all() or [] + db.session.query(ApiToolProvider).where(ApiToolProvider.tenant_id == tenant_id).all() or [] ) result: list[ToolProviderApiEntity] = [] diff --git a/api/services/tools/builtin_tools_manage_service.py b/api/services/tools/builtin_tools_manage_service.py index 04c8af78c8..02c78a0833 100644 --- a/api/services/tools/builtin_tools_manage_service.py +++ b/api/services/tools/builtin_tools_manage_service.py @@ -38,6 +38,7 @@ logger = logging.getLogger(__name__) class BuiltinToolManageService: __MAX_BUILTIN_TOOL_PROVIDER_COUNT__ = 100 + __DEFAULT_EXPIRES_AT__ = 2147483647 @staticmethod def delete_custom_oauth_client_params(tenant_id: str, provider: str): @@ -153,7 +154,7 @@ class BuiltinToolManageService: # get if the provider exists db_provider = ( session.query(BuiltinToolProvider) - .filter( + .where( BuiltinToolProvider.tenant_id == tenant_id, BuiltinToolProvider.id == credential_id, ) @@ -212,6 +213,7 @@ class BuiltinToolManageService: tenant_id: str, provider: str, credentials: dict, + expires_at: int = -1, name: str | None = None, ): """ @@ -269,6 +271,9 @@ class BuiltinToolManageService: encrypted_credentials=json.dumps(encrypter.encrypt(credentials)), credential_type=api_type.value, name=name, + expires_at=expires_at + if expires_at is not None + else BuiltinToolManageService.__DEFAULT_EXPIRES_AT__, ) session.add(db_provider) @@ -377,7 +382,7 @@ class BuiltinToolManageService: with Session(db.engine) as session: db_provider = ( session.query(BuiltinToolProvider) - .filter( + .where( BuiltinToolProvider.tenant_id == tenant_id, BuiltinToolProvider.id == credential_id, ) @@ -586,7 +591,7 @@ class BuiltinToolManageService: if provider_id_entity.organization != "langgenius": provider = ( session.query(BuiltinToolProvider) - .filter( + .where( BuiltinToolProvider.tenant_id == tenant_id, BuiltinToolProvider.provider == full_provider_name, ) @@ -599,7 +604,7 @@ class BuiltinToolManageService: else: provider = ( session.query(BuiltinToolProvider) - .filter( + .where( BuiltinToolProvider.tenant_id == tenant_id, (BuiltinToolProvider.provider == provider_name) | (BuiltinToolProvider.provider == full_provider_name), @@ -620,7 +625,7 @@ class BuiltinToolManageService: # it's an old provider without organization return ( session.query(BuiltinToolProvider) - .filter(BuiltinToolProvider.tenant_id == tenant_id, BuiltinToolProvider.provider == provider_name) + .where(BuiltinToolProvider.tenant_id == tenant_id, BuiltinToolProvider.provider == provider_name) .order_by( BuiltinToolProvider.is_default.desc(), # default=True first BuiltinToolProvider.created_at.asc(), # oldest first diff --git a/api/services/tools/mcp_tools_manage_service.py b/api/services/tools/mcp_tools_manage_service.py index e0e256912e..23be449a5a 100644 --- a/api/services/tools/mcp_tools_manage_service.py +++ b/api/services/tools/mcp_tools_manage_service.py @@ -31,7 +31,7 @@ class MCPToolManageService: def get_mcp_provider_by_provider_id(provider_id: str, tenant_id: str) -> MCPToolProvider: res = ( db.session.query(MCPToolProvider) - .filter(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.id == provider_id) + .where(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.id == provider_id) .first() ) if not res: @@ -42,7 +42,7 @@ class MCPToolManageService: def get_mcp_provider_by_server_identifier(server_identifier: str, tenant_id: str) -> MCPToolProvider: res = ( db.session.query(MCPToolProvider) - .filter(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.server_identifier == server_identifier) + .where(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.server_identifier == server_identifier) .first() ) if not res: @@ -63,7 +63,7 @@ class MCPToolManageService: server_url_hash = hashlib.sha256(server_url.encode()).hexdigest() existing_provider = ( db.session.query(MCPToolProvider) - .filter( + .where( MCPToolProvider.tenant_id == tenant_id, or_( MCPToolProvider.name == name, @@ -100,7 +100,7 @@ class MCPToolManageService: def retrieve_mcp_tools(tenant_id: str, for_list: bool = False) -> list[ToolProviderApiEntity]: mcp_providers = ( db.session.query(MCPToolProvider) - .filter(MCPToolProvider.tenant_id == tenant_id) + .where(MCPToolProvider.tenant_id == tenant_id) .order_by(MCPToolProvider.name) .all() ) @@ -112,19 +112,27 @@ class MCPToolManageService: @classmethod def list_mcp_tool_from_remote_server(cls, tenant_id: str, provider_id: str) -> ToolProviderApiEntity: mcp_provider = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id) + server_url = mcp_provider.decrypted_server_url + authed = mcp_provider.authed + try: - with MCPClient( - mcp_provider.decrypted_server_url, provider_id, tenant_id, authed=mcp_provider.authed, for_list=True - ) as mcp_client: + with MCPClient(server_url, provider_id, tenant_id, authed=authed, for_list=True) as mcp_client: tools = mcp_client.list_tools() except MCPAuthError: raise ValueError("Please auth the tool first") except MCPError as e: raise ValueError(f"Failed to connect to MCP server: {e}") - mcp_provider.tools = json.dumps([tool.model_dump() for tool in tools]) - mcp_provider.authed = True - mcp_provider.updated_at = datetime.now() - db.session.commit() + + try: + mcp_provider = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id) + mcp_provider.tools = json.dumps([tool.model_dump() for tool in tools]) + mcp_provider.authed = True + mcp_provider.updated_at = datetime.now() + db.session.commit() + except Exception: + db.session.rollback() + raise + user = mcp_provider.load_user() return ToolProviderApiEntity( id=mcp_provider.id, @@ -160,22 +168,35 @@ class MCPToolManageService: server_identifier: str, ): mcp_provider = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id) - mcp_provider.updated_at = datetime.now() - mcp_provider.name = name - mcp_provider.icon = ( - json.dumps({"content": icon, "background": icon_background}) if icon_type == "emoji" else icon - ) - mcp_provider.server_identifier = server_identifier + + reconnect_result = None + encrypted_server_url = None + server_url_hash = None if UNCHANGED_SERVER_URL_PLACEHOLDER not in server_url: encrypted_server_url = encrypter.encrypt_token(tenant_id, server_url) - mcp_provider.server_url = encrypted_server_url server_url_hash = hashlib.sha256(server_url.encode()).hexdigest() if server_url_hash != mcp_provider.server_url_hash: - cls._re_connect_mcp_provider(mcp_provider, provider_id, tenant_id) - mcp_provider.server_url_hash = server_url_hash + reconnect_result = cls._re_connect_mcp_provider(server_url, provider_id, tenant_id) + try: + mcp_provider.updated_at = datetime.now() + mcp_provider.name = name + mcp_provider.icon = ( + json.dumps({"content": icon, "background": icon_background}) if icon_type == "emoji" else icon + ) + mcp_provider.server_identifier = server_identifier + + if encrypted_server_url is not None and server_url_hash is not None: + mcp_provider.server_url = encrypted_server_url + mcp_provider.server_url_hash = server_url_hash + + if reconnect_result: + mcp_provider.authed = reconnect_result["authed"] + mcp_provider.tools = reconnect_result["tools"] + mcp_provider.encrypted_credentials = reconnect_result["encrypted_credentials"] + db.session.commit() except IntegrityError as e: db.session.rollback() @@ -187,6 +208,9 @@ class MCPToolManageService: if "unique_mcp_provider_server_identifier" in error_msg: raise ValueError(f"MCP tool {server_identifier} already exists") raise + except Exception: + db.session.rollback() + raise @classmethod def update_mcp_provider_credentials( @@ -207,23 +231,22 @@ class MCPToolManageService: db.session.commit() @classmethod - def _re_connect_mcp_provider(cls, mcp_provider: MCPToolProvider, provider_id: str, tenant_id: str): - """re-connect mcp provider""" + def _re_connect_mcp_provider(cls, server_url: str, provider_id: str, tenant_id: str): try: with MCPClient( - mcp_provider.decrypted_server_url, + server_url, provider_id, tenant_id, authed=False, for_list=True, ) as mcp_client: tools = mcp_client.list_tools() - mcp_provider.authed = True - mcp_provider.tools = json.dumps([tool.model_dump() for tool in tools]) + return { + "authed": True, + "tools": json.dumps([tool.model_dump() for tool in tools]), + "encrypted_credentials": "{}", + } except MCPAuthError: - mcp_provider.authed = False - mcp_provider.tools = "[]" + return {"authed": False, "tools": "[]", "encrypted_credentials": "{}"} except MCPError as e: raise ValueError(f"Failed to re-connect MCP server: {e}") from e - # reset credentials - mcp_provider.encrypted_credentials = "{}" diff --git a/api/services/tools/workflow_tools_manage_service.py b/api/services/tools/workflow_tools_manage_service.py index c6b205557a..75da5e5eaa 100644 --- a/api/services/tools/workflow_tools_manage_service.py +++ b/api/services/tools/workflow_tools_manage_service.py @@ -43,7 +43,7 @@ class WorkflowToolManageService: # check if the name is unique existing_workflow_tool_provider = ( db.session.query(WorkflowToolProvider) - .filter( + .where( WorkflowToolProvider.tenant_id == tenant_id, # name or app_id or_(WorkflowToolProvider.name == name, WorkflowToolProvider.app_id == workflow_app_id), @@ -54,7 +54,7 @@ class WorkflowToolManageService: if existing_workflow_tool_provider is not None: raise ValueError(f"Tool with name {name} or app_id {workflow_app_id} already exists") - app: App | None = db.session.query(App).filter(App.id == workflow_app_id, App.tenant_id == tenant_id).first() + app: App | None = db.session.query(App).where(App.id == workflow_app_id, App.tenant_id == tenant_id).first() if app is None: raise ValueError(f"App {workflow_app_id} not found") @@ -123,7 +123,7 @@ class WorkflowToolManageService: # check if the name is unique existing_workflow_tool_provider = ( db.session.query(WorkflowToolProvider) - .filter( + .where( WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.name == name, WorkflowToolProvider.id != workflow_tool_id, @@ -136,7 +136,7 @@ class WorkflowToolManageService: workflow_tool_provider: WorkflowToolProvider | None = ( db.session.query(WorkflowToolProvider) - .filter(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id) + .where(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id) .first() ) @@ -144,7 +144,7 @@ class WorkflowToolManageService: raise ValueError(f"Tool {workflow_tool_id} not found") app: App | None = ( - db.session.query(App).filter(App.id == workflow_tool_provider.app_id, App.tenant_id == tenant_id).first() + db.session.query(App).where(App.id == workflow_tool_provider.app_id, App.tenant_id == tenant_id).first() ) if app is None: @@ -186,7 +186,7 @@ class WorkflowToolManageService: :param tenant_id: the tenant id :return: the list of tools """ - db_tools = db.session.query(WorkflowToolProvider).filter(WorkflowToolProvider.tenant_id == tenant_id).all() + db_tools = db.session.query(WorkflowToolProvider).where(WorkflowToolProvider.tenant_id == tenant_id).all() tools: list[WorkflowToolProviderController] = [] for provider in db_tools: @@ -224,7 +224,7 @@ class WorkflowToolManageService: :param tenant_id: the tenant id :param workflow_tool_id: the workflow tool id """ - db.session.query(WorkflowToolProvider).filter( + db.session.query(WorkflowToolProvider).where( WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id ).delete() @@ -243,7 +243,7 @@ class WorkflowToolManageService: """ db_tool: WorkflowToolProvider | None = ( db.session.query(WorkflowToolProvider) - .filter(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id) + .where(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id) .first() ) return cls._get_workflow_tool(tenant_id, db_tool) @@ -259,7 +259,7 @@ class WorkflowToolManageService: """ db_tool: WorkflowToolProvider | None = ( db.session.query(WorkflowToolProvider) - .filter(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.app_id == workflow_app_id) + .where(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.app_id == workflow_app_id) .first() ) return cls._get_workflow_tool(tenant_id, db_tool) @@ -275,7 +275,7 @@ class WorkflowToolManageService: raise ValueError("Tool not found") workflow_app: App | None = ( - db.session.query(App).filter(App.id == db_tool.app_id, App.tenant_id == db_tool.tenant_id).first() + db.session.query(App).where(App.id == db_tool.app_id, App.tenant_id == db_tool.tenant_id).first() ) if workflow_app is None: @@ -318,7 +318,7 @@ class WorkflowToolManageService: """ db_tool: WorkflowToolProvider | None = ( db.session.query(WorkflowToolProvider) - .filter(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id) + .where(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id) .first() ) diff --git a/api/services/vector_service.py b/api/services/vector_service.py index 9165139193..f9ec054593 100644 --- a/api/services/vector_service.py +++ b/api/services/vector_service.py @@ -36,7 +36,7 @@ class VectorService: # get the process rule processing_rule = ( db.session.query(DatasetProcessRule) - .filter(DatasetProcessRule.id == dataset_document.dataset_process_rule_id) + .where(DatasetProcessRule.id == dataset_document.dataset_process_rule_id) .first() ) if not processing_rule: diff --git a/api/services/web_conversation_service.py b/api/services/web_conversation_service.py index f698ed3084..c48e24f244 100644 --- a/api/services/web_conversation_service.py +++ b/api/services/web_conversation_service.py @@ -65,7 +65,7 @@ class WebConversationService: return pinned_conversation = ( db.session.query(PinnedConversation) - .filter( + .where( PinnedConversation.app_id == app_model.id, PinnedConversation.conversation_id == conversation_id, PinnedConversation.created_by_role == ("account" if isinstance(user, Account) else "end_user"), @@ -97,7 +97,7 @@ class WebConversationService: return pinned_conversation = ( db.session.query(PinnedConversation) - .filter( + .where( PinnedConversation.app_id == app_model.id, PinnedConversation.conversation_id == conversation_id, PinnedConversation.created_by_role == ("account" if isinstance(user, Account) else "end_user"), diff --git a/api/services/webapp_auth_service.py b/api/services/webapp_auth_service.py index 8f92b3f070..a9df8d0d73 100644 --- a/api/services/webapp_auth_service.py +++ b/api/services/webapp_auth_service.py @@ -52,7 +52,7 @@ class WebAppAuthService: @classmethod def get_user_through_email(cls, email: str): - account = db.session.query(Account).filter(Account.email == email).first() + account = db.session.query(Account).where(Account.email == email).first() if not account: return None @@ -91,10 +91,10 @@ class WebAppAuthService: @classmethod def create_end_user(cls, app_code, email) -> EndUser: - site = db.session.query(Site).filter(Site.code == app_code).first() + site = db.session.query(Site).where(Site.code == app_code).first() if not site: raise NotFound("Site not found.") - app_model = db.session.query(App).filter(App.id == site.app_id).first() + app_model = db.session.query(App).where(App.id == site.app_id).first() if not app_model: raise NotFound("App not found.") end_user = EndUser( diff --git a/api/services/workflow/workflow_converter.py b/api/services/workflow/workflow_converter.py index 2b0d57bdfd..abf6824d73 100644 --- a/api/services/workflow/workflow_converter.py +++ b/api/services/workflow/workflow_converter.py @@ -620,7 +620,7 @@ class WorkflowConverter: """ api_based_extension = ( db.session.query(APIBasedExtension) - .filter(APIBasedExtension.tenant_id == tenant_id, APIBasedExtension.id == api_based_extension_id) + .where(APIBasedExtension.tenant_id == tenant_id, APIBasedExtension.id == api_based_extension_id) .first() ) diff --git a/api/services/workflow_draft_variable_service.py b/api/services/workflow_draft_variable_service.py index f306e1f062..3164e010b4 100644 --- a/api/services/workflow_draft_variable_service.py +++ b/api/services/workflow_draft_variable_service.py @@ -138,7 +138,7 @@ class WorkflowDraftVariableService: ) def get_variable(self, variable_id: str) -> WorkflowDraftVariable | None: - return self._session.query(WorkflowDraftVariable).filter(WorkflowDraftVariable.id == variable_id).first() + return self._session.query(WorkflowDraftVariable).where(WorkflowDraftVariable.id == variable_id).first() def get_draft_variables_by_selectors( self, @@ -166,7 +166,7 @@ class WorkflowDraftVariableService: def list_variables_without_values(self, app_id: str, page: int, limit: int) -> WorkflowDraftVariableList: criteria = WorkflowDraftVariable.app_id == app_id total = None - query = self._session.query(WorkflowDraftVariable).filter(criteria) + query = self._session.query(WorkflowDraftVariable).where(criteria) if page == 1: total = query.count() variables = ( @@ -185,7 +185,7 @@ class WorkflowDraftVariableService: WorkflowDraftVariable.app_id == app_id, WorkflowDraftVariable.node_id == node_id, ) - query = self._session.query(WorkflowDraftVariable).filter(*criteria) + query = self._session.query(WorkflowDraftVariable).where(*criteria) variables = query.order_by(WorkflowDraftVariable.created_at.desc()).all() return WorkflowDraftVariableList(variables=variables) @@ -328,7 +328,7 @@ class WorkflowDraftVariableService: def delete_workflow_variables(self, app_id: str): ( self._session.query(WorkflowDraftVariable) - .filter(WorkflowDraftVariable.app_id == app_id) + .where(WorkflowDraftVariable.app_id == app_id) .delete(synchronize_session=False) ) @@ -379,7 +379,7 @@ class WorkflowDraftVariableService: if conv_id is not None: conversation = ( self._session.query(Conversation) - .filter( + .where( Conversation.id == conv_id, Conversation.app_id == workflow.app_id, ) diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index 89bb504437..24a421b734 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -89,7 +89,7 @@ class WorkflowService: def is_workflow_exist(self, app_model: App) -> bool: return ( db.session.query(Workflow) - .filter( + .where( Workflow.tenant_id == app_model.tenant_id, Workflow.app_id == app_model.id, Workflow.version == Workflow.VERSION_DRAFT, @@ -104,7 +104,7 @@ class WorkflowService: # fetch draft workflow by app_model workflow = ( db.session.query(Workflow) - .filter( + .where( Workflow.tenant_id == app_model.tenant_id, Workflow.app_id == app_model.id, Workflow.version == "draft" ) .first() @@ -117,7 +117,7 @@ class WorkflowService: # fetch published workflow by workflow_id workflow = ( db.session.query(Workflow) - .filter( + .where( Workflow.tenant_id == app_model.tenant_id, Workflow.app_id == app_model.id, Workflow.id == workflow_id, @@ -141,7 +141,7 @@ class WorkflowService: # fetch published workflow by workflow_id workflow = ( db.session.query(Workflow) - .filter( + .where( Workflow.tenant_id == app_model.tenant_id, Workflow.app_id == app_model.id, Workflow.id == app_model.workflow_id, @@ -659,7 +659,7 @@ class WorkflowService: # Check if there's a tool provider using this specific workflow version tool_provider = ( session.query(WorkflowToolProvider) - .filter( + .where( WorkflowToolProvider.tenant_id == workflow.tenant_id, WorkflowToolProvider.app_id == workflow.app_id, WorkflowToolProvider.version == workflow.version, diff --git a/api/services/workspace_service.py b/api/services/workspace_service.py index bb35645c50..d4fc68a084 100644 --- a/api/services/workspace_service.py +++ b/api/services/workspace_service.py @@ -25,7 +25,7 @@ class WorkspaceService: # Get role of user tenant_account_join = ( db.session.query(TenantAccountJoin) - .filter(TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.account_id == current_user.id) + .where(TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.account_id == current_user.id) .first() ) assert tenant_account_join is not None, "TenantAccountJoin not found" diff --git a/api/tasks/add_document_to_index_task.py b/api/tasks/add_document_to_index_task.py index 75d648e1b7..204c1a4f5b 100644 --- a/api/tasks/add_document_to_index_task.py +++ b/api/tasks/add_document_to_index_task.py @@ -25,7 +25,7 @@ def add_document_to_index_task(dataset_document_id: str): logging.info(click.style("Start add document to index: {}".format(dataset_document_id), fg="green")) start_at = time.perf_counter() - dataset_document = db.session.query(DatasetDocument).filter(DatasetDocument.id == dataset_document_id).first() + dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == dataset_document_id).first() if not dataset_document: logging.info(click.style("Document not found: {}".format(dataset_document_id), fg="red")) db.session.close() @@ -43,7 +43,7 @@ def add_document_to_index_task(dataset_document_id: str): segments = ( db.session.query(DocumentSegment) - .filter( + .where( DocumentSegment.document_id == dataset_document.id, DocumentSegment.enabled == False, DocumentSegment.status == "completed", @@ -86,12 +86,10 @@ def add_document_to_index_task(dataset_document_id: str): index_processor.load(dataset, documents) # delete auto disable log - db.session.query(DatasetAutoDisableLog).filter( - DatasetAutoDisableLog.document_id == dataset_document.id - ).delete() + db.session.query(DatasetAutoDisableLog).where(DatasetAutoDisableLog.document_id == dataset_document.id).delete() # update segment to enable - db.session.query(DocumentSegment).filter(DocumentSegment.document_id == dataset_document.id).update( + db.session.query(DocumentSegment).where(DocumentSegment.document_id == dataset_document.id).update( { DocumentSegment.enabled: True, DocumentSegment.disabled_at: None, diff --git a/api/tasks/annotation/batch_import_annotations_task.py b/api/tasks/annotation/batch_import_annotations_task.py index 6144a4fe3e..6d48f5df89 100644 --- a/api/tasks/annotation/batch_import_annotations_task.py +++ b/api/tasks/annotation/batch_import_annotations_task.py @@ -29,7 +29,7 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id: start_at = time.perf_counter() indexing_cache_key = "app_annotation_batch_import_{}".format(str(job_id)) # get app info - app = db.session.query(App).filter(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first() + app = db.session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first() if app: try: @@ -48,7 +48,7 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id: documents.append(document) # if annotation reply is enabled , batch add annotations' index app_annotation_setting = ( - db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first() + db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first() ) if app_annotation_setting: diff --git a/api/tasks/annotation/disable_annotation_reply_task.py b/api/tasks/annotation/disable_annotation_reply_task.py index 747fce5784..5d5d1d3ad8 100644 --- a/api/tasks/annotation/disable_annotation_reply_task.py +++ b/api/tasks/annotation/disable_annotation_reply_task.py @@ -19,16 +19,14 @@ def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str): logging.info(click.style("Start delete app annotations index: {}".format(app_id), fg="green")) start_at = time.perf_counter() # get app info - app = db.session.query(App).filter(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first() - annotations_count = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app_id).count() + app = db.session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first() + annotations_count = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_id).count() if not app: logging.info(click.style("App not found: {}".format(app_id), fg="red")) db.session.close() return - app_annotation_setting = ( - db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first() - ) + app_annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first() if not app_annotation_setting: logging.info(click.style("App annotation setting not found: {}".format(app_id), fg="red")) diff --git a/api/tasks/annotation/enable_annotation_reply_task.py b/api/tasks/annotation/enable_annotation_reply_task.py index c04f1be845..12d10df442 100644 --- a/api/tasks/annotation/enable_annotation_reply_task.py +++ b/api/tasks/annotation/enable_annotation_reply_task.py @@ -30,14 +30,14 @@ def enable_annotation_reply_task( logging.info(click.style("Start add app annotation to index: {}".format(app_id), fg="green")) start_at = time.perf_counter() # get app info - app = db.session.query(App).filter(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first() + app = db.session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first() if not app: logging.info(click.style("App not found: {}".format(app_id), fg="red")) db.session.close() return - annotations = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app_id).all() + annotations = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_id).all() enable_app_annotation_key = "enable_app_annotation_{}".format(str(app_id)) enable_app_annotation_job_key = "enable_app_annotation_job_{}".format(str(job_id)) @@ -46,9 +46,7 @@ def enable_annotation_reply_task( dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding( embedding_provider_name, embedding_model_name, "annotation" ) - annotation_setting = ( - db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first() - ) + annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first() if annotation_setting: if dataset_collection_binding.id != annotation_setting.collection_binding_id: old_dataset_collection_binding = ( diff --git a/api/tasks/batch_clean_document_task.py b/api/tasks/batch_clean_document_task.py index 97efc47b33..49bff72a96 100644 --- a/api/tasks/batch_clean_document_task.py +++ b/api/tasks/batch_clean_document_task.py @@ -27,12 +27,12 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form start_at = time.perf_counter() try: - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: raise Exception("Document has no dataset") - segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id.in_(document_ids)).all() + segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids)).all() # check segment is exist if segments: index_node_ids = [segment.index_node_id for segment in segments] @@ -42,7 +42,7 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form for segment in segments: image_upload_file_ids = get_image_upload_file_ids(segment.content) for upload_file_id in image_upload_file_ids: - image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first() + image_file = db.session.query(UploadFile).where(UploadFile.id == upload_file_id).first() try: if image_file and image_file.key: storage.delete(image_file.key) @@ -56,7 +56,7 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form db.session.commit() if file_ids: - files = db.session.query(UploadFile).filter(UploadFile.id.in_(file_ids)).all() + files = db.session.query(UploadFile).where(UploadFile.id.in_(file_ids)).all() for file in files: try: storage.delete(file.key) diff --git a/api/tasks/batch_create_segment_to_index_task.py b/api/tasks/batch_create_segment_to_index_task.py index 51b6343fdc..64df3175e1 100644 --- a/api/tasks/batch_create_segment_to_index_task.py +++ b/api/tasks/batch_create_segment_to_index_task.py @@ -81,7 +81,7 @@ def batch_create_segment_to_index_task( segment_hash = helper.generate_text_hash(content) # type: ignore max_position = ( db.session.query(func.max(DocumentSegment.position)) - .filter(DocumentSegment.document_id == dataset_document.id) + .where(DocumentSegment.document_id == dataset_document.id) .scalar() ) segment_document = DocumentSegment( diff --git a/api/tasks/clean_dataset_task.py b/api/tasks/clean_dataset_task.py index 6bac718395..fad090141a 100644 --- a/api/tasks/clean_dataset_task.py +++ b/api/tasks/clean_dataset_task.py @@ -53,8 +53,8 @@ def clean_dataset_task( index_struct=index_struct, collection_binding_id=collection_binding_id, ) - documents = db.session.query(Document).filter(Document.dataset_id == dataset_id).all() - segments = db.session.query(DocumentSegment).filter(DocumentSegment.dataset_id == dataset_id).all() + documents = db.session.query(Document).where(Document.dataset_id == dataset_id).all() + segments = db.session.query(DocumentSegment).where(DocumentSegment.dataset_id == dataset_id).all() if documents is None or len(documents) == 0: logging.info(click.style("No documents found for dataset: {}".format(dataset_id), fg="green")) @@ -72,7 +72,7 @@ def clean_dataset_task( for segment in segments: image_upload_file_ids = get_image_upload_file_ids(segment.content) for upload_file_id in image_upload_file_ids: - image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first() + image_file = db.session.query(UploadFile).where(UploadFile.id == upload_file_id).first() if image_file is None: continue try: @@ -85,12 +85,12 @@ def clean_dataset_task( db.session.delete(image_file) db.session.delete(segment) - db.session.query(DatasetProcessRule).filter(DatasetProcessRule.dataset_id == dataset_id).delete() - db.session.query(DatasetQuery).filter(DatasetQuery.dataset_id == dataset_id).delete() - db.session.query(AppDatasetJoin).filter(AppDatasetJoin.dataset_id == dataset_id).delete() + db.session.query(DatasetProcessRule).where(DatasetProcessRule.dataset_id == dataset_id).delete() + db.session.query(DatasetQuery).where(DatasetQuery.dataset_id == dataset_id).delete() + db.session.query(AppDatasetJoin).where(AppDatasetJoin.dataset_id == dataset_id).delete() # delete dataset metadata - db.session.query(DatasetMetadata).filter(DatasetMetadata.dataset_id == dataset_id).delete() - db.session.query(DatasetMetadataBinding).filter(DatasetMetadataBinding.dataset_id == dataset_id).delete() + db.session.query(DatasetMetadata).where(DatasetMetadata.dataset_id == dataset_id).delete() + db.session.query(DatasetMetadataBinding).where(DatasetMetadataBinding.dataset_id == dataset_id).delete() # delete files if documents: for document in documents: @@ -102,7 +102,7 @@ def clean_dataset_task( file_id = data_source_info["upload_file_id"] file = ( db.session.query(UploadFile) - .filter(UploadFile.tenant_id == document.tenant_id, UploadFile.id == file_id) + .where(UploadFile.tenant_id == document.tenant_id, UploadFile.id == file_id) .first() ) if not file: diff --git a/api/tasks/clean_document_task.py b/api/tasks/clean_document_task.py index c72a3319c1..dd7a544ff5 100644 --- a/api/tasks/clean_document_task.py +++ b/api/tasks/clean_document_task.py @@ -28,12 +28,12 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i start_at = time.perf_counter() try: - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: raise Exception("Document has no dataset") - segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all() + segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all() # check segment is exist if segments: index_node_ids = [segment.index_node_id for segment in segments] @@ -43,7 +43,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i for segment in segments: image_upload_file_ids = get_image_upload_file_ids(segment.content) for upload_file_id in image_upload_file_ids: - image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first() + image_file = db.session.query(UploadFile).where(UploadFile.id == upload_file_id).first() if image_file is None: continue try: @@ -58,7 +58,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i db.session.commit() if file_id: - file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first() + file = db.session.query(UploadFile).where(UploadFile.id == file_id).first() if file: try: storage.delete(file.key) @@ -68,7 +68,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i db.session.commit() # delete dataset metadata binding - db.session.query(DatasetMetadataBinding).filter( + db.session.query(DatasetMetadataBinding).where( DatasetMetadataBinding.dataset_id == dataset_id, DatasetMetadataBinding.document_id == document_id, ).delete() diff --git a/api/tasks/clean_notion_document_task.py b/api/tasks/clean_notion_document_task.py index 1087a37761..0f72f87f15 100644 --- a/api/tasks/clean_notion_document_task.py +++ b/api/tasks/clean_notion_document_task.py @@ -24,17 +24,17 @@ def clean_notion_document_task(document_ids: list[str], dataset_id: str): start_at = time.perf_counter() try: - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: raise Exception("Document has no dataset") index_type = dataset.doc_form index_processor = IndexProcessorFactory(index_type).init_index_processor() for document_id in document_ids: - document = db.session.query(Document).filter(Document.id == document_id).first() + document = db.session.query(Document).where(Document.id == document_id).first() db.session.delete(document) - segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all() + segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all() index_node_ids = [segment.index_node_id for segment in segments] index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True) diff --git a/api/tasks/create_segment_to_index_task.py b/api/tasks/create_segment_to_index_task.py index a3f811faa1..5eda24674a 100644 --- a/api/tasks/create_segment_to_index_task.py +++ b/api/tasks/create_segment_to_index_task.py @@ -24,7 +24,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]] logging.info(click.style("Start create segment to index: {}".format(segment_id), fg="green")) start_at = time.perf_counter() - segment = db.session.query(DocumentSegment).filter(DocumentSegment.id == segment_id).first() + segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first() if not segment: logging.info(click.style("Segment not found: {}".format(segment_id), fg="red")) db.session.close() @@ -37,11 +37,12 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]] try: # update segment status to indexing - update_params = { - DocumentSegment.status: "indexing", - DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), - } - db.session.query(DocumentSegment).filter_by(id=segment.id).update(update_params) + db.session.query(DocumentSegment).filter_by(id=segment.id).update( + { + DocumentSegment.status: "indexing", + DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + } + ) db.session.commit() document = Document( page_content=segment.content, @@ -74,11 +75,12 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]] index_processor.load(dataset, [document]) # update segment to completed - update_params = { - DocumentSegment.status: "completed", - DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), - } - db.session.query(DocumentSegment).filter_by(id=segment.id).update(update_params) + db.session.query(DocumentSegment).filter_by(id=segment.id).update( + { + DocumentSegment.status: "completed", + DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + } + ) db.session.commit() end_at = time.perf_counter() diff --git a/api/tasks/deal_dataset_vector_index_task.py b/api/tasks/deal_dataset_vector_index_task.py index a27207f2f1..7478bf5a90 100644 --- a/api/tasks/deal_dataset_vector_index_task.py +++ b/api/tasks/deal_dataset_vector_index_task.py @@ -35,7 +35,7 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str): elif action == "add": dataset_documents = ( db.session.query(DatasetDocument) - .filter( + .where( DatasetDocument.dataset_id == dataset_id, DatasetDocument.indexing_status == "completed", DatasetDocument.enabled == True, @@ -46,7 +46,7 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str): if dataset_documents: dataset_documents_ids = [doc.id for doc in dataset_documents] - db.session.query(DatasetDocument).filter(DatasetDocument.id.in_(dataset_documents_ids)).update( + db.session.query(DatasetDocument).where(DatasetDocument.id.in_(dataset_documents_ids)).update( {"indexing_status": "indexing"}, synchronize_session=False ) db.session.commit() @@ -56,7 +56,7 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str): # add from vector index segments = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.document_id == dataset_document.id, DocumentSegment.enabled == True) + .where(DocumentSegment.document_id == dataset_document.id, DocumentSegment.enabled == True) .order_by(DocumentSegment.position.asc()) .all() ) @@ -76,19 +76,19 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str): documents.append(document) # save vector index index_processor.load(dataset, documents, with_keywords=False) - db.session.query(DatasetDocument).filter(DatasetDocument.id == dataset_document.id).update( + db.session.query(DatasetDocument).where(DatasetDocument.id == dataset_document.id).update( {"indexing_status": "completed"}, synchronize_session=False ) db.session.commit() except Exception as e: - db.session.query(DatasetDocument).filter(DatasetDocument.id == dataset_document.id).update( + db.session.query(DatasetDocument).where(DatasetDocument.id == dataset_document.id).update( {"indexing_status": "error", "error": str(e)}, synchronize_session=False ) db.session.commit() elif action == "update": dataset_documents = ( db.session.query(DatasetDocument) - .filter( + .where( DatasetDocument.dataset_id == dataset_id, DatasetDocument.indexing_status == "completed", DatasetDocument.enabled == True, @@ -100,7 +100,7 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str): if dataset_documents: # update document status dataset_documents_ids = [doc.id for doc in dataset_documents] - db.session.query(DatasetDocument).filter(DatasetDocument.id.in_(dataset_documents_ids)).update( + db.session.query(DatasetDocument).where(DatasetDocument.id.in_(dataset_documents_ids)).update( {"indexing_status": "indexing"}, synchronize_session=False ) db.session.commit() @@ -113,7 +113,7 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str): try: segments = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.document_id == dataset_document.id, DocumentSegment.enabled == True) + .where(DocumentSegment.document_id == dataset_document.id, DocumentSegment.enabled == True) .order_by(DocumentSegment.position.asc()) .all() ) @@ -148,12 +148,12 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str): documents.append(document) # save vector index index_processor.load(dataset, documents, with_keywords=False) - db.session.query(DatasetDocument).filter(DatasetDocument.id == dataset_document.id).update( + db.session.query(DatasetDocument).where(DatasetDocument.id == dataset_document.id).update( {"indexing_status": "completed"}, synchronize_session=False ) db.session.commit() except Exception as e: - db.session.query(DatasetDocument).filter(DatasetDocument.id == dataset_document.id).update( + db.session.query(DatasetDocument).where(DatasetDocument.id == dataset_document.id).update( {"indexing_status": "error", "error": str(e)}, synchronize_session=False ) db.session.commit() diff --git a/api/tasks/delete_account_task.py b/api/tasks/delete_account_task.py index 52c884ca29..d3b33e3052 100644 --- a/api/tasks/delete_account_task.py +++ b/api/tasks/delete_account_task.py @@ -12,7 +12,7 @@ logger = logging.getLogger(__name__) @shared_task(queue="dataset") def delete_account_task(account_id): - account = db.session.query(Account).filter(Account.id == account_id).first() + account = db.session.query(Account).where(Account.id == account_id).first() try: BillingService.delete_account(account_id) except Exception as e: diff --git a/api/tasks/delete_segment_from_index_task.py b/api/tasks/delete_segment_from_index_task.py index a93babc310..66ff0f9a0a 100644 --- a/api/tasks/delete_segment_from_index_task.py +++ b/api/tasks/delete_segment_from_index_task.py @@ -22,11 +22,11 @@ def delete_segment_from_index_task(index_node_ids: list, dataset_id: str, docume logging.info(click.style("Start delete segment from index", fg="green")) start_at = time.perf_counter() try: - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: return - dataset_document = db.session.query(Document).filter(Document.id == document_id).first() + dataset_document = db.session.query(Document).where(Document.id == document_id).first() if not dataset_document: return diff --git a/api/tasks/disable_segment_from_index_task.py b/api/tasks/disable_segment_from_index_task.py index 327eed4721..e67ba5c76e 100644 --- a/api/tasks/disable_segment_from_index_task.py +++ b/api/tasks/disable_segment_from_index_task.py @@ -21,7 +21,7 @@ def disable_segment_from_index_task(segment_id: str): logging.info(click.style("Start disable segment from index: {}".format(segment_id), fg="green")) start_at = time.perf_counter() - segment = db.session.query(DocumentSegment).filter(DocumentSegment.id == segment_id).first() + segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first() if not segment: logging.info(click.style("Segment not found: {}".format(segment_id), fg="red")) db.session.close() diff --git a/api/tasks/disable_segments_from_index_task.py b/api/tasks/disable_segments_from_index_task.py index 8b77b290c8..0c8b1aabc7 100644 --- a/api/tasks/disable_segments_from_index_task.py +++ b/api/tasks/disable_segments_from_index_task.py @@ -23,13 +23,13 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen """ start_at = time.perf_counter() - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: logging.info(click.style("Dataset {} not found, pass.".format(dataset_id), fg="cyan")) db.session.close() return - dataset_document = db.session.query(DatasetDocument).filter(DatasetDocument.id == document_id).first() + dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == document_id).first() if not dataset_document: logging.info(click.style("Document {} not found, pass.".format(document_id), fg="cyan")) @@ -44,7 +44,7 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen segments = ( db.session.query(DocumentSegment) - .filter( + .where( DocumentSegment.id.in_(segment_ids), DocumentSegment.dataset_id == dataset_id, DocumentSegment.document_id == document_id, @@ -64,7 +64,7 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen logging.info(click.style("Segments removed from index latency: {}".format(end_at - start_at), fg="green")) except Exception: # update segment error msg - db.session.query(DocumentSegment).filter( + db.session.query(DocumentSegment).where( DocumentSegment.id.in_(segment_ids), DocumentSegment.dataset_id == dataset_id, DocumentSegment.document_id == document_id, diff --git a/api/tasks/document_indexing_sync_task.py b/api/tasks/document_indexing_sync_task.py index b4848be192..dcc748ef18 100644 --- a/api/tasks/document_indexing_sync_task.py +++ b/api/tasks/document_indexing_sync_task.py @@ -25,7 +25,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str): logging.info(click.style("Start sync document: {}".format(document_id), fg="green")) start_at = time.perf_counter() - document = db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first() + document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() if not document: logging.info(click.style("Document not found: {}".format(document_id), fg="red")) @@ -46,7 +46,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str): page_edited_time = data_source_info["last_edited_time"] data_source_binding = ( db.session.query(DataSourceOauthBinding) - .filter( + .where( db.and_( DataSourceOauthBinding.tenant_id == document.tenant_id, DataSourceOauthBinding.provider == "notion", @@ -77,13 +77,13 @@ def document_indexing_sync_task(dataset_id: str, document_id: str): # delete all document segment and index try: - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: raise Exception("Dataset not found") index_type = document.doc_form index_processor = IndexProcessorFactory(index_type).init_index_processor() - segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all() + segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all() index_node_ids = [segment.index_node_id for segment in segments] # delete from vector index diff --git a/api/tasks/document_indexing_task.py b/api/tasks/document_indexing_task.py index a85aab0bb7..ec6d10d93b 100644 --- a/api/tasks/document_indexing_task.py +++ b/api/tasks/document_indexing_task.py @@ -24,7 +24,7 @@ def document_indexing_task(dataset_id: str, document_ids: list): documents = [] start_at = time.perf_counter() - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: logging.info(click.style("Dataset is not found: {}".format(dataset_id), fg="yellow")) db.session.close() @@ -48,7 +48,7 @@ def document_indexing_task(dataset_id: str, document_ids: list): except Exception as e: for document_id in document_ids: document = ( - db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first() + db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() ) if document: document.indexing_status = "error" @@ -63,7 +63,7 @@ def document_indexing_task(dataset_id: str, document_ids: list): logging.info(click.style("Start process document: {}".format(document_id), fg="green")) document = ( - db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first() + db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() ) if document: diff --git a/api/tasks/document_indexing_update_task.py b/api/tasks/document_indexing_update_task.py index 167b928f5d..e53c38ddc3 100644 --- a/api/tasks/document_indexing_update_task.py +++ b/api/tasks/document_indexing_update_task.py @@ -23,7 +23,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str): logging.info(click.style("Start update document: {}".format(document_id), fg="green")) start_at = time.perf_counter() - document = db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first() + document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() if not document: logging.info(click.style("Document not found: {}".format(document_id), fg="red")) @@ -36,14 +36,14 @@ def document_indexing_update_task(dataset_id: str, document_id: str): # delete all document segment and index try: - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: raise Exception("Dataset not found") index_type = document.doc_form index_processor = IndexProcessorFactory(index_type).init_index_processor() - segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all() + segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all() if segments: index_node_ids = [segment.index_node_id for segment in segments] diff --git a/api/tasks/duplicate_document_indexing_task.py b/api/tasks/duplicate_document_indexing_task.py index a6c93e110e..b3ddface59 100644 --- a/api/tasks/duplicate_document_indexing_task.py +++ b/api/tasks/duplicate_document_indexing_task.py @@ -25,7 +25,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list): documents = [] start_at = time.perf_counter() - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if dataset is None: logging.info(click.style("Dataset not found: {}".format(dataset_id), fg="red")) db.session.close() @@ -50,7 +50,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list): except Exception as e: for document_id in document_ids: document = ( - db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first() + db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() ) if document: document.indexing_status = "error" @@ -66,7 +66,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list): logging.info(click.style("Start process document: {}".format(document_id), fg="green")) document = ( - db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first() + db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() ) if document: @@ -74,7 +74,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list): index_type = document.doc_form index_processor = IndexProcessorFactory(index_type).init_index_processor() - segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all() + segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all() if segments: index_node_ids = [segment.index_node_id for segment in segments] diff --git a/api/tasks/enable_segment_to_index_task.py b/api/tasks/enable_segment_to_index_task.py index 21f08f40a7..13822f078e 100644 --- a/api/tasks/enable_segment_to_index_task.py +++ b/api/tasks/enable_segment_to_index_task.py @@ -24,7 +24,7 @@ def enable_segment_to_index_task(segment_id: str): logging.info(click.style("Start enable segment to index: {}".format(segment_id), fg="green")) start_at = time.perf_counter() - segment = db.session.query(DocumentSegment).filter(DocumentSegment.id == segment_id).first() + segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first() if not segment: logging.info(click.style("Segment not found: {}".format(segment_id), fg="red")) db.session.close() diff --git a/api/tasks/enable_segments_to_index_task.py b/api/tasks/enable_segments_to_index_task.py index 625a3b582e..e3fdf04d8c 100644 --- a/api/tasks/enable_segments_to_index_task.py +++ b/api/tasks/enable_segments_to_index_task.py @@ -25,12 +25,12 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i Usage: enable_segments_to_index_task.delay(segment_ids, dataset_id, document_id) """ start_at = time.perf_counter() - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: logging.info(click.style("Dataset {} not found, pass.".format(dataset_id), fg="cyan")) return - dataset_document = db.session.query(DatasetDocument).filter(DatasetDocument.id == document_id).first() + dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == document_id).first() if not dataset_document: logging.info(click.style("Document {} not found, pass.".format(document_id), fg="cyan")) @@ -45,7 +45,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i segments = ( db.session.query(DocumentSegment) - .filter( + .where( DocumentSegment.id.in_(segment_ids), DocumentSegment.dataset_id == dataset_id, DocumentSegment.document_id == document_id, @@ -95,7 +95,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i except Exception as e: logging.exception("enable segments to index failed") # update segment error msg - db.session.query(DocumentSegment).filter( + db.session.query(DocumentSegment).where( DocumentSegment.id.in_(segment_ids), DocumentSegment.dataset_id == dataset_id, DocumentSegment.document_id == document_id, diff --git a/api/tasks/mail_account_deletion_task.py b/api/tasks/mail_account_deletion_task.py index 0c60ae53d5..a6f8ce2f0b 100644 --- a/api/tasks/mail_account_deletion_task.py +++ b/api/tasks/mail_account_deletion_task.py @@ -3,14 +3,20 @@ import time import click from celery import shared_task # type: ignore -from flask import render_template from extensions.ext_mail import mail +from libs.email_i18n import EmailType, get_email_i18n_service @shared_task(queue="mail") -def send_deletion_success_task(to): - """Send email to user regarding account deletion.""" +def send_deletion_success_task(to: str, language: str = "en-US") -> None: + """ + Send account deletion success email with internationalization support. + + Args: + to: Recipient email address + language: Language code for email localization + """ if not mail.is_inited(): return @@ -18,12 +24,16 @@ def send_deletion_success_task(to): start_at = time.perf_counter() try: - html_content = render_template( - "delete_account_success_template_en-US.html", + email_service = get_email_i18n_service() + email_service.send_email( + email_type=EmailType.ACCOUNT_DELETION_SUCCESS, + language_code=language, to=to, - email=to, + template_context={ + "to": to, + "email": to, + }, ) - mail.send(to=to, subject="Your Dify.AI Account Has Been Successfully Deleted", html=html_content) end_at = time.perf_counter() logging.info( @@ -36,12 +46,14 @@ def send_deletion_success_task(to): @shared_task(queue="mail") -def send_account_deletion_verification_code(to, code): - """Send email to user regarding account deletion verification code. +def send_account_deletion_verification_code(to: str, code: str, language: str = "en-US") -> None: + """ + Send account deletion verification code email with internationalization support. Args: - to (str): Recipient email address - code (str): Verification code + to: Recipient email address + code: Verification code + language: Language code for email localization """ if not mail.is_inited(): return @@ -50,8 +62,16 @@ def send_account_deletion_verification_code(to, code): start_at = time.perf_counter() try: - html_content = render_template("delete_account_code_email_template_en-US.html", to=to, code=code) - mail.send(to=to, subject="Dify.AI Account Deletion and Verification", html=html_content) + email_service = get_email_i18n_service() + email_service.send_email( + email_type=EmailType.ACCOUNT_DELETION_VERIFICATION, + language_code=language, + to=to, + template_context={ + "to": to, + "code": code, + }, + ) end_at = time.perf_counter() logging.info( diff --git a/api/tasks/mail_change_mail_task.py b/api/tasks/mail_change_mail_task.py index da44040b7d..6334fb22de 100644 --- a/api/tasks/mail_change_mail_task.py +++ b/api/tasks/mail_change_mail_task.py @@ -3,20 +3,21 @@ import time import click from celery import shared_task # type: ignore -from flask import render_template from extensions.ext_mail import mail -from services.feature_service import FeatureService +from libs.email_i18n import EmailType, get_email_i18n_service @shared_task(queue="mail") -def send_change_mail_task(language: str, to: str, code: str, phase: str): +def send_change_mail_task(language: str, to: str, code: str, phase: str) -> None: """ - Async Send change email mail - :param language: Language in which the email should be sent (e.g., 'en', 'zh') - :param to: Recipient email address - :param code: Change email code - :param phase: Change email phase (new_email, old_email) + Send change email notification with internationalization support. + + Args: + language: Language code for email localization + to: Recipient email address + code: Email verification code + phase: Change email phase ('old_email' or 'new_email') """ if not mail.is_inited(): return @@ -24,51 +25,14 @@ def send_change_mail_task(language: str, to: str, code: str, phase: str): logging.info(click.style("Start change email mail to {}".format(to), fg="green")) start_at = time.perf_counter() - email_config = { - "zh-Hans": { - "old_email": { - "subject": "检测您现在的邮箱", - "template_with_brand": "change_mail_confirm_old_template_zh-CN.html", - "template_without_brand": "without-brand/change_mail_confirm_old_template_zh-CN.html", - }, - "new_email": { - "subject": "确认您的邮箱地址变更", - "template_with_brand": "change_mail_confirm_new_template_zh-CN.html", - "template_without_brand": "without-brand/change_mail_confirm_new_template_zh-CN.html", - }, - }, - "en": { - "old_email": { - "subject": "Check your current email", - "template_with_brand": "change_mail_confirm_old_template_en-US.html", - "template_without_brand": "without-brand/change_mail_confirm_old_template_en-US.html", - }, - "new_email": { - "subject": "Confirm your new email address", - "template_with_brand": "change_mail_confirm_new_template_en-US.html", - "template_without_brand": "without-brand/change_mail_confirm_new_template_en-US.html", - }, - }, - } - - # send change email mail using different languages try: - system_features = FeatureService.get_system_features() - lang_key = "zh-Hans" if language == "zh-Hans" else "en" - - if phase not in ["old_email", "new_email"]: - raise ValueError("Invalid phase") - - config = email_config[lang_key][phase] - subject = config["subject"] - - if system_features.branding.enabled: - template = config["template_without_brand"] - else: - template = config["template_with_brand"] - - html_content = render_template(template, to=to, code=code) - mail.send(to=to, subject=subject, html=html_content) + email_service = get_email_i18n_service() + email_service.send_change_email( + language_code=language, + to=to, + code=code, + phase=phase, + ) end_at = time.perf_counter() logging.info( @@ -76,3 +40,41 @@ def send_change_mail_task(language: str, to: str, code: str, phase: str): ) except Exception: logging.exception("Send change email mail to {} failed".format(to)) + + +@shared_task(queue="mail") +def send_change_mail_completed_notification_task(language: str, to: str) -> None: + """ + Send change email completed notification with internationalization support. + + Args: + language: Language code for email localization + to: Recipient email address + """ + if not mail.is_inited(): + return + + logging.info(click.style("Start change email completed notify mail to {}".format(to), fg="green")) + start_at = time.perf_counter() + + try: + email_service = get_email_i18n_service() + email_service.send_email( + email_type=EmailType.CHANGE_EMAIL_COMPLETED, + language_code=language, + to=to, + template_context={ + "to": to, + "email": to, + }, + ) + + end_at = time.perf_counter() + logging.info( + click.style( + "Send change email completed mail to {} succeeded: latency: {}".format(to, end_at - start_at), + fg="green", + ) + ) + except Exception: + logging.exception("Send change email completed mail to {} failed".format(to)) diff --git a/api/tasks/mail_email_code_login.py b/api/tasks/mail_email_code_login.py index ddad331725..34220784e9 100644 --- a/api/tasks/mail_email_code_login.py +++ b/api/tasks/mail_email_code_login.py @@ -3,19 +3,20 @@ import time import click from celery import shared_task # type: ignore -from flask import render_template from extensions.ext_mail import mail -from services.feature_service import FeatureService +from libs.email_i18n import EmailType, get_email_i18n_service @shared_task(queue="mail") -def send_email_code_login_mail_task(language: str, to: str, code: str): +def send_email_code_login_mail_task(language: str, to: str, code: str) -> None: """ - Async Send email code login mail - :param language: Language in which the email should be sent (e.g., 'en', 'zh') - :param to: Recipient email address - :param code: Email code to be included in the email + Send email code login email with internationalization support. + + Args: + language: Language code for email localization + to: Recipient email address + code: Email verification code """ if not mail.is_inited(): return @@ -23,28 +24,17 @@ def send_email_code_login_mail_task(language: str, to: str, code: str): logging.info(click.style("Start email code login mail to {}".format(to), fg="green")) start_at = time.perf_counter() - # send email code login mail using different languages try: - if language == "zh-Hans": - template = "email_code_login_mail_template_zh-CN.html" - system_features = FeatureService.get_system_features() - if system_features.branding.enabled: - application_title = system_features.branding.application_title - template = "without-brand/email_code_login_mail_template_zh-CN.html" - html_content = render_template(template, to=to, code=code, application_title=application_title) - else: - html_content = render_template(template, to=to, code=code) - mail.send(to=to, subject="邮箱验证码", html=html_content) - else: - template = "email_code_login_mail_template_en-US.html" - system_features = FeatureService.get_system_features() - if system_features.branding.enabled: - application_title = system_features.branding.application_title - template = "without-brand/email_code_login_mail_template_en-US.html" - html_content = render_template(template, to=to, code=code, application_title=application_title) - else: - html_content = render_template(template, to=to, code=code) - mail.send(to=to, subject="Email Code", html=html_content) + email_service = get_email_i18n_service() + email_service.send_email( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=language, + to=to, + template_context={ + "to": to, + "code": code, + }, + ) end_at = time.perf_counter() logging.info( diff --git a/api/tasks/mail_enterprise_task.py b/api/tasks/mail_enterprise_task.py index b9d8fd55df..a1c2908624 100644 --- a/api/tasks/mail_enterprise_task.py +++ b/api/tasks/mail_enterprise_task.py @@ -1,15 +1,17 @@ import logging import time +from collections.abc import Mapping import click from celery import shared_task # type: ignore from flask import render_template_string from extensions.ext_mail import mail +from libs.email_i18n import get_email_i18n_service @shared_task(queue="mail") -def send_enterprise_email_task(to, subject, body, substitutions): +def send_enterprise_email_task(to: list[str], subject: str, body: str, substitutions: Mapping[str, str]): if not mail.is_inited(): return @@ -19,11 +21,8 @@ def send_enterprise_email_task(to, subject, body, substitutions): try: html_content = render_template_string(body, **substitutions) - if isinstance(to, list): - for t in to: - mail.send(to=t, subject=subject, html=html_content) - else: - mail.send(to=to, subject=subject, html=html_content) + email_service = get_email_i18n_service() + email_service.send_raw_email(to=to, subject=subject, html_content=html_content) end_at = time.perf_counter() logging.info( diff --git a/api/tasks/mail_invite_member_task.py b/api/tasks/mail_invite_member_task.py index 7ca85c7f2d..8c73de0111 100644 --- a/api/tasks/mail_invite_member_task.py +++ b/api/tasks/mail_invite_member_task.py @@ -3,24 +3,23 @@ import time import click from celery import shared_task # type: ignore -from flask import render_template from configs import dify_config from extensions.ext_mail import mail -from services.feature_service import FeatureService +from libs.email_i18n import EmailType, get_email_i18n_service @shared_task(queue="mail") -def send_invite_member_mail_task(language: str, to: str, token: str, inviter_name: str, workspace_name: str): +def send_invite_member_mail_task(language: str, to: str, token: str, inviter_name: str, workspace_name: str) -> None: """ - Async Send invite member mail - :param language - :param to - :param token - :param inviter_name - :param workspace_name + Send invite member email with internationalization support. - Usage: send_invite_member_mail_task.delay(language, to, token, inviter_name, workspace_name) + Args: + language: Language code for email localization + to: Recipient email address + token: Invitation token + inviter_name: Name of the person sending the invitation + workspace_name: Name of the workspace """ if not mail.is_inited(): return @@ -30,49 +29,20 @@ def send_invite_member_mail_task(language: str, to: str, token: str, inviter_nam ) start_at = time.perf_counter() - # send invite member mail using different languages try: url = f"{dify_config.CONSOLE_WEB_URL}/activate?token={token}" - if language == "zh-Hans": - template = "invite_member_mail_template_zh-CN.html" - system_features = FeatureService.get_system_features() - if system_features.branding.enabled: - application_title = system_features.branding.application_title - template = "without-brand/invite_member_mail_template_zh-CN.html" - html_content = render_template( - template, - to=to, - inviter_name=inviter_name, - workspace_name=workspace_name, - url=url, - application_title=application_title, - ) - mail.send(to=to, subject=f"立即加入 {application_title} 工作空间", html=html_content) - else: - html_content = render_template( - template, to=to, inviter_name=inviter_name, workspace_name=workspace_name, url=url - ) - mail.send(to=to, subject="立即加入 Dify 工作空间", html=html_content) - else: - template = "invite_member_mail_template_en-US.html" - system_features = FeatureService.get_system_features() - if system_features.branding.enabled: - application_title = system_features.branding.application_title - template = "without-brand/invite_member_mail_template_en-US.html" - html_content = render_template( - template, - to=to, - inviter_name=inviter_name, - workspace_name=workspace_name, - url=url, - application_title=application_title, - ) - mail.send(to=to, subject=f"Join {application_title} Workspace Now", html=html_content) - else: - html_content = render_template( - template, to=to, inviter_name=inviter_name, workspace_name=workspace_name, url=url - ) - mail.send(to=to, subject="Join Dify Workspace Now", html=html_content) + email_service = get_email_i18n_service() + email_service.send_email( + email_type=EmailType.INVITE_MEMBER, + language_code=language, + to=to, + template_context={ + "to": to, + "inviter_name": inviter_name, + "workspace_name": workspace_name, + "url": url, + }, + ) end_at = time.perf_counter() logging.info( diff --git a/api/tasks/mail_owner_transfer_task.py b/api/tasks/mail_owner_transfer_task.py index 8d05c6dc0f..e566a6bc56 100644 --- a/api/tasks/mail_owner_transfer_task.py +++ b/api/tasks/mail_owner_transfer_task.py @@ -3,47 +3,40 @@ import time import click from celery import shared_task # type: ignore -from flask import render_template from extensions.ext_mail import mail -from services.feature_service import FeatureService +from libs.email_i18n import EmailType, get_email_i18n_service @shared_task(queue="mail") -def send_owner_transfer_confirm_task(language: str, to: str, code: str, workspace: str): +def send_owner_transfer_confirm_task(language: str, to: str, code: str, workspace: str) -> None: """ - Async Send owner transfer confirm mail - :param language: Language in which the email should be sent (e.g., 'en', 'zh') - :param to: Recipient email address - :param workspace: Workspace name + Send owner transfer confirmation email with internationalization support. + + Args: + language: Language code for email localization + to: Recipient email address + code: Verification code + workspace: Workspace name """ if not mail.is_inited(): return - logging.info(click.style("Start change email mail to {}".format(to), fg="green")) + logging.info(click.style("Start owner transfer confirm mail to {}".format(to), fg="green")) start_at = time.perf_counter() - # send change email mail using different languages + try: - if language == "zh-Hans": - template = "transfer_workspace_owner_confirm_template_zh-CN.html" - system_features = FeatureService.get_system_features() - if system_features.branding.enabled: - template = "without-brand/transfer_workspace_owner_confirm_template_zh-CN.html" - html_content = render_template(template, to=to, code=code, WorkspaceName=workspace) - mail.send(to=to, subject="验证您转移工作空间所有权的请求", html=html_content) - else: - html_content = render_template(template, to=to, code=code, WorkspaceName=workspace) - mail.send(to=to, subject="验证您转移工作空间所有权的请求", html=html_content) - else: - template = "transfer_workspace_owner_confirm_template_en-US.html" - system_features = FeatureService.get_system_features() - if system_features.branding.enabled: - template = "without-brand/transfer_workspace_owner_confirm_template_en-US.html" - html_content = render_template(template, to=to, code=code, WorkspaceName=workspace) - mail.send(to=to, subject="Verify Your Request to Transfer Workspace Ownership", html=html_content) - else: - html_content = render_template(template, to=to, code=code, WorkspaceName=workspace) - mail.send(to=to, subject="Verify Your Request to Transfer Workspace Ownership", html=html_content) + email_service = get_email_i18n_service() + email_service.send_email( + email_type=EmailType.OWNER_TRANSFER_CONFIRM, + language_code=language, + to=to, + template_context={ + "to": to, + "code": code, + "WorkspaceName": workspace, + }, + ) end_at = time.perf_counter() logging.info( @@ -57,96 +50,80 @@ def send_owner_transfer_confirm_task(language: str, to: str, code: str, workspac @shared_task(queue="mail") -def send_old_owner_transfer_notify_email_task(language: str, to: str, workspace: str, new_owner_email: str): +def send_old_owner_transfer_notify_email_task(language: str, to: str, workspace: str, new_owner_email: str) -> None: """ - Async Send owner transfer confirm mail - :param language: Language in which the email should be sent (e.g., 'en', 'zh') - :param to: Recipient email address - :param workspace: Workspace name - :param new_owner_email: New owner email + Send old owner transfer notification email with internationalization support. + + Args: + language: Language code for email localization + to: Recipient email address + workspace: Workspace name + new_owner_email: New owner email address """ if not mail.is_inited(): return - logging.info(click.style("Start change email mail to {}".format(to), fg="green")) + logging.info(click.style("Start old owner transfer notify mail to {}".format(to), fg="green")) start_at = time.perf_counter() - # send change email mail using different languages + try: - if language == "zh-Hans": - template = "transfer_workspace_old_owner_notify_template_zh-CN.html" - system_features = FeatureService.get_system_features() - if system_features.branding.enabled: - template = "without-brand/transfer_workspace_old_owner_notify_template_zh-CN.html" - html_content = render_template(template, to=to, WorkspaceName=workspace, NewOwnerEmail=new_owner_email) - mail.send(to=to, subject="工作区所有权已转移", html=html_content) - else: - html_content = render_template(template, to=to, WorkspaceName=workspace, NewOwnerEmail=new_owner_email) - mail.send(to=to, subject="工作区所有权已转移", html=html_content) - else: - template = "transfer_workspace_old_owner_notify_template_en-US.html" - system_features = FeatureService.get_system_features() - if system_features.branding.enabled: - template = "without-brand/transfer_workspace_old_owner_notify_template_en-US.html" - html_content = render_template(template, to=to, WorkspaceName=workspace, NewOwnerEmail=new_owner_email) - mail.send(to=to, subject="Workspace ownership has been transferred", html=html_content) - else: - html_content = render_template(template, to=to, WorkspaceName=workspace, NewOwnerEmail=new_owner_email) - mail.send(to=to, subject="Workspace ownership has been transferred", html=html_content) + email_service = get_email_i18n_service() + email_service.send_email( + email_type=EmailType.OWNER_TRANSFER_OLD_NOTIFY, + language_code=language, + to=to, + template_context={ + "to": to, + "WorkspaceName": workspace, + "NewOwnerEmail": new_owner_email, + }, + ) end_at = time.perf_counter() logging.info( click.style( - "Send owner transfer confirm mail to {} succeeded: latency: {}".format(to, end_at - start_at), + "Send old owner transfer notify mail to {} succeeded: latency: {}".format(to, end_at - start_at), fg="green", ) ) except Exception: - logging.exception("owner transfer confirm email mail to {} failed".format(to)) + logging.exception("old owner transfer notify email mail to {} failed".format(to)) @shared_task(queue="mail") -def send_new_owner_transfer_notify_email_task(language: str, to: str, workspace: str): +def send_new_owner_transfer_notify_email_task(language: str, to: str, workspace: str) -> None: """ - Async Send owner transfer confirm mail - :param language: Language in which the email should be sent (e.g., 'en', 'zh') - :param to: Recipient email address - :param code: Change email code - :param workspace: Workspace name + Send new owner transfer notification email with internationalization support. + + Args: + language: Language code for email localization + to: Recipient email address + workspace: Workspace name """ if not mail.is_inited(): return - logging.info(click.style("Start change email mail to {}".format(to), fg="green")) + logging.info(click.style("Start new owner transfer notify mail to {}".format(to), fg="green")) start_at = time.perf_counter() - # send change email mail using different languages + try: - if language == "zh-Hans": - template = "transfer_workspace_new_owner_notify_template_zh-CN.html" - system_features = FeatureService.get_system_features() - if system_features.branding.enabled: - template = "without-brand/transfer_workspace_new_owner_notify_template_zh-CN.html" - html_content = render_template(template, to=to, WorkspaceName=workspace) - mail.send(to=to, subject=f"您现在是 {workspace} 的所有者", html=html_content) - else: - html_content = render_template(template, to=to, WorkspaceName=workspace) - mail.send(to=to, subject=f"您现在是 {workspace} 的所有者", html=html_content) - else: - template = "transfer_workspace_new_owner_notify_template_en-US.html" - system_features = FeatureService.get_system_features() - if system_features.branding.enabled: - template = "without-brand/transfer_workspace_new_owner_notify_template_en-US.html" - html_content = render_template(template, to=to, WorkspaceName=workspace) - mail.send(to=to, subject=f"You are now the owner of {workspace}", html=html_content) - else: - html_content = render_template(template, to=to, WorkspaceName=workspace) - mail.send(to=to, subject=f"You are now the owner of {workspace}", html=html_content) + email_service = get_email_i18n_service() + email_service.send_email( + email_type=EmailType.OWNER_TRANSFER_NEW_NOTIFY, + language_code=language, + to=to, + template_context={ + "to": to, + "WorkspaceName": workspace, + }, + ) end_at = time.perf_counter() logging.info( click.style( - "Send owner transfer confirm mail to {} succeeded: latency: {}".format(to, end_at - start_at), + "Send new owner transfer notify mail to {} succeeded: latency: {}".format(to, end_at - start_at), fg="green", ) ) except Exception: - logging.exception("owner transfer confirm email mail to {} failed".format(to)) + logging.exception("new owner transfer notify email mail to {} failed".format(to)) diff --git a/api/tasks/mail_reset_password_task.py b/api/tasks/mail_reset_password_task.py index d4f4482a48..e2482f2101 100644 --- a/api/tasks/mail_reset_password_task.py +++ b/api/tasks/mail_reset_password_task.py @@ -3,19 +3,20 @@ import time import click from celery import shared_task # type: ignore -from flask import render_template from extensions.ext_mail import mail -from services.feature_service import FeatureService +from libs.email_i18n import EmailType, get_email_i18n_service @shared_task(queue="mail") -def send_reset_password_mail_task(language: str, to: str, code: str): +def send_reset_password_mail_task(language: str, to: str, code: str) -> None: """ - Async Send reset password mail - :param language: Language in which the email should be sent (e.g., 'en', 'zh') - :param to: Recipient email address - :param code: Reset password code + Send reset password email with internationalization support. + + Args: + language: Language code for email localization + to: Recipient email address + code: Reset password code """ if not mail.is_inited(): return @@ -23,30 +24,17 @@ def send_reset_password_mail_task(language: str, to: str, code: str): logging.info(click.style("Start password reset mail to {}".format(to), fg="green")) start_at = time.perf_counter() - # send reset password mail using different languages try: - if language == "zh-Hans": - template = "reset_password_mail_template_zh-CN.html" - system_features = FeatureService.get_system_features() - if system_features.branding.enabled: - application_title = system_features.branding.application_title - template = "without-brand/reset_password_mail_template_zh-CN.html" - html_content = render_template(template, to=to, code=code, application_title=application_title) - mail.send(to=to, subject=f"设置您的 {application_title} 密码", html=html_content) - else: - html_content = render_template(template, to=to, code=code) - mail.send(to=to, subject="设置您的 Dify 密码", html=html_content) - else: - template = "reset_password_mail_template_en-US.html" - system_features = FeatureService.get_system_features() - if system_features.branding.enabled: - application_title = system_features.branding.application_title - template = "without-brand/reset_password_mail_template_en-US.html" - html_content = render_template(template, to=to, code=code, application_title=application_title) - mail.send(to=to, subject=f"Set Your {application_title} Password", html=html_content) - else: - html_content = render_template(template, to=to, code=code) - mail.send(to=to, subject="Set Your Dify Password", html=html_content) + email_service = get_email_i18n_service() + email_service.send_email( + email_type=EmailType.RESET_PASSWORD, + language_code=language, + to=to, + template_context={ + "to": to, + "code": code, + }, + ) end_at = time.perf_counter() logging.info( diff --git a/api/tasks/process_tenant_plugin_autoupgrade_check_task.py b/api/tasks/process_tenant_plugin_autoupgrade_check_task.py new file mode 100644 index 0000000000..6fcdad0525 --- /dev/null +++ b/api/tasks/process_tenant_plugin_autoupgrade_check_task.py @@ -0,0 +1,166 @@ +import traceback +import typing + +import click +from celery import shared_task # type: ignore + +from core.helper import marketplace +from core.helper.marketplace import MarketplacePluginDeclaration +from core.plugin.entities.plugin import PluginInstallationSource +from core.plugin.impl.plugin import PluginInstaller +from models.account import TenantPluginAutoUpgradeStrategy + +RETRY_TIMES_OF_ONE_PLUGIN_IN_ONE_TENANT = 3 + + +cached_plugin_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {} + + +def marketplace_batch_fetch_plugin_manifests( + plugin_ids_plain_list: list[str], +) -> list[MarketplacePluginDeclaration]: + global cached_plugin_manifests + # return marketplace.batch_fetch_plugin_manifests(plugin_ids_plain_list) + not_included_plugin_ids = [ + plugin_id for plugin_id in plugin_ids_plain_list if plugin_id not in cached_plugin_manifests + ] + if not_included_plugin_ids: + manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_included_plugin_ids) + for manifest in manifests: + cached_plugin_manifests[manifest.plugin_id] = manifest + + if ( + len(manifests) == 0 + ): # this indicates that the plugin not found in marketplace, should set None in cache to prevent future check + for plugin_id in not_included_plugin_ids: + cached_plugin_manifests[plugin_id] = None + + result: list[MarketplacePluginDeclaration] = [] + for plugin_id in plugin_ids_plain_list: + final_manifest = cached_plugin_manifests.get(plugin_id) + if final_manifest is not None: + result.append(final_manifest) + + return result + + +@shared_task(queue="plugin") +def process_tenant_plugin_autoupgrade_check_task( + tenant_id: str, + strategy_setting: TenantPluginAutoUpgradeStrategy.StrategySetting, + upgrade_time_of_day: int, + upgrade_mode: TenantPluginAutoUpgradeStrategy.UpgradeMode, + exclude_plugins: list[str], + include_plugins: list[str], +): + try: + manager = PluginInstaller() + + click.echo( + click.style( + "Checking upgradable plugin for tenant: {}".format(tenant_id), + fg="green", + ) + ) + + if strategy_setting == TenantPluginAutoUpgradeStrategy.StrategySetting.DISABLED: + return + + # get plugin_ids to check + plugin_ids: list[tuple[str, str, str]] = [] # plugin_id, version, unique_identifier + click.echo(click.style("Upgrade mode: {}".format(upgrade_mode), fg="green")) + + if upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.PARTIAL and include_plugins: + all_plugins = manager.list_plugins(tenant_id) + + for plugin in all_plugins: + if plugin.source == PluginInstallationSource.Marketplace and plugin.plugin_id in include_plugins: + plugin_ids.append( + ( + plugin.plugin_id, + plugin.version, + plugin.plugin_unique_identifier, + ) + ) + + elif upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE: + # get all plugins and remove excluded plugins + all_plugins = manager.list_plugins(tenant_id) + plugin_ids = [ + (plugin.plugin_id, plugin.version, plugin.plugin_unique_identifier) + for plugin in all_plugins + if plugin.source == PluginInstallationSource.Marketplace and plugin.plugin_id not in exclude_plugins + ] + elif upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.ALL: + all_plugins = manager.list_plugins(tenant_id) + plugin_ids = [ + (plugin.plugin_id, plugin.version, plugin.plugin_unique_identifier) + for plugin in all_plugins + if plugin.source == PluginInstallationSource.Marketplace + ] + + if not plugin_ids: + return + + plugin_ids_plain_list = [plugin_id for plugin_id, _, _ in plugin_ids] + + manifests = marketplace_batch_fetch_plugin_manifests(plugin_ids_plain_list) + + if not manifests: + return + + for manifest in manifests: + for plugin_id, version, original_unique_identifier in plugin_ids: + if manifest.plugin_id != plugin_id: + continue + + try: + current_version = version + latest_version = manifest.latest_version + + def fix_only_checker(latest_version, current_version): + latest_version_tuple = tuple(int(val) for val in latest_version.split(".")) + current_version_tuple = tuple(int(val) for val in current_version.split(".")) + + if ( + latest_version_tuple[0] == current_version_tuple[0] + and latest_version_tuple[1] == current_version_tuple[1] + ): + return latest_version_tuple[2] != current_version_tuple[2] + return False + + version_checker = { + TenantPluginAutoUpgradeStrategy.StrategySetting.LATEST: lambda latest_version, + current_version: latest_version != current_version, + TenantPluginAutoUpgradeStrategy.StrategySetting.FIX_ONLY: fix_only_checker, + } + + if version_checker[strategy_setting](latest_version, current_version): + # execute upgrade + new_unique_identifier = manifest.latest_package_identifier + + marketplace.record_install_plugin_event(new_unique_identifier) + click.echo( + click.style( + "Upgrade plugin: {} -> {}".format(original_unique_identifier, new_unique_identifier), + fg="green", + ) + ) + task_start_resp = manager.upgrade_plugin( + tenant_id, + original_unique_identifier, + new_unique_identifier, + PluginInstallationSource.Marketplace, + { + "plugin_unique_identifier": new_unique_identifier, + }, + ) + except Exception as e: + click.echo(click.style("Error when upgrading plugin: {}".format(e), fg="red")) + traceback.print_exc() + break + + except Exception as e: + click.echo(click.style("Error when checking upgradable plugin: {}".format(e), fg="red")) + traceback.print_exc() + return diff --git a/api/tasks/recover_document_indexing_task.py b/api/tasks/recover_document_indexing_task.py index e7d49c78dc..dfb2389579 100644 --- a/api/tasks/recover_document_indexing_task.py +++ b/api/tasks/recover_document_indexing_task.py @@ -21,7 +21,7 @@ def recover_document_indexing_task(dataset_id: str, document_id: str): logging.info(click.style("Recover document: {}".format(document_id), fg="green")) start_at = time.perf_counter() - document = db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first() + document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() if not document: logging.info(click.style("Document not found: {}".format(document_id), fg="red")) diff --git a/api/tasks/remove_app_and_related_data_task.py b/api/tasks/remove_app_and_related_data_task.py index 179adcbd6e..1619f8c546 100644 --- a/api/tasks/remove_app_and_related_data_task.py +++ b/api/tasks/remove_app_and_related_data_task.py @@ -76,7 +76,7 @@ def remove_app_and_related_data_task(self, tenant_id: str, app_id: str): def _delete_app_model_configs(tenant_id: str, app_id: str): def del_model_config(model_config_id: str): - db.session.query(AppModelConfig).filter(AppModelConfig.id == model_config_id).delete(synchronize_session=False) + db.session.query(AppModelConfig).where(AppModelConfig.id == model_config_id).delete(synchronize_session=False) _delete_records( """select id from app_model_configs where app_id=:app_id limit 1000""", @@ -88,14 +88,14 @@ def _delete_app_model_configs(tenant_id: str, app_id: str): def _delete_app_site(tenant_id: str, app_id: str): def del_site(site_id: str): - db.session.query(Site).filter(Site.id == site_id).delete(synchronize_session=False) + db.session.query(Site).where(Site.id == site_id).delete(synchronize_session=False) _delete_records("""select id from sites where app_id=:app_id limit 1000""", {"app_id": app_id}, del_site, "site") def _delete_app_mcp_servers(tenant_id: str, app_id: str): def del_mcp_server(mcp_server_id: str): - db.session.query(AppMCPServer).filter(AppMCPServer.id == mcp_server_id).delete(synchronize_session=False) + db.session.query(AppMCPServer).where(AppMCPServer.id == mcp_server_id).delete(synchronize_session=False) _delete_records( """select id from app_mcp_servers where app_id=:app_id limit 1000""", @@ -107,7 +107,7 @@ def _delete_app_mcp_servers(tenant_id: str, app_id: str): def _delete_app_api_tokens(tenant_id: str, app_id: str): def del_api_token(api_token_id: str): - db.session.query(ApiToken).filter(ApiToken.id == api_token_id).delete(synchronize_session=False) + db.session.query(ApiToken).where(ApiToken.id == api_token_id).delete(synchronize_session=False) _delete_records( """select id from api_tokens where app_id=:app_id limit 1000""", {"app_id": app_id}, del_api_token, "api token" @@ -116,7 +116,7 @@ def _delete_app_api_tokens(tenant_id: str, app_id: str): def _delete_installed_apps(tenant_id: str, app_id: str): def del_installed_app(installed_app_id: str): - db.session.query(InstalledApp).filter(InstalledApp.id == installed_app_id).delete(synchronize_session=False) + db.session.query(InstalledApp).where(InstalledApp.id == installed_app_id).delete(synchronize_session=False) _delete_records( """select id from installed_apps where tenant_id=:tenant_id and app_id=:app_id limit 1000""", @@ -128,7 +128,7 @@ def _delete_installed_apps(tenant_id: str, app_id: str): def _delete_recommended_apps(tenant_id: str, app_id: str): def del_recommended_app(recommended_app_id: str): - db.session.query(RecommendedApp).filter(RecommendedApp.id == recommended_app_id).delete( + db.session.query(RecommendedApp).where(RecommendedApp.id == recommended_app_id).delete( synchronize_session=False ) @@ -142,9 +142,9 @@ def _delete_recommended_apps(tenant_id: str, app_id: str): def _delete_app_annotation_data(tenant_id: str, app_id: str): def del_annotation_hit_history(annotation_hit_history_id: str): - db.session.query(AppAnnotationHitHistory).filter( - AppAnnotationHitHistory.id == annotation_hit_history_id - ).delete(synchronize_session=False) + db.session.query(AppAnnotationHitHistory).where(AppAnnotationHitHistory.id == annotation_hit_history_id).delete( + synchronize_session=False + ) _delete_records( """select id from app_annotation_hit_histories where app_id=:app_id limit 1000""", @@ -154,7 +154,7 @@ def _delete_app_annotation_data(tenant_id: str, app_id: str): ) def del_annotation_setting(annotation_setting_id: str): - db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.id == annotation_setting_id).delete( + db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.id == annotation_setting_id).delete( synchronize_session=False ) @@ -168,7 +168,7 @@ def _delete_app_annotation_data(tenant_id: str, app_id: str): def _delete_app_dataset_joins(tenant_id: str, app_id: str): def del_dataset_join(dataset_join_id: str): - db.session.query(AppDatasetJoin).filter(AppDatasetJoin.id == dataset_join_id).delete(synchronize_session=False) + db.session.query(AppDatasetJoin).where(AppDatasetJoin.id == dataset_join_id).delete(synchronize_session=False) _delete_records( """select id from app_dataset_joins where app_id=:app_id limit 1000""", @@ -180,7 +180,7 @@ def _delete_app_dataset_joins(tenant_id: str, app_id: str): def _delete_app_workflows(tenant_id: str, app_id: str): def del_workflow(workflow_id: str): - db.session.query(Workflow).filter(Workflow.id == workflow_id).delete(synchronize_session=False) + db.session.query(Workflow).where(Workflow.id == workflow_id).delete(synchronize_session=False) _delete_records( """select id from workflows where tenant_id=:tenant_id and app_id=:app_id limit 1000""", @@ -220,7 +220,7 @@ def _delete_app_workflow_node_executions(tenant_id: str, app_id: str): def _delete_app_workflow_app_logs(tenant_id: str, app_id: str): def del_workflow_app_log(workflow_app_log_id: str): - db.session.query(WorkflowAppLog).filter(WorkflowAppLog.id == workflow_app_log_id).delete( + db.session.query(WorkflowAppLog).where(WorkflowAppLog.id == workflow_app_log_id).delete( synchronize_session=False ) @@ -234,10 +234,10 @@ def _delete_app_workflow_app_logs(tenant_id: str, app_id: str): def _delete_app_conversations(tenant_id: str, app_id: str): def del_conversation(conversation_id: str): - db.session.query(PinnedConversation).filter(PinnedConversation.conversation_id == conversation_id).delete( + db.session.query(PinnedConversation).where(PinnedConversation.conversation_id == conversation_id).delete( synchronize_session=False ) - db.session.query(Conversation).filter(Conversation.id == conversation_id).delete(synchronize_session=False) + db.session.query(Conversation).where(Conversation.id == conversation_id).delete(synchronize_session=False) _delete_records( """select id from conversations where app_id=:app_id limit 1000""", @@ -257,19 +257,19 @@ def _delete_conversation_variables(*, app_id: str): def _delete_app_messages(tenant_id: str, app_id: str): def del_message(message_id: str): - db.session.query(MessageFeedback).filter(MessageFeedback.message_id == message_id).delete( + db.session.query(MessageFeedback).where(MessageFeedback.message_id == message_id).delete( synchronize_session=False ) - db.session.query(MessageAnnotation).filter(MessageAnnotation.message_id == message_id).delete( + db.session.query(MessageAnnotation).where(MessageAnnotation.message_id == message_id).delete( synchronize_session=False ) - db.session.query(MessageChain).filter(MessageChain.message_id == message_id).delete(synchronize_session=False) - db.session.query(MessageAgentThought).filter(MessageAgentThought.message_id == message_id).delete( + db.session.query(MessageChain).where(MessageChain.message_id == message_id).delete(synchronize_session=False) + db.session.query(MessageAgentThought).where(MessageAgentThought.message_id == message_id).delete( synchronize_session=False ) - db.session.query(MessageFile).filter(MessageFile.message_id == message_id).delete(synchronize_session=False) - db.session.query(SavedMessage).filter(SavedMessage.message_id == message_id).delete(synchronize_session=False) - db.session.query(Message).filter(Message.id == message_id).delete() + db.session.query(MessageFile).where(MessageFile.message_id == message_id).delete(synchronize_session=False) + db.session.query(SavedMessage).where(SavedMessage.message_id == message_id).delete(synchronize_session=False) + db.session.query(Message).where(Message.id == message_id).delete() _delete_records( """select id from messages where app_id=:app_id limit 1000""", {"app_id": app_id}, del_message, "message" @@ -278,7 +278,7 @@ def _delete_app_messages(tenant_id: str, app_id: str): def _delete_workflow_tool_providers(tenant_id: str, app_id: str): def del_tool_provider(tool_provider_id: str): - db.session.query(WorkflowToolProvider).filter(WorkflowToolProvider.id == tool_provider_id).delete( + db.session.query(WorkflowToolProvider).where(WorkflowToolProvider.id == tool_provider_id).delete( synchronize_session=False ) @@ -292,7 +292,7 @@ def _delete_workflow_tool_providers(tenant_id: str, app_id: str): def _delete_app_tag_bindings(tenant_id: str, app_id: str): def del_tag_binding(tag_binding_id: str): - db.session.query(TagBinding).filter(TagBinding.id == tag_binding_id).delete(synchronize_session=False) + db.session.query(TagBinding).where(TagBinding.id == tag_binding_id).delete(synchronize_session=False) _delete_records( """select id from tag_bindings where tenant_id=:tenant_id and target_id=:app_id limit 1000""", @@ -304,7 +304,7 @@ def _delete_app_tag_bindings(tenant_id: str, app_id: str): def _delete_end_users(tenant_id: str, app_id: str): def del_end_user(end_user_id: str): - db.session.query(EndUser).filter(EndUser.id == end_user_id).delete(synchronize_session=False) + db.session.query(EndUser).where(EndUser.id == end_user_id).delete(synchronize_session=False) _delete_records( """select id from end_users where tenant_id=:tenant_id and app_id=:app_id limit 1000""", @@ -316,7 +316,7 @@ def _delete_end_users(tenant_id: str, app_id: str): def _delete_trace_app_configs(tenant_id: str, app_id: str): def del_trace_app_config(trace_app_config_id: str): - db.session.query(TraceAppConfig).filter(TraceAppConfig.id == trace_app_config_id).delete( + db.session.query(TraceAppConfig).where(TraceAppConfig.id == trace_app_config_id).delete( synchronize_session=False ) diff --git a/api/tasks/remove_document_from_index_task.py b/api/tasks/remove_document_from_index_task.py index 0e2960788d..3f73cc7b40 100644 --- a/api/tasks/remove_document_from_index_task.py +++ b/api/tasks/remove_document_from_index_task.py @@ -22,7 +22,7 @@ def remove_document_from_index_task(document_id: str): logging.info(click.style("Start remove document segments from index: {}".format(document_id), fg="green")) start_at = time.perf_counter() - document = db.session.query(Document).filter(Document.id == document_id).first() + document = db.session.query(Document).where(Document.id == document_id).first() if not document: logging.info(click.style("Document not found: {}".format(document_id), fg="red")) db.session.close() @@ -43,7 +43,7 @@ def remove_document_from_index_task(document_id: str): index_processor = IndexProcessorFactory(document.doc_form).init_index_processor() - segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).all() + segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document.id).all() index_node_ids = [segment.index_node_id for segment in segments] if index_node_ids: try: @@ -51,7 +51,7 @@ def remove_document_from_index_task(document_id: str): except Exception: logging.exception(f"clean dataset {dataset.id} from index failed") # update segment to disable - db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).update( + db.session.query(DocumentSegment).where(DocumentSegment.document_id == document.id).update( { DocumentSegment.enabled: False, DocumentSegment.disabled_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), diff --git a/api/tasks/retry_document_indexing_task.py b/api/tasks/retry_document_indexing_task.py index 8f8c3f9d81..58f0156afb 100644 --- a/api/tasks/retry_document_indexing_task.py +++ b/api/tasks/retry_document_indexing_task.py @@ -25,7 +25,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]): documents: list[Document] = [] start_at = time.perf_counter() - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: logging.info(click.style("Dataset not found: {}".format(dataset_id), fg="red")) db.session.close() @@ -45,7 +45,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]): ) except Exception as e: document = ( - db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first() + db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() ) if document: document.indexing_status = "error" @@ -59,7 +59,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]): logging.info(click.style("Start retry document: {}".format(document_id), fg="green")) document = ( - db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first() + db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() ) if not document: logging.info(click.style("Document not found: {}".format(document_id), fg="yellow")) @@ -69,7 +69,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]): # clean old data index_processor = IndexProcessorFactory(document.doc_form).init_index_processor() - segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all() + segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all() if segments: index_node_ids = [segment.index_node_id for segment in segments] # delete from vector index diff --git a/api/tasks/sync_website_document_indexing_task.py b/api/tasks/sync_website_document_indexing_task.py index dba0a39c2d..539c2db80f 100644 --- a/api/tasks/sync_website_document_indexing_task.py +++ b/api/tasks/sync_website_document_indexing_task.py @@ -24,7 +24,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str): """ start_at = time.perf_counter() - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if dataset is None: raise ValueError("Dataset not found") @@ -41,7 +41,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str): ) except Exception as e: document = ( - db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first() + db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() ) if document: document.indexing_status = "error" @@ -53,7 +53,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str): return logging.info(click.style("Start sync website document: {}".format(document_id), fg="green")) - document = db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first() + document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() if not document: logging.info(click.style("Document not found: {}".format(document_id), fg="yellow")) return @@ -61,7 +61,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str): # clean old data index_processor = IndexProcessorFactory(document.doc_form).init_index_processor() - segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all() + segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all() if segments: index_node_ids = [segment.index_node_id for segment in segments] # delete from vector index diff --git a/api/templates/change_mail_completed_template_en-US.html b/api/templates/change_mail_completed_template_en-US.html new file mode 100644 index 0000000000..ecaf35868d --- /dev/null +++ b/api/templates/change_mail_completed_template_en-US.html @@ -0,0 +1,135 @@ + + + + + + + + +
+
+ + Dify Logo +
+

Your login email has been changed

+
+

You can now log into Dify with your new email address:

+
+
+ {{email}} +
+

If you did not make this change, email support@dify.ai.

+
+ + + + diff --git a/api/templates/change_mail_completed_template_zh-CN.html b/api/templates/change_mail_completed_template_zh-CN.html new file mode 100644 index 0000000000..b4fdb4b9ab --- /dev/null +++ b/api/templates/change_mail_completed_template_zh-CN.html @@ -0,0 +1,135 @@ + + + + + + + + +
+
+ + Dify Logo +
+

您的登录邮箱已更改

+
+

您现在可以使用新的电子邮件地址登录 Dify:

+
+
+ {{email}} +
+

如果您没有进行此更改,请发送电子邮件至 support@dify.ai

+
+ + + + diff --git a/api/templates/clean_document_job_mail_template-US.html b/api/templates/clean_document_job_mail_template-US.html index b26e494f80..97f3997c93 100644 --- a/api/templates/clean_document_job_mail_template-US.html +++ b/api/templates/clean_document_job_mail_template-US.html @@ -45,8 +45,7 @@ line-height: 120%; /* 28.8px */ } .button { - display: inline-block; - width: 480px; + display: block; padding: 8px 12px; color: white; text-decoration: none; diff --git a/api/templates/invite_member_mail_template_en-US.html b/api/templates/invite_member_mail_template_en-US.html index da29242869..a07c5f4b16 100644 --- a/api/templates/invite_member_mail_template_en-US.html +++ b/api/templates/invite_member_mail_template_en-US.html @@ -12,7 +12,7 @@ } .container { width: 504px; - height: 444px; + min-height: 444px; margin: 40px auto; padding: 0 48px; background-color: #fcfcfd; @@ -31,8 +31,7 @@ height: auto; } .button { - display: inline-block; - width: 480px; + display: block; padding: 8px 12px; color: white; text-decoration: none; diff --git a/api/templates/invite_member_mail_template_zh-CN.html b/api/templates/invite_member_mail_template_zh-CN.html index c7c321bf6f..27709a3c6d 100644 --- a/api/templates/invite_member_mail_template_zh-CN.html +++ b/api/templates/invite_member_mail_template_zh-CN.html @@ -28,11 +28,10 @@ .header img { max-width: 63px; - height: auto; + min-height: auto; } .button { - display: inline-block; - width: 480px; + display: block; padding: 8px 12px; color: white; text-decoration: none; diff --git a/api/templates/without-brand/change_mail_completed_template_en-US.html b/api/templates/without-brand/change_mail_completed_template_en-US.html new file mode 100644 index 0000000000..f211cc74d9 --- /dev/null +++ b/api/templates/without-brand/change_mail_completed_template_en-US.html @@ -0,0 +1,132 @@ + + + + + + + + +
+
+

Your login email has been changed

+
+

You can now log into {{application_title}} with your new email address:

+
+
+ {{email}} +
+

If you did not make this change, please ignore this email or contact support immediately.

+
+ + + + diff --git a/api/templates/without-brand/change_mail_completed_template_zh-CN.html b/api/templates/without-brand/change_mail_completed_template_zh-CN.html new file mode 100644 index 0000000000..c96604f0e5 --- /dev/null +++ b/api/templates/without-brand/change_mail_completed_template_zh-CN.html @@ -0,0 +1,132 @@ + + + + + + + + +
+
+

您的登录邮箱已更改

+
+

您现在可以使用新的电子邮件地址登录 {{application_title}}:

+
+
+ {{email}} +
+

如果您没有进行此更改,请忽略此电子邮件或立即联系支持。

+
+ + + + diff --git a/api/templates/without-brand/invite_member_mail_template_en-US.html b/api/templates/without-brand/invite_member_mail_template_en-US.html index f6b1966c52..fc7f3679ba 100644 --- a/api/templates/without-brand/invite_member_mail_template_en-US.html +++ b/api/templates/without-brand/invite_member_mail_template_en-US.html @@ -12,7 +12,7 @@ } .container { width: 504px; - height: 444px; + min-height: 444px; margin: 40px auto; padding: 0 48px; background-color: #fcfcfd; @@ -31,8 +31,7 @@ height: auto; } .button { - display: inline-block; - width: 480px; + display: block; padding: 8px 12px; color: white; text-decoration: none; diff --git a/api/templates/without-brand/invite_member_mail_template_zh-CN.html b/api/templates/without-brand/invite_member_mail_template_zh-CN.html index fd2d6b873f..e787c90914 100644 --- a/api/templates/without-brand/invite_member_mail_template_zh-CN.html +++ b/api/templates/without-brand/invite_member_mail_template_zh-CN.html @@ -12,7 +12,7 @@ } .container { width: 504px; - height: 444px; + min-height: 444px; margin: 40px auto; padding: 0 48px; background-color: #fcfcfd; @@ -31,8 +31,7 @@ height: auto; } .button { - display: inline-block; - width: 480px; + display: block; padding: 8px 12px; color: white; text-decoration: none; diff --git a/api/tests/integration_tests/services/test_workflow_draft_variable_service.py b/api/tests/integration_tests/services/test_workflow_draft_variable_service.py index 9d6d2cc33d..8711a7dd4e 100644 --- a/api/tests/integration_tests/services/test_workflow_draft_variable_service.py +++ b/api/tests/integration_tests/services/test_workflow_draft_variable_service.py @@ -214,7 +214,7 @@ class TestDraftVariableLoader(unittest.TestCase): def tearDown(self): with Session(bind=db.engine, expire_on_commit=False) as session: - session.query(WorkflowDraftVariable).filter(WorkflowDraftVariable.app_id == self._test_app_id).delete( + session.query(WorkflowDraftVariable).where(WorkflowDraftVariable.app_id == self._test_app_id).delete( synchronize_session=False ) session.commit() diff --git a/api/tests/integration_tests/vdb/tablestore/test_tablestore.py b/api/tests/integration_tests/vdb/tablestore/test_tablestore.py index da890d0b7c..da549af1b6 100644 --- a/api/tests/integration_tests/vdb/tablestore/test_tablestore.py +++ b/api/tests/integration_tests/vdb/tablestore/test_tablestore.py @@ -1,4 +1,7 @@ import os +import uuid + +import tablestore from core.rag.datasource.vdb.tablestore.tablestore_vector import ( TableStoreConfig, @@ -6,6 +9,8 @@ from core.rag.datasource.vdb.tablestore.tablestore_vector import ( ) from tests.integration_tests.vdb.test_vector_store import ( AbstractVectorTest, + get_example_document, + get_example_text, setup_mock_redis, ) @@ -29,6 +34,49 @@ class TableStoreVectorTest(AbstractVectorTest): assert len(ids) == 1 assert ids[0] == self.example_doc_id + def create_vector(self): + self.vector.create( + texts=[get_example_document(doc_id=self.example_doc_id)], + embeddings=[self.example_embedding], + ) + while True: + search_response = self.vector._tablestore_client.search( + table_name=self.vector._table_name, + index_name=self.vector._index_name, + search_query=tablestore.SearchQuery(query=tablestore.MatchAllQuery(), get_total_count=True, limit=0), + columns_to_get=tablestore.ColumnsToGet(return_type=tablestore.ColumnReturnType.ALL_FROM_INDEX), + ) + if search_response.total_count == 1: + break + + def search_by_vector(self): + super().search_by_vector() + docs = self.vector.search_by_vector(self.example_embedding, document_ids_filter=[self.example_doc_id]) + assert len(docs) == 1 + assert docs[0].metadata["doc_id"] == self.example_doc_id + assert docs[0].metadata["score"] > 0 + + docs = self.vector.search_by_vector(self.example_embedding, document_ids_filter=[str(uuid.uuid4())]) + assert len(docs) == 0 + + def search_by_full_text(self): + super().search_by_full_text() + docs = self.vector.search_by_full_text(get_example_text(), document_ids_filter=[self.example_doc_id]) + assert len(docs) == 1 + assert docs[0].metadata["doc_id"] == self.example_doc_id + assert not hasattr(docs[0], "score") + + docs = self.vector.search_by_full_text(get_example_text(), document_ids_filter=[str(uuid.uuid4())]) + assert len(docs) == 0 + + def run_all_tests(self): + try: + self.vector.delete() + except Exception: + pass + + return super().run_all_tests() + def test_tablestore_vector(setup_mock_redis): TableStoreVectorTest().run_all_tests() diff --git a/api/tests/unit_tests/core/helper/test_encrypter.py b/api/tests/unit_tests/core/helper/test_encrypter.py index 61cf8f255d..5890009742 100644 --- a/api/tests/unit_tests/core/helper/test_encrypter.py +++ b/api/tests/unit_tests/core/helper/test_encrypter.py @@ -44,7 +44,7 @@ class TestEncryptToken: """Test successful token encryption""" mock_tenant = MagicMock() mock_tenant.encrypt_public_key = "mock_public_key" - mock_query.return_value.filter.return_value.first.return_value = mock_tenant + mock_query.return_value.where.return_value.first.return_value = mock_tenant mock_encrypt.return_value = b"encrypted_data" result = encrypt_token("tenant-123", "test_token") @@ -55,7 +55,7 @@ class TestEncryptToken: @patch("models.engine.db.session.query") def test_tenant_not_found(self, mock_query): """Test error when tenant doesn't exist""" - mock_query.return_value.filter.return_value.first.return_value = None + mock_query.return_value.where.return_value.first.return_value = None with pytest.raises(ValueError) as exc_info: encrypt_token("invalid-tenant", "test_token") @@ -127,7 +127,7 @@ class TestEncryptDecryptIntegration: # Setup mock tenant mock_tenant = MagicMock() mock_tenant.encrypt_public_key = "mock_public_key" - mock_query.return_value.filter.return_value.first.return_value = mock_tenant + mock_query.return_value.where.return_value.first.return_value = mock_tenant # Setup mock encryption/decryption original_token = "test_token_123" @@ -153,7 +153,7 @@ class TestSecurity: # Setup mock tenant mock_tenant = MagicMock() mock_tenant.encrypt_public_key = "tenant1_public_key" - mock_query.return_value.filter.return_value.first.return_value = mock_tenant + mock_query.return_value.where.return_value.first.return_value = mock_tenant mock_encrypt.return_value = b"encrypted_for_tenant1" # Encrypt token for tenant1 @@ -186,7 +186,7 @@ class TestSecurity: def test_encryption_randomness(self, mock_encrypt, mock_query): """Ensure same plaintext produces different ciphertext""" mock_tenant = MagicMock(encrypt_public_key="key") - mock_query.return_value.filter.return_value.first.return_value = mock_tenant + mock_query.return_value.where.return_value.first.return_value = mock_tenant # Different outputs for same input mock_encrypt.side_effect = [b"enc1", b"enc2", b"enc3"] @@ -211,7 +211,7 @@ class TestEdgeCases: """Test encryption of empty token""" mock_tenant = MagicMock() mock_tenant.encrypt_public_key = "mock_public_key" - mock_query.return_value.filter.return_value.first.return_value = mock_tenant + mock_query.return_value.where.return_value.first.return_value = mock_tenant mock_encrypt.return_value = b"encrypted_empty" result = encrypt_token("tenant-123", "") @@ -225,7 +225,7 @@ class TestEdgeCases: """Test tokens containing special/unicode characters""" mock_tenant = MagicMock() mock_tenant.encrypt_public_key = "mock_public_key" - mock_query.return_value.filter.return_value.first.return_value = mock_tenant + mock_query.return_value.where.return_value.first.return_value = mock_tenant mock_encrypt.return_value = b"encrypted_special" # Test various special characters @@ -248,7 +248,7 @@ class TestEdgeCases: """Test behavior when token exceeds RSA encryption limits""" mock_tenant = MagicMock() mock_tenant.encrypt_public_key = "mock_public_key" - mock_query.return_value.filter.return_value.first.return_value = mock_tenant + mock_query.return_value.where.return_value.first.return_value = mock_tenant # RSA 2048-bit can only encrypt ~245 bytes # The actual limit depends on padding scheme diff --git a/api/tests/unit_tests/core/helper/test_trace_id_helper.py b/api/tests/unit_tests/core/helper/test_trace_id_helper.py new file mode 100644 index 0000000000..27bfe1af05 --- /dev/null +++ b/api/tests/unit_tests/core/helper/test_trace_id_helper.py @@ -0,0 +1,86 @@ +import pytest + +from core.helper.trace_id_helper import extract_external_trace_id_from_args, get_external_trace_id, is_valid_trace_id + + +class DummyRequest: + def __init__(self, headers=None, args=None, json=None, is_json=False): + self.headers = headers or {} + self.args = args or {} + self.json = json + self.is_json = is_json + + +class TestTraceIdHelper: + """Test cases for trace_id_helper.py""" + + @pytest.mark.parametrize( + ("trace_id", "expected"), + [ + ("abc123", True), + ("A-B_C-123", True), + ("a" * 128, True), + ("", False), + ("a" * 129, False), + ("abc!@#", False), + ("空格", False), + ("with space", False), + ], + ) + def test_is_valid_trace_id(self, trace_id, expected): + """Test trace_id validation for various cases""" + assert is_valid_trace_id(trace_id) is expected + + def test_get_external_trace_id_from_header(self): + """Should extract valid trace_id from header""" + req = DummyRequest(headers={"X-Trace-Id": "abc123"}) + assert get_external_trace_id(req) == "abc123" + + def test_get_external_trace_id_from_args(self): + """Should extract valid trace_id from args if header missing""" + req = DummyRequest(args={"trace_id": "abc123"}) + assert get_external_trace_id(req) == "abc123" + + def test_get_external_trace_id_from_json(self): + """Should extract valid trace_id from JSON body if header and args missing""" + req = DummyRequest(is_json=True, json={"trace_id": "abc123"}) + assert get_external_trace_id(req) == "abc123" + + def test_get_external_trace_id_priority(self): + """Header > args > json priority""" + req = DummyRequest( + headers={"X-Trace-Id": "header_id"}, + args={"trace_id": "args_id"}, + is_json=True, + json={"trace_id": "json_id"}, + ) + assert get_external_trace_id(req) == "header_id" + req2 = DummyRequest(args={"trace_id": "args_id"}, is_json=True, json={"trace_id": "json_id"}) + assert get_external_trace_id(req2) == "args_id" + req3 = DummyRequest(is_json=True, json={"trace_id": "json_id"}) + assert get_external_trace_id(req3) == "json_id" + + @pytest.mark.parametrize( + "req", + [ + DummyRequest(headers={"X-Trace-Id": "!!!"}), + DummyRequest(args={"trace_id": "!!!"}), + DummyRequest(is_json=True, json={"trace_id": "!!!"}), + DummyRequest(), + ], + ) + def test_get_external_trace_id_invalid(self, req): + """Should return None for invalid or missing trace_id""" + assert get_external_trace_id(req) is None + + @pytest.mark.parametrize( + ("args", "expected"), + [ + ({"external_trace_id": "abc123"}, {"external_trace_id": "abc123"}), + ({"other": "value"}, {}), + ({}, {}), + ], + ) + def test_extract_external_trace_id_from_args(self, args, expected): + """Test extraction of external_trace_id from args mapping""" + assert extract_external_trace_id_from_args(args) == expected diff --git a/api/tests/unit_tests/factories/test_build_from_mapping.py b/api/tests/unit_tests/factories/test_build_from_mapping.py index 48463a369e..d42c4412f5 100644 --- a/api/tests/unit_tests/factories/test_build_from_mapping.py +++ b/api/tests/unit_tests/factories/test_build_from_mapping.py @@ -54,8 +54,7 @@ def mock_tool_file(): mock.mimetype = "application/pdf" mock.original_url = "http://example.com/tool.pdf" mock.size = 2048 - with patch("factories.file_factory.db.session.query") as mock_query: - mock_query.return_value.filter.return_value.first.return_value = mock + with patch("factories.file_factory.db.session.scalar", return_value=mock): yield mock @@ -153,8 +152,7 @@ def test_build_from_remote_url(mock_http_head): def test_tool_file_not_found(): """Test ToolFile not found in database.""" - with patch("factories.file_factory.db.session.query") as mock_query: - mock_query.return_value.filter.return_value.first.return_value = None + with patch("factories.file_factory.db.session.scalar", return_value=None): mapping = tool_file_mapping() with pytest.raises(ValueError, match=f"ToolFile {TEST_TOOL_FILE_ID} not found"): build_from_mapping(mapping=mapping, tenant_id=TEST_TENANT_ID) diff --git a/api/tests/unit_tests/libs/test_email_i18n.py b/api/tests/unit_tests/libs/test_email_i18n.py new file mode 100644 index 0000000000..aeb30438e0 --- /dev/null +++ b/api/tests/unit_tests/libs/test_email_i18n.py @@ -0,0 +1,539 @@ +""" +Unit tests for EmailI18nService + +Tests the email internationalization service with mocked dependencies +following Domain-Driven Design principles. +""" + +from typing import Any +from unittest.mock import MagicMock + +import pytest + +from libs.email_i18n import ( + EmailI18nConfig, + EmailI18nService, + EmailLanguage, + EmailTemplate, + EmailType, + FlaskEmailRenderer, + FlaskMailSender, + create_default_email_config, + get_email_i18n_service, +) +from services.feature_service import BrandingModel + + +class MockEmailRenderer: + """Mock implementation of EmailRenderer protocol""" + + def __init__(self) -> None: + self.rendered_templates: list[tuple[str, dict[str, Any]]] = [] + + def render_template(self, template_path: str, **context: Any) -> str: + """Mock render_template that returns a formatted string""" + self.rendered_templates.append((template_path, context)) + return f"Rendered {template_path} with {context}" + + +class MockBrandingService: + """Mock implementation of BrandingService protocol""" + + def __init__(self, enabled: bool = False, application_title: str = "Dify") -> None: + self.enabled = enabled + self.application_title = application_title + + def get_branding_config(self) -> BrandingModel: + """Return mock branding configuration""" + branding_model = MagicMock(spec=BrandingModel) + branding_model.enabled = self.enabled + branding_model.application_title = self.application_title + return branding_model + + +class MockEmailSender: + """Mock implementation of EmailSender protocol""" + + def __init__(self) -> None: + self.sent_emails: list[dict[str, str]] = [] + + def send_email(self, to: str, subject: str, html_content: str) -> None: + """Mock send_email that records sent emails""" + self.sent_emails.append( + { + "to": to, + "subject": subject, + "html_content": html_content, + } + ) + + +class TestEmailI18nService: + """Test cases for EmailI18nService""" + + @pytest.fixture + def email_config(self) -> EmailI18nConfig: + """Create test email configuration""" + return EmailI18nConfig( + templates={ + EmailType.RESET_PASSWORD: { + EmailLanguage.EN_US: EmailTemplate( + subject="Reset Your {application_title} Password", + template_path="reset_password_en.html", + branded_template_path="branded/reset_password_en.html", + ), + EmailLanguage.ZH_HANS: EmailTemplate( + subject="重置您的 {application_title} 密码", + template_path="reset_password_zh.html", + branded_template_path="branded/reset_password_zh.html", + ), + }, + EmailType.INVITE_MEMBER: { + EmailLanguage.EN_US: EmailTemplate( + subject="Join {application_title} Workspace", + template_path="invite_member_en.html", + branded_template_path="branded/invite_member_en.html", + ), + }, + } + ) + + @pytest.fixture + def mock_renderer(self) -> MockEmailRenderer: + """Create mock email renderer""" + return MockEmailRenderer() + + @pytest.fixture + def mock_branding_service(self) -> MockBrandingService: + """Create mock branding service""" + return MockBrandingService() + + @pytest.fixture + def mock_sender(self) -> MockEmailSender: + """Create mock email sender""" + return MockEmailSender() + + @pytest.fixture + def email_service( + self, + email_config: EmailI18nConfig, + mock_renderer: MockEmailRenderer, + mock_branding_service: MockBrandingService, + mock_sender: MockEmailSender, + ) -> EmailI18nService: + """Create EmailI18nService with mocked dependencies""" + return EmailI18nService( + config=email_config, + renderer=mock_renderer, + branding_service=mock_branding_service, + sender=mock_sender, + ) + + def test_send_email_with_english_language( + self, + email_service: EmailI18nService, + mock_renderer: MockEmailRenderer, + mock_sender: MockEmailSender, + ) -> None: + """Test sending email with English language""" + email_service.send_email( + email_type=EmailType.RESET_PASSWORD, + language_code="en-US", + to="test@example.com", + template_context={"reset_link": "https://example.com/reset"}, + ) + + # Verify renderer was called with correct template + assert len(mock_renderer.rendered_templates) == 1 + template_path, context = mock_renderer.rendered_templates[0] + assert template_path == "reset_password_en.html" + assert context["reset_link"] == "https://example.com/reset" + assert context["branding_enabled"] is False + assert context["application_title"] == "Dify" + + # Verify email was sent + assert len(mock_sender.sent_emails) == 1 + sent_email = mock_sender.sent_emails[0] + assert sent_email["to"] == "test@example.com" + assert sent_email["subject"] == "Reset Your Dify Password" + assert "reset_password_en.html" in sent_email["html_content"] + + def test_send_email_with_chinese_language( + self, + email_service: EmailI18nService, + mock_sender: MockEmailSender, + ) -> None: + """Test sending email with Chinese language""" + email_service.send_email( + email_type=EmailType.RESET_PASSWORD, + language_code="zh-Hans", + to="test@example.com", + template_context={"reset_link": "https://example.com/reset"}, + ) + + # Verify email was sent with Chinese subject + assert len(mock_sender.sent_emails) == 1 + sent_email = mock_sender.sent_emails[0] + assert sent_email["subject"] == "重置您的 Dify 密码" + + def test_send_email_with_branding_enabled( + self, + email_config: EmailI18nConfig, + mock_renderer: MockEmailRenderer, + mock_sender: MockEmailSender, + ) -> None: + """Test sending email with branding enabled""" + # Create branding service with branding enabled + branding_service = MockBrandingService(enabled=True, application_title="MyApp") + + email_service = EmailI18nService( + config=email_config, + renderer=mock_renderer, + branding_service=branding_service, + sender=mock_sender, + ) + + email_service.send_email( + email_type=EmailType.RESET_PASSWORD, + language_code="en-US", + to="test@example.com", + ) + + # Verify branded template was used + assert len(mock_renderer.rendered_templates) == 1 + template_path, context = mock_renderer.rendered_templates[0] + assert template_path == "branded/reset_password_en.html" + assert context["branding_enabled"] is True + assert context["application_title"] == "MyApp" + + # Verify subject includes custom application title + assert len(mock_sender.sent_emails) == 1 + sent_email = mock_sender.sent_emails[0] + assert sent_email["subject"] == "Reset Your MyApp Password" + + def test_send_email_with_language_fallback( + self, + email_service: EmailI18nService, + mock_sender: MockEmailSender, + ) -> None: + """Test language fallback to English when requested language not available""" + # Request invite member in Chinese (not configured) + email_service.send_email( + email_type=EmailType.INVITE_MEMBER, + language_code="zh-Hans", + to="test@example.com", + ) + + # Should fall back to English + assert len(mock_sender.sent_emails) == 1 + sent_email = mock_sender.sent_emails[0] + assert sent_email["subject"] == "Join Dify Workspace" + + def test_send_email_with_unknown_language_code( + self, + email_service: EmailI18nService, + mock_sender: MockEmailSender, + ) -> None: + """Test unknown language code falls back to English""" + email_service.send_email( + email_type=EmailType.RESET_PASSWORD, + language_code="fr-FR", # French not configured + to="test@example.com", + ) + + # Should use English + assert len(mock_sender.sent_emails) == 1 + sent_email = mock_sender.sent_emails[0] + assert sent_email["subject"] == "Reset Your Dify Password" + + def test_send_change_email_old_phase( + self, + email_config: EmailI18nConfig, + mock_renderer: MockEmailRenderer, + mock_sender: MockEmailSender, + mock_branding_service: MockBrandingService, + ) -> None: + """Test sending change email for old email verification""" + # Add change email templates to config + email_config.templates[EmailType.CHANGE_EMAIL_OLD] = { + EmailLanguage.EN_US: EmailTemplate( + subject="Verify your current email", + template_path="change_email_old_en.html", + branded_template_path="branded/change_email_old_en.html", + ), + } + + email_service = EmailI18nService( + config=email_config, + renderer=mock_renderer, + branding_service=mock_branding_service, + sender=mock_sender, + ) + + email_service.send_change_email( + language_code="en-US", + to="old@example.com", + code="123456", + phase="old_email", + ) + + # Verify correct template and context + assert len(mock_renderer.rendered_templates) == 1 + template_path, context = mock_renderer.rendered_templates[0] + assert template_path == "change_email_old_en.html" + assert context["to"] == "old@example.com" + assert context["code"] == "123456" + + def test_send_change_email_new_phase( + self, + email_config: EmailI18nConfig, + mock_renderer: MockEmailRenderer, + mock_sender: MockEmailSender, + mock_branding_service: MockBrandingService, + ) -> None: + """Test sending change email for new email verification""" + # Add change email templates to config + email_config.templates[EmailType.CHANGE_EMAIL_NEW] = { + EmailLanguage.EN_US: EmailTemplate( + subject="Verify your new email", + template_path="change_email_new_en.html", + branded_template_path="branded/change_email_new_en.html", + ), + } + + email_service = EmailI18nService( + config=email_config, + renderer=mock_renderer, + branding_service=mock_branding_service, + sender=mock_sender, + ) + + email_service.send_change_email( + language_code="en-US", + to="new@example.com", + code="654321", + phase="new_email", + ) + + # Verify correct template and context + assert len(mock_renderer.rendered_templates) == 1 + template_path, context = mock_renderer.rendered_templates[0] + assert template_path == "change_email_new_en.html" + assert context["to"] == "new@example.com" + assert context["code"] == "654321" + + def test_send_change_email_invalid_phase( + self, + email_service: EmailI18nService, + ) -> None: + """Test sending change email with invalid phase raises error""" + with pytest.raises(ValueError, match="Invalid phase: invalid_phase"): + email_service.send_change_email( + language_code="en-US", + to="test@example.com", + code="123456", + phase="invalid_phase", + ) + + def test_send_raw_email_single_recipient( + self, + email_service: EmailI18nService, + mock_sender: MockEmailSender, + ) -> None: + """Test sending raw email to single recipient""" + email_service.send_raw_email( + to="test@example.com", + subject="Test Subject", + html_content="Test Content", + ) + + assert len(mock_sender.sent_emails) == 1 + sent_email = mock_sender.sent_emails[0] + assert sent_email["to"] == "test@example.com" + assert sent_email["subject"] == "Test Subject" + assert sent_email["html_content"] == "Test Content" + + def test_send_raw_email_multiple_recipients( + self, + email_service: EmailI18nService, + mock_sender: MockEmailSender, + ) -> None: + """Test sending raw email to multiple recipients""" + recipients = ["user1@example.com", "user2@example.com", "user3@example.com"] + + email_service.send_raw_email( + to=recipients, + subject="Test Subject", + html_content="Test Content", + ) + + # Should send individual emails to each recipient + assert len(mock_sender.sent_emails) == 3 + for i, recipient in enumerate(recipients): + sent_email = mock_sender.sent_emails[i] + assert sent_email["to"] == recipient + assert sent_email["subject"] == "Test Subject" + assert sent_email["html_content"] == "Test Content" + + def test_get_template_missing_email_type( + self, + email_config: EmailI18nConfig, + ) -> None: + """Test getting template for missing email type raises error""" + with pytest.raises(ValueError, match="No templates configured for email type"): + email_config.get_template(EmailType.EMAIL_CODE_LOGIN, EmailLanguage.EN_US) + + def test_get_template_missing_language_and_english( + self, + email_config: EmailI18nConfig, + ) -> None: + """Test error when neither requested language nor English fallback exists""" + # Add template without English fallback + email_config.templates[EmailType.EMAIL_CODE_LOGIN] = { + EmailLanguage.ZH_HANS: EmailTemplate( + subject="Test", + template_path="test.html", + branded_template_path="branded/test.html", + ), + } + + with pytest.raises(ValueError, match="No template found for"): + # Request a language that doesn't exist and no English fallback + email_config.get_template(EmailType.EMAIL_CODE_LOGIN, EmailLanguage.EN_US) + + def test_subject_templating_with_variables( + self, + email_config: EmailI18nConfig, + mock_renderer: MockEmailRenderer, + mock_sender: MockEmailSender, + mock_branding_service: MockBrandingService, + ) -> None: + """Test subject templating with custom variables""" + # Add template with variable in subject + email_config.templates[EmailType.OWNER_TRANSFER_NEW_NOTIFY] = { + EmailLanguage.EN_US: EmailTemplate( + subject="You are now the owner of {WorkspaceName}", + template_path="owner_transfer_en.html", + branded_template_path="branded/owner_transfer_en.html", + ), + } + + email_service = EmailI18nService( + config=email_config, + renderer=mock_renderer, + branding_service=mock_branding_service, + sender=mock_sender, + ) + + email_service.send_email( + email_type=EmailType.OWNER_TRANSFER_NEW_NOTIFY, + language_code="en-US", + to="test@example.com", + template_context={"WorkspaceName": "My Workspace"}, + ) + + # Verify subject was templated correctly + assert len(mock_sender.sent_emails) == 1 + sent_email = mock_sender.sent_emails[0] + assert sent_email["subject"] == "You are now the owner of My Workspace" + + def test_email_language_from_language_code(self) -> None: + """Test EmailLanguage.from_language_code method""" + assert EmailLanguage.from_language_code("zh-Hans") == EmailLanguage.ZH_HANS + assert EmailLanguage.from_language_code("en-US") == EmailLanguage.EN_US + assert EmailLanguage.from_language_code("fr-FR") == EmailLanguage.EN_US # Fallback + assert EmailLanguage.from_language_code("unknown") == EmailLanguage.EN_US # Fallback + + +class TestEmailI18nIntegration: + """Integration tests for email i18n components""" + + def test_create_default_email_config(self) -> None: + """Test creating default email configuration""" + config = create_default_email_config() + + # Verify key email types have at least English template + expected_types = [ + EmailType.RESET_PASSWORD, + EmailType.INVITE_MEMBER, + EmailType.EMAIL_CODE_LOGIN, + EmailType.CHANGE_EMAIL_OLD, + EmailType.CHANGE_EMAIL_NEW, + EmailType.OWNER_TRANSFER_CONFIRM, + EmailType.OWNER_TRANSFER_OLD_NOTIFY, + EmailType.OWNER_TRANSFER_NEW_NOTIFY, + EmailType.ACCOUNT_DELETION_SUCCESS, + EmailType.ACCOUNT_DELETION_VERIFICATION, + EmailType.QUEUE_MONITOR_ALERT, + EmailType.DOCUMENT_CLEAN_NOTIFY, + ] + + for email_type in expected_types: + assert email_type in config.templates + assert EmailLanguage.EN_US in config.templates[email_type] + + # Verify some have Chinese translations + assert EmailLanguage.ZH_HANS in config.templates[EmailType.RESET_PASSWORD] + assert EmailLanguage.ZH_HANS in config.templates[EmailType.INVITE_MEMBER] + + def test_get_email_i18n_service(self) -> None: + """Test getting global email i18n service instance""" + service1 = get_email_i18n_service() + service2 = get_email_i18n_service() + + # Should return the same instance + assert service1 is service2 + + def test_flask_email_renderer(self) -> None: + """Test FlaskEmailRenderer implementation""" + renderer = FlaskEmailRenderer() + + # Should raise TemplateNotFound when template doesn't exist + from jinja2.exceptions import TemplateNotFound + + with pytest.raises(TemplateNotFound): + renderer.render_template("test.html", foo="bar") + + def test_flask_mail_sender_not_initialized(self) -> None: + """Test FlaskMailSender when mail is not initialized""" + sender = FlaskMailSender() + + # Mock mail.is_inited() to return False + import libs.email_i18n + + original_mail = libs.email_i18n.mail + mock_mail = MagicMock() + mock_mail.is_inited.return_value = False + libs.email_i18n.mail = mock_mail + + try: + # Should not send email when mail is not initialized + sender.send_email("test@example.com", "Subject", "Content") + mock_mail.send.assert_not_called() + finally: + # Restore original mail + libs.email_i18n.mail = original_mail + + def test_flask_mail_sender_initialized(self) -> None: + """Test FlaskMailSender when mail is initialized""" + sender = FlaskMailSender() + + # Mock mail.is_inited() to return True + import libs.email_i18n + + original_mail = libs.email_i18n.mail + mock_mail = MagicMock() + mock_mail.is_inited.return_value = True + libs.email_i18n.mail = mock_mail + + try: + # Should send email when mail is initialized + sender.send_email("test@example.com", "Subject", "Content") + mock_mail.send.assert_called_once_with( + to="test@example.com", + subject="Subject", + html="Content", + ) + finally: + # Restore original mail + libs.email_i18n.mail = original_mail diff --git a/api/tests/unit_tests/models/test_types_enum_text.py b/api/tests/unit_tests/models/test_types_enum_text.py index 3afa0f17a0..e4061b72c7 100644 --- a/api/tests/unit_tests/models/test_types_enum_text.py +++ b/api/tests/unit_tests/models/test_types_enum_text.py @@ -6,7 +6,7 @@ import pytest import sqlalchemy as sa from sqlalchemy import exc as sa_exc from sqlalchemy import insert -from sqlalchemy.orm import DeclarativeBase, Mapped, Session +from sqlalchemy.orm import DeclarativeBase, Mapped, Session, mapped_column from sqlalchemy.sql.sqltypes import VARCHAR from models.types import EnumText @@ -32,22 +32,26 @@ class _EnumWithLongValue(StrEnum): class _User(_Base): __tablename__ = "users" - id: Mapped[int] = sa.Column(sa.Integer, primary_key=True) - name: Mapped[str] = sa.Column(sa.String(length=255), nullable=False) - user_type: Mapped[_UserType] = sa.Column(EnumText(enum_class=_UserType), nullable=False, default=_UserType.normal) - user_type_nullable: Mapped[_UserType | None] = sa.Column(EnumText(enum_class=_UserType), nullable=True) + id: Mapped[int] = mapped_column(sa.Integer, primary_key=True) + name: Mapped[str] = mapped_column(sa.String(length=255), nullable=False) + user_type: Mapped[_UserType] = mapped_column( + EnumText(enum_class=_UserType), nullable=False, default=_UserType.normal + ) + user_type_nullable: Mapped[_UserType | None] = mapped_column(EnumText(enum_class=_UserType), nullable=True) class _ColumnTest(_Base): __tablename__ = "column_test" - id: Mapped[int] = sa.Column(sa.Integer, primary_key=True) + id: Mapped[int] = mapped_column(sa.Integer, primary_key=True) - user_type: Mapped[_UserType] = sa.Column(EnumText(enum_class=_UserType), nullable=False, default=_UserType.normal) - explicit_length: Mapped[_UserType | None] = sa.Column( + user_type: Mapped[_UserType] = mapped_column( + EnumText(enum_class=_UserType), nullable=False, default=_UserType.normal + ) + explicit_length: Mapped[_UserType | None] = mapped_column( EnumText(_UserType, length=50), nullable=True, default=_UserType.normal ) - long_value: Mapped[_EnumWithLongValue] = sa.Column(EnumText(enum_class=_EnumWithLongValue), nullable=False) + long_value: Mapped[_EnumWithLongValue] = mapped_column(EnumText(enum_class=_EnumWithLongValue), nullable=False) _T = TypeVar("_T") @@ -110,12 +114,12 @@ class TestEnumText: session.commit() with Session(engine) as session: - user = session.query(_User).filter(_User.id == admin_user_id).first() + user = session.query(_User).where(_User.id == admin_user_id).first() assert user.user_type == _UserType.admin assert user.user_type_nullable is None with Session(engine) as session: - user = session.query(_User).filter(_User.id == normal_user_id).first() + user = session.query(_User).where(_User.id == normal_user_id).first() assert user.user_type == _UserType.normal assert user.user_type_nullable == _UserType.normal @@ -184,4 +188,4 @@ class TestEnumText: with pytest.raises(ValueError) as exc: with Session(engine) as session: - _user = session.query(_User).filter(_User.id == 1).first() + _user = session.query(_User).where(_User.id == 1).first() diff --git a/api/tests/unit_tests/services/auth/test_api_key_auth_service.py b/api/tests/unit_tests/services/auth/test_api_key_auth_service.py index f0e425e742..dc42a04cf3 100644 --- a/api/tests/unit_tests/services/auth/test_api_key_auth_service.py +++ b/api/tests/unit_tests/services/auth/test_api_key_auth_service.py @@ -28,7 +28,7 @@ class TestApiKeyAuthService: mock_binding.provider = self.provider mock_binding.disabled = False - mock_session.query.return_value.filter.return_value.all.return_value = [mock_binding] + mock_session.query.return_value.where.return_value.all.return_value = [mock_binding] result = ApiKeyAuthService.get_provider_auth_list(self.tenant_id) @@ -39,7 +39,7 @@ class TestApiKeyAuthService: @patch("services.auth.api_key_auth_service.db.session") def test_get_provider_auth_list_empty(self, mock_session): """Test get provider auth list - empty result""" - mock_session.query.return_value.filter.return_value.all.return_value = [] + mock_session.query.return_value.where.return_value.all.return_value = [] result = ApiKeyAuthService.get_provider_auth_list(self.tenant_id) @@ -48,13 +48,13 @@ class TestApiKeyAuthService: @patch("services.auth.api_key_auth_service.db.session") def test_get_provider_auth_list_filters_disabled(self, mock_session): """Test get provider auth list - filters disabled items""" - mock_session.query.return_value.filter.return_value.all.return_value = [] + mock_session.query.return_value.where.return_value.all.return_value = [] ApiKeyAuthService.get_provider_auth_list(self.tenant_id) - # Verify filter conditions include disabled.is_(False) - filter_call = mock_session.query.return_value.filter.call_args[0] - assert len(filter_call) == 2 # tenant_id and disabled filter conditions + # Verify where conditions include disabled.is_(False) + where_call = mock_session.query.return_value.where.call_args[0] + assert len(where_call) == 2 # tenant_id and disabled filter conditions @patch("services.auth.api_key_auth_service.db.session") @patch("services.auth.api_key_auth_service.ApiKeyAuthFactory") @@ -138,7 +138,8 @@ class TestApiKeyAuthService: # Mock database query result mock_binding = Mock() mock_binding.credentials = json.dumps(self.mock_credentials) - mock_session.query.return_value.filter.return_value.first.return_value = mock_binding + mock_session.query.return_value.where.return_value.first.return_value = mock_binding + mock_session.query.return_value.where.return_value.first.return_value = mock_binding result = ApiKeyAuthService.get_auth_credentials(self.tenant_id, self.category, self.provider) @@ -148,7 +149,7 @@ class TestApiKeyAuthService: @patch("services.auth.api_key_auth_service.db.session") def test_get_auth_credentials_not_found(self, mock_session): """Test get auth credentials - not found""" - mock_session.query.return_value.filter.return_value.first.return_value = None + mock_session.query.return_value.where.return_value.first.return_value = None result = ApiKeyAuthService.get_auth_credentials(self.tenant_id, self.category, self.provider) @@ -157,13 +158,13 @@ class TestApiKeyAuthService: @patch("services.auth.api_key_auth_service.db.session") def test_get_auth_credentials_filters_correctly(self, mock_session): """Test get auth credentials - applies correct filters""" - mock_session.query.return_value.filter.return_value.first.return_value = None + mock_session.query.return_value.where.return_value.first.return_value = None ApiKeyAuthService.get_auth_credentials(self.tenant_id, self.category, self.provider) - # Verify filter conditions are correct - filter_call = mock_session.query.return_value.filter.call_args[0] - assert len(filter_call) == 4 # tenant_id, category, provider, disabled + # Verify where conditions are correct + where_call = mock_session.query.return_value.where.call_args[0] + assert len(where_call) == 4 # tenant_id, category, provider, disabled @patch("services.auth.api_key_auth_service.db.session") def test_get_auth_credentials_json_parsing(self, mock_session): @@ -173,7 +174,7 @@ class TestApiKeyAuthService: mock_binding = Mock() mock_binding.credentials = json.dumps(special_credentials, ensure_ascii=False) - mock_session.query.return_value.filter.return_value.first.return_value = mock_binding + mock_session.query.return_value.where.return_value.first.return_value = mock_binding result = ApiKeyAuthService.get_auth_credentials(self.tenant_id, self.category, self.provider) @@ -185,7 +186,7 @@ class TestApiKeyAuthService: """Test delete provider auth - success scenario""" # Mock database query result mock_binding = Mock() - mock_session.query.return_value.filter.return_value.first.return_value = mock_binding + mock_session.query.return_value.where.return_value.first.return_value = mock_binding ApiKeyAuthService.delete_provider_auth(self.tenant_id, self.binding_id) @@ -196,7 +197,7 @@ class TestApiKeyAuthService: @patch("services.auth.api_key_auth_service.db.session") def test_delete_provider_auth_not_found(self, mock_session): """Test delete provider auth - not found""" - mock_session.query.return_value.filter.return_value.first.return_value = None + mock_session.query.return_value.where.return_value.first.return_value = None ApiKeyAuthService.delete_provider_auth(self.tenant_id, self.binding_id) @@ -207,13 +208,13 @@ class TestApiKeyAuthService: @patch("services.auth.api_key_auth_service.db.session") def test_delete_provider_auth_filters_by_tenant(self, mock_session): """Test delete provider auth - filters by tenant""" - mock_session.query.return_value.filter.return_value.first.return_value = None + mock_session.query.return_value.where.return_value.first.return_value = None ApiKeyAuthService.delete_provider_auth(self.tenant_id, self.binding_id) - # Verify filter conditions include tenant_id and binding_id - filter_call = mock_session.query.return_value.filter.call_args[0] - assert len(filter_call) == 2 + # Verify where conditions include tenant_id and binding_id + where_call = mock_session.query.return_value.where.call_args[0] + assert len(where_call) == 2 def test_validate_api_key_auth_args_success(self): """Test API key auth args validation - success scenario""" @@ -336,7 +337,7 @@ class TestApiKeyAuthService: # Mock database returning invalid JSON mock_binding = Mock() mock_binding.credentials = "invalid json content" - mock_session.query.return_value.filter.return_value.first.return_value = mock_binding + mock_session.query.return_value.where.return_value.first.return_value = mock_binding with pytest.raises(json.JSONDecodeError): ApiKeyAuthService.get_auth_credentials(self.tenant_id, self.category, self.provider) diff --git a/api/tests/unit_tests/services/auth/test_auth_integration.py b/api/tests/unit_tests/services/auth/test_auth_integration.py new file mode 100644 index 0000000000..4ce5525942 --- /dev/null +++ b/api/tests/unit_tests/services/auth/test_auth_integration.py @@ -0,0 +1,234 @@ +""" +API Key Authentication System Integration Tests +""" + +import json +from concurrent.futures import ThreadPoolExecutor +from unittest.mock import Mock, patch + +import pytest +import requests + +from services.auth.api_key_auth_factory import ApiKeyAuthFactory +from services.auth.api_key_auth_service import ApiKeyAuthService +from services.auth.auth_type import AuthType + + +class TestAuthIntegration: + def setup_method(self): + self.tenant_id_1 = "tenant_123" + self.tenant_id_2 = "tenant_456" # For multi-tenant isolation testing + self.category = "search" + + # Realistic authentication configurations + self.firecrawl_credentials = {"auth_type": "bearer", "config": {"api_key": "fc_test_key_123"}} + self.jina_credentials = {"auth_type": "bearer", "config": {"api_key": "jina_test_key_456"}} + self.watercrawl_credentials = {"auth_type": "x-api-key", "config": {"api_key": "wc_test_key_789"}} + + @patch("services.auth.api_key_auth_service.db.session") + @patch("services.auth.firecrawl.firecrawl.requests.post") + @patch("services.auth.api_key_auth_service.encrypter.encrypt_token") + def test_end_to_end_auth_flow(self, mock_encrypt, mock_http, mock_session): + """Test complete authentication flow: request → validation → encryption → storage""" + mock_http.return_value = self._create_success_response() + mock_encrypt.return_value = "encrypted_fc_test_key_123" + mock_session.add = Mock() + mock_session.commit = Mock() + + args = {"category": self.category, "provider": AuthType.FIRECRAWL, "credentials": self.firecrawl_credentials} + ApiKeyAuthService.create_provider_auth(self.tenant_id_1, args) + + mock_http.assert_called_once() + call_args = mock_http.call_args + assert "https://api.firecrawl.dev/v1/crawl" in call_args[0][0] + assert call_args[1]["headers"]["Authorization"] == "Bearer fc_test_key_123" + + mock_encrypt.assert_called_once_with(self.tenant_id_1, "fc_test_key_123") + mock_session.add.assert_called_once() + mock_session.commit.assert_called_once() + + @patch("services.auth.firecrawl.firecrawl.requests.post") + def test_cross_component_integration(self, mock_http): + """Test factory → provider → HTTP call integration""" + mock_http.return_value = self._create_success_response() + factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, self.firecrawl_credentials) + result = factory.validate_credentials() + + assert result is True + mock_http.assert_called_once() + + @patch("services.auth.api_key_auth_service.db.session") + def test_multi_tenant_isolation(self, mock_session): + """Ensure complete tenant data isolation""" + tenant1_binding = self._create_mock_binding(self.tenant_id_1, AuthType.FIRECRAWL, self.firecrawl_credentials) + tenant2_binding = self._create_mock_binding(self.tenant_id_2, AuthType.JINA, self.jina_credentials) + + mock_session.query.return_value.where.return_value.all.return_value = [tenant1_binding] + result1 = ApiKeyAuthService.get_provider_auth_list(self.tenant_id_1) + + mock_session.query.return_value.where.return_value.all.return_value = [tenant2_binding] + result2 = ApiKeyAuthService.get_provider_auth_list(self.tenant_id_2) + + assert len(result1) == 1 + assert result1[0].tenant_id == self.tenant_id_1 + assert len(result2) == 1 + assert result2[0].tenant_id == self.tenant_id_2 + + @patch("services.auth.api_key_auth_service.db.session") + def test_cross_tenant_access_prevention(self, mock_session): + """Test prevention of cross-tenant credential access""" + mock_session.query.return_value.where.return_value.first.return_value = None + + result = ApiKeyAuthService.get_auth_credentials(self.tenant_id_2, self.category, AuthType.FIRECRAWL) + + assert result is None + + def test_sensitive_data_protection(self): + """Ensure API keys don't leak to logs""" + credentials_with_secrets = { + "auth_type": "bearer", + "config": {"api_key": "super_secret_key_do_not_log", "secret": "another_secret"}, + } + + factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, credentials_with_secrets) + factory_str = str(factory) + + assert "super_secret_key_do_not_log" not in factory_str + assert "another_secret" not in factory_str + + @patch("services.auth.api_key_auth_service.db.session") + @patch("services.auth.firecrawl.firecrawl.requests.post") + @patch("services.auth.api_key_auth_service.encrypter.encrypt_token") + def test_concurrent_creation_safety(self, mock_encrypt, mock_http, mock_session): + """Test concurrent authentication creation safety""" + mock_http.return_value = self._create_success_response() + mock_encrypt.return_value = "encrypted_key" + mock_session.add = Mock() + mock_session.commit = Mock() + + args = {"category": self.category, "provider": AuthType.FIRECRAWL, "credentials": self.firecrawl_credentials} + + results = [] + exceptions = [] + + def create_auth(): + try: + ApiKeyAuthService.create_provider_auth(self.tenant_id_1, args) + results.append("success") + except Exception as e: + exceptions.append(e) + + with ThreadPoolExecutor(max_workers=5) as executor: + futures = [executor.submit(create_auth) for _ in range(5)] + for future in futures: + future.result() + + assert len(results) == 5 + assert len(exceptions) == 0 + assert mock_session.add.call_count == 5 + assert mock_session.commit.call_count == 5 + + @pytest.mark.parametrize( + "invalid_input", + [ + None, # Null input + {}, # Empty dictionary - missing required fields + {"auth_type": "bearer"}, # Missing config section + {"auth_type": "bearer", "config": {}}, # Missing api_key + ], + ) + def test_invalid_input_boundary(self, invalid_input): + """Test boundary handling for invalid inputs""" + with pytest.raises((ValueError, KeyError, TypeError, AttributeError)): + ApiKeyAuthFactory(AuthType.FIRECRAWL, invalid_input) + + @patch("services.auth.firecrawl.firecrawl.requests.post") + def test_http_error_handling(self, mock_http): + """Test proper HTTP error handling""" + mock_response = Mock() + mock_response.status_code = 401 + mock_response.text = '{"error": "Unauthorized"}' + mock_response.raise_for_status.side_effect = requests.exceptions.HTTPError("Unauthorized") + mock_http.return_value = mock_response + + # PT012: Split into single statement for pytest.raises + factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, self.firecrawl_credentials) + with pytest.raises((requests.exceptions.HTTPError, Exception)): + factory.validate_credentials() + + @patch("services.auth.api_key_auth_service.db.session") + @patch("services.auth.firecrawl.firecrawl.requests.post") + def test_network_failure_recovery(self, mock_http, mock_session): + """Test system recovery from network failures""" + mock_http.side_effect = requests.exceptions.RequestException("Network timeout") + mock_session.add = Mock() + mock_session.commit = Mock() + + args = {"category": self.category, "provider": AuthType.FIRECRAWL, "credentials": self.firecrawl_credentials} + + with pytest.raises(requests.exceptions.RequestException): + ApiKeyAuthService.create_provider_auth(self.tenant_id_1, args) + + mock_session.commit.assert_not_called() + + @pytest.mark.parametrize( + ("provider", "credentials"), + [ + (AuthType.FIRECRAWL, {"auth_type": "bearer", "config": {"api_key": "fc_key"}}), + (AuthType.JINA, {"auth_type": "bearer", "config": {"api_key": "jina_key"}}), + (AuthType.WATERCRAWL, {"auth_type": "x-api-key", "config": {"api_key": "wc_key"}}), + ], + ) + def test_all_providers_factory_creation(self, provider, credentials): + """Test factory creation for all supported providers""" + try: + auth_class = ApiKeyAuthFactory.get_apikey_auth_factory(provider) + assert auth_class is not None + + factory = ApiKeyAuthFactory(provider, credentials) + assert factory.auth is not None + except ImportError: + pytest.skip(f"Provider {provider} not implemented yet") + + def _create_success_response(self, status_code=200): + """Create successful HTTP response mock""" + mock_response = Mock() + mock_response.status_code = status_code + mock_response.json.return_value = {"status": "success"} + mock_response.raise_for_status.return_value = None + return mock_response + + def _create_mock_binding(self, tenant_id: str, provider: str, credentials: dict) -> Mock: + """Create realistic database binding mock""" + mock_binding = Mock() + mock_binding.id = f"binding_{provider}_{tenant_id}" + mock_binding.tenant_id = tenant_id + mock_binding.category = self.category + mock_binding.provider = provider + mock_binding.credentials = json.dumps(credentials, ensure_ascii=False) + mock_binding.disabled = False + + mock_binding.created_at = Mock() + mock_binding.created_at.timestamp.return_value = 1640995200 + mock_binding.updated_at = Mock() + mock_binding.updated_at.timestamp.return_value = 1640995200 + + return mock_binding + + def test_integration_coverage_validation(self): + """Validate integration test coverage meets quality standards""" + core_scenarios = { + "business_logic": ["end_to_end_auth_flow", "cross_component_integration"], + "security": ["multi_tenant_isolation", "cross_tenant_access_prevention", "sensitive_data_protection"], + "reliability": ["concurrent_creation_safety", "network_failure_recovery"], + "compatibility": ["all_providers_factory_creation"], + "boundaries": ["invalid_input_boundary", "http_error_handling"], + } + + total_scenarios = sum(len(scenarios) for scenarios in core_scenarios.values()) + assert total_scenarios >= 10 + + security_tests = core_scenarios["security"] + assert "multi_tenant_isolation" in security_tests + assert "sensitive_data_protection" in security_tests + assert True diff --git a/api/tests/unit_tests/services/test_account_service.py b/api/tests/unit_tests/services/test_account_service.py index 13900ab6d1..442839e44e 100644 --- a/api/tests/unit_tests/services/test_account_service.py +++ b/api/tests/unit_tests/services/test_account_service.py @@ -708,9 +708,9 @@ class TestTenantService: with patch("services.account_service.db") as mock_db: # Mock the join query that returns the tenant_account_join mock_query = MagicMock() - mock_filter = MagicMock() - mock_filter.first.return_value = mock_tenant_join - mock_query.filter.return_value = mock_filter + mock_where = MagicMock() + mock_where.first.return_value = mock_tenant_join + mock_query.where.return_value = mock_where mock_query.join.return_value = mock_query mock_db.session.query.return_value = mock_query @@ -1381,10 +1381,10 @@ class TestRegisterService: # Mock database queries - complex query mocking mock_query1 = MagicMock() - mock_query1.filter.return_value.first.return_value = mock_tenant + mock_query1.where.return_value.first.return_value = mock_tenant mock_query2 = MagicMock() - mock_query2.join.return_value.filter.return_value.first.return_value = (mock_account, "normal") + mock_query2.join.return_value.where.return_value.first.return_value = (mock_account, "normal") mock_db_dependencies["db"].session.query.side_effect = [mock_query1, mock_query2] @@ -1449,7 +1449,7 @@ class TestRegisterService: mock_query1.filter.return_value.first.return_value = mock_tenant mock_query2 = MagicMock() - mock_query2.join.return_value.filter.return_value.first.return_value = None # No account found + mock_query2.join.return_value.where.return_value.first.return_value = None # No account found mock_db_dependencies["db"].session.query.side_effect = [mock_query1, mock_query2] @@ -1482,7 +1482,7 @@ class TestRegisterService: mock_query1.filter.return_value.first.return_value = mock_tenant mock_query2 = MagicMock() - mock_query2.join.return_value.filter.return_value.first.return_value = (mock_account, "normal") + mock_query2.join.return_value.where.return_value.first.return_value = (mock_account, "normal") mock_db_dependencies["db"].session.query.side_effect = [mock_query1, mock_query2] diff --git a/api/tests/unit_tests/services/workflow/test_workflow_deletion.py b/api/tests/unit_tests/services/workflow/test_workflow_deletion.py index 2c87eaf805..dfe325648d 100644 --- a/api/tests/unit_tests/services/workflow/test_workflow_deletion.py +++ b/api/tests/unit_tests/services/workflow/test_workflow_deletion.py @@ -43,7 +43,7 @@ def test_delete_workflow_success(workflow_setup): # Setup mocks # Mock the tool provider query to return None (not published as a tool) - workflow_setup["session"].query.return_value.filter.return_value.first.return_value = None + workflow_setup["session"].query.return_value.where.return_value.first.return_value = None workflow_setup["session"].scalar = MagicMock( side_effect=[workflow_setup["workflow"], None] @@ -106,7 +106,7 @@ def test_delete_workflow_published_as_tool_error(workflow_setup): # Mock the tool provider query mock_tool_provider = MagicMock(spec=WorkflowToolProvider) - workflow_setup["session"].query.return_value.filter.return_value.first.return_value = mock_tool_provider + workflow_setup["session"].query.return_value.where.return_value.first.return_value = mock_tool_provider workflow_setup["session"].scalar = MagicMock( side_effect=[workflow_setup["workflow"], None] diff --git a/api/tests/unit_tests/utils/position_helper/test_position_helper.py b/api/tests/unit_tests/utils/position_helper/test_position_helper.py index 29558a93c2..dbd8f05098 100644 --- a/api/tests/unit_tests/utils/position_helper/test_position_helper.py +++ b/api/tests/unit_tests/utils/position_helper/test_position_helper.py @@ -95,7 +95,7 @@ def test_included_position_data(prepare_example_positions_yaml): position_map = get_position_map(folder_path=prepare_example_positions_yaml, file_name="example_positions.yaml") pin_list = ["forth", "first"] include_set = {"forth", "first"} - exclude_set = {} + exclude_set = set() position_map = pin_position_map(original_position_map=position_map, pin_list=pin_list) diff --git a/api/uv.lock b/api/uv.lock index 21b6b20f53..623b125ab3 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1217,7 +1217,7 @@ wheels = [ [[package]] name = "dify-api" -version = "1.6.0" +version = "1.7.0" source = { virtual = "." } dependencies = [ { name = "arize-phoenix-otel" }, diff --git a/docker/.env.example b/docker/.env.example index 6149f63165..88cc544730 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -1168,3 +1168,13 @@ QUEUE_MONITOR_THRESHOLD=200 QUEUE_MONITOR_ALERT_EMAILS= # Monitor interval in minutes, default is 30 minutes QUEUE_MONITOR_INTERVAL=30 + +# Celery schedule tasks configuration +ENABLE_CLEAN_EMBEDDING_CACHE_TASK=false +ENABLE_CLEAN_UNUSED_DATASETS_TASK=false +ENABLE_CREATE_TIDB_SERVERLESS_TASK=false +ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false +ENABLE_CLEAN_MESSAGES=false +ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false +ENABLE_DATASETS_QUEUE_MONITOR=false +ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index 7c1544acb9..394a068200 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.6.0 + image: langgenius/dify-api:1.7.0 restart: always environment: # Use the shared environment variables. @@ -31,7 +31,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.6.0 + image: langgenius/dify-api:1.7.0 restart: always environment: # Use the shared environment variables. @@ -55,9 +55,28 @@ services: - ssrf_proxy_network - default + # worker_beat service + # Celery beat for scheduling periodic tasks. + worker_beat: + image: langgenius/dify-api:1.7.0 + restart: always + environment: + # Use the shared environment variables. + <<: *shared-api-worker-env + # Startup mode, 'worker_beat' starts the Celery beat for scheduling periodic tasks. + MODE: beat + depends_on: + db: + condition: service_healthy + redis: + condition: service_started + networks: + - ssrf_proxy_network + - default + # Frontend web application. web: - image: langgenius/dify-web:1.6.0 + image: langgenius/dify-web:1.7.0 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -143,7 +162,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.1.3-local + image: langgenius/dify-plugin-daemon:0.2.0-local restart: always environment: # Use the shared environment variables. diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 1271d6d464..c2ef2ff723 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -527,11 +527,19 @@ x-shared-env: &shared-api-worker-env QUEUE_MONITOR_THRESHOLD: ${QUEUE_MONITOR_THRESHOLD:-200} QUEUE_MONITOR_ALERT_EMAILS: ${QUEUE_MONITOR_ALERT_EMAILS:-} QUEUE_MONITOR_INTERVAL: ${QUEUE_MONITOR_INTERVAL:-30} + ENABLE_CLEAN_EMBEDDING_CACHE_TASK: ${ENABLE_CLEAN_EMBEDDING_CACHE_TASK:-false} + ENABLE_CLEAN_UNUSED_DATASETS_TASK: ${ENABLE_CLEAN_UNUSED_DATASETS_TASK:-false} + ENABLE_CREATE_TIDB_SERVERLESS_TASK: ${ENABLE_CREATE_TIDB_SERVERLESS_TASK:-false} + ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK: ${ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK:-false} + ENABLE_CLEAN_MESSAGES: ${ENABLE_CLEAN_MESSAGES:-false} + ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: ${ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK:-false} + ENABLE_DATASETS_QUEUE_MONITOR: ${ENABLE_DATASETS_QUEUE_MONITOR:-false} + ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK: ${ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK:-true} services: # API service api: - image: langgenius/dify-api:1.6.0 + image: langgenius/dify-api:1.7.0 restart: always environment: # Use the shared environment variables. @@ -560,7 +568,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.6.0 + image: langgenius/dify-api:1.7.0 restart: always environment: # Use the shared environment variables. @@ -584,9 +592,28 @@ services: - ssrf_proxy_network - default + # worker_beat service + # Celery beat for scheduling periodic tasks. + worker_beat: + image: langgenius/dify-api:1.7.0 + restart: always + environment: + # Use the shared environment variables. + <<: *shared-api-worker-env + # Startup mode, 'worker_beat' starts the Celery beat for scheduling periodic tasks. + MODE: beat + depends_on: + db: + condition: service_healthy + redis: + condition: service_started + networks: + - ssrf_proxy_network + - default + # Frontend web application. web: - image: langgenius/dify-web:1.6.0 + image: langgenius/dify-web:1.7.0 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -672,7 +699,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.1.3-local + image: langgenius/dify-plugin-daemon:0.2.0-local restart: always environment: # Use the shared environment variables. diff --git a/tests/unit_tests/events/test_provider_update_deadlock_prevention.py b/tests/unit_tests/events/test_provider_update_deadlock_prevention.py deleted file mode 100644 index 47c175acd7..0000000000 --- a/tests/unit_tests/events/test_provider_update_deadlock_prevention.py +++ /dev/null @@ -1,248 +0,0 @@ -import threading -from unittest.mock import Mock, patch - -from core.app.entities.app_invoke_entities import ChatAppGenerateEntity -from core.entities.provider_entities import QuotaUnit -from events.event_handlers.update_provider_when_message_created import ( - handle, - get_update_stats, -) -from models.provider import ProviderType -from sqlalchemy.exc import OperationalError - - -class TestProviderUpdateDeadlockPrevention: - """Test suite for deadlock prevention in Provider updates.""" - - def setup_method(self): - """Setup test fixtures.""" - self.mock_message = Mock() - self.mock_message.answer_tokens = 100 - - self.mock_app_config = Mock() - self.mock_app_config.tenant_id = "test-tenant-123" - - self.mock_model_conf = Mock() - self.mock_model_conf.provider = "openai" - - self.mock_system_config = Mock() - self.mock_system_config.current_quota_type = QuotaUnit.TOKENS - - self.mock_provider_config = Mock() - self.mock_provider_config.using_provider_type = ProviderType.SYSTEM - self.mock_provider_config.system_configuration = self.mock_system_config - - self.mock_provider_bundle = Mock() - self.mock_provider_bundle.configuration = self.mock_provider_config - - self.mock_model_conf.provider_model_bundle = self.mock_provider_bundle - - self.mock_generate_entity = Mock(spec=ChatAppGenerateEntity) - self.mock_generate_entity.app_config = self.mock_app_config - self.mock_generate_entity.model_conf = self.mock_model_conf - - @patch("events.event_handlers.update_provider_when_message_created.db") - def test_consolidated_handler_basic_functionality(self, mock_db): - """Test that the consolidated handler performs both updates correctly.""" - # Setup mock query chain - mock_query = Mock() - mock_db.session.query.return_value = mock_query - mock_query.filter.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.update.return_value = 1 # 1 row affected - - # Call the handler - handle(self.mock_message, application_generate_entity=self.mock_generate_entity) - - # Verify db.session.query was called - assert mock_db.session.query.called - - # Verify commit was called - mock_db.session.commit.assert_called_once() - - # Verify no rollback was called - assert not mock_db.session.rollback.called - - @patch("events.event_handlers.update_provider_when_message_created.db") - def test_deadlock_retry_mechanism(self, mock_db): - """Test that deadlock errors trigger retry logic.""" - # Setup mock to raise deadlock error on first attempt, succeed on second - mock_query = Mock() - mock_db.session.query.return_value = mock_query - mock_query.filter.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.update.return_value = 1 - - # First call raises deadlock, second succeeds - mock_db.session.commit.side_effect = [ - OperationalError("deadlock detected", None, None), - None, # Success on retry - ] - - # Call the handler - handle(self.mock_message, application_generate_entity=self.mock_generate_entity) - - # Verify commit was called twice (original + retry) - assert mock_db.session.commit.call_count == 2 - - # Verify rollback was called once (after first failure) - mock_db.session.rollback.assert_called_once() - - @patch("events.event_handlers.update_provider_when_message_created.db") - @patch("events.event_handlers.update_provider_when_message_created.time.sleep") - def test_exponential_backoff_timing(self, mock_sleep, mock_db): - """Test that retry delays follow exponential backoff pattern.""" - # Setup mock to fail twice, succeed on third attempt - mock_query = Mock() - mock_db.session.query.return_value = mock_query - mock_query.filter.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.update.return_value = 1 - - mock_db.session.commit.side_effect = [ - OperationalError("deadlock detected", None, None), - OperationalError("deadlock detected", None, None), - None, # Success on third attempt - ] - - # Call the handler - handle(self.mock_message, application_generate_entity=self.mock_generate_entity) - - # Verify sleep was called twice with increasing delays - assert mock_sleep.call_count == 2 - - # First delay should be around 0.1s + jitter - first_delay = mock_sleep.call_args_list[0][0][0] - assert 0.1 <= first_delay <= 0.3 - - # Second delay should be around 0.2s + jitter - second_delay = mock_sleep.call_args_list[1][0][0] - assert 0.2 <= second_delay <= 0.4 - - def test_concurrent_handler_execution(self): - """Test that multiple handlers can run concurrently without deadlock.""" - results = [] - errors = [] - - def run_handler(): - try: - with patch( - "events.event_handlers.update_provider_when_message_created.db" - ) as mock_db: - mock_query = Mock() - mock_db.session.query.return_value = mock_query - mock_query.filter.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.update.return_value = 1 - - handle( - self.mock_message, - application_generate_entity=self.mock_generate_entity, - ) - results.append("success") - except Exception as e: - errors.append(str(e)) - - # Run multiple handlers concurrently - threads = [] - for _ in range(5): - thread = threading.Thread(target=run_handler) - threads.append(thread) - thread.start() - - # Wait for all threads to complete - for thread in threads: - thread.join(timeout=5) - - # Verify all handlers completed successfully - assert len(results) == 5 - assert len(errors) == 0 - - def test_performance_stats_tracking(self): - """Test that performance statistics are tracked correctly.""" - # Reset stats - stats = get_update_stats() - initial_total = stats["total_updates"] - - with patch( - "events.event_handlers.update_provider_when_message_created.db" - ) as mock_db: - mock_query = Mock() - mock_db.session.query.return_value = mock_query - mock_query.filter.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.update.return_value = 1 - - # Call handler - handle( - self.mock_message, application_generate_entity=self.mock_generate_entity - ) - - # Check that stats were updated - updated_stats = get_update_stats() - assert updated_stats["total_updates"] == initial_total + 1 - assert updated_stats["successful_updates"] >= initial_total + 1 - - def test_non_chat_entity_ignored(self): - """Test that non-chat entities are ignored by the handler.""" - # Create a non-chat entity - mock_non_chat_entity = Mock() - mock_non_chat_entity.__class__.__name__ = "NonChatEntity" - - with patch( - "events.event_handlers.update_provider_when_message_created.db" - ) as mock_db: - # Call handler with non-chat entity - handle(self.mock_message, application_generate_entity=mock_non_chat_entity) - - # Verify no database operations were performed - assert not mock_db.session.query.called - assert not mock_db.session.commit.called - - @patch("events.event_handlers.update_provider_when_message_created.db") - def test_quota_calculation_tokens(self, mock_db): - """Test quota calculation for token-based quotas.""" - # Setup token-based quota - self.mock_system_config.current_quota_type = QuotaUnit.TOKENS - self.mock_message.answer_tokens = 150 - - mock_query = Mock() - mock_db.session.query.return_value = mock_query - mock_query.filter.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.update.return_value = 1 - - # Call handler - handle(self.mock_message, application_generate_entity=self.mock_generate_entity) - - # Verify update was called with token count - update_calls = mock_query.update.call_args_list - - # Should have at least one call with quota_used update - quota_update_found = False - for call in update_calls: - values = call[0][0] # First argument to update() - if "quota_used" in values: - quota_update_found = True - break - - assert quota_update_found - - @patch("events.event_handlers.update_provider_when_message_created.db") - def test_quota_calculation_times(self, mock_db): - """Test quota calculation for times-based quotas.""" - # Setup times-based quota - self.mock_system_config.current_quota_type = QuotaUnit.TIMES - - mock_query = Mock() - mock_db.session.query.return_value = mock_query - mock_query.filter.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.update.return_value = 1 - - # Call handler - handle(self.mock_message, application_generate_entity=self.mock_generate_entity) - - # Verify update was called - assert mock_query.update.called - assert mock_db.session.commit.called diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx index 2afe451fe1..907c270017 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx @@ -1,5 +1,5 @@ 'use client' -import type { FC } from 'react' +import type { FC, JSX } from 'react' import React, { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx index 8bf18904be..d082523222 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx @@ -83,27 +83,50 @@ const Panel: FC = () => { const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig || weaveConfig || arizeConfig || phoenixConfig || aliyunConfig) const fetchTracingConfig = async () => { - const { tracing_config: arizeConfig, has_not_configured: arizeHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.arize }) - if (!arizeHasNotConfig) - setArizeConfig(arizeConfig as ArizeConfig) - const { tracing_config: phoenixConfig, has_not_configured: phoenixHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.phoenix }) - if (!phoenixHasNotConfig) - setPhoenixConfig(phoenixConfig as PhoenixConfig) - const { tracing_config: langSmithConfig, has_not_configured: langSmithHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.langSmith }) - if (!langSmithHasNotConfig) - setLangSmithConfig(langSmithConfig as LangSmithConfig) - const { tracing_config: langFuseConfig, has_not_configured: langFuseHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.langfuse }) - if (!langFuseHasNotConfig) - setLangFuseConfig(langFuseConfig as LangFuseConfig) - const { tracing_config: opikConfig, has_not_configured: OpikHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.opik }) - if (!OpikHasNotConfig) - setOpikConfig(opikConfig as OpikConfig) - const { tracing_config: weaveConfig, has_not_configured: weaveHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.weave }) - if (!weaveHasNotConfig) - setWeaveConfig(weaveConfig as WeaveConfig) - const { tracing_config: aliyunConfig, has_not_configured: aliyunHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.aliyun }) - if (!aliyunHasNotConfig) - setAliyunConfig(aliyunConfig as AliyunConfig) + const getArizeConfig = async () => { + const { tracing_config: arizeConfig, has_not_configured: arizeHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.arize }) + if (!arizeHasNotConfig) + setArizeConfig(arizeConfig as ArizeConfig) + } + const getPhoenixConfig = async () => { + const { tracing_config: phoenixConfig, has_not_configured: phoenixHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.phoenix }) + if (!phoenixHasNotConfig) + setPhoenixConfig(phoenixConfig as PhoenixConfig) + } + const getLangSmithConfig = async () => { + const { tracing_config: langSmithConfig, has_not_configured: langSmithHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.langSmith }) + if (!langSmithHasNotConfig) + setLangSmithConfig(langSmithConfig as LangSmithConfig) + } + const getLangFuseConfig = async () => { + const { tracing_config: langFuseConfig, has_not_configured: langFuseHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.langfuse }) + if (!langFuseHasNotConfig) + setLangFuseConfig(langFuseConfig as LangFuseConfig) + } + const getOpikConfig = async () => { + const { tracing_config: opikConfig, has_not_configured: OpikHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.opik }) + if (!OpikHasNotConfig) + setOpikConfig(opikConfig as OpikConfig) + } + const getWeaveConfig = async () => { + const { tracing_config: weaveConfig, has_not_configured: weaveHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.weave }) + if (!weaveHasNotConfig) + setWeaveConfig(weaveConfig as WeaveConfig) + } + const getAliyunConfig = async () => { + const { tracing_config: aliyunConfig, has_not_configured: aliyunHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.aliyun }) + if (!aliyunHasNotConfig) + setAliyunConfig(aliyunConfig as AliyunConfig) + } + Promise.all([ + getArizeConfig(), + getPhoenixConfig(), + getLangSmithConfig(), + getLangFuseConfig(), + getOpikConfig(), + getWeaveConfig(), + getAliyunConfig(), + ]) } const handleTracingConfigUpdated = async (provider: TracingProvider) => { @@ -155,7 +178,6 @@ const Panel: FC = () => { await fetchTracingConfig() setLoaded() })() - // eslint-disable-next-line react-hooks/exhaustive-deps }, []) const [controlShowPopup, setControlShowPopup] = useState(0) diff --git a/web/app/components/app-sidebar/app-operations.tsx b/web/app/components/app-sidebar/app-operations.tsx index 49cad71573..79c460419d 100644 --- a/web/app/components/app-sidebar/app-operations.tsx +++ b/web/app/components/app-sidebar/app-operations.tsx @@ -1,4 +1,4 @@ -import type { ReactElement } from 'react' +import type { JSX } from 'react' import { cloneElement, useCallback } from 'react' import { useEffect, useRef, useState } from 'react' import { useTranslation } from 'react-i18next' @@ -7,7 +7,7 @@ import { PortalToFollowElem, PortalToFollowElemContent, PortalToFollowElemTrigge import { RiMoreLine } from '@remixicon/react' export type Operation = { - id: string; title: string; icon: ReactElement; onClick: () => void + id: string; title: string; icon: JSX.Element; onClick: () => void } const AppOperations = ({ operations, gap }: { @@ -47,7 +47,7 @@ const AppOperations = ({ operations, gap }: { updatedEntries[id] = true width += gap + childWidth } - else { + else { if (i === childrens.length - 1 && width + childWidth <= containerWidth) updatedEntries[id] = true else diff --git a/web/app/components/app/app-publisher/index.tsx b/web/app/components/app/app-publisher/index.tsx index c7a4117e42..53cceb8020 100644 --- a/web/app/components/app/app-publisher/index.tsx +++ b/web/app/components/app/app-publisher/index.tsx @@ -5,8 +5,6 @@ import { useState, } from 'react' import { useTranslation } from 'react-i18next' -import dayjs from 'dayjs' -import relativeTime from 'dayjs/plugin/relativeTime' import { RiArrowDownSLine, RiArrowRightSLine, @@ -39,7 +37,6 @@ import { basePath } from '@/utils/var' import { fetchInstalledAppList } from '@/service/explore' import EmbeddedModal from '@/app/components/app/overview/embedded' import { useStore as useAppStore } from '@/app/components/app/store' -import { useGetLanguage } from '@/context/i18n' import { CodeBrowser } from '@/app/components/base/icons/src/vender/line/development' import WorkflowToolConfigureButton from '@/app/components/tools/workflow-tool/configure-button' import type { InputVar } from '@/app/components/workflow/types' @@ -49,7 +46,7 @@ import { useAppWhiteListSubjects, useGetUserCanAccessApp } from '@/service/acces import { AccessMode } from '@/models/access-control' import { fetchAppDetail } from '@/service/apps' import { useGlobalPublicStore } from '@/context/global-public-context' -dayjs.extend(relativeTime) +import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now' export type AppPublisherProps = { disabled?: boolean @@ -92,6 +89,7 @@ const AppPublisher = ({ const appDetail = useAppStore(state => state.appDetail) const setAppDetail = useAppStore(s => s.setAppDetail) const systemFeatures = useGlobalPublicStore(s => s.systemFeatures) + const { formatTimeFromNow } = useFormatTimeFromNow() const { app_base_url: appBaseURL = '', access_token: accessToken = '' } = appDetail?.site ?? {} const appMode = (appDetail?.mode !== 'completion' && appDetail?.mode !== 'workflow') ? 'chat' : appDetail.mode const appURL = `${appBaseURL}${basePath}/${appMode}/${accessToken}` @@ -117,11 +115,6 @@ const AppPublisher = ({ setIsAppAccessSet(true) } }, [appAccessSubjects, appDetail]) - const language = useGetLanguage() - - const formatTimeFromNow = useCallback((time: number) => { - return dayjs(time).locale(language === 'zh_Hans' ? 'zh-cn' : language.replace('_', '-')).fromNow() - }, [language]) const handlePublish = useCallback(async (params?: ModelAndParameter | PublishWorkflowParams) => { try { diff --git a/web/app/components/base/chat/chat-with-history/chat-wrapper.tsx b/web/app/components/base/chat/chat-with-history/chat-wrapper.tsx index cbe4826c14..f3768e80c0 100644 --- a/web/app/components/base/chat/chat-with-history/chat-wrapper.tsx +++ b/web/app/components/base/chat/chat-with-history/chat-wrapper.tsx @@ -23,6 +23,7 @@ import SuggestedQuestions from '@/app/components/base/chat/chat/answer/suggested import { Markdown } from '@/app/components/base/markdown' import cn from '@/utils/classnames' import type { FileEntity } from '../../file-uploader/types' +import Avatar from '../../avatar' const ChatWrapper = () => { const { @@ -48,6 +49,7 @@ const ChatWrapper = () => { setClearChatList, setIsResponding, allInputsHidden, + initUserVariables, } = useChatWithHistoryContext() const appConfig = useMemo(() => { const config = appParams || {} @@ -119,7 +121,6 @@ const ChatWrapper = () => { useEffect(() => { if (currentChatInstanceRef.current) currentChatInstanceRef.current.handleStop = handleStop - // eslint-disable-next-line react-hooks/exhaustive-deps }, []) useEffect(() => { @@ -238,7 +239,7 @@ const ChatWrapper = () => { className='h-full overflow-hidden bg-chatbot-bg' > { inputDisabled={inputDisabled} isMobile={isMobile} sidebarCollapseState={sidebarCollapseState} + questionIcon={ + initUserVariables?.avatar_url + ? : undefined + } /> ) diff --git a/web/app/components/base/chat/chat-with-history/context.tsx b/web/app/components/base/chat/chat-with-history/context.tsx index 3a5dc793d6..03a0399137 100644 --- a/web/app/components/base/chat/chat-with-history/context.tsx +++ b/web/app/components/base/chat/chat-with-history/context.tsx @@ -56,6 +56,10 @@ export type ChatWithHistoryContextValue = { currentConversationInputs: Record | null, setCurrentConversationInputs: (v: Record) => void, allInputsHidden: boolean, + initUserVariables?: { + name?: string + avatar_url?: string + } } export const ChatWithHistoryContext = createContext({ @@ -90,5 +94,6 @@ export const ChatWithHistoryContext = createContext currentConversationInputs: {}, setCurrentConversationInputs: noop, allInputsHidden: false, + initUserVariables: {}, }) export const useChatWithHistoryContext = () => useContext(ChatWithHistoryContext) diff --git a/web/app/components/base/chat/chat-with-history/hooks.tsx b/web/app/components/base/chat/chat-with-history/hooks.tsx index be935a70ba..248c161e6c 100644 --- a/web/app/components/base/chat/chat-with-history/hooks.tsx +++ b/web/app/components/base/chat/chat-with-history/hooks.tsx @@ -16,7 +16,7 @@ import type { Feedback, } from '../types' import { CONVERSATION_ID_INFO } from '../constants' -import { buildChatItemTree, getProcessedSystemVariablesFromUrlParams, getRawInputsFromUrlParams } from '../utils' +import { buildChatItemTree, getProcessedSystemVariablesFromUrlParams, getRawInputsFromUrlParams, getRawUserVariablesFromUrlParams } from '../utils' import { addFileInfos, sortAgentSorts } from '../../../tools/utils' import { getProcessedFilesFromResponse } from '@/app/components/base/file-uploader/utils' import { @@ -181,6 +181,7 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => { const newConversationInputsRef = useRef>({}) const [newConversationInputs, setNewConversationInputs] = useState>({}) const [initInputs, setInitInputs] = useState>({}) + const [initUserVariables, setInitUserVariables] = useState>({}) const handleNewConversationInputsChange = useCallback((newInputs: Record) => { newConversationInputsRef.current = newInputs setNewConversationInputs(newInputs) @@ -249,7 +250,9 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => { // init inputs from url params (async () => { const inputs = await getRawInputsFromUrlParams() + const userVariables = await getRawUserVariablesFromUrlParams() setInitInputs(inputs) + setInitUserVariables(userVariables) })() }, []) @@ -520,5 +523,6 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => { currentConversationInputs, setCurrentConversationInputs, allInputsHidden, + initUserVariables, } } diff --git a/web/app/components/base/chat/chat-with-history/index.tsx b/web/app/components/base/chat/chat-with-history/index.tsx index cceb21b295..cfde517a61 100644 --- a/web/app/components/base/chat/chat-with-history/index.tsx +++ b/web/app/components/base/chat/chat-with-history/index.tsx @@ -142,6 +142,7 @@ const ChatWithHistoryWrap: FC = ({ currentConversationInputs, setCurrentConversationInputs, allInputsHidden, + initUserVariables, } = useChatWithHistory(installedAppInfo) return ( @@ -184,6 +185,7 @@ const ChatWithHistoryWrap: FC = ({ currentConversationInputs, setCurrentConversationInputs, allInputsHidden, + initUserVariables, }}> diff --git a/web/app/components/base/chat/utils.ts b/web/app/components/base/chat/utils.ts index fb7ac93a4b..1c478747c5 100644 --- a/web/app/components/base/chat/utils.ts +++ b/web/app/components/base/chat/utils.ts @@ -20,7 +20,8 @@ async function getRawInputsFromUrlParams(): Promise> { const inputs: Record = {} const entriesArray = Array.from(urlParams.entries()) entriesArray.forEach(([key, value]) => { - if (!key.startsWith('sys.')) + const prefixArray = ['sys.', 'user.'] + if (!prefixArray.some(prefix => key.startsWith(prefix))) inputs[key] = decodeURIComponent(value) }) return inputs @@ -66,6 +67,17 @@ async function getProcessedUserVariablesFromUrlParams(): Promise> { + const urlParams = new URLSearchParams(window.location.search) + const userVariables: Record = {} + const entriesArray = Array.from(urlParams.entries()) + entriesArray.forEach(([key, value]) => { + if (key.startsWith('user.')) + userVariables[key.slice(5)] = decodeURIComponent(value) + }) + return userVariables +} + function isValidGeneratedAnswer(item?: ChatItem | ChatItemInTree): boolean { return !!item && item.isAnswer && !item.id.startsWith('answer-placeholder-') && !item.isOpeningStatement } @@ -213,6 +225,7 @@ export { getProcessedInputsFromUrlParams, getProcessedSystemVariablesFromUrlParams, getProcessedUserVariablesFromUrlParams, + getRawUserVariablesFromUrlParams, isValidGeneratedAnswer, getLastAnswer, buildChatItemTree, diff --git a/web/app/components/base/date-and-time-picker/common/option-list-item.tsx b/web/app/components/base/date-and-time-picker/common/option-list-item.tsx index d11a6e9e86..0144a7c6ec 100644 --- a/web/app/components/base/date-and-time-picker/common/option-list-item.tsx +++ b/web/app/components/base/date-and-time-picker/common/option-list-item.tsx @@ -4,17 +4,19 @@ import cn from '@/utils/classnames' type OptionListItemProps = { isSelected: boolean onClick: () => void + noAutoScroll?: boolean } & React.LiHTMLAttributes const OptionListItem: FC = ({ isSelected, onClick, + noAutoScroll, children, }) => { const listItemRef = useRef(null) useEffect(() => { - if (isSelected) + if (isSelected && !noAutoScroll) listItemRef.current?.scrollIntoView({ behavior: 'instant' }) }, []) diff --git a/web/app/components/base/date-and-time-picker/time-picker/header.tsx b/web/app/components/base/date-and-time-picker/time-picker/header.tsx index 3d85b2ea40..dc6b56f744 100644 --- a/web/app/components/base/date-and-time-picker/time-picker/header.tsx +++ b/web/app/components/base/date-and-time-picker/time-picker/header.tsx @@ -1,13 +1,18 @@ import React from 'react' import { useTranslation } from 'react-i18next' -const Header = () => { +type Props = { + title?: string +} +const Header = ({ + title, +}: Props) => { const { t } = useTranslation() return (
- {t('time.title.pickTime')} + {title || t('time.title.pickTime')}
) diff --git a/web/app/components/base/date-and-time-picker/time-picker/index.tsx b/web/app/components/base/date-and-time-picker/time-picker/index.tsx index a5e666d631..8ef10abc2e 100644 --- a/web/app/components/base/date-and-time-picker/time-picker/index.tsx +++ b/web/app/components/base/date-and-time-picker/time-picker/index.tsx @@ -20,6 +20,9 @@ const TimePicker = ({ onChange, onClear, renderTrigger, + title, + minuteFilter, + popupClassName, }: TimePickerProps) => { const { t } = useTranslation() const [isOpen, setIsOpen] = useState(false) @@ -49,7 +52,6 @@ const TimePicker = ({ else { setSelectedTime(prev => prev ? getDateWithTimezone({ date: prev, timezone }) : undefined) } - // eslint-disable-next-line react-hooks/exhaustive-deps }, [timezone]) const handleClickTrigger = (e: React.MouseEvent) => { @@ -108,6 +110,15 @@ const TimePicker = ({ const displayValue = value?.format(timeFormat) || '' const placeholderDate = isOpen && selectedTime ? selectedTime.format(timeFormat) : (placeholder || t('time.defaultPlaceholder')) + const inputElem = ( + + ) return ( - {renderTrigger ? (renderTrigger()) : ( + {renderTrigger ? (renderTrigger({ + inputElem, + onClick: handleClickTrigger, + isOpen, + })) : (
- + {inputElem} )} - +
{/* Header */} -
+
{/* Time Options */} = ({ selectedTime, + minuteFilter, handleSelectHour, handleSelectMinute, handleSelectPeriod, @@ -33,7 +34,7 @@ const Options: FC = ({ {/* Minute */}
    { - minuteOptions.map((minute) => { + (minuteFilter ? minuteFilter(minuteOptions) : minuteOptions).map((minute) => { const isSelected = selectedTime?.format('mm') === minute return ( = ({ key={period} isSelected={isSelected} onClick={handleSelectPeriod.bind(null, period)} + noAutoScroll // if choose PM which would hide(scrolled) AM that may make user confused that there's no am. > {period} diff --git a/web/app/components/base/date-and-time-picker/types.ts b/web/app/components/base/date-and-time-picker/types.ts index 214c0f011b..4ac01c142a 100644 --- a/web/app/components/base/date-and-time-picker/types.ts +++ b/web/app/components/base/date-and-time-picker/types.ts @@ -28,6 +28,7 @@ export type DatePickerProps = { onClear: () => void triggerWrapClassName?: string renderTrigger?: (props: TriggerProps) => React.ReactNode + minuteFilter?: (minutes: string[]) => string[] popupZIndexClassname?: string } @@ -47,13 +48,21 @@ export type DatePickerFooterProps = { handleConfirmDate: () => void } +export type TriggerParams = { + isOpen: boolean + inputElem: React.ReactNode + onClick: (e: React.MouseEvent) => void +} export type TimePickerProps = { value: Dayjs | undefined timezone?: string placeholder?: string onChange: (date: Dayjs | undefined) => void onClear: () => void - renderTrigger?: () => React.ReactNode + renderTrigger?: (props: TriggerParams) => React.ReactNode + title?: string + minuteFilter?: (minutes: string[]) => string[] + popupClassName?: string } export type TimePickerFooterProps = { @@ -81,6 +90,7 @@ export type CalendarItemProps = { export type TimeOptionsProps = { selectedTime: Dayjs | undefined + minuteFilter?: (minutes: string[]) => string[] handleSelectHour: (hour: string) => void handleSelectMinute: (minute: string) => void handleSelectPeriod: (period: Period) => void diff --git a/web/app/components/base/date-and-time-picker/utils/dayjs.ts b/web/app/components/base/date-and-time-picker/utils/dayjs.ts index 0928fa5d58..cdc3924194 100644 --- a/web/app/components/base/date-and-time-picker/utils/dayjs.ts +++ b/web/app/components/base/date-and-time-picker/utils/dayjs.ts @@ -2,6 +2,7 @@ import dayjs, { type Dayjs } from 'dayjs' import type { Day } from '../types' import utc from 'dayjs/plugin/utc' import timezone from 'dayjs/plugin/timezone' +import tz from '@/utils/timezone.json' dayjs.extend(utc) dayjs.extend(timezone) @@ -78,3 +79,14 @@ export const getHourIn12Hour = (date: Dayjs) => { export const getDateWithTimezone = (props: { date?: Dayjs, timezone?: string }) => { return props.date ? dayjs.tz(props.date, props.timezone) : dayjs().tz(props.timezone) } + +// Asia/Shanghai -> UTC+8 +const DEFAULT_OFFSET_STR = 'UTC+0' +export const convertTimezoneToOffsetStr = (timezone?: string) => { + if (!timezone) + return DEFAULT_OFFSET_STR + const tzItem = tz.find(item => item.value === timezone) + if(!tzItem) + return DEFAULT_OFFSET_STR + return `UTC${tzItem.name.charAt(0)}${tzItem.name.charAt(2)}` +} diff --git a/web/app/components/base/icons/assets/vender/line/general/search-menu.svg b/web/app/components/base/icons/assets/vender/line/general/search-menu.svg new file mode 100644 index 0000000000..f61f69f4ba --- /dev/null +++ b/web/app/components/base/icons/assets/vender/line/general/search-menu.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/web/app/components/base/icons/assets/vender/system/auto-update-line.svg b/web/app/components/base/icons/assets/vender/system/auto-update-line.svg new file mode 100644 index 0000000000..c6bff78400 --- /dev/null +++ b/web/app/components/base/icons/assets/vender/system/auto-update-line.svg @@ -0,0 +1,4 @@ + + + + diff --git a/web/app/components/base/icons/script.mjs b/web/app/components/base/icons/script.mjs index 7f9d7b73a0..1b5994edef 100644 --- a/web/app/components/base/icons/script.mjs +++ b/web/app/components/base/icons/script.mjs @@ -75,7 +75,7 @@ Icon.displayName = '<%= svgName %>' export default Icon `.trim()) - await writeFile(path.resolve(currentPath, `${fileName}.json`), JSON.stringify(svgData, '', '\t')) + await writeFile(path.resolve(currentPath, `${fileName}.json`), `${JSON.stringify(svgData, '', '\t')}\n`) await writeFile(path.resolve(currentPath, `${fileName}.tsx`), `${componentRender({ svgName: fileName })}\n`) const indexingRender = template(` diff --git a/web/app/components/base/icons/src/public/tracing/AliyunIcon.json b/web/app/components/base/icons/src/public/tracing/AliyunIcon.json index ef71df24c0..9a0b89f20a 100644 --- a/web/app/components/base/icons/src/public/tracing/AliyunIcon.json +++ b/web/app/components/base/icons/src/public/tracing/AliyunIcon.json @@ -128,4 +128,4 @@ ] }, "name": "AliyunIcon" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/public/tracing/AliyunIconBig.json b/web/app/components/base/icons/src/public/tracing/AliyunIconBig.json index 177f349f2b..c8093ba660 100644 --- a/web/app/components/base/icons/src/public/tracing/AliyunIconBig.json +++ b/web/app/components/base/icons/src/public/tracing/AliyunIconBig.json @@ -114,4 +114,4 @@ ] }, "name": "AliyunIconBig" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/features/Citations.json b/web/app/components/base/icons/src/vender/features/Citations.json index 1b0b6250de..24a77db601 100644 --- a/web/app/components/base/icons/src/vender/features/Citations.json +++ b/web/app/components/base/icons/src/vender/features/Citations.json @@ -23,4 +23,4 @@ ] }, "name": "Citations" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/features/ContentModeration.json b/web/app/components/base/icons/src/vender/features/ContentModeration.json index 4f5c47acd2..fc609e0434 100644 --- a/web/app/components/base/icons/src/vender/features/ContentModeration.json +++ b/web/app/components/base/icons/src/vender/features/ContentModeration.json @@ -25,4 +25,4 @@ ] }, "name": "ContentModeration" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/features/Document.json b/web/app/components/base/icons/src/vender/features/Document.json index fdd08d5254..f0638eecf7 100644 --- a/web/app/components/base/icons/src/vender/features/Document.json +++ b/web/app/components/base/icons/src/vender/features/Document.json @@ -20,4 +20,4 @@ ] }, "name": "Document" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/features/FolderUpload.json b/web/app/components/base/icons/src/vender/features/FolderUpload.json index 2180127e3d..c113da043b 100644 --- a/web/app/components/base/icons/src/vender/features/FolderUpload.json +++ b/web/app/components/base/icons/src/vender/features/FolderUpload.json @@ -23,4 +23,4 @@ ] }, "name": "FolderUpload" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/features/LoveMessage.json b/web/app/components/base/icons/src/vender/features/LoveMessage.json index 7dbc062662..4874b94944 100644 --- a/web/app/components/base/icons/src/vender/features/LoveMessage.json +++ b/web/app/components/base/icons/src/vender/features/LoveMessage.json @@ -23,4 +23,4 @@ ] }, "name": "LoveMessage" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/features/MessageFast.json b/web/app/components/base/icons/src/vender/features/MessageFast.json index 4580398f31..b859b1f3f0 100644 --- a/web/app/components/base/icons/src/vender/features/MessageFast.json +++ b/web/app/components/base/icons/src/vender/features/MessageFast.json @@ -25,4 +25,4 @@ ] }, "name": "MessageFast" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/features/Microphone01.json b/web/app/components/base/icons/src/vender/features/Microphone01.json index a4ba1bc23f..57545716cf 100644 --- a/web/app/components/base/icons/src/vender/features/Microphone01.json +++ b/web/app/components/base/icons/src/vender/features/Microphone01.json @@ -34,4 +34,4 @@ ] }, "name": "Microphone01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/features/TextToAudio.json b/web/app/components/base/icons/src/vender/features/TextToAudio.json index 1d824f72cc..4369e0b6f1 100644 --- a/web/app/components/base/icons/src/vender/features/TextToAudio.json +++ b/web/app/components/base/icons/src/vender/features/TextToAudio.json @@ -74,4 +74,4 @@ ] }, "name": "TextToAudio" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/features/VirtualAssistant.json b/web/app/components/base/icons/src/vender/features/VirtualAssistant.json index b426eb4b0b..3cbeba0ea5 100644 --- a/web/app/components/base/icons/src/vender/features/VirtualAssistant.json +++ b/web/app/components/base/icons/src/vender/features/VirtualAssistant.json @@ -32,4 +32,4 @@ ] }, "name": "VirtualAssistant" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/features/Vision.json b/web/app/components/base/icons/src/vender/features/Vision.json index e9b5b4df85..6d60e32a29 100644 --- a/web/app/components/base/icons/src/vender/features/Vision.json +++ b/web/app/components/base/icons/src/vender/features/Vision.json @@ -25,4 +25,4 @@ ] }, "name": "Vision" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/alertsAndFeedback/AlertTriangle.json b/web/app/components/base/icons/src/vender/line/alertsAndFeedback/AlertTriangle.json index a200e6035e..057ecbdfc2 100644 --- a/web/app/components/base/icons/src/vender/line/alertsAndFeedback/AlertTriangle.json +++ b/web/app/components/base/icons/src/vender/line/alertsAndFeedback/AlertTriangle.json @@ -36,4 +36,4 @@ ] }, "name": "AlertTriangle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/alertsAndFeedback/ThumbsDown.json b/web/app/components/base/icons/src/vender/line/alertsAndFeedback/ThumbsDown.json index b9ccbef3ec..41877c74bd 100644 --- a/web/app/components/base/icons/src/vender/line/alertsAndFeedback/ThumbsDown.json +++ b/web/app/components/base/icons/src/vender/line/alertsAndFeedback/ThumbsDown.json @@ -63,4 +63,4 @@ ] }, "name": "ThumbsDown" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/alertsAndFeedback/ThumbsUp.json b/web/app/components/base/icons/src/vender/line/alertsAndFeedback/ThumbsUp.json index 674516b1c5..0ee442871f 100644 --- a/web/app/components/base/icons/src/vender/line/alertsAndFeedback/ThumbsUp.json +++ b/web/app/components/base/icons/src/vender/line/alertsAndFeedback/ThumbsUp.json @@ -63,4 +63,4 @@ ] }, "name": "ThumbsUp" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/arrows/ArrowNarrowLeft.json b/web/app/components/base/icons/src/vender/line/arrows/ArrowNarrowLeft.json index 73d6708c51..e3f7b5c674 100644 --- a/web/app/components/base/icons/src/vender/line/arrows/ArrowNarrowLeft.json +++ b/web/app/components/base/icons/src/vender/line/arrows/ArrowNarrowLeft.json @@ -26,4 +26,4 @@ ] }, "name": "ArrowNarrowLeft" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/arrows/ArrowUpRight.json b/web/app/components/base/icons/src/vender/line/arrows/ArrowUpRight.json index 9ab1e6e0d0..621a37afdf 100644 --- a/web/app/components/base/icons/src/vender/line/arrows/ArrowUpRight.json +++ b/web/app/components/base/icons/src/vender/line/arrows/ArrowUpRight.json @@ -36,4 +36,4 @@ ] }, "name": "ArrowUpRight" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/arrows/ChevronDownDouble.json b/web/app/components/base/icons/src/vender/line/arrows/ChevronDownDouble.json index cfae43931c..706e13533c 100644 --- a/web/app/components/base/icons/src/vender/line/arrows/ChevronDownDouble.json +++ b/web/app/components/base/icons/src/vender/line/arrows/ChevronDownDouble.json @@ -36,4 +36,4 @@ ] }, "name": "ChevronDownDouble" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/arrows/ChevronRight.json b/web/app/components/base/icons/src/vender/line/arrows/ChevronRight.json index c144e678bb..e03a2c8dd0 100644 --- a/web/app/components/base/icons/src/vender/line/arrows/ChevronRight.json +++ b/web/app/components/base/icons/src/vender/line/arrows/ChevronRight.json @@ -36,4 +36,4 @@ ] }, "name": "ChevronRight" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/arrows/ChevronSelectorVertical.json b/web/app/components/base/icons/src/vender/line/arrows/ChevronSelectorVertical.json index 84da1f3dbb..67ff40298d 100644 --- a/web/app/components/base/icons/src/vender/line/arrows/ChevronSelectorVertical.json +++ b/web/app/components/base/icons/src/vender/line/arrows/ChevronSelectorVertical.json @@ -26,4 +26,4 @@ ] }, "name": "ChevronSelectorVertical" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/arrows/RefreshCcw01.json b/web/app/components/base/icons/src/vender/line/arrows/RefreshCcw01.json index 30033b41bd..8b3cb0d5a7 100644 --- a/web/app/components/base/icons/src/vender/line/arrows/RefreshCcw01.json +++ b/web/app/components/base/icons/src/vender/line/arrows/RefreshCcw01.json @@ -26,4 +26,4 @@ ] }, "name": "RefreshCcw01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/arrows/RefreshCw05.json b/web/app/components/base/icons/src/vender/line/arrows/RefreshCw05.json index 5468171fe0..1ba0cedfd3 100644 --- a/web/app/components/base/icons/src/vender/line/arrows/RefreshCw05.json +++ b/web/app/components/base/icons/src/vender/line/arrows/RefreshCw05.json @@ -26,4 +26,4 @@ ] }, "name": "RefreshCw05" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/arrows/ReverseLeft.json b/web/app/components/base/icons/src/vender/line/arrows/ReverseLeft.json index 48c6d1fbd6..b5173968c8 100644 --- a/web/app/components/base/icons/src/vender/line/arrows/ReverseLeft.json +++ b/web/app/components/base/icons/src/vender/line/arrows/ReverseLeft.json @@ -36,4 +36,4 @@ ] }, "name": "ReverseLeft" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/communication/AiText.json b/web/app/components/base/icons/src/vender/line/communication/AiText.json index 0f5ff57837..2473c64c22 100644 --- a/web/app/components/base/icons/src/vender/line/communication/AiText.json +++ b/web/app/components/base/icons/src/vender/line/communication/AiText.json @@ -36,4 +36,4 @@ ] }, "name": "AiText" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/communication/ChatBot.json b/web/app/components/base/icons/src/vender/line/communication/ChatBot.json index 69547f9353..0e7382d741 100644 --- a/web/app/components/base/icons/src/vender/line/communication/ChatBot.json +++ b/web/app/components/base/icons/src/vender/line/communication/ChatBot.json @@ -90,4 +90,4 @@ ] }, "name": "ChatBot" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/communication/ChatBotSlim.json b/web/app/components/base/icons/src/vender/line/communication/ChatBotSlim.json index 07f6cda56b..9be716acd6 100644 --- a/web/app/components/base/icons/src/vender/line/communication/ChatBotSlim.json +++ b/web/app/components/base/icons/src/vender/line/communication/ChatBotSlim.json @@ -65,4 +65,4 @@ ] }, "name": "ChatBotSlim" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/communication/CuteRobot.json b/web/app/components/base/icons/src/vender/line/communication/CuteRobot.json index 4ae74d2a77..35596bac9a 100644 --- a/web/app/components/base/icons/src/vender/line/communication/CuteRobot.json +++ b/web/app/components/base/icons/src/vender/line/communication/CuteRobot.json @@ -36,4 +36,4 @@ ] }, "name": "CuteRobot" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/communication/MessageCheckRemove.json b/web/app/components/base/icons/src/vender/line/communication/MessageCheckRemove.json index a536c9f341..5d4202b2e1 100644 --- a/web/app/components/base/icons/src/vender/line/communication/MessageCheckRemove.json +++ b/web/app/components/base/icons/src/vender/line/communication/MessageCheckRemove.json @@ -36,4 +36,4 @@ ] }, "name": "MessageCheckRemove" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/communication/MessageFastPlus.json b/web/app/components/base/icons/src/vender/line/communication/MessageFastPlus.json index 7d40cc7425..988e278325 100644 --- a/web/app/components/base/icons/src/vender/line/communication/MessageFastPlus.json +++ b/web/app/components/base/icons/src/vender/line/communication/MessageFastPlus.json @@ -26,4 +26,4 @@ ] }, "name": "MessageFastPlus" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/development/ArtificialBrain.json b/web/app/components/base/icons/src/vender/line/development/ArtificialBrain.json index 7015ee281a..8bc500ea9b 100644 --- a/web/app/components/base/icons/src/vender/line/development/ArtificialBrain.json +++ b/web/app/components/base/icons/src/vender/line/development/ArtificialBrain.json @@ -26,4 +26,4 @@ ] }, "name": "ArtificialBrain" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/development/BarChartSquare02.json b/web/app/components/base/icons/src/vender/line/development/BarChartSquare02.json index 5b695a7e79..ef51cbec7f 100644 --- a/web/app/components/base/icons/src/vender/line/development/BarChartSquare02.json +++ b/web/app/components/base/icons/src/vender/line/development/BarChartSquare02.json @@ -36,4 +36,4 @@ ] }, "name": "BarChartSquare02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/development/BracketsX.json b/web/app/components/base/icons/src/vender/line/development/BracketsX.json index 08935cc7ff..2287a51073 100644 --- a/web/app/components/base/icons/src/vender/line/development/BracketsX.json +++ b/web/app/components/base/icons/src/vender/line/development/BracketsX.json @@ -26,4 +26,4 @@ ] }, "name": "BracketsX" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/development/CodeBrowser.json b/web/app/components/base/icons/src/vender/line/development/CodeBrowser.json index 1d0254d846..7234b42cea 100644 --- a/web/app/components/base/icons/src/vender/line/development/CodeBrowser.json +++ b/web/app/components/base/icons/src/vender/line/development/CodeBrowser.json @@ -36,4 +36,4 @@ ] }, "name": "CodeBrowser" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/development/Container.json b/web/app/components/base/icons/src/vender/line/development/Container.json index 3b15cd8f88..dbedb8aff3 100644 --- a/web/app/components/base/icons/src/vender/line/development/Container.json +++ b/web/app/components/base/icons/src/vender/line/development/Container.json @@ -26,4 +26,4 @@ ] }, "name": "Container" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/development/Database01.json b/web/app/components/base/icons/src/vender/line/development/Database01.json index e25b3e7cef..2be1974840 100644 --- a/web/app/components/base/icons/src/vender/line/development/Database01.json +++ b/web/app/components/base/icons/src/vender/line/development/Database01.json @@ -26,4 +26,4 @@ ] }, "name": "Database01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/development/Database03.json b/web/app/components/base/icons/src/vender/line/development/Database03.json index 5acf4bf1f9..24a004afed 100644 --- a/web/app/components/base/icons/src/vender/line/development/Database03.json +++ b/web/app/components/base/icons/src/vender/line/development/Database03.json @@ -26,4 +26,4 @@ ] }, "name": "Database03" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/development/FileHeart02.json b/web/app/components/base/icons/src/vender/line/development/FileHeart02.json index ef9343dfc0..163e64b570 100644 --- a/web/app/components/base/icons/src/vender/line/development/FileHeart02.json +++ b/web/app/components/base/icons/src/vender/line/development/FileHeart02.json @@ -49,4 +49,4 @@ ] }, "name": "FileHeart02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/development/GitBranch01.json b/web/app/components/base/icons/src/vender/line/development/GitBranch01.json index 04205e57c6..f9d9d00e59 100644 --- a/web/app/components/base/icons/src/vender/line/development/GitBranch01.json +++ b/web/app/components/base/icons/src/vender/line/development/GitBranch01.json @@ -36,4 +36,4 @@ ] }, "name": "GitBranch01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/development/PromptEngineering.json b/web/app/components/base/icons/src/vender/line/development/PromptEngineering.json index c55bde8f57..97ca2e9353 100644 --- a/web/app/components/base/icons/src/vender/line/development/PromptEngineering.json +++ b/web/app/components/base/icons/src/vender/line/development/PromptEngineering.json @@ -62,4 +62,4 @@ ] }, "name": "PromptEngineering" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/development/PuzzlePiece01.json b/web/app/components/base/icons/src/vender/line/development/PuzzlePiece01.json index ce06d6125f..672e405ffa 100644 --- a/web/app/components/base/icons/src/vender/line/development/PuzzlePiece01.json +++ b/web/app/components/base/icons/src/vender/line/development/PuzzlePiece01.json @@ -63,4 +63,4 @@ ] }, "name": "PuzzlePiece01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/development/TerminalSquare.json b/web/app/components/base/icons/src/vender/line/development/TerminalSquare.json index 7a78b7b934..48fb6ce248 100644 --- a/web/app/components/base/icons/src/vender/line/development/TerminalSquare.json +++ b/web/app/components/base/icons/src/vender/line/development/TerminalSquare.json @@ -36,4 +36,4 @@ ] }, "name": "TerminalSquare" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/development/Variable.json b/web/app/components/base/icons/src/vender/line/development/Variable.json index b7545fe8ae..b366f11a06 100644 --- a/web/app/components/base/icons/src/vender/line/development/Variable.json +++ b/web/app/components/base/icons/src/vender/line/development/Variable.json @@ -59,4 +59,4 @@ ] }, "name": "Variable" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/development/Webhooks.json b/web/app/components/base/icons/src/vender/line/development/Webhooks.json index 452194deb3..bb9ccf1059 100644 --- a/web/app/components/base/icons/src/vender/line/development/Webhooks.json +++ b/web/app/components/base/icons/src/vender/line/development/Webhooks.json @@ -86,4 +86,4 @@ ] }, "name": "Webhooks" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/editor/AlignLeft.json b/web/app/components/base/icons/src/vender/line/editor/AlignLeft.json index ae8b150447..7ddf4d24c1 100644 --- a/web/app/components/base/icons/src/vender/line/editor/AlignLeft.json +++ b/web/app/components/base/icons/src/vender/line/editor/AlignLeft.json @@ -36,4 +36,4 @@ ] }, "name": "AlignLeft" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/editor/BezierCurve03.json b/web/app/components/base/icons/src/vender/line/editor/BezierCurve03.json index bc87f9b00d..5f76ff1ac3 100644 --- a/web/app/components/base/icons/src/vender/line/editor/BezierCurve03.json +++ b/web/app/components/base/icons/src/vender/line/editor/BezierCurve03.json @@ -35,4 +35,4 @@ ] }, "name": "BezierCurve03" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/editor/Collapse.json b/web/app/components/base/icons/src/vender/line/editor/Collapse.json index 5e3cf08ce0..224133e0b6 100644 --- a/web/app/components/base/icons/src/vender/line/editor/Collapse.json +++ b/web/app/components/base/icons/src/vender/line/editor/Collapse.json @@ -59,4 +59,4 @@ ] }, "name": "Collapse" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/editor/Colors.json b/web/app/components/base/icons/src/vender/line/editor/Colors.json index baee8ee347..0508092598 100644 --- a/web/app/components/base/icons/src/vender/line/editor/Colors.json +++ b/web/app/components/base/icons/src/vender/line/editor/Colors.json @@ -36,4 +36,4 @@ ] }, "name": "Colors" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/editor/ImageIndentLeft.json b/web/app/components/base/icons/src/vender/line/editor/ImageIndentLeft.json index 603696d969..33ba61f1c7 100644 --- a/web/app/components/base/icons/src/vender/line/editor/ImageIndentLeft.json +++ b/web/app/components/base/icons/src/vender/line/editor/ImageIndentLeft.json @@ -36,4 +36,4 @@ ] }, "name": "ImageIndentLeft" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/editor/LeftIndent02.json b/web/app/components/base/icons/src/vender/line/editor/LeftIndent02.json index 447ae887a9..cb77ee97ea 100644 --- a/web/app/components/base/icons/src/vender/line/editor/LeftIndent02.json +++ b/web/app/components/base/icons/src/vender/line/editor/LeftIndent02.json @@ -26,4 +26,4 @@ ] }, "name": "LeftIndent02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/editor/LetterSpacing01.json b/web/app/components/base/icons/src/vender/line/editor/LetterSpacing01.json index 98b3cd6617..e322926aa0 100644 --- a/web/app/components/base/icons/src/vender/line/editor/LetterSpacing01.json +++ b/web/app/components/base/icons/src/vender/line/editor/LetterSpacing01.json @@ -36,4 +36,4 @@ ] }, "name": "LetterSpacing01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/editor/TypeSquare.json b/web/app/components/base/icons/src/vender/line/editor/TypeSquare.json index 195b047746..b381c51420 100644 --- a/web/app/components/base/icons/src/vender/line/editor/TypeSquare.json +++ b/web/app/components/base/icons/src/vender/line/editor/TypeSquare.json @@ -35,4 +35,4 @@ ] }, "name": "TypeSquare" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/education/BookOpen01.json b/web/app/components/base/icons/src/vender/line/education/BookOpen01.json index bfa7941345..1c6f46b700 100644 --- a/web/app/components/base/icons/src/vender/line/education/BookOpen01.json +++ b/web/app/components/base/icons/src/vender/line/education/BookOpen01.json @@ -46,4 +46,4 @@ ] }, "name": "BookOpen01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/files/File02.json b/web/app/components/base/icons/src/vender/line/files/File02.json index 110765adeb..6c0cf176c1 100644 --- a/web/app/components/base/icons/src/vender/line/files/File02.json +++ b/web/app/components/base/icons/src/vender/line/files/File02.json @@ -36,4 +36,4 @@ ] }, "name": "File02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/files/FileArrow01.json b/web/app/components/base/icons/src/vender/line/files/FileArrow01.json index 189f0814df..ce13dd0f5e 100644 --- a/web/app/components/base/icons/src/vender/line/files/FileArrow01.json +++ b/web/app/components/base/icons/src/vender/line/files/FileArrow01.json @@ -36,4 +36,4 @@ ] }, "name": "FileArrow01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/files/FileCheck02.json b/web/app/components/base/icons/src/vender/line/files/FileCheck02.json index 9a2e063c0a..0b08e61b30 100644 --- a/web/app/components/base/icons/src/vender/line/files/FileCheck02.json +++ b/web/app/components/base/icons/src/vender/line/files/FileCheck02.json @@ -36,4 +36,4 @@ ] }, "name": "FileCheck02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/files/FileDownload02.json b/web/app/components/base/icons/src/vender/line/files/FileDownload02.json index a0dccc280f..2c439adb31 100644 --- a/web/app/components/base/icons/src/vender/line/files/FileDownload02.json +++ b/web/app/components/base/icons/src/vender/line/files/FileDownload02.json @@ -26,4 +26,4 @@ ] }, "name": "FileDownload02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/files/FilePlus01.json b/web/app/components/base/icons/src/vender/line/files/FilePlus01.json index 67d8784494..470703abe1 100644 --- a/web/app/components/base/icons/src/vender/line/files/FilePlus01.json +++ b/web/app/components/base/icons/src/vender/line/files/FilePlus01.json @@ -36,4 +36,4 @@ ] }, "name": "FilePlus01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/files/FilePlus02.json b/web/app/components/base/icons/src/vender/line/files/FilePlus02.json index 447b1e91ba..cd55cad950 100644 --- a/web/app/components/base/icons/src/vender/line/files/FilePlus02.json +++ b/web/app/components/base/icons/src/vender/line/files/FilePlus02.json @@ -26,4 +26,4 @@ ] }, "name": "FilePlus02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/files/FileText.json b/web/app/components/base/icons/src/vender/line/files/FileText.json index 536bc45852..12335ec1c2 100644 --- a/web/app/components/base/icons/src/vender/line/files/FileText.json +++ b/web/app/components/base/icons/src/vender/line/files/FileText.json @@ -36,4 +36,4 @@ ] }, "name": "FileText" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/files/FileUpload.json b/web/app/components/base/icons/src/vender/line/files/FileUpload.json index 5dc2ec115e..6dfa30a350 100644 --- a/web/app/components/base/icons/src/vender/line/files/FileUpload.json +++ b/web/app/components/base/icons/src/vender/line/files/FileUpload.json @@ -49,4 +49,4 @@ ] }, "name": "FileUpload" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/files/Folder.json b/web/app/components/base/icons/src/vender/line/files/Folder.json index 6bbc4380ae..84e3cb9763 100644 --- a/web/app/components/base/icons/src/vender/line/files/Folder.json +++ b/web/app/components/base/icons/src/vender/line/files/Folder.json @@ -36,4 +36,4 @@ ] }, "name": "Folder" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/financeAndECommerce/Balance.json b/web/app/components/base/icons/src/vender/line/financeAndECommerce/Balance.json index c04fcda517..79bdc7024d 100644 --- a/web/app/components/base/icons/src/vender/line/financeAndECommerce/Balance.json +++ b/web/app/components/base/icons/src/vender/line/financeAndECommerce/Balance.json @@ -26,4 +26,4 @@ ] }, "name": "Balance" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/financeAndECommerce/CoinsStacked01.json b/web/app/components/base/icons/src/vender/line/financeAndECommerce/CoinsStacked01.json index 8a971909c8..328e38a1ce 100644 --- a/web/app/components/base/icons/src/vender/line/financeAndECommerce/CoinsStacked01.json +++ b/web/app/components/base/icons/src/vender/line/financeAndECommerce/CoinsStacked01.json @@ -36,4 +36,4 @@ ] }, "name": "CoinsStacked01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/financeAndECommerce/GoldCoin.json b/web/app/components/base/icons/src/vender/line/financeAndECommerce/GoldCoin.json index f10b5fa7cf..7a748bda5a 100644 --- a/web/app/components/base/icons/src/vender/line/financeAndECommerce/GoldCoin.json +++ b/web/app/components/base/icons/src/vender/line/financeAndECommerce/GoldCoin.json @@ -117,4 +117,4 @@ ] }, "name": "GoldCoin" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/financeAndECommerce/ReceiptList.json b/web/app/components/base/icons/src/vender/line/financeAndECommerce/ReceiptList.json index 8e9c070875..ac3d3bdfe2 100644 --- a/web/app/components/base/icons/src/vender/line/financeAndECommerce/ReceiptList.json +++ b/web/app/components/base/icons/src/vender/line/financeAndECommerce/ReceiptList.json @@ -26,4 +26,4 @@ ] }, "name": "ReceiptList" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/financeAndECommerce/Tag01.json b/web/app/components/base/icons/src/vender/line/financeAndECommerce/Tag01.json index b6f838d72f..82cd5af5f9 100644 --- a/web/app/components/base/icons/src/vender/line/financeAndECommerce/Tag01.json +++ b/web/app/components/base/icons/src/vender/line/financeAndECommerce/Tag01.json @@ -63,4 +63,4 @@ ] }, "name": "Tag01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/financeAndECommerce/Tag03.json b/web/app/components/base/icons/src/vender/line/financeAndECommerce/Tag03.json index ef0753b8d3..2fa00f62f7 100644 --- a/web/app/components/base/icons/src/vender/line/financeAndECommerce/Tag03.json +++ b/web/app/components/base/icons/src/vender/line/financeAndECommerce/Tag03.json @@ -36,4 +36,4 @@ ] }, "name": "Tag03" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/AtSign.json b/web/app/components/base/icons/src/vender/line/general/AtSign.json index 0722d8ff34..dd6d9cbbc7 100644 --- a/web/app/components/base/icons/src/vender/line/general/AtSign.json +++ b/web/app/components/base/icons/src/vender/line/general/AtSign.json @@ -63,4 +63,4 @@ ] }, "name": "AtSign" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/Bookmark.json b/web/app/components/base/icons/src/vender/line/general/Bookmark.json index 1b6e517be7..378bc76be5 100644 --- a/web/app/components/base/icons/src/vender/line/general/Bookmark.json +++ b/web/app/components/base/icons/src/vender/line/general/Bookmark.json @@ -26,4 +26,4 @@ ] }, "name": "Bookmark" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/Check.json b/web/app/components/base/icons/src/vender/line/general/Check.json index eae343816a..e3265f8138 100644 --- a/web/app/components/base/icons/src/vender/line/general/Check.json +++ b/web/app/components/base/icons/src/vender/line/general/Check.json @@ -36,4 +36,4 @@ ] }, "name": "Check" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/CheckDone01.json b/web/app/components/base/icons/src/vender/line/general/CheckDone01.json index 85355f93fd..ec3894aa00 100644 --- a/web/app/components/base/icons/src/vender/line/general/CheckDone01.json +++ b/web/app/components/base/icons/src/vender/line/general/CheckDone01.json @@ -36,4 +36,4 @@ ] }, "name": "CheckDone01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/ChecklistSquare.json b/web/app/components/base/icons/src/vender/line/general/ChecklistSquare.json index 737c69623d..6cf330994a 100644 --- a/web/app/components/base/icons/src/vender/line/general/ChecklistSquare.json +++ b/web/app/components/base/icons/src/vender/line/general/ChecklistSquare.json @@ -33,4 +33,4 @@ ] }, "name": "ChecklistSquare" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/DotsGrid.json b/web/app/components/base/icons/src/vender/line/general/DotsGrid.json index 9aafed2f7b..85a24bb14c 100644 --- a/web/app/components/base/icons/src/vender/line/general/DotsGrid.json +++ b/web/app/components/base/icons/src/vender/line/general/DotsGrid.json @@ -131,4 +131,4 @@ ] }, "name": "DotsGrid" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/Edit02.json b/web/app/components/base/icons/src/vender/line/general/Edit02.json index 38798fecf1..2d32d1da94 100644 --- a/web/app/components/base/icons/src/vender/line/general/Edit02.json +++ b/web/app/components/base/icons/src/vender/line/general/Edit02.json @@ -63,4 +63,4 @@ ] }, "name": "Edit02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/Edit04.json b/web/app/components/base/icons/src/vender/line/general/Edit04.json index 73f275b732..34be957072 100644 --- a/web/app/components/base/icons/src/vender/line/general/Edit04.json +++ b/web/app/components/base/icons/src/vender/line/general/Edit04.json @@ -26,4 +26,4 @@ ] }, "name": "Edit04" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/Edit05.json b/web/app/components/base/icons/src/vender/line/general/Edit05.json index 321336bc2f..f1bbf7138e 100644 --- a/web/app/components/base/icons/src/vender/line/general/Edit05.json +++ b/web/app/components/base/icons/src/vender/line/general/Edit05.json @@ -63,4 +63,4 @@ ] }, "name": "Edit05" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/Hash02.json b/web/app/components/base/icons/src/vender/line/general/Hash02.json index 41b639f938..bd140198a1 100644 --- a/web/app/components/base/icons/src/vender/line/general/Hash02.json +++ b/web/app/components/base/icons/src/vender/line/general/Hash02.json @@ -35,4 +35,4 @@ ] }, "name": "Hash02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/InfoCircle.json b/web/app/components/base/icons/src/vender/line/general/InfoCircle.json index 4017e85ce1..6bc285c86b 100644 --- a/web/app/components/base/icons/src/vender/line/general/InfoCircle.json +++ b/web/app/components/base/icons/src/vender/line/general/InfoCircle.json @@ -63,4 +63,4 @@ ] }, "name": "InfoCircle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/Link03.json b/web/app/components/base/icons/src/vender/line/general/Link03.json index ccd608f643..4728221b60 100644 --- a/web/app/components/base/icons/src/vender/line/general/Link03.json +++ b/web/app/components/base/icons/src/vender/line/general/Link03.json @@ -54,4 +54,4 @@ ] }, "name": "Link03" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/LinkExternal02.json b/web/app/components/base/icons/src/vender/line/general/LinkExternal02.json index af445595c8..7016dd896f 100644 --- a/web/app/components/base/icons/src/vender/line/general/LinkExternal02.json +++ b/web/app/components/base/icons/src/vender/line/general/LinkExternal02.json @@ -35,4 +35,4 @@ ] }, "name": "LinkExternal02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/LogIn04.json b/web/app/components/base/icons/src/vender/line/general/LogIn04.json index a8316e9c27..27808a36a9 100644 --- a/web/app/components/base/icons/src/vender/line/general/LogIn04.json +++ b/web/app/components/base/icons/src/vender/line/general/LogIn04.json @@ -50,4 +50,4 @@ ] }, "name": "LogIn04" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/LogOut01.json b/web/app/components/base/icons/src/vender/line/general/LogOut01.json index bd2cb3e18c..d5c89394ef 100644 --- a/web/app/components/base/icons/src/vender/line/general/LogOut01.json +++ b/web/app/components/base/icons/src/vender/line/general/LogOut01.json @@ -36,4 +36,4 @@ ] }, "name": "LogOut01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/LogOut04.json b/web/app/components/base/icons/src/vender/line/general/LogOut04.json index a19bedfe4c..80a27ecf31 100644 --- a/web/app/components/base/icons/src/vender/line/general/LogOut04.json +++ b/web/app/components/base/icons/src/vender/line/general/LogOut04.json @@ -50,4 +50,4 @@ ] }, "name": "LogOut04" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/Menu01.json b/web/app/components/base/icons/src/vender/line/general/Menu01.json index 5b32928738..5dfdebf7cc 100644 --- a/web/app/components/base/icons/src/vender/line/general/Menu01.json +++ b/web/app/components/base/icons/src/vender/line/general/Menu01.json @@ -36,4 +36,4 @@ ] }, "name": "Menu01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/Pin01.json b/web/app/components/base/icons/src/vender/line/general/Pin01.json index b0e61a2373..3ad6bd8799 100644 --- a/web/app/components/base/icons/src/vender/line/general/Pin01.json +++ b/web/app/components/base/icons/src/vender/line/general/Pin01.json @@ -36,4 +36,4 @@ ] }, "name": "Pin01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/Pin02.json b/web/app/components/base/icons/src/vender/line/general/Pin02.json index c5b51a5f33..474e7e102f 100644 --- a/web/app/components/base/icons/src/vender/line/general/Pin02.json +++ b/web/app/components/base/icons/src/vender/line/general/Pin02.json @@ -26,4 +26,4 @@ ] }, "name": "Pin02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/Plus02.json b/web/app/components/base/icons/src/vender/line/general/Plus02.json index 8a9516f1ae..84b07b4251 100644 --- a/web/app/components/base/icons/src/vender/line/general/Plus02.json +++ b/web/app/components/base/icons/src/vender/line/general/Plus02.json @@ -36,4 +36,4 @@ ] }, "name": "Plus02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/Refresh.json b/web/app/components/base/icons/src/vender/line/general/Refresh.json index 128dcb7d4d..693b9ab4f0 100644 --- a/web/app/components/base/icons/src/vender/line/general/Refresh.json +++ b/web/app/components/base/icons/src/vender/line/general/Refresh.json @@ -20,4 +20,4 @@ ] }, "name": "Refresh" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/SearchMenu.json b/web/app/components/base/icons/src/vender/line/general/SearchMenu.json new file mode 100644 index 0000000000..5854f25339 --- /dev/null +++ b/web/app/components/base/icons/src/vender/line/general/SearchMenu.json @@ -0,0 +1,77 @@ +{ + "icon": { + "type": "element", + "isRootNode": true, + "name": "svg", + "attributes": { + "width": "32", + "height": "32", + "viewBox": "0 0 32 32", + "fill": "none", + "xmlns": "http://www.w3.org/2000/svg" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "d": "M28.0049 16C28.0049 20.4183 24.4231 24 20.0049 24C15.5866 24 12.0049 20.4183 12.0049 16C12.0049 11.5817 15.5866 8 20.0049 8C24.4231 8 28.0049 11.5817 28.0049 16Z", + "stroke": "currentColor", + "stroke-width": "2", + "stroke-linecap": "round", + "stroke-linejoin": "round" + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "d": "M4.00488 16H6.67155", + "stroke": "currentColor", + "stroke-width": "2", + "stroke-linecap": "round", + "stroke-linejoin": "round" + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "d": "M4.00488 9.33334H8.00488", + "stroke": "currentColor", + "stroke-width": "2", + "stroke-linecap": "round", + "stroke-linejoin": "round" + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "d": "M4.00488 22.6667H8.00488", + "stroke": "currentColor", + "stroke-width": "2", + "stroke-linecap": "round", + "stroke-linejoin": "round" + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "d": "M26 22L29.3333 25.3333", + "stroke": "currentColor", + "stroke-width": "2", + "stroke-linecap": "round", + "stroke-linejoin": "round" + }, + "children": [] + } + ] + }, + "name": "SearchMenu" +} diff --git a/web/app/components/base/icons/src/vender/line/general/SearchMenu.tsx b/web/app/components/base/icons/src/vender/line/general/SearchMenu.tsx new file mode 100644 index 0000000000..4826abb20f --- /dev/null +++ b/web/app/components/base/icons/src/vender/line/general/SearchMenu.tsx @@ -0,0 +1,20 @@ +// GENERATE BY script +// DON NOT EDIT IT MANUALLY + +import * as React from 'react' +import data from './SearchMenu.json' +import IconBase from '@/app/components/base/icons/IconBase' +import type { IconData } from '@/app/components/base/icons/IconBase' + +const Icon = ( + { + ref, + ...props + }: React.SVGProps & { + ref?: React.RefObject>; + }, +) => + +Icon.displayName = 'SearchMenu' + +export default Icon diff --git a/web/app/components/base/icons/src/vender/line/general/Settings01.json b/web/app/components/base/icons/src/vender/line/general/Settings01.json index 8734e9f947..ca337d9b20 100644 --- a/web/app/components/base/icons/src/vender/line/general/Settings01.json +++ b/web/app/components/base/icons/src/vender/line/general/Settings01.json @@ -83,4 +83,4 @@ ] }, "name": "Settings01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/Settings04.json b/web/app/components/base/icons/src/vender/line/general/Settings04.json index e46a0548ed..4dd34e68a4 100644 --- a/web/app/components/base/icons/src/vender/line/general/Settings04.json +++ b/web/app/components/base/icons/src/vender/line/general/Settings04.json @@ -36,4 +36,4 @@ ] }, "name": "Settings04" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/Target04.json b/web/app/components/base/icons/src/vender/line/general/Target04.json index 5c07628bae..731fcc208c 100644 --- a/web/app/components/base/icons/src/vender/line/general/Target04.json +++ b/web/app/components/base/icons/src/vender/line/general/Target04.json @@ -62,4 +62,4 @@ ] }, "name": "Target04" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/Upload03.json b/web/app/components/base/icons/src/vender/line/general/Upload03.json index c3490f3cff..bda73041a9 100644 --- a/web/app/components/base/icons/src/vender/line/general/Upload03.json +++ b/web/app/components/base/icons/src/vender/line/general/Upload03.json @@ -63,4 +63,4 @@ ] }, "name": "Upload03" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/UploadCloud01.json b/web/app/components/base/icons/src/vender/line/general/UploadCloud01.json index 03e448d7ad..5e5411c1cf 100644 --- a/web/app/components/base/icons/src/vender/line/general/UploadCloud01.json +++ b/web/app/components/base/icons/src/vender/line/general/UploadCloud01.json @@ -39,4 +39,4 @@ ] }, "name": "UploadCloud01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/X.json b/web/app/components/base/icons/src/vender/line/general/X.json index 5c2fde5df6..9c482caa25 100644 --- a/web/app/components/base/icons/src/vender/line/general/X.json +++ b/web/app/components/base/icons/src/vender/line/general/X.json @@ -36,4 +36,4 @@ ] }, "name": "X" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/general/index.ts b/web/app/components/base/icons/src/vender/line/general/index.ts index b5c7a7bbc1..1b6c7e7303 100644 --- a/web/app/components/base/icons/src/vender/line/general/index.ts +++ b/web/app/components/base/icons/src/vender/line/general/index.ts @@ -19,6 +19,7 @@ export { default as Pin01 } from './Pin01' export { default as Pin02 } from './Pin02' export { default as Plus02 } from './Plus02' export { default as Refresh } from './Refresh' +export { default as SearchMenu } from './SearchMenu' export { default as Settings01 } from './Settings01' export { default as Settings04 } from './Settings04' export { default as Target04 } from './Target04' diff --git a/web/app/components/base/icons/src/vender/line/images/ImagePlus.json b/web/app/components/base/icons/src/vender/line/images/ImagePlus.json index 127b04659a..ce3073f1c3 100644 --- a/web/app/components/base/icons/src/vender/line/images/ImagePlus.json +++ b/web/app/components/base/icons/src/vender/line/images/ImagePlus.json @@ -36,4 +36,4 @@ ] }, "name": "ImagePlus" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/layout/AlignLeft01.json b/web/app/components/base/icons/src/vender/line/layout/AlignLeft01.json index 5ed5add0d7..9450fd2403 100644 --- a/web/app/components/base/icons/src/vender/line/layout/AlignLeft01.json +++ b/web/app/components/base/icons/src/vender/line/layout/AlignLeft01.json @@ -36,4 +36,4 @@ ] }, "name": "AlignLeft01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/layout/AlignRight01.json b/web/app/components/base/icons/src/vender/line/layout/AlignRight01.json index 6690e6d474..05ecc93716 100644 --- a/web/app/components/base/icons/src/vender/line/layout/AlignRight01.json +++ b/web/app/components/base/icons/src/vender/line/layout/AlignRight01.json @@ -36,4 +36,4 @@ ] }, "name": "AlignRight01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/layout/Grid01.json b/web/app/components/base/icons/src/vender/line/layout/Grid01.json index 43a385c770..edc374a9cc 100644 --- a/web/app/components/base/icons/src/vender/line/layout/Grid01.json +++ b/web/app/components/base/icons/src/vender/line/layout/Grid01.json @@ -80,4 +80,4 @@ ] }, "name": "Grid01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/layout/LayoutGrid02.json b/web/app/components/base/icons/src/vender/line/layout/LayoutGrid02.json index d71e981723..a5e5b2479d 100644 --- a/web/app/components/base/icons/src/vender/line/layout/LayoutGrid02.json +++ b/web/app/components/base/icons/src/vender/line/layout/LayoutGrid02.json @@ -26,4 +26,4 @@ ] }, "name": "LayoutGrid02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/mapsAndTravel/Globe01.json b/web/app/components/base/icons/src/vender/line/mapsAndTravel/Globe01.json index 1e0896672b..9ccee71b02 100644 --- a/web/app/components/base/icons/src/vender/line/mapsAndTravel/Globe01.json +++ b/web/app/components/base/icons/src/vender/line/mapsAndTravel/Globe01.json @@ -63,4 +63,4 @@ ] }, "name": "Globe01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/mapsAndTravel/Route.json b/web/app/components/base/icons/src/vender/line/mapsAndTravel/Route.json index 19cb837362..cb0b7f01a9 100644 --- a/web/app/components/base/icons/src/vender/line/mapsAndTravel/Route.json +++ b/web/app/components/base/icons/src/vender/line/mapsAndTravel/Route.json @@ -63,4 +63,4 @@ ] }, "name": "Route" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/mediaAndDevices/Microphone01.json b/web/app/components/base/icons/src/vender/line/mediaAndDevices/Microphone01.json index 8f273d0a75..193aee5c3b 100644 --- a/web/app/components/base/icons/src/vender/line/mediaAndDevices/Microphone01.json +++ b/web/app/components/base/icons/src/vender/line/mediaAndDevices/Microphone01.json @@ -36,4 +36,4 @@ ] }, "name": "Microphone01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/mediaAndDevices/PlayCircle.json b/web/app/components/base/icons/src/vender/line/mediaAndDevices/PlayCircle.json index 278512534f..db313deb88 100644 --- a/web/app/components/base/icons/src/vender/line/mediaAndDevices/PlayCircle.json +++ b/web/app/components/base/icons/src/vender/line/mediaAndDevices/PlayCircle.json @@ -83,4 +83,4 @@ ] }, "name": "PlayCircle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/mediaAndDevices/SlidersH.json b/web/app/components/base/icons/src/vender/line/mediaAndDevices/SlidersH.json index fc138eecbc..4620cb9178 100644 --- a/web/app/components/base/icons/src/vender/line/mediaAndDevices/SlidersH.json +++ b/web/app/components/base/icons/src/vender/line/mediaAndDevices/SlidersH.json @@ -26,4 +26,4 @@ ] }, "name": "SlidersH" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/mediaAndDevices/Speaker.json b/web/app/components/base/icons/src/vender/line/mediaAndDevices/Speaker.json index 3e5cbe171b..2cb1df48f8 100644 --- a/web/app/components/base/icons/src/vender/line/mediaAndDevices/Speaker.json +++ b/web/app/components/base/icons/src/vender/line/mediaAndDevices/Speaker.json @@ -109,4 +109,4 @@ ] }, "name": "Speaker" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/mediaAndDevices/Stop.json b/web/app/components/base/icons/src/vender/line/mediaAndDevices/Stop.json index 7d25397087..b0860433ee 100644 --- a/web/app/components/base/icons/src/vender/line/mediaAndDevices/Stop.json +++ b/web/app/components/base/icons/src/vender/line/mediaAndDevices/Stop.json @@ -63,4 +63,4 @@ ] }, "name": "Stop" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/mediaAndDevices/StopCircle.json b/web/app/components/base/icons/src/vender/line/mediaAndDevices/StopCircle.json index 2d456014b8..3a211c78ce 100644 --- a/web/app/components/base/icons/src/vender/line/mediaAndDevices/StopCircle.json +++ b/web/app/components/base/icons/src/vender/line/mediaAndDevices/StopCircle.json @@ -56,4 +56,4 @@ ] }, "name": "StopCircle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/others/Apps02.json b/web/app/components/base/icons/src/vender/line/others/Apps02.json index 2ff128f24c..31378e175d 100644 --- a/web/app/components/base/icons/src/vender/line/others/Apps02.json +++ b/web/app/components/base/icons/src/vender/line/others/Apps02.json @@ -33,4 +33,4 @@ ] }, "name": "Apps02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/others/BubbleX.json b/web/app/components/base/icons/src/vender/line/others/BubbleX.json index 0cb5702c1f..7991ed4981 100644 --- a/web/app/components/base/icons/src/vender/line/others/BubbleX.json +++ b/web/app/components/base/icons/src/vender/line/others/BubbleX.json @@ -54,4 +54,4 @@ ] }, "name": "BubbleX" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/others/Colors.json b/web/app/components/base/icons/src/vender/line/others/Colors.json index b1832c2fe8..904e39fd18 100644 --- a/web/app/components/base/icons/src/vender/line/others/Colors.json +++ b/web/app/components/base/icons/src/vender/line/others/Colors.json @@ -63,4 +63,4 @@ ] }, "name": "Colors" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/others/DragHandle.json b/web/app/components/base/icons/src/vender/line/others/DragHandle.json index c1364aff18..ee1803c15c 100644 --- a/web/app/components/base/icons/src/vender/line/others/DragHandle.json +++ b/web/app/components/base/icons/src/vender/line/others/DragHandle.json @@ -35,4 +35,4 @@ ] }, "name": "DragHandle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/others/Env.json b/web/app/components/base/icons/src/vender/line/others/Env.json index 87a88edf3f..0cca4da4c4 100644 --- a/web/app/components/base/icons/src/vender/line/others/Env.json +++ b/web/app/components/base/icons/src/vender/line/others/Env.json @@ -87,4 +87,4 @@ ] }, "name": "Env" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/others/Exchange02.json b/web/app/components/base/icons/src/vender/line/others/Exchange02.json index 808a9ff644..3672d8b88b 100644 --- a/web/app/components/base/icons/src/vender/line/others/Exchange02.json +++ b/web/app/components/base/icons/src/vender/line/others/Exchange02.json @@ -23,4 +23,4 @@ ] }, "name": "Exchange02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/others/FileCode.json b/web/app/components/base/icons/src/vender/line/others/FileCode.json index 41050a559b..d61af3fdb3 100644 --- a/web/app/components/base/icons/src/vender/line/others/FileCode.json +++ b/web/app/components/base/icons/src/vender/line/others/FileCode.json @@ -23,4 +23,4 @@ ] }, "name": "FileCode" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/others/GlobalVariable.json b/web/app/components/base/icons/src/vender/line/others/GlobalVariable.json index d5fce59b4a..600c803f32 100644 --- a/web/app/components/base/icons/src/vender/line/others/GlobalVariable.json +++ b/web/app/components/base/icons/src/vender/line/others/GlobalVariable.json @@ -25,4 +25,4 @@ ] }, "name": "GlobalVariable" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/others/Icon3Dots.json b/web/app/components/base/icons/src/vender/line/others/Icon3Dots.json index 0942222f39..cd56eea903 100644 --- a/web/app/components/base/icons/src/vender/line/others/Icon3Dots.json +++ b/web/app/components/base/icons/src/vender/line/others/Icon3Dots.json @@ -36,4 +36,4 @@ ] }, "name": "Icon3Dots" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/others/LongArrowLeft.json b/web/app/components/base/icons/src/vender/line/others/LongArrowLeft.json index d2646b1090..43074803fe 100644 --- a/web/app/components/base/icons/src/vender/line/others/LongArrowLeft.json +++ b/web/app/components/base/icons/src/vender/line/others/LongArrowLeft.json @@ -24,4 +24,4 @@ ] }, "name": "LongArrowLeft" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/others/LongArrowRight.json b/web/app/components/base/icons/src/vender/line/others/LongArrowRight.json index 7582b81568..df05126c9a 100644 --- a/web/app/components/base/icons/src/vender/line/others/LongArrowRight.json +++ b/web/app/components/base/icons/src/vender/line/others/LongArrowRight.json @@ -24,4 +24,4 @@ ] }, "name": "LongArrowRight" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/others/SearchMenu.json b/web/app/components/base/icons/src/vender/line/others/SearchMenu.json index 5222574040..5854f25339 100644 --- a/web/app/components/base/icons/src/vender/line/others/SearchMenu.json +++ b/web/app/components/base/icons/src/vender/line/others/SearchMenu.json @@ -74,4 +74,4 @@ ] }, "name": "SearchMenu" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/others/Tools.json b/web/app/components/base/icons/src/vender/line/others/Tools.json index 0ab6857b09..12068ada07 100644 --- a/web/app/components/base/icons/src/vender/line/others/Tools.json +++ b/web/app/components/base/icons/src/vender/line/others/Tools.json @@ -116,4 +116,4 @@ ] }, "name": "Tools" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/shapes/CubeOutline.json b/web/app/components/base/icons/src/vender/line/shapes/CubeOutline.json index 4091004b72..bfeea58e92 100644 --- a/web/app/components/base/icons/src/vender/line/shapes/CubeOutline.json +++ b/web/app/components/base/icons/src/vender/line/shapes/CubeOutline.json @@ -95,4 +95,4 @@ ] }, "name": "CubeOutline" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/time/ClockFastForward.json b/web/app/components/base/icons/src/vender/line/time/ClockFastForward.json index 26b72084bf..72f2478958 100644 --- a/web/app/components/base/icons/src/vender/line/time/ClockFastForward.json +++ b/web/app/components/base/icons/src/vender/line/time/ClockFastForward.json @@ -26,4 +26,4 @@ ] }, "name": "ClockFastForward" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/time/ClockPlay.json b/web/app/components/base/icons/src/vender/line/time/ClockPlay.json index 7d3cc48b09..4f6739241c 100644 --- a/web/app/components/base/icons/src/vender/line/time/ClockPlay.json +++ b/web/app/components/base/icons/src/vender/line/time/ClockPlay.json @@ -63,4 +63,4 @@ ] }, "name": "ClockPlay" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/time/ClockPlaySlim.json b/web/app/components/base/icons/src/vender/line/time/ClockPlaySlim.json index 348694eeee..6790781864 100644 --- a/web/app/components/base/icons/src/vender/line/time/ClockPlaySlim.json +++ b/web/app/components/base/icons/src/vender/line/time/ClockPlaySlim.json @@ -36,4 +36,4 @@ ] }, "name": "ClockPlaySlim" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/time/ClockRefresh.json b/web/app/components/base/icons/src/vender/line/time/ClockRefresh.json index 925907ab8c..f0fda2c829 100644 --- a/web/app/components/base/icons/src/vender/line/time/ClockRefresh.json +++ b/web/app/components/base/icons/src/vender/line/time/ClockRefresh.json @@ -59,4 +59,4 @@ ] }, "name": "ClockRefresh" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/users/User01.json b/web/app/components/base/icons/src/vender/line/users/User01.json index 55353030f9..5b878f8deb 100644 --- a/web/app/components/base/icons/src/vender/line/users/User01.json +++ b/web/app/components/base/icons/src/vender/line/users/User01.json @@ -36,4 +36,4 @@ ] }, "name": "User01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/users/Users01.json b/web/app/components/base/icons/src/vender/line/users/Users01.json index 96dbeb30ec..497c258bc5 100644 --- a/web/app/components/base/icons/src/vender/line/users/Users01.json +++ b/web/app/components/base/icons/src/vender/line/users/Users01.json @@ -36,4 +36,4 @@ ] }, "name": "Users01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/line/weather/Stars02.json b/web/app/components/base/icons/src/vender/line/weather/Stars02.json index 54f6a42ecf..fdb25e6238 100644 --- a/web/app/components/base/icons/src/vender/line/weather/Stars02.json +++ b/web/app/components/base/icons/src/vender/line/weather/Stars02.json @@ -26,4 +26,4 @@ ] }, "name": "Stars02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/other/AnthropicText.json b/web/app/components/base/icons/src/vender/other/AnthropicText.json index a65ef47747..df844dec27 100644 --- a/web/app/components/base/icons/src/vender/other/AnthropicText.json +++ b/web/app/components/base/icons/src/vender/other/AnthropicText.json @@ -536,4 +536,4 @@ ] }, "name": "AnthropicText" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/other/Generator.json b/web/app/components/base/icons/src/vender/other/Generator.json index 3f24cfe18b..a72489d190 100644 --- a/web/app/components/base/icons/src/vender/other/Generator.json +++ b/web/app/components/base/icons/src/vender/other/Generator.json @@ -34,4 +34,4 @@ ] }, "name": "Generator" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/other/Group.json b/web/app/components/base/icons/src/vender/other/Group.json index 078febbc80..5f95dfc364 100644 --- a/web/app/components/base/icons/src/vender/other/Group.json +++ b/web/app/components/base/icons/src/vender/other/Group.json @@ -63,4 +63,4 @@ ] }, "name": "Group" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/other/Mcp.json b/web/app/components/base/icons/src/vender/other/Mcp.json index 7caa70b16b..c1162e64a1 100644 --- a/web/app/components/base/icons/src/vender/other/Mcp.json +++ b/web/app/components/base/icons/src/vender/other/Mcp.json @@ -32,4 +32,4 @@ ] }, "name": "Mcp" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/other/NoToolPlaceholder.json b/web/app/components/base/icons/src/vender/other/NoToolPlaceholder.json index d33d62d344..db2c952b1d 100644 --- a/web/app/components/base/icons/src/vender/other/NoToolPlaceholder.json +++ b/web/app/components/base/icons/src/vender/other/NoToolPlaceholder.json @@ -276,4 +276,4 @@ ] }, "name": "NoToolPlaceholder" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/other/Openai.json b/web/app/components/base/icons/src/vender/other/Openai.json index 236f66fcf2..ddf1243254 100644 --- a/web/app/components/base/icons/src/vender/other/Openai.json +++ b/web/app/components/base/icons/src/vender/other/Openai.json @@ -77,4 +77,4 @@ ] }, "name": "Openai" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/other/ReplayLine.json b/web/app/components/base/icons/src/vender/other/ReplayLine.json index 0fffbc98f5..2cc78753fd 100644 --- a/web/app/components/base/icons/src/vender/other/ReplayLine.json +++ b/web/app/components/base/icons/src/vender/other/ReplayLine.json @@ -33,4 +33,4 @@ ] }, "name": "ReplayLine" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/plugin/BoxSparkleFill.json b/web/app/components/base/icons/src/vender/plugin/BoxSparkleFill.json index 3733f98afd..55d7c64620 100644 --- a/web/app/components/base/icons/src/vender/plugin/BoxSparkleFill.json +++ b/web/app/components/base/icons/src/vender/plugin/BoxSparkleFill.json @@ -63,4 +63,4 @@ ] }, "name": "BoxSparkleFill" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/plugin/LeftCorner.json b/web/app/components/base/icons/src/vender/plugin/LeftCorner.json index d4cd0cd0ec..2374b1cfd8 100644 --- a/web/app/components/base/icons/src/vender/plugin/LeftCorner.json +++ b/web/app/components/base/icons/src/vender/plugin/LeftCorner.json @@ -24,4 +24,4 @@ ] }, "name": "LeftCorner" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/FinanceAndECommerce/GoldCoin.json b/web/app/components/base/icons/src/vender/solid/FinanceAndECommerce/GoldCoin.json index dac0e567f6..878c1f9f1f 100644 --- a/web/app/components/base/icons/src/vender/solid/FinanceAndECommerce/GoldCoin.json +++ b/web/app/components/base/icons/src/vender/solid/FinanceAndECommerce/GoldCoin.json @@ -23,4 +23,4 @@ ] }, "name": "GoldCoin" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/FinanceAndECommerce/Scales02.json b/web/app/components/base/icons/src/vender/solid/FinanceAndECommerce/Scales02.json index 9a781bd62d..3dd4e8908c 100644 --- a/web/app/components/base/icons/src/vender/solid/FinanceAndECommerce/Scales02.json +++ b/web/app/components/base/icons/src/vender/solid/FinanceAndECommerce/Scales02.json @@ -45,4 +45,4 @@ ] }, "name": "Scales02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/alertsAndFeedback/AlertTriangle.json b/web/app/components/base/icons/src/vender/solid/alertsAndFeedback/AlertTriangle.json index c73fbc5855..1aa7089b6d 100644 --- a/web/app/components/base/icons/src/vender/solid/alertsAndFeedback/AlertTriangle.json +++ b/web/app/components/base/icons/src/vender/solid/alertsAndFeedback/AlertTriangle.json @@ -35,4 +35,4 @@ ] }, "name": "AlertTriangle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/arrows/ChevronDown.json b/web/app/components/base/icons/src/vender/solid/arrows/ChevronDown.json index ef9a33dc03..b4b6429d9c 100644 --- a/web/app/components/base/icons/src/vender/solid/arrows/ChevronDown.json +++ b/web/app/components/base/icons/src/vender/solid/arrows/ChevronDown.json @@ -36,4 +36,4 @@ ] }, "name": "ChevronDown" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/arrows/HighPriority.json b/web/app/components/base/icons/src/vender/solid/arrows/HighPriority.json index 6710fd8109..5b66153647 100644 --- a/web/app/components/base/icons/src/vender/solid/arrows/HighPriority.json +++ b/web/app/components/base/icons/src/vender/solid/arrows/HighPriority.json @@ -50,4 +50,4 @@ ] }, "name": "HighPriority" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/communication/AiText.json b/web/app/components/base/icons/src/vender/solid/communication/AiText.json index c6e30fbf01..65860e58b9 100644 --- a/web/app/components/base/icons/src/vender/solid/communication/AiText.json +++ b/web/app/components/base/icons/src/vender/solid/communication/AiText.json @@ -50,4 +50,4 @@ ] }, "name": "AiText" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/communication/BubbleTextMod.json b/web/app/components/base/icons/src/vender/solid/communication/BubbleTextMod.json index fceddcc729..7b2e964a61 100644 --- a/web/app/components/base/icons/src/vender/solid/communication/BubbleTextMod.json +++ b/web/app/components/base/icons/src/vender/solid/communication/BubbleTextMod.json @@ -25,4 +25,4 @@ ] }, "name": "BubbleTextMod" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/communication/ChatBot.json b/web/app/components/base/icons/src/vender/solid/communication/ChatBot.json index 024b0edbeb..0378d60a53 100644 --- a/web/app/components/base/icons/src/vender/solid/communication/ChatBot.json +++ b/web/app/components/base/icons/src/vender/solid/communication/ChatBot.json @@ -55,4 +55,4 @@ ] }, "name": "ChatBot" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/communication/CuteRobot.json b/web/app/components/base/icons/src/vender/solid/communication/CuteRobot.json index 5b36575f56..fa9786473c 100644 --- a/web/app/components/base/icons/src/vender/solid/communication/CuteRobot.json +++ b/web/app/components/base/icons/src/vender/solid/communication/CuteRobot.json @@ -35,4 +35,4 @@ ] }, "name": "CuteRobot" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/communication/EditList.json b/web/app/components/base/icons/src/vender/solid/communication/EditList.json index 436f0be9f3..51278466c5 100644 --- a/web/app/components/base/icons/src/vender/solid/communication/EditList.json +++ b/web/app/components/base/icons/src/vender/solid/communication/EditList.json @@ -50,4 +50,4 @@ ] }, "name": "EditList" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/communication/ListSparkle.json b/web/app/components/base/icons/src/vender/solid/communication/ListSparkle.json index 2e348e4b8f..160172c88c 100644 --- a/web/app/components/base/icons/src/vender/solid/communication/ListSparkle.json +++ b/web/app/components/base/icons/src/vender/solid/communication/ListSparkle.json @@ -50,4 +50,4 @@ ] }, "name": "ListSparkle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/communication/Logic.json b/web/app/components/base/icons/src/vender/solid/communication/Logic.json index 57f86f4dd8..fa55d1c35b 100644 --- a/web/app/components/base/icons/src/vender/solid/communication/Logic.json +++ b/web/app/components/base/icons/src/vender/solid/communication/Logic.json @@ -50,4 +50,4 @@ ] }, "name": "Logic" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/communication/MessageDotsCircle.json b/web/app/components/base/icons/src/vender/solid/communication/MessageDotsCircle.json index dca92bf5d9..e4f41f22b4 100644 --- a/web/app/components/base/icons/src/vender/solid/communication/MessageDotsCircle.json +++ b/web/app/components/base/icons/src/vender/solid/communication/MessageDotsCircle.json @@ -35,4 +35,4 @@ ] }, "name": "MessageDotsCircle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/communication/MessageFast.json b/web/app/components/base/icons/src/vender/solid/communication/MessageFast.json index 4580398f31..b859b1f3f0 100644 --- a/web/app/components/base/icons/src/vender/solid/communication/MessageFast.json +++ b/web/app/components/base/icons/src/vender/solid/communication/MessageFast.json @@ -25,4 +25,4 @@ ] }, "name": "MessageFast" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/communication/MessageHeartCircle.json b/web/app/components/base/icons/src/vender/solid/communication/MessageHeartCircle.json index 84769ba909..ede7ecdb8b 100644 --- a/web/app/components/base/icons/src/vender/solid/communication/MessageHeartCircle.json +++ b/web/app/components/base/icons/src/vender/solid/communication/MessageHeartCircle.json @@ -35,4 +35,4 @@ ] }, "name": "MessageHeartCircle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/communication/MessageSmileSquare.json b/web/app/components/base/icons/src/vender/solid/communication/MessageSmileSquare.json index 7810d9043b..466f1d0207 100644 --- a/web/app/components/base/icons/src/vender/solid/communication/MessageSmileSquare.json +++ b/web/app/components/base/icons/src/vender/solid/communication/MessageSmileSquare.json @@ -35,4 +35,4 @@ ] }, "name": "MessageSmileSquare" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/communication/Send03.json b/web/app/components/base/icons/src/vender/solid/communication/Send03.json index c6ff534838..8d0373ba7a 100644 --- a/web/app/components/base/icons/src/vender/solid/communication/Send03.json +++ b/web/app/components/base/icons/src/vender/solid/communication/Send03.json @@ -33,4 +33,4 @@ ] }, "name": "Send03" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/development/ApiConnection.json b/web/app/components/base/icons/src/vender/solid/development/ApiConnection.json index 6aafba9630..54a052241c 100644 --- a/web/app/components/base/icons/src/vender/solid/development/ApiConnection.json +++ b/web/app/components/base/icons/src/vender/solid/development/ApiConnection.json @@ -50,4 +50,4 @@ ] }, "name": "ApiConnection" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/development/ApiConnectionMod.json b/web/app/components/base/icons/src/vender/solid/development/ApiConnectionMod.json index e8ebcc7448..21efdaa13e 100644 --- a/web/app/components/base/icons/src/vender/solid/development/ApiConnectionMod.json +++ b/web/app/components/base/icons/src/vender/solid/development/ApiConnectionMod.json @@ -35,4 +35,4 @@ ] }, "name": "ApiConnectionMod" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/development/BarChartSquare02.json b/web/app/components/base/icons/src/vender/solid/development/BarChartSquare02.json index 14b274eef7..8ae42ed951 100644 --- a/web/app/components/base/icons/src/vender/solid/development/BarChartSquare02.json +++ b/web/app/components/base/icons/src/vender/solid/development/BarChartSquare02.json @@ -35,4 +35,4 @@ ] }, "name": "BarChartSquare02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/development/Container.json b/web/app/components/base/icons/src/vender/solid/development/Container.json index c2c3701b4c..b0d23fef72 100644 --- a/web/app/components/base/icons/src/vender/solid/development/Container.json +++ b/web/app/components/base/icons/src/vender/solid/development/Container.json @@ -41,4 +41,4 @@ ] }, "name": "Container" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/development/Database02.json b/web/app/components/base/icons/src/vender/solid/development/Database02.json index a1c5230612..b18d20eaea 100644 --- a/web/app/components/base/icons/src/vender/solid/development/Database02.json +++ b/web/app/components/base/icons/src/vender/solid/development/Database02.json @@ -43,4 +43,4 @@ ] }, "name": "Database02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/development/Database03.json b/web/app/components/base/icons/src/vender/solid/development/Database03.json index fa0c7ce94f..b00726139c 100644 --- a/web/app/components/base/icons/src/vender/solid/development/Database03.json +++ b/web/app/components/base/icons/src/vender/solid/development/Database03.json @@ -25,4 +25,4 @@ ] }, "name": "Database03" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/development/FileHeart02.json b/web/app/components/base/icons/src/vender/solid/development/FileHeart02.json index 08df0f27dd..681806b16f 100644 --- a/web/app/components/base/icons/src/vender/solid/development/FileHeart02.json +++ b/web/app/components/base/icons/src/vender/solid/development/FileHeart02.json @@ -47,4 +47,4 @@ ] }, "name": "FileHeart02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/development/PatternRecognition.json b/web/app/components/base/icons/src/vender/solid/development/PatternRecognition.json index 3d13c32b87..3b05d5ba8c 100644 --- a/web/app/components/base/icons/src/vender/solid/development/PatternRecognition.json +++ b/web/app/components/base/icons/src/vender/solid/development/PatternRecognition.json @@ -95,4 +95,4 @@ ] }, "name": "PatternRecognition" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/development/PromptEngineering.json b/web/app/components/base/icons/src/vender/solid/development/PromptEngineering.json index 01fbac5e93..a34a3f4fe8 100644 --- a/web/app/components/base/icons/src/vender/solid/development/PromptEngineering.json +++ b/web/app/components/base/icons/src/vender/solid/development/PromptEngineering.json @@ -50,4 +50,4 @@ ] }, "name": "PromptEngineering" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/development/PuzzlePiece01.json b/web/app/components/base/icons/src/vender/solid/development/PuzzlePiece01.json index f4008c81e2..20e6719fbe 100644 --- a/web/app/components/base/icons/src/vender/solid/development/PuzzlePiece01.json +++ b/web/app/components/base/icons/src/vender/solid/development/PuzzlePiece01.json @@ -35,4 +35,4 @@ ] }, "name": "PuzzlePiece01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/development/Semantic.json b/web/app/components/base/icons/src/vender/solid/development/Semantic.json index 333b3fa1c1..d9f6eeeb7e 100644 --- a/web/app/components/base/icons/src/vender/solid/development/Semantic.json +++ b/web/app/components/base/icons/src/vender/solid/development/Semantic.json @@ -50,4 +50,4 @@ ] }, "name": "Semantic" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/development/TerminalSquare.json b/web/app/components/base/icons/src/vender/solid/development/TerminalSquare.json index 7716cfd242..bf3c05a3b9 100644 --- a/web/app/components/base/icons/src/vender/solid/development/TerminalSquare.json +++ b/web/app/components/base/icons/src/vender/solid/development/TerminalSquare.json @@ -35,4 +35,4 @@ ] }, "name": "TerminalSquare" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/development/Variable02.json b/web/app/components/base/icons/src/vender/solid/development/Variable02.json index f506afd8bb..d957d38d4e 100644 --- a/web/app/components/base/icons/src/vender/solid/development/Variable02.json +++ b/web/app/components/base/icons/src/vender/solid/development/Variable02.json @@ -59,4 +59,4 @@ ] }, "name": "Variable02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/editor/Brush01.json b/web/app/components/base/icons/src/vender/solid/editor/Brush01.json index 049e5f2924..4087091bc0 100644 --- a/web/app/components/base/icons/src/vender/solid/editor/Brush01.json +++ b/web/app/components/base/icons/src/vender/solid/editor/Brush01.json @@ -32,4 +32,4 @@ ] }, "name": "Brush01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/editor/Citations.json b/web/app/components/base/icons/src/vender/solid/editor/Citations.json index 79d56b1a6c..1f7aa93794 100644 --- a/web/app/components/base/icons/src/vender/solid/editor/Citations.json +++ b/web/app/components/base/icons/src/vender/solid/editor/Citations.json @@ -33,4 +33,4 @@ ] }, "name": "Citations" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/editor/Colors.json b/web/app/components/base/icons/src/vender/solid/editor/Colors.json index 6e5dc69049..6fc4010c27 100644 --- a/web/app/components/base/icons/src/vender/solid/editor/Colors.json +++ b/web/app/components/base/icons/src/vender/solid/editor/Colors.json @@ -59,4 +59,4 @@ ] }, "name": "Colors" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/editor/Paragraph.json b/web/app/components/base/icons/src/vender/solid/editor/Paragraph.json index a16f076073..747f8e50b7 100644 --- a/web/app/components/base/icons/src/vender/solid/editor/Paragraph.json +++ b/web/app/components/base/icons/src/vender/solid/editor/Paragraph.json @@ -41,4 +41,4 @@ ] }, "name": "Paragraph" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/editor/TypeSquare.json b/web/app/components/base/icons/src/vender/solid/editor/TypeSquare.json index f901b0737f..9e6c72cf67 100644 --- a/web/app/components/base/icons/src/vender/solid/editor/TypeSquare.json +++ b/web/app/components/base/icons/src/vender/solid/editor/TypeSquare.json @@ -25,4 +25,4 @@ ] }, "name": "TypeSquare" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/education/Beaker02.json b/web/app/components/base/icons/src/vender/solid/education/Beaker02.json index 2f7830084e..b6dfd318ef 100644 --- a/web/app/components/base/icons/src/vender/solid/education/Beaker02.json +++ b/web/app/components/base/icons/src/vender/solid/education/Beaker02.json @@ -35,4 +35,4 @@ ] }, "name": "Beaker02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/education/BubbleText.json b/web/app/components/base/icons/src/vender/solid/education/BubbleText.json index 999f0db97e..e1b4b54fd2 100644 --- a/web/app/components/base/icons/src/vender/solid/education/BubbleText.json +++ b/web/app/components/base/icons/src/vender/solid/education/BubbleText.json @@ -35,4 +35,4 @@ ] }, "name": "BubbleText" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/education/Heart02.json b/web/app/components/base/icons/src/vender/solid/education/Heart02.json index 8cecaaee84..58ffe0c6f7 100644 --- a/web/app/components/base/icons/src/vender/solid/education/Heart02.json +++ b/web/app/components/base/icons/src/vender/solid/education/Heart02.json @@ -23,4 +23,4 @@ ] }, "name": "Heart02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/education/Unblur.json b/web/app/components/base/icons/src/vender/solid/education/Unblur.json index 13b8bb36f5..33c43170fe 100644 --- a/web/app/components/base/icons/src/vender/solid/education/Unblur.json +++ b/web/app/components/base/icons/src/vender/solid/education/Unblur.json @@ -149,4 +149,4 @@ ] }, "name": "Unblur" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/files/File05.json b/web/app/components/base/icons/src/vender/solid/files/File05.json index 17b9629741..ead7649bcf 100644 --- a/web/app/components/base/icons/src/vender/solid/files/File05.json +++ b/web/app/components/base/icons/src/vender/solid/files/File05.json @@ -52,4 +52,4 @@ ] }, "name": "File05" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/files/FileSearch02.json b/web/app/components/base/icons/src/vender/solid/files/FileSearch02.json index 7f8b0e8c78..e1decb16ac 100644 --- a/web/app/components/base/icons/src/vender/solid/files/FileSearch02.json +++ b/web/app/components/base/icons/src/vender/solid/files/FileSearch02.json @@ -54,4 +54,4 @@ ] }, "name": "FileSearch02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/files/FileZip.json b/web/app/components/base/icons/src/vender/solid/files/FileZip.json index 11fe823916..3d58745fa8 100644 --- a/web/app/components/base/icons/src/vender/solid/files/FileZip.json +++ b/web/app/components/base/icons/src/vender/solid/files/FileZip.json @@ -44,4 +44,4 @@ ] }, "name": "FileZip" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/files/Folder.json b/web/app/components/base/icons/src/vender/solid/files/Folder.json index 4fc5e5f51f..50c483bc92 100644 --- a/web/app/components/base/icons/src/vender/solid/files/Folder.json +++ b/web/app/components/base/icons/src/vender/solid/files/Folder.json @@ -35,4 +35,4 @@ ] }, "name": "Folder" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/AnswerTriangle.json b/web/app/components/base/icons/src/vender/solid/general/AnswerTriangle.json index a4b6283830..030e1efedf 100644 --- a/web/app/components/base/icons/src/vender/solid/general/AnswerTriangle.json +++ b/web/app/components/base/icons/src/vender/solid/general/AnswerTriangle.json @@ -24,4 +24,4 @@ ] }, "name": "AnswerTriangle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/ArrowDownRoundFill.json b/web/app/components/base/icons/src/vender/solid/general/ArrowDownRoundFill.json index 4e7da3c801..8367c942cb 100644 --- a/web/app/components/base/icons/src/vender/solid/general/ArrowDownRoundFill.json +++ b/web/app/components/base/icons/src/vender/solid/general/ArrowDownRoundFill.json @@ -33,4 +33,4 @@ ] }, "name": "ArrowDownRoundFill" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/CheckCircle.json b/web/app/components/base/icons/src/vender/solid/general/CheckCircle.json index 1b567e859e..403a5fe1c4 100644 --- a/web/app/components/base/icons/src/vender/solid/general/CheckCircle.json +++ b/web/app/components/base/icons/src/vender/solid/general/CheckCircle.json @@ -35,4 +35,4 @@ ] }, "name": "CheckCircle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/CheckDone01.json b/web/app/components/base/icons/src/vender/solid/general/CheckDone01.json index b4d5530b06..57a48650f1 100644 --- a/web/app/components/base/icons/src/vender/solid/general/CheckDone01.json +++ b/web/app/components/base/icons/src/vender/solid/general/CheckDone01.json @@ -34,4 +34,4 @@ ] }, "name": "CheckDone01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/Download02.json b/web/app/components/base/icons/src/vender/solid/general/Download02.json index 5854e64301..8afe46c93e 100644 --- a/web/app/components/base/icons/src/vender/solid/general/Download02.json +++ b/web/app/components/base/icons/src/vender/solid/general/Download02.json @@ -26,4 +26,4 @@ ] }, "name": "Download02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/Edit03.json b/web/app/components/base/icons/src/vender/solid/general/Edit03.json index f736ef56dd..ddf3a0d234 100644 --- a/web/app/components/base/icons/src/vender/solid/general/Edit03.json +++ b/web/app/components/base/icons/src/vender/solid/general/Edit03.json @@ -54,4 +54,4 @@ ] }, "name": "Edit03" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/Edit04.json b/web/app/components/base/icons/src/vender/solid/general/Edit04.json index aa923c2862..c254f600a4 100644 --- a/web/app/components/base/icons/src/vender/solid/general/Edit04.json +++ b/web/app/components/base/icons/src/vender/solid/general/Edit04.json @@ -36,4 +36,4 @@ ] }, "name": "Edit04" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/Eye.json b/web/app/components/base/icons/src/vender/solid/general/Eye.json index a7e63484da..a76bb81725 100644 --- a/web/app/components/base/icons/src/vender/solid/general/Eye.json +++ b/web/app/components/base/icons/src/vender/solid/general/Eye.json @@ -34,4 +34,4 @@ ] }, "name": "Eye" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/Github.json b/web/app/components/base/icons/src/vender/solid/general/Github.json index 46e694215b..a46c81bfef 100644 --- a/web/app/components/base/icons/src/vender/solid/general/Github.json +++ b/web/app/components/base/icons/src/vender/solid/general/Github.json @@ -33,4 +33,4 @@ ] }, "name": "Github" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/MessageClockCircle.json b/web/app/components/base/icons/src/vender/solid/general/MessageClockCircle.json index 4307f82ef8..d52347f6d9 100644 --- a/web/app/components/base/icons/src/vender/solid/general/MessageClockCircle.json +++ b/web/app/components/base/icons/src/vender/solid/general/MessageClockCircle.json @@ -33,4 +33,4 @@ ] }, "name": "MessageClockCircle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/PlusCircle.json b/web/app/components/base/icons/src/vender/solid/general/PlusCircle.json index 005a7ba5bf..e3a86132ec 100644 --- a/web/app/components/base/icons/src/vender/solid/general/PlusCircle.json +++ b/web/app/components/base/icons/src/vender/solid/general/PlusCircle.json @@ -35,4 +35,4 @@ ] }, "name": "PlusCircle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/QuestionTriangle.json b/web/app/components/base/icons/src/vender/solid/general/QuestionTriangle.json index 8830ee5837..32df4b0cd5 100644 --- a/web/app/components/base/icons/src/vender/solid/general/QuestionTriangle.json +++ b/web/app/components/base/icons/src/vender/solid/general/QuestionTriangle.json @@ -42,4 +42,4 @@ ] }, "name": "QuestionTriangle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/SearchMd.json b/web/app/components/base/icons/src/vender/solid/general/SearchMd.json index 808195f1fa..89cf471ca5 100644 --- a/web/app/components/base/icons/src/vender/solid/general/SearchMd.json +++ b/web/app/components/base/icons/src/vender/solid/general/SearchMd.json @@ -35,4 +35,4 @@ ] }, "name": "SearchMd" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/Target04.json b/web/app/components/base/icons/src/vender/solid/general/Target04.json index 6b22fab885..70895ccc00 100644 --- a/web/app/components/base/icons/src/vender/solid/general/Target04.json +++ b/web/app/components/base/icons/src/vender/solid/general/Target04.json @@ -43,4 +43,4 @@ ] }, "name": "Target04" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/Tool03.json b/web/app/components/base/icons/src/vender/solid/general/Tool03.json index 0a7f1ab96c..843fb36fba 100644 --- a/web/app/components/base/icons/src/vender/solid/general/Tool03.json +++ b/web/app/components/base/icons/src/vender/solid/general/Tool03.json @@ -59,4 +59,4 @@ ] }, "name": "Tool03" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/XCircle.json b/web/app/components/base/icons/src/vender/solid/general/XCircle.json index dd269fafcc..5e09c5a9e1 100644 --- a/web/app/components/base/icons/src/vender/solid/general/XCircle.json +++ b/web/app/components/base/icons/src/vender/solid/general/XCircle.json @@ -26,4 +26,4 @@ ] }, "name": "XCircle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/ZapFast.json b/web/app/components/base/icons/src/vender/solid/general/ZapFast.json index 865a48ee65..418a5c0019 100644 --- a/web/app/components/base/icons/src/vender/solid/general/ZapFast.json +++ b/web/app/components/base/icons/src/vender/solid/general/ZapFast.json @@ -76,4 +76,4 @@ ] }, "name": "ZapFast" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/general/ZapNarrow.json b/web/app/components/base/icons/src/vender/solid/general/ZapNarrow.json index 740c823f6e..27a8059fc8 100644 --- a/web/app/components/base/icons/src/vender/solid/general/ZapNarrow.json +++ b/web/app/components/base/icons/src/vender/solid/general/ZapNarrow.json @@ -35,4 +35,4 @@ ] }, "name": "ZapNarrow" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/layout/Grid01.json b/web/app/components/base/icons/src/vender/solid/layout/Grid01.json index 722cdecef6..35d4190237 100644 --- a/web/app/components/base/icons/src/vender/solid/layout/Grid01.json +++ b/web/app/components/base/icons/src/vender/solid/layout/Grid01.json @@ -76,4 +76,4 @@ ] }, "name": "Grid01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/mapsAndTravel/Globe06.json b/web/app/components/base/icons/src/vender/solid/mapsAndTravel/Globe06.json index 6cc565ffdf..b86197ae7e 100644 --- a/web/app/components/base/icons/src/vender/solid/mapsAndTravel/Globe06.json +++ b/web/app/components/base/icons/src/vender/solid/mapsAndTravel/Globe06.json @@ -54,4 +54,4 @@ ] }, "name": "Globe06" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/mapsAndTravel/Route.json b/web/app/components/base/icons/src/vender/solid/mapsAndTravel/Route.json index 24d3f35954..ac94bf2109 100644 --- a/web/app/components/base/icons/src/vender/solid/mapsAndTravel/Route.json +++ b/web/app/components/base/icons/src/vender/solid/mapsAndTravel/Route.json @@ -55,4 +55,4 @@ ] }, "name": "Route" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/AudioSupportIcon.json b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/AudioSupportIcon.json index cd3006b76d..aaab128672 100644 --- a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/AudioSupportIcon.json +++ b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/AudioSupportIcon.json @@ -23,4 +23,4 @@ ] }, "name": "AudioSupportIcon" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/DocumentSupportIcon.json b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/DocumentSupportIcon.json index 49cb6a521c..1047de09e0 100644 --- a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/DocumentSupportIcon.json +++ b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/DocumentSupportIcon.json @@ -23,4 +23,4 @@ ] }, "name": "DocumentSupportIcon" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/MagicBox.json b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/MagicBox.json index 4668e9eba8..ee079108c6 100644 --- a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/MagicBox.json +++ b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/MagicBox.json @@ -61,4 +61,4 @@ ] }, "name": "MagicBox" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/MagicEyes.json b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/MagicEyes.json index 00e16960a6..e7c0b53bf2 100644 --- a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/MagicEyes.json +++ b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/MagicEyes.json @@ -35,4 +35,4 @@ ] }, "name": "MagicEyes" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/MagicWand.json b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/MagicWand.json index bf13ab9e00..d9852c839f 100644 --- a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/MagicWand.json +++ b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/MagicWand.json @@ -70,4 +70,4 @@ ] }, "name": "MagicWand" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Microphone01.json b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Microphone01.json index 36aad43649..95ff504b07 100644 --- a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Microphone01.json +++ b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Microphone01.json @@ -52,4 +52,4 @@ ] }, "name": "Microphone01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Play.json b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Play.json index b32d786e4e..dc3cb6b4c0 100644 --- a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Play.json +++ b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Play.json @@ -35,4 +35,4 @@ ] }, "name": "Play" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Robot.json b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Robot.json index 650ca36528..616821956e 100644 --- a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Robot.json +++ b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Robot.json @@ -35,4 +35,4 @@ ] }, "name": "Robot" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Sliders02.json b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Sliders02.json index d72b99aa57..015509fca5 100644 --- a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Sliders02.json +++ b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Sliders02.json @@ -74,4 +74,4 @@ ] }, "name": "Sliders02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Speaker.json b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Speaker.json index 3e5cbe171b..2cb1df48f8 100644 --- a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Speaker.json +++ b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/Speaker.json @@ -109,4 +109,4 @@ ] }, "name": "Speaker" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/StopCircle.json b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/StopCircle.json index 67e02fca63..62ae331783 100644 --- a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/StopCircle.json +++ b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/StopCircle.json @@ -35,4 +35,4 @@ ] }, "name": "StopCircle" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/VideoSupportIcon.json b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/VideoSupportIcon.json index 4bc6881a5d..c801c12c4a 100644 --- a/web/app/components/base/icons/src/vender/solid/mediaAndDevices/VideoSupportIcon.json +++ b/web/app/components/base/icons/src/vender/solid/mediaAndDevices/VideoSupportIcon.json @@ -23,4 +23,4 @@ ] }, "name": "VideoSupportIcon" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/security/Lock01.json b/web/app/components/base/icons/src/vender/solid/security/Lock01.json index aa01bc574f..080a309f4d 100644 --- a/web/app/components/base/icons/src/vender/solid/security/Lock01.json +++ b/web/app/components/base/icons/src/vender/solid/security/Lock01.json @@ -35,4 +35,4 @@ ] }, "name": "Lock01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/shapes/Corner.json b/web/app/components/base/icons/src/vender/solid/shapes/Corner.json index 2f35483a66..23e1194c73 100644 --- a/web/app/components/base/icons/src/vender/solid/shapes/Corner.json +++ b/web/app/components/base/icons/src/vender/solid/shapes/Corner.json @@ -24,4 +24,4 @@ ] }, "name": "Corner" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/shapes/Star04.json b/web/app/components/base/icons/src/vender/solid/shapes/Star04.json index 5e5393a9a4..eba208cca0 100644 --- a/web/app/components/base/icons/src/vender/solid/shapes/Star04.json +++ b/web/app/components/base/icons/src/vender/solid/shapes/Star04.json @@ -33,4 +33,4 @@ ] }, "name": "Star04" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/shapes/Star06.json b/web/app/components/base/icons/src/vender/solid/shapes/Star06.json index 0466602376..5baa4d9a02 100644 --- a/web/app/components/base/icons/src/vender/solid/shapes/Star06.json +++ b/web/app/components/base/icons/src/vender/solid/shapes/Star06.json @@ -59,4 +59,4 @@ ] }, "name": "Star06" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/users/User01.json b/web/app/components/base/icons/src/vender/solid/users/User01.json index c9b8ea90d2..110cb2c020 100644 --- a/web/app/components/base/icons/src/vender/solid/users/User01.json +++ b/web/app/components/base/icons/src/vender/solid/users/User01.json @@ -54,4 +54,4 @@ ] }, "name": "User01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/users/UserEdit02.json b/web/app/components/base/icons/src/vender/solid/users/UserEdit02.json index f4451ea16f..7040d17ccb 100644 --- a/web/app/components/base/icons/src/vender/solid/users/UserEdit02.json +++ b/web/app/components/base/icons/src/vender/solid/users/UserEdit02.json @@ -89,4 +89,4 @@ ] }, "name": "UserEdit02" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/users/Users01.json b/web/app/components/base/icons/src/vender/solid/users/Users01.json index c18d59a00f..9c78dd5a09 100644 --- a/web/app/components/base/icons/src/vender/solid/users/Users01.json +++ b/web/app/components/base/icons/src/vender/solid/users/Users01.json @@ -76,4 +76,4 @@ ] }, "name": "Users01" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/solid/users/UsersPlus.json b/web/app/components/base/icons/src/vender/solid/users/UsersPlus.json index a70117f655..af8c22709f 100644 --- a/web/app/components/base/icons/src/vender/solid/users/UsersPlus.json +++ b/web/app/components/base/icons/src/vender/solid/users/UsersPlus.json @@ -74,4 +74,4 @@ ] }, "name": "UsersPlus" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/system/AutoUpdateLine.json b/web/app/components/base/icons/src/vender/system/AutoUpdateLine.json new file mode 100644 index 0000000000..5acc316bdd --- /dev/null +++ b/web/app/components/base/icons/src/vender/system/AutoUpdateLine.json @@ -0,0 +1,37 @@ +{ + "icon": { + "type": "element", + "isRootNode": true, + "name": "svg", + "attributes": { + "width": "24", + "height": "24", + "viewBox": "0 0 24 24", + "fill": "none", + "xmlns": "http://www.w3.org/2000/svg" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "d": "M5.46257 4.43262C7.21556 2.91688 9.5007 2 12 2C17.5228 2 22 6.47715 22 12C22 14.1361 21.3302 16.1158 20.1892 17.7406L17 12H20C20 7.58172 16.4183 4 12 4C9.84982 4 7.89777 4.84827 6.46023 6.22842L5.46257 4.43262ZM18.5374 19.5674C16.7844 21.0831 14.4993 22 12 22C6.47715 22 2 17.5228 2 12C2 9.86386 2.66979 7.88416 3.8108 6.25944L7 12H4C4 16.4183 7.58172 20 12 20C14.1502 20 16.1022 19.1517 17.5398 17.7716L18.5374 19.5674Z", + "fill": "currentColor" + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "fill-rule": "evenodd", + "clip-rule": "evenodd", + "d": "M16.3308 16H14.2915L13.6249 13.9476H10.3761L9.70846 16H7.66918L10.7759 7H13.2281L16.3308 16ZM10.8595 12.4622H13.1435L12.0378 9.05639H11.9673L10.8595 12.4622Z", + "fill": "currentColor" + }, + "children": [] + } + ] + }, + "name": "AutoUpdateLine" +} diff --git a/web/app/components/base/icons/src/vender/system/AutoUpdateLine.tsx b/web/app/components/base/icons/src/vender/system/AutoUpdateLine.tsx new file mode 100644 index 0000000000..d162edaa5a --- /dev/null +++ b/web/app/components/base/icons/src/vender/system/AutoUpdateLine.tsx @@ -0,0 +1,20 @@ +// GENERATE BY script +// DON NOT EDIT IT MANUALLY + +import * as React from 'react' +import data from './AutoUpdateLine.json' +import IconBase from '@/app/components/base/icons/IconBase' +import type { IconData } from '@/app/components/base/icons/IconBase' + +const Icon = ( + { + ref, + ...props + }: React.SVGProps & { + ref?: React.RefObject>; + }, +) => + +Icon.displayName = 'AutoUpdateLine' + +export default Icon diff --git a/web/app/components/base/icons/src/vender/system/index.ts b/web/app/components/base/icons/src/vender/system/index.ts new file mode 100644 index 0000000000..01553789b8 --- /dev/null +++ b/web/app/components/base/icons/src/vender/system/index.ts @@ -0,0 +1 @@ +export { default as AutoUpdateLine } from './AutoUpdateLine' diff --git a/web/app/components/base/icons/src/vender/workflow/Agent.json b/web/app/components/base/icons/src/vender/workflow/Agent.json index e7ed19369b..200475fc44 100644 --- a/web/app/components/base/icons/src/vender/workflow/Agent.json +++ b/web/app/components/base/icons/src/vender/workflow/Agent.json @@ -50,4 +50,4 @@ ] }, "name": "Agent" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/Answer.json b/web/app/components/base/icons/src/vender/workflow/Answer.json index 4f15b339bb..b0ad3f87ea 100644 --- a/web/app/components/base/icons/src/vender/workflow/Answer.json +++ b/web/app/components/base/icons/src/vender/workflow/Answer.json @@ -35,4 +35,4 @@ ] }, "name": "Answer" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/Assigner.json b/web/app/components/base/icons/src/vender/workflow/Assigner.json index 7106e5ad43..89e76a9cbe 100644 --- a/web/app/components/base/icons/src/vender/workflow/Assigner.json +++ b/web/app/components/base/icons/src/vender/workflow/Assigner.json @@ -65,4 +65,4 @@ ] }, "name": "Assigner" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/Code.json b/web/app/components/base/icons/src/vender/workflow/Code.json index d94f12ab3a..d72f02e289 100644 --- a/web/app/components/base/icons/src/vender/workflow/Code.json +++ b/web/app/components/base/icons/src/vender/workflow/Code.json @@ -35,4 +35,4 @@ ] }, "name": "Code" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/DocsExtractor.json b/web/app/components/base/icons/src/vender/workflow/DocsExtractor.json index 5b454590be..4e43c6c208 100644 --- a/web/app/components/base/icons/src/vender/workflow/DocsExtractor.json +++ b/web/app/components/base/icons/src/vender/workflow/DocsExtractor.json @@ -61,4 +61,4 @@ ] }, "name": "DocsExtractor" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/End.json b/web/app/components/base/icons/src/vender/workflow/End.json index 3e281cb575..56336efad1 100644 --- a/web/app/components/base/icons/src/vender/workflow/End.json +++ b/web/app/components/base/icons/src/vender/workflow/End.json @@ -35,4 +35,4 @@ ] }, "name": "End" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/Home.json b/web/app/components/base/icons/src/vender/workflow/Home.json index fd3096f658..982a9db222 100644 --- a/web/app/components/base/icons/src/vender/workflow/Home.json +++ b/web/app/components/base/icons/src/vender/workflow/Home.json @@ -35,4 +35,4 @@ ] }, "name": "Home" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/Http.json b/web/app/components/base/icons/src/vender/workflow/Http.json index 53b5c3a9fd..4affe7a5b0 100644 --- a/web/app/components/base/icons/src/vender/workflow/Http.json +++ b/web/app/components/base/icons/src/vender/workflow/Http.json @@ -68,4 +68,4 @@ ] }, "name": "Http" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/IfElse.json b/web/app/components/base/icons/src/vender/workflow/IfElse.json index 0ff778bc24..359f695c26 100644 --- a/web/app/components/base/icons/src/vender/workflow/IfElse.json +++ b/web/app/components/base/icons/src/vender/workflow/IfElse.json @@ -35,4 +35,4 @@ ] }, "name": "IfElse" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/Iteration.json b/web/app/components/base/icons/src/vender/workflow/Iteration.json index ee5748d1f1..84148e7bac 100644 --- a/web/app/components/base/icons/src/vender/workflow/Iteration.json +++ b/web/app/components/base/icons/src/vender/workflow/Iteration.json @@ -33,4 +33,4 @@ ] }, "name": "Iteration" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/IterationStart.json b/web/app/components/base/icons/src/vender/workflow/IterationStart.json index 2941cdb65d..cb574b7ea5 100644 --- a/web/app/components/base/icons/src/vender/workflow/IterationStart.json +++ b/web/app/components/base/icons/src/vender/workflow/IterationStart.json @@ -33,4 +33,4 @@ ] }, "name": "IterationStart" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/Jinja.json b/web/app/components/base/icons/src/vender/workflow/Jinja.json index ba46cb9ca6..91eee1534b 100644 --- a/web/app/components/base/icons/src/vender/workflow/Jinja.json +++ b/web/app/components/base/icons/src/vender/workflow/Jinja.json @@ -95,4 +95,4 @@ ] }, "name": "Jinja" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/KnowledgeRetrieval.json b/web/app/components/base/icons/src/vender/workflow/KnowledgeRetrieval.json index 4bdc83f868..69eb10eb83 100644 --- a/web/app/components/base/icons/src/vender/workflow/KnowledgeRetrieval.json +++ b/web/app/components/base/icons/src/vender/workflow/KnowledgeRetrieval.json @@ -35,4 +35,4 @@ ] }, "name": "KnowledgeRetrieval" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/ListFilter.json b/web/app/components/base/icons/src/vender/workflow/ListFilter.json index 568020f4a6..6ed383c315 100644 --- a/web/app/components/base/icons/src/vender/workflow/ListFilter.json +++ b/web/app/components/base/icons/src/vender/workflow/ListFilter.json @@ -35,4 +35,4 @@ ] }, "name": "ListFilter" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/Llm.json b/web/app/components/base/icons/src/vender/workflow/Llm.json index d900a67041..cdf114a490 100644 --- a/web/app/components/base/icons/src/vender/workflow/Llm.json +++ b/web/app/components/base/icons/src/vender/workflow/Llm.json @@ -35,4 +35,4 @@ ] }, "name": "Llm" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/Loop.json b/web/app/components/base/icons/src/vender/workflow/Loop.json index 65a70d82a1..b6abd13dfa 100644 --- a/web/app/components/base/icons/src/vender/workflow/Loop.json +++ b/web/app/components/base/icons/src/vender/workflow/Loop.json @@ -35,4 +35,4 @@ ] }, "name": "Loop" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/LoopEnd.json b/web/app/components/base/icons/src/vender/workflow/LoopEnd.json index 1427dfdcc5..eea9e717ca 100644 --- a/web/app/components/base/icons/src/vender/workflow/LoopEnd.json +++ b/web/app/components/base/icons/src/vender/workflow/LoopEnd.json @@ -35,4 +35,4 @@ ] }, "name": "LoopEnd" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/ParameterExtractor.json b/web/app/components/base/icons/src/vender/workflow/ParameterExtractor.json index 7d4fa6424a..eb66f4846b 100644 --- a/web/app/components/base/icons/src/vender/workflow/ParameterExtractor.json +++ b/web/app/components/base/icons/src/vender/workflow/ParameterExtractor.json @@ -263,4 +263,4 @@ ] }, "name": "ParameterExtractor" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/QuestionClassifier.json b/web/app/components/base/icons/src/vender/workflow/QuestionClassifier.json index a50ee6c410..6bd3dbf096 100644 --- a/web/app/components/base/icons/src/vender/workflow/QuestionClassifier.json +++ b/web/app/components/base/icons/src/vender/workflow/QuestionClassifier.json @@ -35,4 +35,4 @@ ] }, "name": "QuestionClassifier" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/TemplatingTransform.json b/web/app/components/base/icons/src/vender/workflow/TemplatingTransform.json index 69ee236611..6399208a2d 100644 --- a/web/app/components/base/icons/src/vender/workflow/TemplatingTransform.json +++ b/web/app/components/base/icons/src/vender/workflow/TemplatingTransform.json @@ -151,4 +151,4 @@ ] }, "name": "TemplatingTransform" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/VariableX.json b/web/app/components/base/icons/src/vender/workflow/VariableX.json index 1560684e55..a87b000587 100644 --- a/web/app/components/base/icons/src/vender/workflow/VariableX.json +++ b/web/app/components/base/icons/src/vender/workflow/VariableX.json @@ -35,4 +35,4 @@ ] }, "name": "VariableX" -} \ No newline at end of file +} diff --git a/web/app/components/base/icons/src/vender/workflow/WindowCursor.json b/web/app/components/base/icons/src/vender/workflow/WindowCursor.json index b64ba912bb..66591c1116 100644 --- a/web/app/components/base/icons/src/vender/workflow/WindowCursor.json +++ b/web/app/components/base/icons/src/vender/workflow/WindowCursor.json @@ -59,4 +59,4 @@ ] }, "name": "WindowCursor" -} \ No newline at end of file +} diff --git a/web/app/components/base/markdown/index.tsx b/web/app/components/base/markdown/index.tsx index 1e50e6745b..bab5ac8eba 100644 --- a/web/app/components/base/markdown/index.tsx +++ b/web/app/components/base/markdown/index.tsx @@ -28,8 +28,15 @@ import { * Further refactoring candidates (custom block components not fitting general categories) * are noted in their respective files if applicable. */ +export type MarkdownProps = { + content: string + className?: string + customDisallowedElements?: string[] + customComponents?: Record> +} -export function Markdown(props: { content: string; className?: string; customDisallowedElements?: string[] }) { +export const Markdown = (props: MarkdownProps) => { + const { customComponents = {} } = props const latexContent = flow([ preprocessThinkTag, preprocessLaTeX, @@ -78,6 +85,7 @@ export function Markdown(props: { content: string; className?: string; customDis form: MarkdownForm, script: ScriptBlock as any, details: ThinkBlock, + ...customComponents, }} > {/* Markdown detect has problem. */} diff --git a/web/app/components/base/select/pure.tsx b/web/app/components/base/select/pure.tsx index 300d4ed47d..f80f67b06c 100644 --- a/web/app/components/base/select/pure.tsx +++ b/web/app/components/base/select/pure.tsx @@ -91,7 +91,7 @@ const PureSelect = ({ triggerPopupSameWidth={triggerPopupSameWidth} > handleOpenChange(!mergedOpen)} + onClick={() => !disabled && handleOpenChange(!mergedOpen)} asChild >
    = ({ datasetId }: Props) => { const [isShowModifyRetrievalModal, setIsShowModifyRetrievalModal] = useState(false) const [isShowRightPanel, { setTrue: showRightPanel, setFalse: hideRightPanel, set: setShowRightPanel }] = useBoolean(!isMobile) const renderHitResults = (results: HitTesting[] | ExternalKnowledgeBaseHitTesting[]) => ( -
    +
    {t('datasetHitTesting.hit.title', { num: results.length })}
    @@ -93,7 +93,7 @@ const HitTestingPage: FC = ({ datasetId }: Props) => { ) const renderEmptyState = () => ( -
    +
    {t('datasetHitTesting.hit.emptyTip')} @@ -180,7 +180,7 @@ const HitTestingPage: FC = ({ datasetId }: Props) => {
    {/* {renderHitResults(generalResultData)} */} {submitLoading - ?
    + ?
    : ( diff --git a/web/app/components/develop/template/template_advanced_chat.en.mdx b/web/app/components/develop/template/template_advanced_chat.en.mdx index adba404a64..bafcb1f99a 100644 --- a/web/app/components/develop/template/template_advanced_chat.en.mdx +++ b/web/app/components/develop/template/template_advanced_chat.en.mdx @@ -80,6 +80,12 @@ Chat applications support session persistence, allowing previous chat history to Auto-generate title, default is `true`. If set to `false`, can achieve async title generation by calling the conversation rename API and setting `auto_generate` to `true`. + + (Optional) Trace ID. Used for integration with existing business trace components to achieve end-to-end distributed tracing. If not provided, the system will automatically generate a trace_id. Supports the following three ways to pass, in order of priority:
    + - Header: via HTTP Header X-Trace-Id, highest priority.
    + - Query parameter: via URL query parameter trace_id.
    + - Request Body: via request body field trace_id (i.e., this field).
    +
    ### Response diff --git a/web/app/components/develop/template/template_advanced_chat.ja.mdx b/web/app/components/develop/template/template_advanced_chat.ja.mdx index 2e57d5e20c..d8c5464ed5 100644 --- a/web/app/components/develop/template/template_advanced_chat.ja.mdx +++ b/web/app/components/develop/template/template_advanced_chat.ja.mdx @@ -80,6 +80,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from タイトルを自動生成、デフォルトは`true`。 `false`に設定すると、会話のリネームAPIを呼び出し、`auto_generate`を`true`に設定することで非同期タイトル生成を実現できます。 + + (オプション)トレースID。既存の業務システムのトレースコンポーネントと連携し、エンドツーエンドの分散トレーシングを実現するために使用します。指定がない場合、システムが自動的に trace_id を生成します。以下の3つの方法で渡すことができ、優先順位は次のとおりです:
    + - Header:HTTPヘッダー X-Trace-Id で渡す(最優先)。
    + - クエリパラメータ:URLクエリパラメータ trace_id で渡す。
    + - リクエストボディ:リクエストボディの trace_id フィールドで渡す(本フィールド)。
    +
    ### 応答 diff --git a/web/app/components/develop/template/template_advanced_chat.zh.mdx b/web/app/components/develop/template/template_advanced_chat.zh.mdx index 8955396ad9..30068d93a2 100755 --- a/web/app/components/develop/template/template_advanced_chat.zh.mdx +++ b/web/app/components/develop/template/template_advanced_chat.zh.mdx @@ -78,6 +78,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx' (选填)自动生成标题,默认 `true`。 若设置为 `false`,则可通过调用会话重命名接口并设置 `auto_generate` 为 `true` 实现异步生成标题。 + + (选填)链路追踪ID。适用于与业务系统已有的trace组件打通,实现端到端分布式追踪等场景。如果未指定,系统会自动生成trace_id。支持以下三种方式传递,具体优先级依次为:
    + - Header:通过 HTTP Header X-Trace-Id 传递,优先级最高。
    + - Query 参数:通过 URL 查询参数 trace_id 传递。
    + - Request Body:通过请求体字段 trace_id 传递(即本字段)。
    +
    ### Response diff --git a/web/app/components/develop/template/template_chat.en.mdx b/web/app/components/develop/template/template_chat.en.mdx index 73d1fa1b41..f1bb1de206 100644 --- a/web/app/components/develop/template/template_chat.en.mdx +++ b/web/app/components/develop/template/template_chat.en.mdx @@ -74,6 +74,12 @@ Chat applications support session persistence, allowing previous chat history to Auto-generate title, default is `true`. If set to `false`, can achieve async title generation by calling the conversation rename API and setting `auto_generate` to `true`. + + (Optional) Trace ID. Used for integration with existing business trace components to achieve end-to-end distributed tracing. If not provided, the system will automatically generate a trace_id. Supports the following three ways to pass, in order of priority:
    + - Header: via HTTP Header X-Trace-Id, highest priority.
    + - Query parameter: via URL query parameter trace_id.
    + - Request Body: via request body field trace_id (i.e., this field).
    +
    ### Response diff --git a/web/app/components/develop/template/template_chat.ja.mdx b/web/app/components/develop/template/template_chat.ja.mdx index 45c970a9f2..06e88782d9 100644 --- a/web/app/components/develop/template/template_chat.ja.mdx +++ b/web/app/components/develop/template/template_chat.ja.mdx @@ -74,6 +74,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from タイトルを自動生成します。デフォルトは`true`です。 `false`に設定すると、会話のリネームAPIを呼び出し、`auto_generate`を`true`に設定することで非同期タイトル生成を実現できます。 + + (オプション)トレースID。既存の業務システムのトレースコンポーネントと連携し、エンドツーエンドの分散トレーシングを実現するために使用します。指定がない場合、システムが自動的に trace_id を生成します。以下の3つの方法で渡すことができ、優先順位は次のとおりです:
    + - Header:HTTPヘッダー X-Trace-Id で渡す(最優先)。
    + - クエリパラメータ:URLクエリパラメータ trace_id で渡す。
    + - リクエストボディ:リクエストボディの trace_id フィールドで渡す(本フィールド)。
    +
    ### 応答 diff --git a/web/app/components/develop/template/template_chat.zh.mdx b/web/app/components/develop/template/template_chat.zh.mdx index 8573408c36..a7127d614b 100644 --- a/web/app/components/develop/template/template_chat.zh.mdx +++ b/web/app/components/develop/template/template_chat.zh.mdx @@ -73,6 +73,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx' (选填)自动生成标题,默认 `true`。 若设置为 `false`,则可通过调用会话重命名接口并设置 `auto_generate` 为 `true` 实现异步生成标题。 + + (选填)链路追踪ID。适用于与业务系统已有的trace组件打通,实现端到端分布式追踪等场景。如果未指定,系统会自动生成trace_id。支持以下三种方式传递,具体优先级依次为:
    + - Header:通过 HTTP Header X-Trace-Id 传递,优先级最高。
    + - Query 参数:通过 URL 查询参数 trace_id 传递。
    + - Request Body:通过请求体字段 trace_id 传递(即本字段)。
    +
    ### Response diff --git a/web/app/components/develop/template/template_workflow.en.mdx b/web/app/components/develop/template/template_workflow.en.mdx index 23ff2bbb55..8ac1db3287 100644 --- a/web/app/components/develop/template/template_workflow.en.mdx +++ b/web/app/components/develop/template/template_workflow.en.mdx @@ -66,6 +66,12 @@ Workflow applications offers non-session support and is ideal for translation, a Should be uniquely defined by the developer within the application.
    The user identifier should be consistent with the user passed in the message sending interface. The Service API does not share conversations created by the WebApp. + - `files` (array[object]) Optional + - `trace_id` (string) Optional + Trace ID. Used for integration with existing business trace components to achieve end-to-end distributed tracing. If not provided, the system will automatically generate a trace_id. Supports the following three ways to pass, in order of priority: + 1. Header: via HTTP Header `X-Trace-Id`, highest priority. + 2. Query parameter: via URL query parameter `trace_id`. + 3. Request Body: via request body field `trace_id` (i.e., this field). ### Response When `response_mode` is `blocking`, return a CompletionResponse object. diff --git a/web/app/components/develop/template/template_workflow.ja.mdx b/web/app/components/develop/template/template_workflow.ja.mdx index 287eb87f45..0c32467ce8 100644 --- a/web/app/components/develop/template/template_workflow.ja.mdx +++ b/web/app/components/develop/template/template_workflow.ja.mdx @@ -66,6 +66,11 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from ユーザー識別子、エンドユーザーのアイデンティティを定義するために使用されます。 アプリケーション内で開発者によって一意に定義される必要があります。 - `files` (array[object]) オプション + - `trace_id` (string) オプション + トレースID。既存の業務システムのトレースコンポーネントと連携し、エンドツーエンドの分散トレーシングを実現するために使用します。指定がない場合、システムが自動的に trace_id を生成します。以下の3つの方法で渡すことができ、優先順位は次のとおりです: + 1. Header:HTTPヘッダー `X-Trace-Id` で渡す(最優先)。 + 2. クエリパラメータ:URLクエリパラメータ `trace_id` で渡す。 + 3. リクエストボディ:リクエストボディの `trace_id` フィールドで渡す(本フィールド)。 ### 応答 diff --git a/web/app/components/develop/template/template_workflow.zh.mdx b/web/app/components/develop/template/template_workflow.zh.mdx index 105eca0700..236da62e88 100644 --- a/web/app/components/develop/template/template_workflow.zh.mdx +++ b/web/app/components/develop/template/template_workflow.zh.mdx @@ -60,7 +60,12 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等 - `user` (string) Required 用户标识,用于定义终端用户的身份,方便检索、统计。 由开发者定义规则,需保证用户标识在应用内唯一。API 无法访问 WebApp 创建的会话。 - + - `files` (array[object]) 可选 + - `trace_id` (string) Optional + 链路追踪ID。适用于与业务系统已有的trace组件打通,实现端到端分布式追踪等场景。如果未指定,系统将自动生成 `trace_id`。支持以下三种方式传递,具体优先级依次为: + 1. Header:推荐通过 HTTP Header `X-Trace-Id` 传递,优先级最高。 + 2. Query 参数:通过 URL 查询参数 `trace_id` 传递。 + 3. Request Body:通过请求体字段 `trace_id` 传递(即本字段)。 ### Response 当 `response_mode` 为 `blocking` 时,返回 CompletionResponse object。 diff --git a/web/app/components/header/account-setting/members-page/index.tsx b/web/app/components/header/account-setting/members-page/index.tsx index 4ff527627b..8b536cbe43 100644 --- a/web/app/components/header/account-setting/members-page/index.tsx +++ b/web/app/components/header/account-setting/members-page/index.tsx @@ -1,9 +1,6 @@ 'use client' import { useState } from 'react' import useSWR from 'swr' -import dayjs from 'dayjs' -import 'dayjs/locale/zh-cn' -import relativeTime from 'dayjs/plugin/relativeTime' import { useContext } from 'use-context-selector' import { RiUserAddLine } from '@remixicon/react' import { useTranslation } from 'react-i18next' @@ -28,7 +25,7 @@ import cn from '@/utils/classnames' import Tooltip from '@/app/components/base/tooltip' import { RiPencilLine } from '@remixicon/react' import { useGlobalPublicStore } from '@/context/global-public-context' -dayjs.extend(relativeTime) +import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now' const MembersPage = () => { const { t } = useTranslation() @@ -50,6 +47,7 @@ const MembersPage = () => { fetchMembers, ) const { systemFeatures } = useGlobalPublicStore() + const { formatTimeFromNow } = useFormatTimeFromNow() const [inviteModalVisible, setInviteModalVisible] = useState(false) const [invitationResults, setInvitationResults] = useState([]) const [invitedModalVisible, setInvitedModalVisible] = useState(false) @@ -133,7 +131,7 @@ const MembersPage = () => {
    {account.email}
    -
    {dayjs(Number((account.last_active_at || account.created_at)) * 1000).locale(locale === 'zh-Hans' ? 'zh-cn' : 'en').fromNow()}
    +
    {formatTimeFromNow(Number((account.last_active_at || account.created_at)) * 1000)}
    {isCurrentWorkspaceOwner && account.role === 'owner' && isAllowTransferWorkspace && ( setShowTransferOwnershipModal(true)}> diff --git a/web/app/components/i18n.tsx b/web/app/components/i18n.tsx index f04f8d6cbe..374b1f608f 100644 --- a/web/app/components/i18n.tsx +++ b/web/app/components/i18n.tsx @@ -1,10 +1,13 @@ 'use client' import type { FC } from 'react' -import React, { useEffect } from 'react' +import React, { useEffect, useState } from 'react' import I18NContext from '@/context/i18n' import type { Locale } from '@/i18n' import { setLocaleOnClient } from '@/i18n' +import Loading from './base/loading' +import { usePrefetchQuery } from '@tanstack/react-query' +import { getSystemFeatures } from '@/service/common' export type II18nProps = { locale: Locale @@ -14,10 +17,22 @@ const I18n: FC = ({ locale, children, }) => { + const [loading, setLoading] = useState(true) + + usePrefetchQuery({ + queryKey: ['systemFeatures'], + queryFn: getSystemFeatures, + }) + useEffect(() => { - setLocaleOnClient(locale, false) + setLocaleOnClient(locale, false).then(() => { + setLoading(false) + }) }, [locale]) + if (loading) + return
    + return ( = ({ const deprecatedReasonKey = useMemo(() => { if (!deprecatedReason) return '' - return snakeCase2CamelCase(deprecatedReason) + return camelCase(deprecatedReason) }, [deprecatedReason]) // Check if the deprecatedReasonKey exists in i18n diff --git a/web/app/components/plugins/install-plugin/install-bundle/steps/install.tsx b/web/app/components/plugins/install-plugin/install-bundle/steps/install.tsx index 2d8bdcd3d9..fabad62397 100644 --- a/web/app/components/plugins/install-plugin/install-bundle/steps/install.tsx +++ b/web/app/components/plugins/install-plugin/install-bundle/steps/install.tsx @@ -10,7 +10,7 @@ import type { ExposeRefs } from './install-multi' import InstallMulti from './install-multi' import { useInstallOrUpdate } from '@/service/use-plugins' import useRefreshPluginList from '../../hooks/use-refresh-plugin-list' -import { useCanInstallPluginFromMarketplace } from '@/app/components/plugins/plugin-page/use-permission' +import { useCanInstallPluginFromMarketplace } from '@/app/components/plugins/plugin-page/use-reference-setting' import { useMittContextSelector } from '@/context/mitt-context' import Checkbox from '@/app/components/base/checkbox' const i18nPrefix = 'plugin.installModal' diff --git a/web/app/components/plugins/marketplace/list/index.tsx b/web/app/components/plugins/marketplace/list/index.tsx index 5651512f8e..2072e3feed 100644 --- a/web/app/components/plugins/marketplace/list/index.tsx +++ b/web/app/components/plugins/marketplace/list/index.tsx @@ -56,7 +56,7 @@ const List = ({ return ( { const { t } = useTranslation() + const { userProfile: { timezone } } = useAppContext() + const { theme } = useTheme() const locale = useGetLanguage() const { locale: currentLocale } = useI18N() @@ -112,8 +119,24 @@ const DetailHeader = ({ setFalse: hideUpdateModal, }] = useBoolean(false) - const handleUpdate = async () => { + const { referenceSetting } = useReferenceSetting() + const { auto_upgrade: autoUpgradeInfo } = referenceSetting || {} + const isAutoUpgradeEnabled = useMemo(() => { + if (!autoUpgradeInfo || !isFromMarketplace) + return false + if(autoUpgradeInfo.upgrade_mode === AUTO_UPDATE_MODE.update_all) + return true + if(autoUpgradeInfo.upgrade_mode === AUTO_UPDATE_MODE.partial && autoUpgradeInfo.include_plugins.includes(plugin_id)) + return true + if(autoUpgradeInfo.upgrade_mode === AUTO_UPDATE_MODE.exclude && !autoUpgradeInfo.exclude_plugins.includes(plugin_id)) + return true + return false + }, [autoUpgradeInfo, plugin_id, isFromMarketplace]) + + const [isDowngrade, setIsDowngrade] = useState(false) + const handleUpdate = async (isDowngrade?: boolean) => { if (isFromMarketplace) { + setIsDowngrade(!!isDowngrade) showUpdateModal() return } @@ -180,9 +203,6 @@ const DetailHeader = ({ } }, [showDeleting, installation_id, hideDeleting, hideDeleteConfirm, onUpdate, category, refreshModelProviders, invalidateAllToolProviders]) - // #plugin TODO# used in apps - // const usedInApps = 3 - return (
    @@ -201,7 +221,7 @@ const DetailHeader = ({ currentVersion={version} onSelect={(state) => { setTargetVersion(state) - handleUpdate() + handleUpdate(state.isDowngrade) }} trigger={ } /> + {/* Auto update info */} + {isAutoUpgradeEnabled && ( + + {/* add a a div to fix tooltip hover not show problem */} +
    + + + +
    +
    + )} + {(hasNewVersion || isFromGitHub) && ( + } + value={value} + onChange={onChange} + isShow={isShowToolPicker} + onShowChange={setToolPicker} + /> +
    + ) +} +export default React.memo(PluginsPicker) diff --git a/web/app/components/plugins/reference-setting-modal/auto-update-setting/plugins-selected.tsx b/web/app/components/plugins/reference-setting-modal/auto-update-setting/plugins-selected.tsx new file mode 100644 index 0000000000..42c2a34ee8 --- /dev/null +++ b/web/app/components/plugins/reference-setting-modal/auto-update-setting/plugins-selected.tsx @@ -0,0 +1,29 @@ +'use client' +import type { FC } from 'react' +import React from 'react' +import cn from '@/utils/classnames' +import { MARKETPLACE_API_PREFIX } from '@/config' +import Icon from '@/app/components/plugins/card/base/card-icon' + +const MAX_DISPLAY_COUNT = 14 +type Props = { + className?: string + plugins: string[] +} + +const PluginsSelected: FC = ({ + className, + plugins, +}) => { + const isShowAll = plugins.length < MAX_DISPLAY_COUNT + const displayPlugins = plugins.slice(0, MAX_DISPLAY_COUNT) + return ( +
    + {displayPlugins.map(plugin => ( + + ))} + {!isShowAll &&
    +{plugins.length - MAX_DISPLAY_COUNT}
    } +
    + ) +} +export default React.memo(PluginsSelected) diff --git a/web/app/components/plugins/reference-setting-modal/auto-update-setting/strategy-picker.tsx b/web/app/components/plugins/reference-setting-modal/auto-update-setting/strategy-picker.tsx new file mode 100644 index 0000000000..c8227520f3 --- /dev/null +++ b/web/app/components/plugins/reference-setting-modal/auto-update-setting/strategy-picker.tsx @@ -0,0 +1,98 @@ +import { useState } from 'react' +import { useTranslation } from 'react-i18next' +import { + RiArrowDownSLine, + RiCheckLine, +} from '@remixicon/react' +import { AUTO_UPDATE_STRATEGY } from './types' +import { + PortalToFollowElem, + PortalToFollowElemContent, + PortalToFollowElemTrigger, +} from '@/app/components/base/portal-to-follow-elem' +import Button from '@/app/components/base/button' +const i18nPrefix = 'plugin.autoUpdate.strategy' + +type Props = { + value: AUTO_UPDATE_STRATEGY + onChange: (value: AUTO_UPDATE_STRATEGY) => void +} +const StrategyPicker = ({ + value, + onChange, +}: Props) => { + const { t } = useTranslation() + const [open, setOpen] = useState(false) + const options = [ + { + value: AUTO_UPDATE_STRATEGY.disabled, + label: t(`${i18nPrefix}.disabled.name`), + description: t(`${i18nPrefix}.disabled.description`), + }, + { + value: AUTO_UPDATE_STRATEGY.fixOnly, + label: t(`${i18nPrefix}.fixOnly.name`), + description: t(`${i18nPrefix}.fixOnly.description`), + }, + { + value: AUTO_UPDATE_STRATEGY.latest, + label: t(`${i18nPrefix}.latest.name`), + description: t(`${i18nPrefix}.latest.description`), + }, + ] + const selectedOption = options.find(option => option.value === value) + + return ( + + { + e.stopPropagation() + e.nativeEvent.stopImmediatePropagation() + setOpen(v => !v) + }}> + + + +
    + { + options.map(option => ( +
    { + e.stopPropagation() + e.nativeEvent.stopImmediatePropagation() + onChange(option.value) + setOpen(false) + }} + > +
    + { + value === option.value && ( + + ) + } +
    +
    +
    {option.label}
    +
    {option.description}
    +
    +
    + )) + } +
    +
    +
    + ) +} + +export default StrategyPicker diff --git a/web/app/components/plugins/reference-setting-modal/auto-update-setting/tool-item.tsx b/web/app/components/plugins/reference-setting-modal/auto-update-setting/tool-item.tsx new file mode 100644 index 0000000000..99a01bcd0f --- /dev/null +++ b/web/app/components/plugins/reference-setting-modal/auto-update-setting/tool-item.tsx @@ -0,0 +1,45 @@ +'use client' +import type { FC } from 'react' +import React from 'react' +import type { PluginDetail } from '@/app/components/plugins/types' +import Icon from '@/app/components/plugins/card/base/card-icon' +import { renderI18nObject } from '@/i18n' +import { useGetLanguage } from '@/context/i18n' +import { MARKETPLACE_API_PREFIX } from '@/config' +import Checkbox from '@/app/components/base/checkbox' + +type Props = { + payload: PluginDetail + isChecked?: boolean + onCheckChange: () => void +} + +const ToolItem: FC = ({ + payload, + isChecked, + onCheckChange, +}) => { + const language = useGetLanguage() + + const { plugin_id, declaration } = payload + const { label, author: org } = declaration + return ( +
    +
    +
    + +
    {renderI18nObject(label, language)}
    +
    {org}
    +
    + +
    +
    + ) +} +export default React.memo(ToolItem) diff --git a/web/app/components/plugins/reference-setting-modal/auto-update-setting/tool-picker.tsx b/web/app/components/plugins/reference-setting-modal/auto-update-setting/tool-picker.tsx new file mode 100644 index 0000000000..02c1ecaf35 --- /dev/null +++ b/web/app/components/plugins/reference-setting-modal/auto-update-setting/tool-picker.tsx @@ -0,0 +1,167 @@ +'use client' +import type { FC } from 'react' +import React, { useCallback, useMemo, useState } from 'react' +import { + PortalToFollowElem, + PortalToFollowElemContent, + PortalToFollowElemTrigger, +} from '@/app/components/base/portal-to-follow-elem' +import { useInstalledPluginList } from '@/service/use-plugins' +import { PLUGIN_TYPE_SEARCH_MAP } from '../../marketplace/plugin-type-switch' +import SearchBox from '@/app/components/plugins/marketplace/search-box' +import { useTranslation } from 'react-i18next' +import cn from '@/utils/classnames' +import ToolItem from './tool-item' +import Loading from '@/app/components/base/loading' +import NoDataPlaceholder from './no-data-placeholder' +import { PluginSource } from '../../types' + +type Props = { + trigger: React.ReactNode + value: string[] + onChange: (value: string[]) => void + isShow: boolean + onShowChange: (isShow: boolean) => void + +} + +const ToolPicker: FC = ({ + trigger, + value, + onChange, + isShow, + onShowChange, +}) => { + const { t } = useTranslation() + const toggleShowPopup = useCallback(() => { + onShowChange(!isShow) + }, [onShowChange, isShow]) + + const tabs = [ + { + key: PLUGIN_TYPE_SEARCH_MAP.all, + name: t('plugin.category.all'), + }, + { + key: PLUGIN_TYPE_SEARCH_MAP.model, + name: t('plugin.category.models'), + }, + { + key: PLUGIN_TYPE_SEARCH_MAP.tool, + name: t('plugin.category.tools'), + }, + { + key: PLUGIN_TYPE_SEARCH_MAP.agent, + name: t('plugin.category.agents'), + }, + { + key: PLUGIN_TYPE_SEARCH_MAP.extension, + name: t('plugin.category.extensions'), + }, + { + key: PLUGIN_TYPE_SEARCH_MAP.bundle, + name: t('plugin.category.bundles'), + }, + ] + + const [pluginType, setPluginType] = useState(PLUGIN_TYPE_SEARCH_MAP.all) + const [query, setQuery] = useState('') + const [tags, setTags] = useState([]) + const { data, isLoading } = useInstalledPluginList() + const filteredList = useMemo(() => { + const list = data ? data.plugins : [] + return list.filter((plugin) => { + const isFromMarketPlace = plugin.source === PluginSource.marketplace + return ( + isFromMarketPlace && (pluginType === PLUGIN_TYPE_SEARCH_MAP.all || plugin.declaration.category === pluginType) + && (tags.length === 0 || tags.some(tag => plugin.declaration.tags.includes(tag))) + && (query === '' || plugin.plugin_id.toLowerCase().includes(query.toLowerCase())) + ) + }) + }, [data, pluginType, query, tags]) + const handleCheckChange = useCallback((pluginId: string) => { + return () => { + const newValue = value.includes(pluginId) + ? value.filter(id => id !== pluginId) + : [...value, pluginId] + onChange(newValue) + } + }, [onChange, value]) + + const listContent = ( +
    + {filteredList.map(item => ( + + ))} +
    + ) + + const loadingContent = ( +
    + +
    + ) + + const noData = ( + + ) + + return ( + + + {trigger} + + +
    +
    + +
    +
    +
    + { + tabs.map(tab => ( +
    setPluginType(tab.key)} + > + {tab.name} +
    + )) + } +
    +
    + {!isLoading && filteredList.length > 0 && listContent} + {!isLoading && filteredList.length === 0 && noData} + {isLoading && loadingContent} +
    +
    +
    + ) +} + +export default React.memo(ToolPicker) diff --git a/web/app/components/plugins/reference-setting-modal/auto-update-setting/types.ts b/web/app/components/plugins/reference-setting-modal/auto-update-setting/types.ts new file mode 100644 index 0000000000..b734150b49 --- /dev/null +++ b/web/app/components/plugins/reference-setting-modal/auto-update-setting/types.ts @@ -0,0 +1,19 @@ +export enum AUTO_UPDATE_STRATEGY { + fixOnly = 'fix_only', + disabled = 'disabled', + latest = 'latest', +} + +export enum AUTO_UPDATE_MODE { + partial = 'partial', + exclude = 'exclude', + update_all = 'all', +} + +export type AutoUpdateConfig = { + strategy_setting: AUTO_UPDATE_STRATEGY + upgrade_time_of_day: number + upgrade_mode: AUTO_UPDATE_MODE + exclude_plugins: string[] + include_plugins: string[] +} diff --git a/web/app/components/plugins/reference-setting-modal/auto-update-setting/utils.spec.ts b/web/app/components/plugins/reference-setting-modal/auto-update-setting/utils.spec.ts new file mode 100644 index 0000000000..f813338c98 --- /dev/null +++ b/web/app/components/plugins/reference-setting-modal/auto-update-setting/utils.spec.ts @@ -0,0 +1,14 @@ +import { convertLocalSecondsToUTCDaySeconds, convertUTCDaySecondsToLocalSeconds } from './utils' + +describe('convertLocalSecondsToUTCDaySeconds', () => { + it('should convert local seconds to UTC day seconds correctly', () => { + const localTimezone = 'Asia/Shanghai' + const utcSeconds = convertLocalSecondsToUTCDaySeconds(0, localTimezone) + expect(utcSeconds).toBe((24 - 8) * 3600) + }) + + it('should convert local seconds to UTC day seconds for a specific time', () => { + const localTimezone = 'Asia/Shanghai' + expect(convertUTCDaySecondsToLocalSeconds(convertLocalSecondsToUTCDaySeconds(0, localTimezone), localTimezone)).toBe(0) + }) +}) diff --git a/web/app/components/plugins/reference-setting-modal/auto-update-setting/utils.ts b/web/app/components/plugins/reference-setting-modal/auto-update-setting/utils.ts new file mode 100644 index 0000000000..23c067285f --- /dev/null +++ b/web/app/components/plugins/reference-setting-modal/auto-update-setting/utils.ts @@ -0,0 +1,37 @@ +import type { Dayjs } from 'dayjs' +import dayjs from 'dayjs' +import utc from 'dayjs/plugin/utc' +import timezone from 'dayjs/plugin/timezone' + +dayjs.extend(utc) +dayjs.extend(timezone) + +export const timeOfDayToDayjs = (timeOfDay: number): Dayjs => { + const hours = Math.floor(timeOfDay / 3600) + const minutes = (timeOfDay - hours * 3600) / 60 + const res = dayjs().startOf('day').hour(hours).minute(minutes) + return res +} + +export const convertLocalSecondsToUTCDaySeconds = (secondsInDay: number, localTimezone: string): number => { + const localDayStart = dayjs().tz(localTimezone).startOf('day') + const localTargetTime = localDayStart.add(secondsInDay, 'second') + const utcTargetTime = localTargetTime.utc() + const utcDayStart = utcTargetTime.startOf('day') + const secondsFromUTCMidnight = utcTargetTime.diff(utcDayStart, 'second') + return secondsFromUTCMidnight +} + +export const dayjsToTimeOfDay = (date?: Dayjs): number => { + if (!date) return 0 + return date.hour() * 3600 + date.minute() * 60 +} + +export const convertUTCDaySecondsToLocalSeconds = (utcDaySeconds: number, localTimezone: string): number => { + const utcDayStart = dayjs().utc().startOf('day') + const utcTargetTime = utcDayStart.add(utcDaySeconds, 'second') + const localTargetTime = utcTargetTime.tz(localTimezone) + const localDayStart = localTargetTime.startOf('day') + const secondsInLocalDay = localTargetTime.diff(localDayStart, 'second') + return secondsInLocalDay +} diff --git a/web/app/components/plugins/reference-setting-modal/label.tsx b/web/app/components/plugins/reference-setting-modal/label.tsx new file mode 100644 index 0000000000..6444bf801d --- /dev/null +++ b/web/app/components/plugins/reference-setting-modal/label.tsx @@ -0,0 +1,28 @@ +'use client' +import type { FC } from 'react' +import React from 'react' +import cn from '@/utils/classnames' + +type Props = { + label: string + description?: string +} + +const Label: FC = ({ + label, + description, +}) => { + return ( +
    +
    + {label} +
    + {description && ( +
    + {description} +
    + )} +
    + ) +} +export default React.memo(Label) diff --git a/web/app/components/plugins/permission-setting-modal/modal.tsx b/web/app/components/plugins/reference-setting-modal/modal.tsx similarity index 74% rename from web/app/components/plugins/permission-setting-modal/modal.tsx rename to web/app/components/plugins/reference-setting-modal/modal.tsx index 6fd4d8c2dc..9fefbdbb55 100644 --- a/web/app/components/plugins/permission-setting-modal/modal.tsx +++ b/web/app/components/plugins/reference-setting-modal/modal.tsx @@ -5,14 +5,18 @@ import { useTranslation } from 'react-i18next' import Modal from '@/app/components/base/modal' import OptionCard from '@/app/components/workflow/nodes/_base/components/option-card' import Button from '@/app/components/base/button' -import type { Permissions } from '@/app/components/plugins/types' +import type { Permissions, ReferenceSetting } from '@/app/components/plugins/types' import { PermissionType } from '@/app/components/plugins/types' +import type { AutoUpdateConfig } from './auto-update-setting/types' +import AutoUpdateSetting from './auto-update-setting' +import { defaultValue as autoUpdateDefaultValue } from './auto-update-setting/config' +import Label from './label' const i18nPrefix = 'plugin.privilege' type Props = { - payload: Permissions + payload: ReferenceSetting onHide: () => void - onSave: (payload: Permissions) => void + onSave: (payload: ReferenceSetting) => void } const PluginSettingModal: FC = ({ @@ -21,7 +25,9 @@ const PluginSettingModal: FC = ({ onSave, }) => { const { t } = useTranslation() - const [tempPrivilege, setTempPrivilege] = useState(payload) + const { auto_upgrade: autoUpdateConfig, permission: privilege } = payload || {} + const [tempPrivilege, setTempPrivilege] = useState(privilege) + const [tempAutoUpdateConfig, setTempAutoUpdateConfig] = useState(autoUpdateConfig || autoUpdateDefaultValue) const handlePrivilegeChange = useCallback((key: string) => { return (value: PermissionType) => { setTempPrivilege({ @@ -32,18 +38,21 @@ const PluginSettingModal: FC = ({ }, [tempPrivilege]) const handleSave = useCallback(async () => { - await onSave(tempPrivilege) + await onSave({ + permission: tempPrivilege, + auto_upgrade: tempAutoUpdateConfig, + }) onHide() - }, [onHide, onSave, tempPrivilege]) + }, [onHide, onSave, tempAutoUpdateConfig, tempPrivilege]) return ( -
    +
    {t(`${i18nPrefix}.title`)}
    @@ -53,9 +62,7 @@ const PluginSettingModal: FC = ({ { title: t(`${i18nPrefix}.whoCanDebug`), key: 'debug_permission', value: tempPrivilege?.debug_permission || PermissionType.noOne }, ].map(({ title, key, value }) => (
    -
    - {title} -
    +
    + +
    + + +
    + + ) +} + +export default DowngradeWarningModal diff --git a/web/app/components/plugins/update-plugin/from-market-place.tsx b/web/app/components/plugins/update-plugin/from-market-place.tsx index 98994d9b9c..70bc7399f5 100644 --- a/web/app/components/plugins/update-plugin/from-market-place.tsx +++ b/web/app/components/plugins/update-plugin/from-market-place.tsx @@ -13,13 +13,18 @@ import { updateFromMarketPlace } from '@/service/plugins' import checkTaskStatus from '@/app/components/plugins/install-plugin/base/check-task-status' import { usePluginTaskList } from '@/service/use-plugins' import Toast from '../../base/toast' +import DowngradeWarningModal from './downgrade-warning' +import { useInvalidateReferenceSettings, useRemoveAutoUpgrade } from '@/service/use-plugins' +import cn from '@/utils/classnames' const i18nPrefix = 'plugin.upgrade' type Props = { payload: UpdateFromMarketPlacePayload + pluginId: string onSave: () => void onCancel: () => void + isShowDowngradeWarningModal?: boolean } enum UploadStep { @@ -30,8 +35,10 @@ enum UploadStep { const UpdatePluginModal: FC = ({ payload, + pluginId, onSave, onCancel, + isShowDowngradeWarningModal, }) => { const { originalPackageInfo, @@ -103,51 +110,74 @@ const UpdatePluginModal: FC = ({ onSave() }, [onSave, uploadStep, check, originalPackageInfo.id, handleRefetch, targetPackageInfo.id]) + const { mutateAsync } = useRemoveAutoUpgrade() + const invalidateReferenceSettings = useInvalidateReferenceSettings() + const handleExcludeAndDownload = async () => { + await mutateAsync({ + plugin_id: pluginId, + }) + invalidateReferenceSettings() + handleConfirm() + } + const doShowDowngradeWarningModal = isShowDowngradeWarningModal && uploadStep === UploadStep.notStarted + return ( -
    - {t(`${i18nPrefix}.description`)} -
    -
    - - - {`${originalPackageInfo.payload.version} -> ${targetPackageInfo.version}`} - - - } + {doShowDowngradeWarningModal && ( + -
    -
    - {uploadStep === UploadStep.notStarted && ( + )} + {!doShowDowngradeWarningModal && ( + <> +
    + {t(`${i18nPrefix}.description`)} +
    +
    + + + {`${originalPackageInfo.payload.version} -> ${targetPackageInfo.version}`} + + + } + /> +
    +
    + {uploadStep === UploadStep.notStarted && ( + + )} - )} - -
    +
    + + )} +
    ) } diff --git a/web/app/components/plugins/update-plugin/plugin-version-picker.tsx b/web/app/components/plugins/update-plugin/plugin-version-picker.tsx index 424f76d790..36a4faace1 100644 --- a/web/app/components/plugins/update-plugin/plugin-version-picker.tsx +++ b/web/app/components/plugins/update-plugin/plugin-version-picker.tsx @@ -15,6 +15,7 @@ import type { import { useVersionListOfPlugin } from '@/service/use-plugins' import useTimestamp from '@/hooks/use-timestamp' import cn from '@/utils/classnames' +import { lt } from 'semver' type Props = { disabled?: boolean @@ -28,9 +29,11 @@ type Props = { onSelect: ({ version, unique_identifier, + isDowngrade, }: { version: string unique_identifier: string + isDowngrade: boolean }) => void } @@ -59,13 +62,14 @@ const PluginVersionPicker: FC = ({ const { data: res } = useVersionListOfPlugin(pluginID) - const handleSelect = useCallback(({ version, unique_identifier }: { + const handleSelect = useCallback(({ version, unique_identifier, isDowngrade }: { version: string unique_identifier: string + isDowngrade: boolean }) => { if (currentVersion === version) return - onSelect({ version, unique_identifier }) + onSelect({ version, unique_identifier, isDowngrade }) onShowChange(false) }, [currentVersion, onSelect, onShowChange]) @@ -99,6 +103,7 @@ const PluginVersionPicker: FC = ({ onClick={() => handleSelect({ version: version.version, unique_identifier: version.unique_identifier, + isDowngrade: lt(version.version, currentVersion), })} >
    diff --git a/web/app/components/tools/marketplace/index.tsx b/web/app/components/tools/marketplace/index.tsx index c8510420ce..b5183f242a 100644 --- a/web/app/components/tools/marketplace/index.tsx +++ b/web/app/components/tools/marketplace/index.tsx @@ -1,14 +1,10 @@ -import { - useEffect, - useRef, -} from 'react' import { useTheme } from 'next-themes' import { RiArrowRightUpLine, RiArrowUpDoubleLine, } from '@remixicon/react' import { useTranslation } from 'react-i18next' -import { useMarketplace } from './hooks' +import type { useMarketplace } from './hooks' import List from '@/app/components/plugins/marketplace/list' import Loading from '@/app/components/base/loading' import { getLocaleOnClient } from '@/i18n' @@ -17,12 +13,16 @@ import { getMarketplaceUrl } from '@/utils/var' type MarketplaceProps = { searchPluginText: string filterPluginTags: string[] - onMarketplaceScroll: () => void + isMarketplaceArrowVisible: boolean + showMarketplacePanel: () => void + marketplaceContext: ReturnType } const Marketplace = ({ searchPluginText, filterPluginTags, - onMarketplaceScroll, + isMarketplaceArrowVisible, + showMarketplacePanel, + marketplaceContext, }: MarketplaceProps) => { const locale = getLocaleOnClient() const { t } = useTranslation() @@ -32,90 +32,80 @@ const Marketplace = ({ marketplaceCollections, marketplaceCollectionPluginsMap, plugins, - handleScroll, page, - } = useMarketplace(searchPluginText, filterPluginTags) - const containerRef = useRef(null) - - useEffect(() => { - const container = containerRef.current - if (container) - container.addEventListener('scroll', handleScroll) - - return () => { - if (container) - container.removeEventListener('scroll', handleScroll) - } - }, [handleScroll]) + } = marketplaceContext return ( -
    - onMarketplaceScroll()} - /> -
    -
    - {t('plugin.marketplace.moreFrom')} -
    -
    - {t('plugin.marketplace.discover')} - - {t('plugin.category.models')} - - , - - {t('plugin.category.tools')} - + <> +
    + {isMarketplaceArrowVisible && ( + + )} +
    +
    + {t('plugin.marketplace.moreFrom')} +
    +
    + {t('plugin.marketplace.discover')} + + {t('plugin.category.models')} + + , + + {t('plugin.category.tools')} + , {t('plugin.category.datasources')} - , - - {t('plugin.category.agents')} - - , - - {t('plugin.category.extensions')} - - {t('plugin.marketplace.and')} - - {t('plugin.category.bundles')} - - {t('common.operation.in')} - - {t('plugin.marketplace.difyMarketplace')} - - + , + + {t('plugin.category.agents')} + + , + + {t('plugin.category.extensions')} + + {t('plugin.marketplace.and')} + + {t('plugin.category.bundles')} + + {t('common.operation.in')} + + {t('plugin.marketplace.difyMarketplace')} + + +
    - { - isLoading && page === 1 && ( -
    - -
    - ) - } - { - (!isLoading || page > 1) && ( - - ) - } -
    +
    + { + isLoading && page === 1 && ( +
    + +
    + ) + } + { + (!isLoading || page > 1) && ( + + ) + } +
    + ) } diff --git a/web/app/components/tools/mcp/hooks.ts b/web/app/components/tools/mcp/hooks.ts deleted file mode 100644 index 4a348569a0..0000000000 --- a/web/app/components/tools/mcp/hooks.ts +++ /dev/null @@ -1,14 +0,0 @@ -import dayjs from 'dayjs' -import relativeTime from 'dayjs/plugin/relativeTime' -import { useCallback } from 'react' -import { useI18N } from '@/context/i18n' -dayjs.extend(relativeTime) - -export const useFormatTimeFromNow = () => { - const { locale } = useI18N() - const formatTimeFromNow = useCallback((time: number) => { - return dayjs(time).locale(locale === 'zh-Hans' ? 'zh-cn' : locale).fromNow() - }, [locale]) - - return { formatTimeFromNow } -} diff --git a/web/app/components/tools/mcp/provider-card.tsx b/web/app/components/tools/mcp/provider-card.tsx index 677e25c533..7c4f3718d4 100644 --- a/web/app/components/tools/mcp/provider-card.tsx +++ b/web/app/components/tools/mcp/provider-card.tsx @@ -6,7 +6,7 @@ import { useAppContext } from '@/context/app-context' import { RiHammerFill } from '@remixicon/react' import Indicator from '@/app/components/header/indicator' import Icon from '@/app/components/plugins/card/base/card-icon' -import { useFormatTimeFromNow } from './hooks' +import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now' import type { ToolWithProvider } from '../../workflow/types' import Confirm from '@/app/components/base/confirm' import MCPModal from './modal' diff --git a/web/app/components/tools/provider-list.tsx b/web/app/components/tools/provider-list.tsx index ecfa5f6ea2..d267b49c79 100644 --- a/web/app/components/tools/provider-list.tsx +++ b/web/app/components/tools/provider-list.tsx @@ -1,5 +1,5 @@ 'use client' -import { useMemo, useRef, useState } from 'react' +import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { useTranslation } from 'react-i18next' import type { Collection } from './types' import Marketplace from './marketplace' @@ -20,6 +20,7 @@ import { useAllToolProviders } from '@/service/use-tools' import { useInstalledPluginList, useInvalidateInstalledPluginList } from '@/service/use-plugins' import { useGlobalPublicStore } from '@/context/global-public-context' import { ToolTypeEnum } from '../workflow/block-selector/types' +import { useMarketplace } from './marketplace/hooks' const getToolType = (type: string) => { switch (type) { @@ -37,7 +38,7 @@ const getToolType = (type: string) => { } const ProviderList = () => { // const searchParams = useSearchParams() - // searchParams.get('category') === 'workflow' + // searchParams.get('category') === 'workflow' const { t } = useTranslation() const { enable_marketplace } = useGlobalPublicStore(s => s.systemFeatures) const containerRef = useRef(null) @@ -83,6 +84,41 @@ const ProviderList = () => { return detail }, [currentProvider?.plugin_id, pluginList?.plugins]) + const toolListTailRef = useRef(null) + const showMarketplacePanel = useCallback(() => { + containerRef.current?.scrollTo({ + top: toolListTailRef.current + ? toolListTailRef.current?.offsetTop - 80 + : 0, + behavior: 'smooth', + }) + }, [toolListTailRef]) + + const marketplaceContext = useMarketplace(keywords, tagFilterValue) + const { + handleScroll, + } = marketplaceContext + + const [isMarketplaceArrowVisible, setIsMarketplaceArrowVisible] = useState(true) + const onContainerScroll = useMemo(() => { + return (e: Event) => { + handleScroll(e) + if (containerRef.current && toolListTailRef.current) + setIsMarketplaceArrowVisible(containerRef.current.scrollTop < (toolListTailRef.current?.offsetTop - 80)) + } + }, [handleScroll, containerRef, toolListTailRef, setIsMarketplaceArrowVisible]) + + useEffect(() => { + const container = containerRef.current + if (container) + container.addEventListener('scroll', onContainerScroll) + + return () => { + if (container) + container.removeEventListener('scroll', onContainerScroll) + } + }, [onContainerScroll]) + return ( <>
    @@ -152,15 +188,16 @@ const ProviderList = () => {
    )} {!filteredCollectionList.length && activeTab === 'builtin' && ( - + )} +
    {enable_marketplace && activeTab === 'builtin' && ( { - containerRef.current?.scrollTo({ top: containerRef.current.scrollHeight, behavior: 'smooth' }) - }} searchPluginText={keywords} filterPluginTags={tagFilterValue} + isMarketplaceArrowVisible={isMarketplaceArrowVisible} + showMarketplacePanel={showMarketplacePanel} + marketplaceContext={marketplaceContext} /> )} {activeTab === 'mcp' && ( diff --git a/web/app/components/workflow/header/editing-title.tsx b/web/app/components/workflow/header/editing-title.tsx index 2444cf8c29..32cfd36b4f 100644 --- a/web/app/components/workflow/header/editing-title.tsx +++ b/web/app/components/workflow/header/editing-title.tsx @@ -1,6 +1,6 @@ import { memo } from 'react' import { useTranslation } from 'react-i18next' -import { useFormatTimeFromNow } from '../hooks' +import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now' import { useStore } from '@/app/components/workflow/store' import useTimestamp from '@/hooks/use-timestamp' diff --git a/web/app/components/workflow/header/restoring-title.tsx b/web/app/components/workflow/header/restoring-title.tsx index 26cdd79d13..e6631d3684 100644 --- a/web/app/components/workflow/header/restoring-title.tsx +++ b/web/app/components/workflow/header/restoring-title.tsx @@ -1,6 +1,6 @@ import { memo, useMemo } from 'react' import { useTranslation } from 'react-i18next' -import { useFormatTimeFromNow } from '../hooks' +import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now' import { useStore } from '../store' import { WorkflowVersion } from '../types' import useTimestamp from '@/hooks/use-timestamp' diff --git a/web/app/components/workflow/header/view-history.tsx b/web/app/components/workflow/header/view-history.tsx index bab68ed753..f5a1f02305 100644 --- a/web/app/components/workflow/header/view-history.tsx +++ b/web/app/components/workflow/header/view-history.tsx @@ -12,12 +12,12 @@ import { RiErrorWarningLine, } from '@remixicon/react' import { - useFormatTimeFromNow, useIsChatMode, useNodesInteractions, useWorkflowInteractions, useWorkflowRun, } from '../hooks' +import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now' import { ControlMode, WorkflowRunningStatus } from '../types' import { formatWorkflowRunIdentifier } from '../utils' import cn from '@/utils/classnames' diff --git a/web/app/components/workflow/hooks/index.ts b/web/app/components/workflow/hooks/index.ts index 725adf2a2a..80293069bc 100644 --- a/web/app/components/workflow/hooks/index.ts +++ b/web/app/components/workflow/hooks/index.ts @@ -14,7 +14,6 @@ export * from './use-workflow-variables' export * from './use-shortcuts' export * from './use-workflow-interactions' export * from './use-workflow-mode' -export * from './use-format-time-from-now' export * from './use-nodes-meta-data' export * from './use-available-blocks' export * from './use-workflow-refresh-draft' diff --git a/web/app/components/workflow/nodes/_base/components/form-input-item.tsx b/web/app/components/workflow/nodes/_base/components/form-input-item.tsx index 316a5c9819..1efa8aab02 100644 --- a/web/app/components/workflow/nodes/_base/components/form-input-item.tsx +++ b/web/app/components/workflow/nodes/_base/components/form-input-item.tsx @@ -164,7 +164,7 @@ const FormInputItem: FC = ({ ...value, [variable]: { ...varInput, - ...newValue, + value: newValue, }, }) } @@ -242,7 +242,7 @@ const FormInputItem: FC = ({ )} @@ -251,7 +251,7 @@ const FormInputItem: FC = ({ popupClassName='!w-[387px]' isAdvancedMode isInWorkflow - value={varInput} + value={varInput?.value} setModel={handleAppOrModelSelect} readonly={readOnly} scope={scope} diff --git a/web/app/dev-only/i18n-checker/page.tsx b/web/app/dev-only/i18n-checker/page.tsx index 5ed0c86b82..d821979bb9 100644 --- a/web/app/dev-only/i18n-checker/page.tsx +++ b/web/app/dev-only/i18n-checker/page.tsx @@ -1,13 +1,19 @@ 'use client' -import { resources } from '@/i18n/i18next-config' -import { useEffect, useState } from 'react' +import { loadLangResources } from '@/i18n/i18next-config' +import { useCallback, useEffect, useState } from 'react' import cn from '@/utils/classnames' +import { LanguagesSupported } from '@/i18n/language' export default function I18nTest() { const [langs, setLangs] = useState([]) + const getLangs = useCallback(async () => { + const langs = await genLangs() + setLangs(langs) + }, []) + useEffect(() => { - setLangs(genLangs()) + getLangs() }, []) return ( @@ -107,10 +113,15 @@ export default function I18nTest() { ) } -function genLangs() { +async function genLangs() { const langs_: Lang[] = [] let en!: Lang + const resources: Record = {} + // Initialize empty resource object + for (const lang of LanguagesSupported) + resources[lang] = await loadLangResources(lang) + for (const [key, value] of Object.entries(resources)) { const keys = getNestedKeys(value.translation) const lang: Lang = { diff --git a/web/app/signin/invite-settings/page.tsx b/web/app/signin/invite-settings/page.tsx index 1ff1c7d671..ea35900968 100644 --- a/web/app/signin/invite-settings/page.tsx +++ b/web/app/signin/invite-settings/page.tsx @@ -57,7 +57,7 @@ export default function InviteSettingsPage() { if (res.result === 'success') { localStorage.setItem('console_token', res.data.access_token) localStorage.setItem('refresh_token', res.data.refresh_token) - setLocaleOnClient(language, false) + await setLocaleOnClient(language, false) router.replace('/apps') } } diff --git a/web/context/i18n.ts b/web/context/i18n.ts index ef53a4b481..932beb9936 100644 --- a/web/context/i18n.ts +++ b/web/context/i18n.ts @@ -9,13 +9,15 @@ import { noop } from 'lodash-es' type II18NContext = { locale: Locale i18n: Record - setLocaleOnClient: (_lang: Locale, _reloadPage?: boolean) => void + setLocaleOnClient: (_lang: Locale, _reloadPage?: boolean) => Promise } const I18NContext = createContext({ locale: 'en-US', i18n: {}, - setLocaleOnClient: noop, + setLocaleOnClient: async (_lang: Locale, _reloadPage?: boolean) => { + noop() + }, }) export const useI18N = () => useContext(I18NContext) diff --git a/web/context/web-app-context.tsx b/web/context/web-app-context.tsx index 55f95e4811..db1c5158dd 100644 --- a/web/context/web-app-context.tsx +++ b/web/context/web-app-context.tsx @@ -2,6 +2,7 @@ import type { ChatConfig } from '@/app/components/base/chat/types' import Loading from '@/app/components/base/loading' +import { checkOrSetAccessToken } from '@/app/components/share/utils' import { AccessMode } from '@/models/access-control' import type { AppData, AppMeta } from '@/models/share' import { useGetWebAppAccessModeByCode } from '@/service/use-share' @@ -60,6 +61,8 @@ const WebAppStoreProvider: FC = ({ children }) => { const pathname = usePathname() const searchParams = useSearchParams() const redirectUrlParam = searchParams.get('redirect_url') + const session = searchParams.get('session') + const sysUserId = searchParams.get('sys.user_id') const [shareCode, setShareCode] = useState(null) useEffect(() => { const shareCodeFromRedirect = getShareCodeFromRedirectUrl(redirectUrlParam) @@ -69,11 +72,22 @@ const WebAppStoreProvider: FC = ({ children }) => { updateShareCode(newShareCode) }, [pathname, redirectUrlParam, updateShareCode]) const { isFetching, data: accessModeResult } = useGetWebAppAccessModeByCode(shareCode) + const [isFetchingAccessToken, setIsFetchingAccessToken] = useState(true) useEffect(() => { - if (accessModeResult?.accessMode) + if (accessModeResult?.accessMode) { updateWebAppAccessMode(accessModeResult.accessMode) - }, [accessModeResult, updateWebAppAccessMode]) - if (isFetching) { + if (accessModeResult?.accessMode === AccessMode.PUBLIC && session && sysUserId) { + setIsFetchingAccessToken(true) + checkOrSetAccessToken(shareCode).finally(() => { + setIsFetchingAccessToken(false) + }) + } + else { + setIsFetchingAccessToken(false) + } + } + }, [accessModeResult, updateWebAppAccessMode, setIsFetchingAccessToken, shareCode, session, sysUserId]) + if (isFetching || isFetchingAccessToken) { return
    diff --git a/web/app/components/workflow/hooks/use-format-time-from-now.ts b/web/hooks/use-format-time-from-now.ts similarity index 93% rename from web/app/components/workflow/hooks/use-format-time-from-now.ts rename to web/hooks/use-format-time-from-now.ts index 4a348569a0..82704252ac 100644 --- a/web/app/components/workflow/hooks/use-format-time-from-now.ts +++ b/web/hooks/use-format-time-from-now.ts @@ -2,6 +2,8 @@ import dayjs from 'dayjs' import relativeTime from 'dayjs/plugin/relativeTime' import { useCallback } from 'react' import { useI18N } from '@/context/i18n' +import 'dayjs/locale/zh-cn' + dayjs.extend(relativeTime) export const useFormatTimeFromNow = () => { diff --git a/web/i18n/README.md b/web/i18n/README.md index b81ffbf4c3..5e7058d829 100644 --- a/web/i18n/README.md +++ b/web/i18n/README.md @@ -28,7 +28,7 @@ This directory contains the internationalization (i18n) files for this project. │   ├── [ 52] layout.ts │   ├── [2.3K] login.ts │   ├── [ 52] register.ts -│   ├── [2.5K] share-app.ts +│   ├── [2.5K] share.ts │   └── [2.8K] tools.ts ├── [1.6K] i18next-config.ts ├── [ 634] index.ts diff --git a/web/i18n/de-DE/plugin.ts b/web/i18n/de-DE/plugin.ts index 2a79241d00..6fa6999ae5 100644 --- a/web/i18n/de-DE/plugin.ts +++ b/web/i18n/de-DE/plugin.ts @@ -84,6 +84,16 @@ const translation = { endpointDeleteTip: 'Endpunkt entfernen', serviceOk: 'Service in Ordnung', switchVersion: 'Version wechseln', + deprecation: { + reason: { + noMaintainer: 'kein Wartender', + ownershipTransferred: 'Eigentum übertragen', + businessAdjustments: 'Geschäftsanpassungen', + }, + onlyReason: 'Dieses Plugin wurde aufgrund von {{deprecatedReason}} abgelehnt und wird nicht länger aktualisiert.', + fullMessage: 'Dieses Plugin wurde aufgrund von {{deprecatedReason}} eingestellt und wird nicht mehr aktualisiert. Bitte verwenden Sie stattdessen {{-alternativePluginId}}.', + noReason: 'Dieses Plugin wurde eingestellt und wird nicht mehr aktualisiert.', + }, }, debugInfo: { title: 'Debuggen', @@ -237,6 +247,56 @@ const translation = { useApiAuthDesc: 'Nachdem die Anmeldeinformationen konfiguriert wurden, können alle Mitglieder des Arbeitsbereichs dieses Tool beim Orchestrieren von Anwendungen verwenden.', authRemoved: 'Die Authentifizierung wurde entfernt.', }, + deprecated: 'Abgelehnt', + autoUpdate: { + strategy: { + disabled: { + description: 'Plugins werden nicht automatisch aktualisiert', + name: 'Behindert', + }, + fixOnly: { + name: 'Nur fixieren', + selectedDescription: 'Auto-Update nur für Patch-Versionen', + }, + latest: { + description: 'Immer auf die neueste Version aktualisieren', + selectedDescription: 'Immer auf die neueste Version aktualisieren', + name: 'Neueste', + }, + }, + upgradeMode: { + exclude: 'Ausgewählte ausschließen', + partial: 'Nur ausgewählt', + all: 'Alle aktualisieren', + }, + upgradeModePlaceholder: { + exclude: 'Ausgewählte Plugins werden nicht automatisch aktualisiert', + partial: 'Nur ausgewählte Plugins werden automatisch aktualisiert. Derzeit sind keine Plugins ausgewählt, daher werden keine Plugins automatisch aktualisiert.', + }, + operation: { + clearAll: 'Alles löschen', + select: 'Plugins auswählen', + }, + pluginDowngradeWarning: { + downgrade: 'Trotzdem downgraden', + title: 'Plugin Downgrade', + exclude: 'Von der automatischen Aktualisierung ausschließen', + description: 'Die automatische Aktualisierung ist derzeit für dieses Plugin aktiviert. Ein Downgrade der Version kann dazu führen, dass Ihre Änderungen während des nächsten automatischen Updates überschrieben werden.', + }, + noPluginPlaceholder: { + noInstalled: 'Keine Plugins installiert', + noFound: 'Keine Plugins gefunden.', + }, + automaticUpdates: 'Automatische Updates', + updateTimeTitle: 'Aktualisierungszeit', + updateTime: 'Aktualisierungszeit', + excludeUpdate: 'Die folgenden {{num}} Plugins werden nicht automatisch aktualisiert.', + changeTimezone: 'Um die Zeitzone zu ändern, gehen Sie zu Einstellungen ', + nextUpdateTime: 'Nächstes automatisches Update: {{time}}', + partialUPdate: 'Nur die folgenden {{num}} Plugins werden automatisch aktualisiert', + specifyPluginsToUpdate: 'Geben Sie die zu aktualisierenden Plugins an', + updateSettings: 'Einstellungen aktualisieren', + }, } export default translation diff --git a/web/i18n/de-DE/share-app.ts b/web/i18n/de-DE/share.ts similarity index 100% rename from web/i18n/de-DE/share-app.ts rename to web/i18n/de-DE/share.ts diff --git a/web/i18n/en-US/plugin.ts b/web/i18n/en-US/plugin.ts index 5d05b335c6..952fc37a64 100644 --- a/web/i18n/en-US/plugin.ts +++ b/web/i18n/en-US/plugin.ts @@ -127,6 +127,56 @@ const translation = { admins: 'Admins', noone: 'No one', }, + autoUpdate: { + automaticUpdates: 'Automatic updates', + updateTime: 'Update time', + specifyPluginsToUpdate: 'Specify plugins to update', + strategy: { + disabled: { + name: 'Disabled', + description: 'Plugins will not auto-update', + }, + fixOnly: { + name: 'Fix Only', + description: 'Auto-update for patch versions only (e.g., 1.0.1 → 1.0.2). Minor version changes won\'t trigger updates.', + selectedDescription: 'Auto-update for patch versions only', + }, + latest: { + name: 'Latest', + description: 'Always update to latest version', + selectedDescription: 'Always update to latest version', + }, + }, + updateTimeTitle: 'Update time', + upgradeMode: { + all: 'Update all', + exclude: 'Exclude selected', + partial: 'Only selected', + }, + upgradeModePlaceholder: { + exclude: 'Selected plugins will not auto-update', + partial: 'Only selected plugins will auto-update. No plugins are currently selected, so no plugins will auto-update.', + }, + excludeUpdate: 'The following {{num}} plugins will not auto-update', + partialUPdate: 'Only the following {{num}} plugins will auto-update', + operation: { + clearAll: 'Clear all', + select: 'Select plugins', + }, + nextUpdateTime: 'Next auto-update: {{time}}', + pluginDowngradeWarning: { + title: 'Plugin Downgrade', + description: 'Auto-update is currently enabled for this plugin. Downgrading the version may cause your changes to be overwritten during the next automatic update.', + downgrade: 'Downgrade anyway', + exclude: 'Exclude from auto-update', + }, + noPluginPlaceholder: { + noFound: 'No plugins were found', + noInstalled: 'No plugins installed', + }, + updateSettings: 'Update Settings', + changeTimezone: 'To change time zone, go to Settings', + }, pluginInfoModal: { title: 'Plugin info', repository: 'Repository', diff --git a/web/i18n/en-US/share-app.ts b/web/i18n/en-US/share.ts similarity index 100% rename from web/i18n/en-US/share-app.ts rename to web/i18n/en-US/share.ts diff --git a/web/i18n/es-ES/common.ts b/web/i18n/es-ES/common.ts index 337232bea0..eba82dd384 100644 --- a/web/i18n/es-ES/common.ts +++ b/web/i18n/es-ES/common.ts @@ -690,6 +690,7 @@ const translation = { license: { expiring: 'Caduca en un día', expiring_plural: 'Caducando en {{count}} días', + unlimited: 'Ilimitado', }, pagination: { perPage: 'Elementos por página', diff --git a/web/i18n/es-ES/dataset.ts b/web/i18n/es-ES/dataset.ts index 16745b56d7..e27fe0cbf9 100644 --- a/web/i18n/es-ES/dataset.ts +++ b/web/i18n/es-ES/dataset.ts @@ -204,6 +204,7 @@ const translation = { name: 'Nombre', description: 'Puedes gestionar todos los metadatos en este conocimiento aquí. Las modificaciones se sincronizarán en todos los documentos.', disabled: 'desactivar', + builtIn: 'Integrado', }, documentMetadata: { technicalParameters: 'Parámetros técnicos', diff --git a/web/i18n/es-ES/plugin.ts b/web/i18n/es-ES/plugin.ts index 630ff3e405..6299684851 100644 --- a/web/i18n/es-ES/plugin.ts +++ b/web/i18n/es-ES/plugin.ts @@ -84,6 +84,16 @@ const translation = { endpointsDocLink: 'Ver el documento', endpointsEmpty: 'Haga clic en el botón \'+\' para agregar un punto de conexión', configureApp: 'Configurar la aplicación', + deprecation: { + reason: { + ownershipTransferred: 'propiedad transferida', + noMaintainer: 'sin mantenedor', + businessAdjustments: 'ajustes comerciales', + }, + noReason: 'Este complemento ha sido descontinuado y ya no se actualizará.', + onlyReason: 'Este complemento ha sido desaprobado debido a {{deprecatedReason}} y ya no se actualizará.', + fullMessage: 'Este complemento ha sido descontinuado debido a {{deprecatedReason}}, y ya no será actualizado. Por favor, utilice {{-alternativePluginId}} en su lugar.', + }, }, debugInfo: { title: 'Depuración', @@ -235,6 +245,57 @@ const translation = { saveOnly: 'Guardar solo', clientInfo: 'Como no se encontraron secretos de cliente del sistema para este proveedor de herramientas, se requiere configurarlo manualmente. Para redirect_uri, por favor utiliza', oauthClientSettings: 'Configuración del cliente OAuth', + default: 'Predeterminado', + }, + deprecated: 'Obsoleto', + autoUpdate: { + strategy: { + disabled: { + description: 'Los plugins no se actualizarán automáticamente', + name: 'Discapacitado', + }, + fixOnly: { + name: 'Arreglar Solo', + selectedDescription: 'Actualización automática solo para versiones de parches', + }, + latest: { + selectedDescription: 'Siempre actualiza a la última versión', + description: 'Siempre actualiza a la última versión', + name: 'último', + }, + }, + upgradeMode: { + partial: 'Solo seleccionado', + all: 'Actualizar todo', + exclude: 'Excluir seleccionado', + }, + upgradeModePlaceholder: { + exclude: 'Los plugins seleccionados no se actualizarán automáticamente', + partial: 'Solo los plugins seleccionados se actualizarán automáticamente. Actualmente no hay plugins seleccionados, por lo que no se actualizarán automáticamente.', + }, + operation: { + clearAll: 'Borrar todo', + select: 'Seleccionar complementos', + }, + pluginDowngradeWarning: { + title: 'Degradar plugin', + exclude: 'Excluir de la actualización automática', + downgrade: 'De todas formas, degradar', + description: 'La actualización automática está actualmente habilitada para este complemento. Downgradear la versión puede hacer que tus cambios se sobrescriban durante la próxima actualización automática.', + }, + noPluginPlaceholder: { + noFound: 'No se encontraron complementos', + noInstalled: 'No hay plugins instalados', + }, + updateTimeTitle: 'Hora de actualización', + nextUpdateTime: 'Próxima autoactualización: {{time}}', + specifyPluginsToUpdate: 'Especifique qué complementos actualizar', + updateTime: 'Actualizar tiempo', + updateSettings: 'Actualizar configuraciones', + excludeUpdate: 'Los siguientes {{num}} complementos no se actualizarán automáticamente', + partialUPdate: 'Solo los siguientes {{num}} complementos se actualizarán automáticamente', + changeTimezone: 'Para cambiar la zona horaria, ve a Configuración.', + automaticUpdates: 'Actualizaciones automáticas', }, } diff --git a/web/i18n/es-ES/share-app.ts b/web/i18n/es-ES/share.ts similarity index 100% rename from web/i18n/es-ES/share-app.ts rename to web/i18n/es-ES/share.ts diff --git a/web/i18n/es-ES/workflow.ts b/web/i18n/es-ES/workflow.ts index 535f92b0b1..3c509934df 100644 --- a/web/i18n/es-ES/workflow.ts +++ b/web/i18n/es-ES/workflow.ts @@ -475,10 +475,12 @@ const translation = { options: { disabled: { subTitle: 'No habilitar el filtrado de metadatos', + title: 'Deshabilitado', }, automatic: { subTitle: 'Generar automáticamente condiciones de filtrado de metadatos basadas en la consulta del usuario', desc: 'Generar automáticamente condiciones de filtrado de metadatos basadas en la variable de consulta', + title: 'Automático', }, manual: { title: 'Manual', @@ -898,6 +900,7 @@ const translation = { error_other: '{{count}} Errores', loopVariables: 'Variables de bucle', variableName: 'Nombre de Variable', + input: 'Entrada', }, }, tracing: { @@ -916,6 +919,7 @@ const translation = { onlyShowNamedVersions: 'Solo muestra versiones nombradas', empty: 'No se encontró un historial de versiones coincidente.', reset: 'Restablecer filtro', + all: 'Todo', }, editField: { titleLengthLimit: 'El título no puede exceder {{limit}} caracteres', @@ -939,6 +943,7 @@ const translation = { deletionTip: 'La eliminación es irreversible, por favor confirma.', currentDraft: 'Borrador Actual', editVersionInfo: 'Editar información de la versión', + latest: 'Último', }, debug: { noData: { @@ -951,6 +956,7 @@ const translation = { stop: 'Detén la carrera', normal: 'Inspeccionar Variable', cached: 'Ver variables en caché', + clear: 'Claro', }, envNode: 'Medio ambiente', chatNode: 'Conversación', @@ -963,6 +969,7 @@ const translation = { resetConversationVar: 'Restablecer la variable de conversación al valor predeterminado', clearNode: 'Limpiar variable en caché', emptyTip: 'Después de recorrer un nodo en el lienzo o ejecutar un nodo paso a paso, puedes ver el valor actual de la variable del nodo en Inspección de Variables.', + edited: 'Editado', }, lastRunTab: 'Última ejecución', settingsTab: 'Ajustes', diff --git a/web/i18n/fa-IR/plugin.ts b/web/i18n/fa-IR/plugin.ts index aebe39d508..5e1cbe02bf 100644 --- a/web/i18n/fa-IR/plugin.ts +++ b/web/i18n/fa-IR/plugin.ts @@ -84,6 +84,16 @@ const translation = { endpointModalDesc: 'پس از پیکربندی، می توان از ویژگی های ارائه شده توسط افزونه از طریق نقاط پایانی API استفاده کرد.', switchVersion: 'نسخه سوئیچ', endpointDeleteContent: 'آیا می خواهید {{name}} را حذف کنید؟', + deprecation: { + reason: { + ownershipTransferred: 'مالکیت منتقل شد', + businessAdjustments: 'تنظیمات کسب و کار', + noMaintainer: 'بدون نگهدارنده', + }, + noReason: 'این افزونه منسوخ شده است و دیگر به روز رسانی نخواهد شد.', + onlyReason: 'این افزونه به دلیل {{deprecatedReason}} منسوخ شده و دیگر به‌روزرسانی نخواهد شد.', + fullMessage: 'این افزونه به دلیل {{deprecatedReason}} منسوخ شده است و دیگر به‌روزرسانی نخواهد شد. لطفا به‌جای آن از {{-alternativePluginId}} استفاده کنید.', + }, }, debugInfo: { title: 'اشکال زدایی', @@ -237,6 +247,56 @@ const translation = { clientInfo: 'از آنجایی که هیچ راز مشتری سیستم برای این ارائه‌دهنده ابزار پیدا نشد، تنظیم دستی آن ضروری است، لطفاً برای redirect_uri از', useApiAuthDesc: 'پس از پیکربندی اعتبارنامه‌ها، تمامی اعضای درون فضای کاری می‌توانند از این ابزار هنگام نظم‌دهی به برنامه‌ها استفاده کنند.', }, + deprecated: 'منسوخ شده', + autoUpdate: { + strategy: { + disabled: { + name: 'ناتوان', + description: 'پلاگین‌ها به‌طور خودکار به‌روزرسانی نخواهند شد', + }, + fixOnly: { + name: 'فقط تعمیر کنید', + selectedDescription: 'به‌روزرسانی خودکار تنها برای نسخه‌های وصله', + }, + latest: { + name: 'جدیدترین', + selectedDescription: 'همیشه به آخرین نسخه بروزرسانی کنید', + description: 'همیشه به آخرین نسخه بروزرسانی کنید', + }, + }, + upgradeMode: { + all: 'همه را بروزرسانی کن', + partial: 'فقط انتخاب شده', + exclude: 'انتخاب شده را استثنا کن', + }, + upgradeModePlaceholder: { + exclude: 'افزونه‌های انتخاب شده به‌صورت خودکار به‌روزرسانی نخواهند شد', + partial: 'فقط پلاگین‌های انتخاب شده به‌روزرسانی خودکار خواهند داشت. در حال حاضر هیچ پلاگینی انتخاب نشده است، بنابراین هیچ پلاگینی به‌روزرسانی خودکار نخواهد شد.', + }, + operation: { + select: 'افزونه‌ها را انتخاب کنید', + clearAll: 'همه را پاک کن', + }, + pluginDowngradeWarning: { + title: 'کاهش نسخه افزونه', + downgrade: 'به هر حال تنزل دهید', + exclude: 'از بروزرسانی خودکار مستثنی شود', + description: 'به‌روزرسانی خودکار برای این افزونه در حال حاضر فعال است. کاهش نسخه ممکن است باعث شود تغییرات شما در حین به‌روزرسانی خودکار بعدی نادیده گرفته شود.', + }, + noPluginPlaceholder: { + noFound: 'هیچ افزونه‌ای یافت نشد', + noInstalled: 'هیچ افزونه‌ای نصب نشده است', + }, + updateTimeTitle: 'زمان به‌روزرسانی', + specifyPluginsToUpdate: 'ماژول‌هایی را برای به‌روزرسانی مشخص کنید', + updateTime: 'زمان به‌روزرسانی', + automaticUpdates: 'بروز رسانی خودکار', + updateSettings: 'تنظیمات را به‌روزرسانی کنید', + changeTimezone: 'برای تغییر منطقه زمانی، به تنظیمات بروید', + excludeUpdate: 'پلاگین‌های زیر {{num}} به‌طور خودکار به‌روزرسانی نخواهند شد', + nextUpdateTime: 'به‌روزرسانی خودکار بعدی: {{time}}', + partialUPdate: 'تنها {{num}} پلاگین زیر به‌طور خودکار به‌روزرسانی خواهد شد.', + }, } export default translation diff --git a/web/i18n/fa-IR/share-app.ts b/web/i18n/fa-IR/share.ts similarity index 100% rename from web/i18n/fa-IR/share-app.ts rename to web/i18n/fa-IR/share.ts diff --git a/web/i18n/fr-FR/plugin.ts b/web/i18n/fr-FR/plugin.ts index dcab0a1ead..255171058a 100644 --- a/web/i18n/fr-FR/plugin.ts +++ b/web/i18n/fr-FR/plugin.ts @@ -84,6 +84,16 @@ const translation = { endpointModalTitle: 'Configurer le point de terminaison', serviceOk: 'Service OK', endpointModalDesc: 'Une fois configuré, les fonctionnalités fournies par le plugin via les points de terminaison de l’API peuvent être utilisées.', + deprecation: { + reason: { + ownershipTransferred: 'propriété transférée', + businessAdjustments: 'ajustements commerciaux', + noMaintainer: 'aucun mainteneur', + }, + noReason: 'Ce plugin a été abandonné et ne sera plus mis à jour.', + onlyReason: 'Ce plugin a été déprécié en raison de {{deprecatedReason}} et ne sera plus mis à jour.', + fullMessage: 'Ce plugin a été déprécié en raison de {{deprecatedReason}}, et ne sera plus mis à jour. Veuillez utiliser {{-alternativePluginId}} à la place.', + }, }, debugInfo: { title: 'Débogage', @@ -237,6 +247,56 @@ const translation = { authorization: 'Autorisation', useApi: 'Utilisez la clé API', }, + deprecated: 'Obsolète', + autoUpdate: { + strategy: { + disabled: { + description: 'Les plugins ne se mettront pas à jour automatiquement', + name: 'désactivé', + }, + fixOnly: { + selectedDescription: 'Mise à jour automatique uniquement pour les versions de correctif', + name: 'Réparer seulement', + }, + latest: { + name: 'Dernier', + selectedDescription: 'Mettez toujours à jour vers la dernière version', + description: 'Mettez toujours à jour vers la dernière version', + }, + }, + upgradeMode: { + exclude: 'Exclure sélectionné', + all: 'Mettre à jour tout', + partial: 'Seulement sélectionné', + }, + upgradeModePlaceholder: { + partial: 'Seuls les plugins sélectionnés se mettront à jour automatiquement. Aucun plugin n\'est actuellement sélectionné, donc aucun plugin ne se mettra à jour automatiquement.', + exclude: 'Les plugins sélectionnés ne se mettront pas à jour automatiquement.', + }, + operation: { + clearAll: 'Tout effacer', + select: 'Sélectionner des plugins', + }, + pluginDowngradeWarning: { + title: 'Baisse de version du plugin', + exclude: 'Exclure de la mise à jour automatique', + downgrade: 'Dégradez de toute façon', + description: 'La mise à jour automatique est actuellement activée pour ce plugin. Le fait de rétrograder la version peut entraîner la perte de vos modifications lors de la prochaine mise à jour automatique.', + }, + noPluginPlaceholder: { + noInstalled: 'Aucun plugin installé', + noFound: 'Aucun plugin n\'a été trouvé', + }, + updateTime: 'Temps de mise à jour', + specifyPluginsToUpdate: 'Spécifiez les plugins à mettre à jour', + updateTimeTitle: 'Temps de mise à jour', + changeTimezone: 'Pour changer de fuseau horaire, allez dans Paramètres', + automaticUpdates: 'Mises à jour automatiques', + updateSettings: 'Mettre à jour les paramètres', + excludeUpdate: 'Les {{num}} plugins suivants ne se mettront pas à jour automatiquement', + partialUPdate: 'Seuls les {{num}} plugins suivants se mettront à jour automatiquement', + nextUpdateTime: 'Prochaine mise à jour automatique : {{time}}', + }, } export default translation diff --git a/web/i18n/fr-FR/share-app.ts b/web/i18n/fr-FR/share.ts similarity index 100% rename from web/i18n/fr-FR/share-app.ts rename to web/i18n/fr-FR/share.ts diff --git a/web/i18n/hi-IN/plugin.ts b/web/i18n/hi-IN/plugin.ts index cd5540d5a9..ae4547421c 100644 --- a/web/i18n/hi-IN/plugin.ts +++ b/web/i18n/hi-IN/plugin.ts @@ -84,6 +84,16 @@ const translation = { endpointModalTitle: 'एंडपॉइंट सेटअप करें', strategyNum: '{{num}} {{रणनीति}} शामिल', endpointsTip: 'यह प्लगइन एंडपॉइंट्स के माध्यम से विशिष्ट कार्यक्षमताएँ प्रदान करता है, और आप वर्तमान कार्यक्षेत्र के लिए कई एंडपॉइंट सेट कॉन्फ़िगर कर सकते हैं।', + deprecation: { + reason: { + noMaintainer: 'कोई देखभाल करने वाला नहीं', + ownershipTransferred: 'स्वामित्व स्थानांतरित किया गया', + businessAdjustments: 'व्यवसाय समायोजन', + }, + noReason: 'यह प्लगइन अप्रचलित हो गया है और इसे अब अपडेट नहीं किया जाएगा।', + onlyReason: 'इस प्लगइन को {{deprecatedReason}} के कारण अमान्य कर दिया गया है और इसे अब अपडेट नहीं किया जाएगा।', + fullMessage: 'इस प्लगइन को {{deprecatedReason}} के कारण अमान्य कर दिया गया है, और इसे अब अपडेट नहीं किया जाएगा। कृपया इसके बजाय {{-alternativePluginId}} का उपयोग करें।', + }, }, debugInfo: { viewDocs: 'दस्तावेज़ देखें', @@ -237,6 +247,56 @@ const translation = { useApiAuthDesc: 'क्रेडेंशियल्स कॉन्फ़िगर करने के बाद, कार्यक्षेत्र के सभी सदस्यों को एप्लिकेशन को व्यवस्थित करते समय इस उपकरण का उपयोग करने की अनुमति होती है।', clientInfo: 'चूंकि इस टूल प्रदाता के लिए कोई सिस्टम क्लाइंट रहस्य नहीं पाए गए हैं, इसलिए इसे मैन्युअल रूप से सेटअप करना आवश्यक है, कृपया redirect_uri का उपयोग करें', }, + deprecated: 'अनुशंसित नहीं', + autoUpdate: { + strategy: { + disabled: { + name: 'अक्षम', + description: 'प्लगइन्स स्वचालित रूप से अपडेट नहीं होंगे', + }, + fixOnly: { + name: 'केवल ठीक करें', + selectedDescription: 'केवल पैच संस्करणों के लिए स्वचालित अपडेट', + }, + latest: { + name: 'नवीनतम', + selectedDescription: 'हमेशा नवीनतम संस्करण पर अद्यतन करें', + description: 'हमेशा नवीनतम संस्करण पर अद्यतन करें', + }, + }, + upgradeMode: { + all: 'सभी अपडेट करें', + partial: 'केवल चयनित', + exclude: 'चुने हुए को बाहर करें', + }, + upgradeModePlaceholder: { + partial: 'केवल चयनित प्लगइन्स स्वतः अपडेट होंगे। वर्तमान में कोई प्लगइन चयनित नहीं है, इसलिए कोई प्लगइन स्वतः अपडेट नहीं होगा।', + exclude: 'चुने हुए प्लगइन्स अपने आप अपडेट नहीं होंगे', + }, + operation: { + clearAll: 'सभी हटाएं', + select: 'प्लगइन्स चुनें', + }, + pluginDowngradeWarning: { + downgrade: 'फिर भी डाउनग्रेड करें', + title: 'प्लगइन डाउनग्रेड', + exclude: 'स्वतः अपडेट से बाहर करें', + description: 'इस प्लगइन के लिए ऑटो-अपडेट वर्तमान में सक्षम है। संस्करण को डाउनग्रेड करने से आपके परिवर्तनों को अगली स्वचालित अद्यतन के दौरान ओवरराइट किया जा सकता है।', + }, + noPluginPlaceholder: { + noFound: 'कोई प्लगइन्स नहीं मिले', + noInstalled: 'कोई प्लगइन स्थापित नहीं है', + }, + updateTimeTitle: 'अद्यतन समय', + updateSettings: 'सेटिंग्स अपडेट करें', + automaticUpdates: 'स्वचालित अपडेट', + partialUPdate: 'केवल निम्नलिखित {{num}} प्लगइन्स स्वचालित रूप से अपडेट होंगे', + nextUpdateTime: 'अगली ऑटो-अपडेट: {{time}}', + updateTime: 'अद्यतन समय', + specifyPluginsToUpdate: 'अपडेट करने के लिए प्लगइन्स निर्दिष्ट करें', + changeTimezone: 'समय क्षेत्र बदलने के लिए, सेटिंग्स पर जाएं', + excludeUpdate: 'निम्नलिखित {{num}} प्लगइन्स स्वचालित रूप से अपडेट नहीं होंगे', + }, } export default translation diff --git a/web/i18n/hi-IN/share-app.ts b/web/i18n/hi-IN/share.ts similarity index 100% rename from web/i18n/hi-IN/share-app.ts rename to web/i18n/hi-IN/share.ts diff --git a/web/i18n/i18next-config.ts b/web/i18n/i18next-config.ts index 13dfeda9c9..e7dd625409 100644 --- a/web/i18n/i18next-config.ts +++ b/web/i18n/i18next-config.ts @@ -1,67 +1,87 @@ 'use client' import i18n from 'i18next' +import { camelCase } from 'lodash-es' import { initReactI18next } from 'react-i18next' -import { LanguagesSupported } from '@/i18n/language' - -const requireSilent = (lang: string) => { +const requireSilent = async (lang: string, namespace: string) => { let res try { - res = require(`./${lang}/education`).default + res = (await import(`./${lang}/${namespace}`)).default } catch { - res = require('./en-US/education').default + res = (await import(`./en-US/${namespace}`)).default } return res } -const loadLangResources = (lang: string) => ({ - translation: { - common: require(`./${lang}/common`).default, - layout: require(`./${lang}/layout`).default, - login: require(`./${lang}/login`).default, - register: require(`./${lang}/register`).default, - app: require(`./${lang}/app`).default, - appOverview: require(`./${lang}/app-overview`).default, - appDebug: require(`./${lang}/app-debug`).default, - appApi: require(`./${lang}/app-api`).default, - appLog: require(`./${lang}/app-log`).default, - appAnnotation: require(`./${lang}/app-annotation`).default, - share: require(`./${lang}/share-app`).default, - dataset: require(`./${lang}/dataset`).default, - datasetDocuments: require(`./${lang}/dataset-documents`).default, - datasetHitTesting: require(`./${lang}/dataset-hit-testing`).default, - datasetSettings: require(`./${lang}/dataset-settings`).default, - datasetCreation: require(`./${lang}/dataset-creation`).default, - datasetPipeline: require(`./${lang}/dataset-pipeline`).default, - explore: require(`./${lang}/explore`).default, - billing: require(`./${lang}/billing`).default, - custom: require(`./${lang}/custom`).default, - tools: require(`./${lang}/tools`).default, - workflow: require(`./${lang}/workflow`).default, - runLog: require(`./${lang}/run-log`).default, - plugin: require(`./${lang}/plugin`).default, - pluginTags: require(`./${lang}/plugin-tags`).default, - time: require(`./${lang}/time`).default, - pipeline: require(`./${lang}/pipeline`).default, - education: requireSilent(lang), - }, -}) +const NAMESPACES = [ + 'app-annotation', + 'app-api', + 'app-debug', + 'app-log', + 'app-overview', + 'app', + 'billing', + 'common', + 'custom', + 'dataset-creation', + 'dataset-documents', + 'dataset-hit-testing', + 'dataset-pipeline', + 'dataset-settings', + 'dataset', + 'education', + 'explore', + 'layout', + 'login', + 'pipeline', + 'plugin-tags', + 'plugin', + 'register', + 'run-log', + 'share', + 'time', + 'tools', + 'workflow', +] -type Resource = Record> -// Automatically generate the resources object -export const resources = LanguagesSupported.reduce((acc, lang) => { - acc[lang] = loadLangResources(lang) - return acc -}, {}) +export const loadLangResources = async (lang: string) => { + const modules = await Promise.all(NAMESPACES.map(ns => requireSilent(lang, ns))) + const resources = modules.reduce((acc, mod, index) => { + acc[camelCase(NAMESPACES[index])] = mod + return acc + }, {} as Record) + return resources +} -i18n.use(initReactI18next) - .init({ - lng: undefined, - fallbackLng: 'en-US', - resources, - }) +const getFallbackTranslation = () => { + const resources = NAMESPACES.reduce((acc, ns, index) => { + acc[camelCase(NAMESPACES[index])] = require(`./en-US/${ns}`).default + return acc + }, {} as Record) + return { + translation: resources, + } +} + +if (!i18n.isInitialized) { + i18n.use(initReactI18next) + .init({ + lng: undefined, + fallbackLng: 'en-US', + resources: { + 'en-US': getFallbackTranslation(), + }, + }) +} + +export const changeLanguage = async (lng?: string) => { + const resolvedLng = lng ?? 'en-US' + const resource = await loadLangResources(resolvedLng) + if (!i18n.hasResourceBundle(resolvedLng, 'translation')) + i18n.addResourceBundle(resolvedLng, 'translation', resource, true, true) + await i18n.changeLanguage(resolvedLng) +} -export const changeLanguage = i18n.changeLanguage export default i18n diff --git a/web/i18n/index.ts b/web/i18n/index.ts index eb49759097..27ed3022ad 100644 --- a/web/i18n/index.ts +++ b/web/i18n/index.ts @@ -11,9 +11,9 @@ export const i18n = { export type Locale = typeof i18n['locales'][number] -export const setLocaleOnClient = (locale: Locale, reloadPage = true) => { +export const setLocaleOnClient = async (locale: Locale, reloadPage = true) => { Cookies.set(LOCALE_COOKIE_NAME, locale, { expires: 365 }) - changeLanguage(locale) + await changeLanguage(locale) reloadPage && location.reload() } diff --git a/web/i18n/it-IT/app.ts b/web/i18n/it-IT/app.ts index a874d2b71f..63a25dccc6 100644 --- a/web/i18n/it-IT/app.ts +++ b/web/i18n/it-IT/app.ts @@ -232,6 +232,7 @@ const translation = { structuredTip: 'Le Uscite Strutturate sono una funzione che garantisce che il modello generi sempre risposte che aderiscano al tuo Schema JSON fornito.', notConfiguredTip: 'L\'output strutturato non è stato ancora configurato.', modelNotSupportedTip: 'Il modello attuale non supporta questa funzione e viene automaticamente downgradato a iniezione di prompt.', + required: 'Necessario', }, accessItemsDescription: { anyone: 'Chiunque può accedere all\'app web', diff --git a/web/i18n/it-IT/dataset.ts b/web/i18n/it-IT/dataset.ts index 9f66ee8e43..c8d5482a4d 100644 --- a/web/i18n/it-IT/dataset.ts +++ b/web/i18n/it-IT/dataset.ts @@ -211,6 +211,7 @@ const translation = { deleteContent: 'Sei sicuro di voler eliminare i metadati "{{name}}"?', builtInDescription: 'I metadati incorporati vengono estratti e generati automaticamente. Devono essere abilitati prima dell\'uso e non possono essere modificati.', description: 'Puoi gestire tutti i metadati in questa conoscenza qui. Le modifiche saranno sincronizzate con ogni documento.', + builtIn: 'Integrato', }, documentMetadata: { documentInformation: 'Informazioni sul documento', diff --git a/web/i18n/it-IT/plugin.ts b/web/i18n/it-IT/plugin.ts index f2aa0977f0..e7b6b147fa 100644 --- a/web/i18n/it-IT/plugin.ts +++ b/web/i18n/it-IT/plugin.ts @@ -84,6 +84,16 @@ const translation = { endpointDeleteTip: 'Rimuovi punto finale', endpointsEmpty: 'Fare clic sul pulsante \'+\' per aggiungere un punto finale', actionNum: '{{num}} {{azione}} INCLUSO', + deprecation: { + reason: { + noMaintainer: 'nessun manutentore', + ownershipTransferred: 'proprietà trasferita', + businessAdjustments: 'adeguamenti aziendali', + }, + onlyReason: 'Questo plugin è stato deprecato a causa di {{deprecatedReason}} e non verrà più aggiornato.', + fullMessage: 'Questo plugin è stato deprecato a causa di {{deprecatedReason}} e non verrà più aggiornato. Si prega di utilizzare {{-alternativePluginId}} invece.', + noReason: 'Questo plugin è stato deprecato e non sarà più aggiornato.', + }, }, debugInfo: { title: 'Debug', @@ -237,6 +247,56 @@ const translation = { useApiAuth: 'Configurazione dell\'autorizzazione della chiave API', clientInfo: 'Poiché non sono stati trovati segreti client di sistema per questo fornitore di strumenti, è necessario configurarlo manualmente. Per redirect_uri, si prega di utilizzare', }, + deprecated: 'Deprecato', + autoUpdate: { + strategy: { + disabled: { + name: 'Disabile', + description: 'I plugin non si aggiorneranno automaticamente', + }, + fixOnly: { + name: 'Ripara solo', + selectedDescription: 'Aggiornamento automatico solo per versioni patch', + }, + latest: { + selectedDescription: 'Aggiorna sempre all\'ultima versione', + description: 'Aggiorna sempre all\'ultima versione', + name: 'Ultimo', + }, + }, + upgradeMode: { + exclude: 'Escludi selezionato', + all: 'Aggiorna tutto', + partial: 'Solo selezionati', + }, + upgradeModePlaceholder: { + exclude: 'I plugin selezionati non verranno aggiornati automaticamente', + partial: 'Solo i plugin selezionati si aggiorneranno automaticamente. Attualmente non ci sono plugin selezionati, quindi nessun plugin si aggiornerà automaticamente.', + }, + operation: { + clearAll: 'Cancella tutto', + select: 'Seleziona i plugin', + }, + pluginDowngradeWarning: { + title: 'Downgrade del plugin', + downgrade: 'Comunque esegui il downgrade', + exclude: 'Escludi dall\'aggiornamento automatico', + description: 'L\'aggiornamento automatico è attualmente abilitato per questo plugin. Il downgrade della versione potrebbe causare la sovrascrittura delle tue modifiche durante il prossimo aggiornamento automatico.', + }, + noPluginPlaceholder: { + noFound: 'Nessun plugin trovato', + noInstalled: 'Nessun plugin installato', + }, + specifyPluginsToUpdate: 'Specifica i plugin da aggiornare', + updateTime: 'Tempo di aggiornamento', + automaticUpdates: 'Aggiornamenti automatici', + updateSettings: 'Aggiorna impostazioni', + nextUpdateTime: 'Prossimo aggiornamento automatico: {{time}}', + partialUPdate: 'Solo i seguenti {{num}} plugin si aggiorneranno automaticamente', + changeTimezone: 'Per cambiare il fuso orario, vai su Impostazioni', + excludeUpdate: 'I seguenti {{num}} plugin non si aggiorneranno automaticamente', + updateTimeTitle: 'Tempo di aggiornamento', + }, } export default translation diff --git a/web/i18n/it-IT/share-app.ts b/web/i18n/it-IT/share.ts similarity index 100% rename from web/i18n/it-IT/share-app.ts rename to web/i18n/it-IT/share.ts diff --git a/web/i18n/it-IT/workflow.ts b/web/i18n/it-IT/workflow.ts index 024ee8b90c..97e4bc14f2 100644 --- a/web/i18n/it-IT/workflow.ts +++ b/web/i18n/it-IT/workflow.ts @@ -497,6 +497,7 @@ const translation = { automatic: { subTitle: 'Genera automaticamente condizioni di filtraggio dei metadati in base alla query dell\'utente', desc: 'Genera automaticamente condizioni di filtraggio dei metadati basate sulla variabile di query', + title: 'Automatico', }, manual: { title: 'Manuale', @@ -611,6 +612,7 @@ const translation = { 'exists': 'Esiste', 'not exists': 'non esiste', 'after': 'dopo', + 'before': 'prima', }, enterValue: 'Inserisci valore', addCondition: 'Aggiungi Condizione', diff --git a/web/i18n/ja-JP/plugin.ts b/web/i18n/ja-JP/plugin.ts index a80cde7e38..38b73a847e 100644 --- a/web/i18n/ja-JP/plugin.ts +++ b/web/i18n/ja-JP/plugin.ts @@ -248,6 +248,55 @@ const translation = { useApiAuthDesc: '認証情報を設定した後、ワークスペース内のすべてのメンバーは、アプリケーションをオーケストレーションする際にこのツールを使用できます。', clientInfo: 'このツールプロバイダーにシステムクライアントシークレットが見つからないため、手動で設定する必要があります。redirect_uriには、次を使用してください。', }, + autoUpdate: { + strategy: { + disabled: { + name: '無効', + description: 'プラグインは自動更新されません', + }, + fixOnly: { + name: '修正のみ', + selectedDescription: 'パッチバージョンのみの自動更新', + }, + latest: { + name: '最新', + selectedDescription: '常に最新バージョンに更新してください', + description: '常に最新バージョンに更新してください', + }, + }, + upgradeMode: { + partial: '選択されたもののみ', + exclude: '選択したものを除外する', + all: 'すべてを更新する', + }, + upgradeModePlaceholder: { + exclude: '選択されたプラグインは自動更新されません', + partial: '選択されたプラグインのみが自動更新されます。現在選択されているプラグインはないため、プラグインは自動更新されません。', + }, + operation: { + clearAll: 'すべてクリア', + select: 'プラグインを選択する', + }, + pluginDowngradeWarning: { + title: 'プラグインのダウングレード', + downgrade: 'とにかくダウングレードする', + exclude: '自動更新から除外する', + description: 'このプラグインは現在、自動更新が有効です。バージョンをダウングレードすると、次回の自動更新中に変更が上書きされる可能性があります。', + }, + noPluginPlaceholder: { + noInstalled: 'プラグインがインストールされていません', + noFound: 'プラグインが見つかりませんでした', + }, + updateTimeTitle: '更新時刻', + automaticUpdates: '自動更新', + updateTime: '更新時刻', + updateSettings: '設定を更新する', + nextUpdateTime: '次の自動更新: {{time}}', + excludeUpdate: '以下の{{num}}プラグインは自動更新されません', + changeTimezone: 'タイムゾーンを変更するには、設定に移動してください。', + specifyPluginsToUpdate: '更新するプラグインを指定してください', + partialUPdate: '以下の{{num}}プラグインのみが自動更新されます', + }, } export default translation diff --git a/web/i18n/ja-JP/share-app.ts b/web/i18n/ja-JP/share.ts similarity index 100% rename from web/i18n/ja-JP/share-app.ts rename to web/i18n/ja-JP/share.ts diff --git a/web/i18n/ko-KR/plugin.ts b/web/i18n/ko-KR/plugin.ts index 6050448fbf..1f60f1365b 100644 --- a/web/i18n/ko-KR/plugin.ts +++ b/web/i18n/ko-KR/plugin.ts @@ -84,6 +84,16 @@ const translation = { endpoints: '끝점', serviceOk: '서비스 정상', endpointDisableTip: '엔드포인트 비활성화', + deprecation: { + reason: { + ownershipTransferred: '소유권 이전', + businessAdjustments: '사업 조정', + noMaintainer: '유지보수자 없음', + }, + noReason: '이 플러그인은 더 이상 지원되지 않으며 업데이트되지 않을 것입니다.', + onlyReason: '이 플러그인은 {{deprecatedReason}}로 인해 사용 중단되었으며 더 이상 업데이트되지 않습니다.', + fullMessage: '이 플러그인은 {{deprecatedReason}}로 인해 사용 중단되었으며 더 이상 업데이트되지 않습니다. 대신 {{-alternativePluginId}}를 사용하십시오.', + }, }, debugInfo: { title: '디버깅', @@ -237,6 +247,56 @@ const translation = { useApiAuthDesc: '자격증명을 구성한 후에는 작업 공간 내의 모든 구성원이 애플리케이션을 조정할 때 이 도구를 사용할 수 있습니다.', clientInfo: '이 도구 공급자에 대한 시스템 클라이언트 비밀이 발견되지 않았으므로 수동으로 설정해야 하며, redirect_uri는 다음을 사용하십시오.', }, + deprecated: '사용 중단됨', + autoUpdate: { + strategy: { + disabled: { + name: '장애인', + description: '플러그인이 자동으로 업데이트되지 않습니다.', + }, + fixOnly: { + name: '수정만 하기', + selectedDescription: '패치 버전만 자동 업데이트', + }, + latest: { + name: '최신', + description: '항상 최신 버전으로 업데이트하세요.', + selectedDescription: '항상 최신 버전으로 업데이트하세요.', + }, + }, + upgradeMode: { + partial: '선택된 것만', + all: '모두 업데이트하기', + exclude: '선택한 항목 제외', + }, + upgradeModePlaceholder: { + partial: '선택된 플러그인만 자동 업데이트됩니다. 현재 선택된 플러그인이 없으므로 자동 업데이트되는 플러그인은 없습니다.', + exclude: '선택한 플러그인은 자동으로 업데이트되지 않습니다.', + }, + operation: { + clearAll: '모두 지우기', + select: '플러그인을 선택하세요', + }, + pluginDowngradeWarning: { + exclude: '자동 업데이트에서 제외', + title: '플러그인 다운그레이드', + downgrade: '어쨌든 다운그레이드', + description: '이 플러그인은 현재 자동 업데이트가 활성화되어 있습니다. 버전을 다운그레이드하면 다음 자동 업데이트 중에 변경 사항이 덮어써질 수 있습니다.', + }, + noPluginPlaceholder: { + noFound: '플러그인이 없습니다.', + noInstalled: '설치된 플러그인이 없습니다.', + }, + updateTimeTitle: '업데이트 시간', + automaticUpdates: '자동 업데이트', + updateTime: '업데이트 시간', + nextUpdateTime: '다음 자동 업데이트: {{time}}', + updateSettings: '설정 업데이트', + partialUPdate: '다음 {{num}} 플러그인만 자동 업데이트됩니다.', + changeTimezone: '시간대를 변경하려면 설정으로 이동하세요.', + specifyPluginsToUpdate: '업데이트할 플러그인을 지정하십시오.', + excludeUpdate: '다음 {{num}} 플러그인은 자동 업데이트되지 않습니다.', + }, } export default translation diff --git a/web/i18n/ko-KR/share-app.ts b/web/i18n/ko-KR/share.ts similarity index 100% rename from web/i18n/ko-KR/share-app.ts rename to web/i18n/ko-KR/share.ts diff --git a/web/i18n/pl-PL/plugin.ts b/web/i18n/pl-PL/plugin.ts index a4b194b757..10944a339b 100644 --- a/web/i18n/pl-PL/plugin.ts +++ b/web/i18n/pl-PL/plugin.ts @@ -84,6 +84,16 @@ const translation = { endpointDeleteContent: 'Czy chcesz usunąć {{name}}?', endpointsTip: 'Ta wtyczka zapewnia określone funkcje za pośrednictwem punktów końcowych i można skonfigurować wiele zestawów punktów końcowych dla bieżącego obszaru roboczego.', modelNum: '{{liczba}} MODELE W ZESTAWIE', + deprecation: { + reason: { + businessAdjustments: 'dostosowania biznesowe', + ownershipTransferred: 'własność przekazana', + noMaintainer: 'brak opiekuna', + }, + onlyReason: 'Ten plugin został wycofany z użycia z powodu {{deprecatedReason}} i nie będzie już aktualizowany.', + noReason: 'Ten wtyczka została przestarzała i nie będzie dłużej aktualizowana.', + fullMessage: 'Ten plugin został wycofany z użycia z powodu {{deprecatedReason}} i nie będzie już aktualizowany. Proszę użyć zamiast tego {{-alternativePluginId}}.', + }, }, debugInfo: { viewDocs: 'Wyświetlanie dokumentów', @@ -237,6 +247,56 @@ const translation = { useApiAuthDesc: 'Po skonfigurowaniu poświadczeń wszyscy członkowie w przestrzeni roboczej mogą korzystać z tego narzędzia podczas orkiestracji aplikacji.', clientInfo: 'Ponieważ nie znaleziono tajemnic klientów systemu dla tego dostawcy narzędzi, wymagane jest ręczne skonfigurowanie, dla redirect_uri proszę użyć', }, + deprecated: 'Nieaktualny', + autoUpdate: { + strategy: { + disabled: { + description: 'Wtyczki nie będą się automatycznie aktualizować', + name: 'Niepełnosprawny', + }, + fixOnly: { + selectedDescription: 'Automatyczna aktualizacja tylko dla wersji poprawek', + name: 'Napraw tylko', + }, + latest: { + name: 'Najświeższy', + description: 'Zawsze aktualizuj do najnowszej wersji', + selectedDescription: 'Zawsze aktualizuj do najnowszej wersji', + }, + }, + upgradeMode: { + all: 'Zaktualizuj wszystko', + partial: 'Tylko wybrane', + exclude: 'Wyłącz wybrane', + }, + upgradeModePlaceholder: { + exclude: 'Wybrane wtyczki nie będą aktualizować się automatycznie.', + partial: 'Tylko wybrane wtyczki będą się aktualizować automatycznie. Obecnie nie wybrano żadnych wtyczek, więc żadna wtyczka nie będzie się automatycznie aktualizować.', + }, + operation: { + clearAll: 'Wyczyść wszystko', + select: 'Wybierz wtyczki', + }, + pluginDowngradeWarning: { + exclude: 'Wyłącz z automatycznej aktualizacji', + downgrade: 'Zrób downgrade tak czy inaczej', + title: 'Obniżenie wersji wtyczki', + description: 'Automatyczna aktualizacja jest obecnie włączona dla tej wtyczki. Obniżenie wersji może spowodować, że twoje zmiany zostaną nadpisane podczas następnej automatycznej aktualizacji.', + }, + noPluginPlaceholder: { + noInstalled: 'Brak zainstalowanych wtyczek', + noFound: 'Nie znaleziono wtyczek', + }, + updateTime: 'Czas aktualizacji', + updateSettings: 'Zaktualizuj ustawienia', + updateTimeTitle: 'Czas aktualizacji', + specifyPluginsToUpdate: 'Określ wtyczki do zaktualizowania', + nextUpdateTime: 'Następna automatyczna aktualizacja: {{time}}', + automaticUpdates: 'Automatyczne aktualizacje', + excludeUpdate: 'Następujące {{num}} wtyczki nie będą aktualizować się automatycznie', + changeTimezone: 'Aby zmienić strefę czasową, przejdź do Ustawienia', + partialUPdate: 'Tylko następujące {{num}} wtyczki będą się automatycznie aktualizować', + }, } export default translation diff --git a/web/i18n/pl-PL/share-app.ts b/web/i18n/pl-PL/share.ts similarity index 100% rename from web/i18n/pl-PL/share-app.ts rename to web/i18n/pl-PL/share.ts diff --git a/web/i18n/pt-BR/plugin.ts b/web/i18n/pt-BR/plugin.ts index c03acac2ec..47490d218c 100644 --- a/web/i18n/pt-BR/plugin.ts +++ b/web/i18n/pt-BR/plugin.ts @@ -84,6 +84,16 @@ const translation = { configureTool: 'Ferramenta de configuração', endpointsDocLink: 'Veja o documento', endpointModalTitle: 'Ponto de extremidade de configuração', + deprecation: { + reason: { + businessAdjustments: 'ajustes de negócios', + ownershipTransferred: 'propriedade transferida', + noMaintainer: 'sem mantenedor', + }, + onlyReason: 'Este plugin foi descontinuado devido a {{deprecatedReason}} e não será mais atualizado.', + noReason: 'Este plugin foi descontinuado e não será mais atualizado.', + fullMessage: 'Este plugin foi descontinuado devido a {{deprecatedReason}}, e não receberá mais atualizações. Por favor, use {{-alternativePluginId}} em vez disso.', + }, }, debugInfo: { title: 'Depuração', @@ -237,6 +247,56 @@ const translation = { useApiAuthDesc: 'Após configurar as credenciais, todos os membros dentro do espaço de trabalho podem usar esta ferramenta ao orquestrar aplicações.', clientInfo: 'Como não foram encontrados segredos de cliente do sistema para este provedor de ferramentas, é necessário configurá-lo manualmente. Para redirect_uri, use', }, + deprecated: 'Obsoleto', + autoUpdate: { + strategy: { + disabled: { + name: 'Desativado', + description: 'Os plugins não atualizarão automaticamente', + }, + fixOnly: { + selectedDescription: 'Atualização automática apenas para versões de patch', + name: 'Reparar Apenas', + }, + latest: { + description: 'Sempre atualize para a versão mais recente', + selectedDescription: 'Sempre atualize para a versão mais recente', + name: 'Último', + }, + }, + upgradeMode: { + all: 'Atualizar tudo', + exclude: 'Excluir selecionados', + partial: 'Somente selecionado', + }, + upgradeModePlaceholder: { + exclude: 'Plugins selecionados não serão atualizados automaticamente', + partial: 'Apenas plugins selecionados serão atualizados automaticamente. Nenhum plugin está atualmente selecionado, então nenhum plugin será atualizado automaticamente.', + }, + operation: { + select: 'Selecionar plugins', + clearAll: 'Limpar tudo', + }, + pluginDowngradeWarning: { + downgrade: 'Descer de nível de qualquer forma', + exclude: 'Excluir da atualização automática', + title: 'Rebaixamento do Plugin', + description: 'A atualização automática está atualmente habilitada para este plugin. Reverter a versão pode causar a sobrescrição de suas alterações durante a próxima atualização automática.', + }, + noPluginPlaceholder: { + noFound: 'Nenhum plugin foi encontrado.', + noInstalled: 'Nenhum plugin instalado', + }, + updateTime: 'Atualizar hora', + automaticUpdates: 'Atualizações automáticas', + excludeUpdate: 'Os seguintes {{num}} plugins não serão atualizados automaticamente', + updateTimeTitle: 'Atualizar hora', + specifyPluginsToUpdate: 'Especifique os plugins a serem atualizados', + changeTimezone: 'Para mudar o fuso horário, vá para Configurações', + nextUpdateTime: 'Próxima atualização automática: {{time}}', + partialUPdate: 'Apenas os seguintes {{num}} plugins serão atualizados automaticamente', + updateSettings: 'Atualizar Configurações', + }, } export default translation diff --git a/web/i18n/pt-BR/share-app.ts b/web/i18n/pt-BR/share.ts similarity index 100% rename from web/i18n/pt-BR/share-app.ts rename to web/i18n/pt-BR/share.ts diff --git a/web/i18n/ro-RO/plugin.ts b/web/i18n/ro-RO/plugin.ts index b866f1de01..8c3ba06bbc 100644 --- a/web/i18n/ro-RO/plugin.ts +++ b/web/i18n/ro-RO/plugin.ts @@ -84,6 +84,16 @@ const translation = { endpointModalDesc: 'Odată configurate, pot fi utilizate funcțiile furnizate de plugin prin intermediul punctelor finale API.', modelNum: '{{num}} MODELE INCLUSE', configureModel: 'Configurarea modelului', + deprecation: { + reason: { + businessAdjustments: 'ajustări de afaceri', + noMaintainer: 'fără întreținător', + ownershipTransferred: 'proprietatea transferată', + }, + noReason: 'Acest plugin a fost declarat învechit și nu va mai fi actualizat.', + onlyReason: 'Acest plugin a fost depreciat din cauza {{deprecatedReason}} și nu va mai fi actualizat.', + fullMessage: 'Acest plugin a fost declarat învechit din cauza {{deprecatedReason}}, și nu va mai fi actualizat. Vă rugăm să folosiți în schimb {{-alternativePluginId}}.', + }, }, debugInfo: { viewDocs: 'Vizualizați documentele', @@ -237,6 +247,56 @@ const translation = { useApiAuthDesc: 'După configurarea acreditivelor, toți membrii din spațiul de lucru pot folosi acest instrument atunci când orchestran aplicații.', clientInfo: 'Deoarece nu s-au găsit secretele clientului sistemului pentru acest furnizor de instrumente, este necesară configurarea manuală; pentru redirect_uri, vă rugăm să folosiți', }, + deprecated: 'Încetat de a mai fi utilizat', + autoUpdate: { + strategy: { + disabled: { + description: 'Pluginurile nu se vor actualiza automat', + name: 'Dezactivat', + }, + fixOnly: { + selectedDescription: 'Actualizare automată doar pentru versiuni patch', + name: 'Fix doar', + }, + latest: { + name: 'Ultimul', + selectedDescription: 'Actualizați întotdeauna la cea mai recentă versiune', + description: 'Actualizați întotdeauna la cea mai recentă versiune', + }, + }, + upgradeMode: { + exclude: 'Excluzi selecția', + all: 'Actualizează tot', + partial: 'Numai selectat', + }, + upgradeModePlaceholder: { + exclude: 'Pluginurile selectate nu se vor actualiza automat.', + partial: 'Numai pluginurile selectate se vor actualiza automat. Nu există pluginuri selectate în prezent, așa că niciun plugin nu se va actualiza automat.', + }, + operation: { + select: 'Selectați plugin-uri', + clearAll: 'Șterge tot', + }, + pluginDowngradeWarning: { + title: 'Scădere a pluginului', + exclude: 'Exclude de la actualizarea automată', + downgrade: 'Oricum, downgradează', + description: 'Actualizarea automată este în prezent activată pentru acest plugin. Revenirea la o versiune anterioară poate provoca suprascrierea modificărilor tale în timpul următoarei actualizări automate.', + }, + noPluginPlaceholder: { + noFound: 'Nu au fost găsite plugin-uri', + noInstalled: 'Niciun plugin instalat', + }, + excludeUpdate: 'Următoarele {{num}} pluginuri nu se vor actualiza automat', + updateTimeTitle: 'Timp de actualizare', + updateSettings: 'Actualizează setările', + changeTimezone: 'Pentru a schimba fusul orar, mergi la Setări', + automaticUpdates: 'Actualizări automate', + specifyPluginsToUpdate: 'Specificați plugin-urile de actualizat', + partialUPdate: 'Numai următoarele {{num}} pluginuri se vor actualiza automat', + updateTime: 'Timp de actualizare', + nextUpdateTime: 'Următoarea actualizare automată: {{time}}', + }, } export default translation diff --git a/web/i18n/ro-RO/share-app.ts b/web/i18n/ro-RO/share.ts similarity index 100% rename from web/i18n/ro-RO/share-app.ts rename to web/i18n/ro-RO/share.ts diff --git a/web/i18n/ru-RU/plugin.ts b/web/i18n/ru-RU/plugin.ts index 5c96520ed9..f39139aa05 100644 --- a/web/i18n/ru-RU/plugin.ts +++ b/web/i18n/ru-RU/plugin.ts @@ -84,6 +84,16 @@ const translation = { endpointsEmpty: 'Нажмите кнопку «+», чтобы добавить конечную точку', switchVersion: 'Версия для переключателя', endpointsDocLink: 'Посмотреть документ', + deprecation: { + reason: { + businessAdjustments: 'бизнес-правки', + ownershipTransferred: 'передача права собственности', + noMaintainer: 'нет сопровождающего', + }, + noReason: 'Этот плагин был устаревшим и больше не будет обновляться.', + onlyReason: 'Этот плагин был устаревшим из-за {{deprecatedReason}} и больше не будет обновляться.', + fullMessage: 'Этот плагин больше не поддерживается по причине {{deprecatedReason}} и больше не будет обновляться. Пожалуйста, используйте {{-alternativePluginId}} вместо этого.', + }, }, debugInfo: { title: 'Отладка', @@ -237,6 +247,56 @@ const translation = { useApiAuthDesc: 'После настройки учетных данных все члены рабочей области могут использовать этот инструмент при оркестрации приложений.', clientInfo: 'Поскольку не найдены секреты клиентской системы для этого поставщика инструментов, необходимо настроить его вручную, для redirect_uri, пожалуйста, используйте', }, + deprecated: 'Устаревший', + autoUpdate: { + strategy: { + disabled: { + name: 'Отключен', + description: 'Плагины не будут автоматически обновляться', + }, + fixOnly: { + name: 'Только исправить', + selectedDescription: 'Автообновление только для версий патчей', + }, + latest: { + name: 'Новости', + selectedDescription: 'Всегда обновляйте до последней версии', + description: 'Всегда обновляйте до последней версии', + }, + }, + upgradeMode: { + partial: 'Только выбрано', + all: 'Обновить все', + exclude: 'Исключить выбранное', + }, + upgradeModePlaceholder: { + partial: 'Только выбранные плагины будут автоматически обновляться. В данный момент плагины не выбраны, поэтому никакие плагины не будут автоматически обновляться.', + exclude: 'Выбранные плагины не будут обновляться автоматически', + }, + operation: { + select: 'Выберите плагины', + clearAll: 'Очистить все', + }, + pluginDowngradeWarning: { + exclude: 'Исключить из автообновления', + title: 'Понижение версии плагина', + downgrade: 'Все равно понизьте версию', + description: 'Автообновление в данный момент включено для этого плагина. Понижение версии может привести к тому, что ваши изменения будут перезаписаны во время следующего автоматического обновления.', + }, + noPluginPlaceholder: { + noFound: 'Плагины не найдены', + noInstalled: 'Нет установленных плагинов', + }, + updateTimeTitle: 'Время обновления', + updateTime: 'Время обновления', + automaticUpdates: 'Автоматические обновления', + updateSettings: 'Обновить настройки', + nextUpdateTime: 'Следующее автообновление: {{time}}', + specifyPluginsToUpdate: 'Укажите плагины для обновления', + excludeUpdate: 'Следующие {{num}} плагины не будут обновляться автоматически', + partialUPdate: 'Только следующие {{num}} плагины будут обновляться автоматически', + changeTimezone: 'Чтобы изменить часовой пояс, перейдите в Настройки', + }, } export default translation diff --git a/web/i18n/ru-RU/share-app.ts b/web/i18n/ru-RU/share.ts similarity index 100% rename from web/i18n/ru-RU/share-app.ts rename to web/i18n/ru-RU/share.ts diff --git a/web/i18n/sl-SI/plugin.ts b/web/i18n/sl-SI/plugin.ts index cecb8e9a62..049a80f859 100644 --- a/web/i18n/sl-SI/plugin.ts +++ b/web/i18n/sl-SI/plugin.ts @@ -87,6 +87,16 @@ const translation = { endpointsTip: 'Ta vtičnik zagotavlja specifične funkcionalnosti preko končnih točk, prav tako pa lahko konfigurirate več nizov končnih točk za trenutno delovno okolje.', endpointModalDesc: 'Ko je konfiguriran, se lahko uporabljajo funkcije, ki jih vtičnik zagotavlja prek API končnih točk.', endpointsEmpty: 'Kliknite gumb \' \' za dodajanje končne točke', + deprecation: { + reason: { + businessAdjustments: 'poslovne prilagoditve', + noMaintainer: 'brez vzdrževalca', + ownershipTransferred: 'lastništvo preneseno', + }, + onlyReason: 'Ta vtičnik je bil opuščen zaradi {{deprecatedReason}} in ne bo več posodobljen.', + noReason: 'Ta vtičnik je bil ukinjen in ne bo več posodabljan.', + fullMessage: 'Ta vtičnik je bil ukinjen zaradi {{deprecatedReason}}, in ne bo več posodobljen. Namesto tega uporabite {{-alternativePluginId}}.', + }, }, debugInfo: { viewDocs: 'Oglejte si dokumente', @@ -237,6 +247,56 @@ const translation = { clientInfo: 'Ker za tega ponudnika orodij niso bili najdeni klientski skrivnosti sistema, je potrebna ročna nastavitev, za redirect_uri prosimo uporabite', useApiAuthDesc: 'Po konfiguraciji poverilnic lahko vsi člani v delovnem prostoru uporabljajo to orodje pri orkestraciji aplikacij.', }, + deprecated: 'Zastaran', + autoUpdate: { + strategy: { + disabled: { + name: 'Onemogočeno', + description: 'Vtičniki se ne bodo samodejno posodobili', + }, + fixOnly: { + name: 'Popravi samo', + selectedDescription: 'Samodejno posodabljanje samo za različice popravkov', + }, + latest: { + selectedDescription: 'Vedno posodobite na najnovejšo različico', + name: 'Najnovejši', + description: 'Vedno posodobite na najnovejšo različico', + }, + }, + upgradeMode: { + partial: 'Samo izbrano', + exclude: 'Izključi izbrano', + all: 'Posodobi vse', + }, + upgradeModePlaceholder: { + exclude: 'Izbrani vtičniki se ne bodo samodejno posodabljali.', + partial: 'Samo izbrani vtičniki se bodo samodejno posodabljali. Trenutno ni izbranih nobenih vtičnikov, zato se nobeni vtičniki ne bodo samodejno posodobili.', + }, + operation: { + select: 'Izberi vtičnike', + clearAll: 'Počisti vse', + }, + pluginDowngradeWarning: { + downgrade: 'Kljub temu narediti nižjo različico', + exclude: 'Izključi iz samodejnega posodabljanja', + title: 'Zmanjšanje različice vtičnika', + description: 'Samodejno posodabljanje je trenutno omogočeno za ta vtičnik. Zmanjšanje različice lahko povzroči, da bodo vaše spremembe prepisane med naslednjim samodejnim posodabljanjem.', + }, + noPluginPlaceholder: { + noFound: 'Nobeni vtičniki niso bili najdeni', + noInstalled: 'Nobenih vtičnikov ni nameščenih', + }, + updateTimeTitle: 'Čas posodobitve', + specifyPluginsToUpdate: 'Določite vtičnike za posodobitev', + updateTime: 'Čas posodobitve', + nextUpdateTime: 'Naslednje samodejno posodabljanje: {{time}}', + automaticUpdates: 'Samodejna posodobitev', + excludeUpdate: 'Naslednjih {{num}} razširitev ne bo samodejno posodobljenih', + changeTimezone: 'Za spremembo časovnega pasu pojdite v Nastavitve', + partialUPdate: 'Samo naslednjih {{num}} vtičnikov se bo samodejno posodabljalo.', + updateSettings: 'Posodobi nastavitve', + }, } export default translation diff --git a/web/i18n/sl-SI/share-app.ts b/web/i18n/sl-SI/share.ts similarity index 100% rename from web/i18n/sl-SI/share-app.ts rename to web/i18n/sl-SI/share.ts diff --git a/web/i18n/th-TH/plugin.ts b/web/i18n/th-TH/plugin.ts index de1201396d..6a53350cad 100644 --- a/web/i18n/th-TH/plugin.ts +++ b/web/i18n/th-TH/plugin.ts @@ -84,6 +84,16 @@ const translation = { endpointsTip: 'ปลั๊กอินนี้มีฟังก์ชันเฉพาะผ่านปลายทาง และคุณสามารถกําหนดค่าชุดปลายทางหลายชุดสําหรับพื้นที่ทํางานปัจจุบันได้', endpointsEmpty: 'คลิกปุ่ม \'+\' เพื่อเพิ่มปลายทาง', serviceOk: 'บริการตกลง', + deprecation: { + reason: { + ownershipTransferred: 'การโอนความเป็นเจ้าของ', + businessAdjustments: 'การปรับเปลี่ยนธุรกิจ', + noMaintainer: 'ไม่มีผู้ดูแล', + }, + onlyReason: 'ปลั๊กอินนี้ถูกเลิกใช้เนื่องจาก {{deprecatedReason}} และจะไม่มีการอัปเดตอีกต่อไป.', + noReason: 'ปลั๊กอินนี้ได้ถูกยกเลิกใช้งานและจะไม่มีการอัปเดตอีกต่อไป.', + fullMessage: 'ปลั๊กอินนี้ถูกยกเลิกการใช้งานเนื่องจาก {{เหตุผลที่ถูกยกเลิก}} และจะไม่มีการอัปเดตอีกต่อไป กรุณาใช้ {{-alternativePluginId}} แทน.', + }, }, debugInfo: { viewDocs: 'ดูเอกสาร', @@ -237,6 +247,56 @@ const translation = { useApiAuthDesc: 'หลังจากตั้งค่าข้อมูลประจำตัวแล้ว สมาชิกทุกคนภายในพื้นที่ทำงานสามารถใช้เครื่องมือนี้เมื่อจัดการแอปพลิเคชันได้', clientInfo: 'เนื่องจากไม่พบความลับของลูกค้าสำหรับผู้ให้บริการเครื่องมือนี้ จำเป็นต้องตั้งค่าแบบแมนนวล สำหรับ redirect_uri กรุณาใช้', }, + deprecated: 'เลิกใช้', + autoUpdate: { + strategy: { + disabled: { + name: 'ผู้พิการ', + description: 'ปลั๊กอินจะไม่อัปเดตอัตโนมัติ', + }, + fixOnly: { + name: 'ซ่อมเฉพาะ', + selectedDescription: 'อัปเดตอัตโนมัติเฉพาะเวอร์ชันแพตช์เท่านั้น', + }, + latest: { + name: 'ล่าสุด', + selectedDescription: 'อัปเดตเป็นเวอร์ชันล่าสุดเสมอ', + description: 'อัปเดตเป็นเวอร์ชันล่าสุดเสมอ', + }, + }, + upgradeMode: { + partial: 'เฉพาะที่เลือกไว้', + exclude: 'ยกเว้นที่เลือกไว้', + all: 'อัปเดตทั้งหมด', + }, + upgradeModePlaceholder: { + exclude: 'ปลั๊กอินที่เลือกจะไม่อัปเดตอัตโนมัติ', + partial: 'เฉพาะปลั๊กอินที่เลือกจะอัปเดตโดยอัตโนมัติ ขณะนี้ไม่มีปลั๊กอินใดที่ถูกเลือก ดังนั้นจะไม่มีปลั๊กอินใดที่อัปเดตโดยอัตโนมัติ', + }, + operation: { + clearAll: 'ล้างทั้งหมด', + select: 'เลือกปลั๊กอิน', + }, + pluginDowngradeWarning: { + title: 'การลดเวอร์ชันปลั๊กอิน', + downgrade: 'ลดระดับอยู่ดี', + exclude: 'ไม่รวมในการอัปเดตอัตโนมัติ', + description: 'ฟีเจอร์การอัปเดตอัตโนมัติเปิดใช้งานอยู่สำหรับปลั๊กอินนี้ การลดระดับเวอร์ชันอาจทำให้การเปลี่ยนแปลงของคุณหายไปในระหว่างการอัปเดตอัตโนมัติต่อไป', + }, + noPluginPlaceholder: { + noInstalled: 'ไม่มีปลั๊กอินติดตั้ง', + noFound: 'ไม่พบปลั๊กอิน', + }, + specifyPluginsToUpdate: 'ระบุปลั๊กอินที่จะแ atualizar', + updateTime: 'เวลาที่อัปเดต', + updateTimeTitle: 'เวลาที่อัปเดต', + updateSettings: 'อัปเดตการตั้งค่า', + nextUpdateTime: 'การอัปเดตอัตโนมัติครั้งถัดไป: {{time}}', + automaticUpdates: 'การอัปเดตอัตโนมัติ', + excludeUpdate: 'ปลั๊กอิน {{num}} ต่อไปนี้จะไม่อัพเดตอัตโนมัติ', + partialUPdate: 'ปลั๊กอิน {{num}} ตัวต่อไปนี้จะอัปเดตให้อัตโนมัติเท่านั้น', + changeTimezone: 'ในการเปลี่ยนเขตเวลา ให้ไปที่ การตั้งค่า', + }, } export default translation diff --git a/web/i18n/th-TH/share-app.ts b/web/i18n/th-TH/share.ts similarity index 100% rename from web/i18n/th-TH/share-app.ts rename to web/i18n/th-TH/share.ts diff --git a/web/i18n/tr-TR/plugin.ts b/web/i18n/tr-TR/plugin.ts index 80237131e7..4c2b5510d2 100644 --- a/web/i18n/tr-TR/plugin.ts +++ b/web/i18n/tr-TR/plugin.ts @@ -84,6 +84,16 @@ const translation = { modelNum: '{{sayı}} DAHİL OLAN MODELLER', endpointDisableTip: 'Uç Noktayı Devre Dışı Bırak', serviceOk: 'Servis Tamam', + deprecation: { + reason: { + noMaintainer: 'bakımcı yok', + ownershipTransferred: 'mülkiyet devredildi', + businessAdjustments: 'iş ayarlamaları', + }, + noReason: 'Bu eklenti kullanımdan kaldırıldı ve artık güncellenmeyecek.', + onlyReason: 'Bu eklenti {{deprecatedReason}} nedeniyle kullanımdan kaldırılmıştır ve artık güncellenmeyecektir.', + fullMessage: 'Bu eklenti {{deprecatedReason}} nedeniyle kullanım dışı bırakılmıştır ve artık güncellenmeyecek. Lütfen bunun yerine {{-alternativePluginId}}\'i kullanın.', + }, }, debugInfo: { title: 'Hata ayıklama', @@ -237,6 +247,56 @@ const translation = { saveAndAuth: 'Kaydet ve Yetkilendir', clientInfo: 'Bu araç sağlayıcı için sistem istemci gizlilikleri bulunmadığından, manuel olarak ayar yapılması gerekmektedir. redirect_uri için lütfen şu adresi kullanın', }, + deprecated: 'Kaldırılmış', + autoUpdate: { + strategy: { + disabled: { + name: 'Engelli', + description: 'Eklentiler otomatik olarak güncellenmeyecek', + }, + fixOnly: { + selectedDescription: 'Sadece yamanın versiyonları için otomatik güncelleme', + name: 'Sadece Düzelt', + }, + latest: { + name: 'Son', + selectedDescription: 'Her zaman en son sürüme güncelle', + description: 'Her zaman en son sürüme güncelle', + }, + }, + upgradeMode: { + partial: 'Sadece seçilen', + all: 'Hepsini güncelle', + exclude: 'Seçilenleri hariç tut', + }, + upgradeModePlaceholder: { + exclude: 'Seçilen eklentiler otomatik olarak güncellenmeyecek.', + partial: 'Sadece seçilen eklentiler otomatik olarak güncellenecek. Şu anda hiçbir eklenti seçilmedi, bu yüzden hiçbir eklenti otomatik olarak güncellenmeyecek.', + }, + operation: { + select: 'Eklentileri seçin', + clearAll: 'Hepsini temizle', + }, + pluginDowngradeWarning: { + downgrade: 'Her durumda düşürme', + title: 'Eklenti Düşürme', + exclude: 'Otomatik güncellemeden hariç tut', + description: 'Bu eklenti için otomatik güncelleme şu anda etkin. Sürümün düşürülmesi, bir sonraki otomatik güncelleme sırasında değişikliklerinizin üzerine yazılmasına neden olabilir.', + }, + noPluginPlaceholder: { + noInstalled: 'Hiçbir eklenti yüklenmemiş', + noFound: 'Hiçbir eklenti bulunamadı', + }, + automaticUpdates: 'Otomatik güncellemeler', + updateTime: 'Güncelleme zamanı', + updateTimeTitle: 'Güncelleme zamanı', + updateSettings: 'Ayarları Güncelle', + nextUpdateTime: 'Sonraki otomatik güncelleme: {{time}}', + specifyPluginsToUpdate: 'Güncellemek için eklentileri belirtin', + excludeUpdate: 'Aşağıdaki {{num}} eklenti otomatik olarak güncellenmeyecek', + changeTimezone: 'Zaman dilimini değiştirmek için Ayarlar sekmesine gidin', + partialUPdate: 'Sadece aşağıdaki {{num}} eklenti otomatik olarak güncellenecek', + }, } export default translation diff --git a/web/i18n/tr-TR/share-app.ts b/web/i18n/tr-TR/share.ts similarity index 100% rename from web/i18n/tr-TR/share-app.ts rename to web/i18n/tr-TR/share.ts diff --git a/web/i18n/uk-UA/plugin.ts b/web/i18n/uk-UA/plugin.ts index 7273f1174e..877d7843ff 100644 --- a/web/i18n/uk-UA/plugin.ts +++ b/web/i18n/uk-UA/plugin.ts @@ -84,6 +84,16 @@ const translation = { endpointModalDesc: 'Після налаштування можна використовувати функції, що надаються плагіном через кінцеві точки API.', configureTool: 'Інструмент налаштування', serviceOk: 'Сервіс працює', + deprecation: { + reason: { + ownershipTransferred: 'право власності передано', + businessAdjustments: 'бізнесові корективи', + noMaintainer: 'немає супроводжувача', + }, + noReason: 'Цей плагін було застаріло, і він більше не буде оновлюватися.', + onlyReason: 'Цей плагін було знято з підтримки через {{deprecatedReason}} і більше не буде оновлюватися.', + fullMessage: 'Цей плагін був застарілий через {{deprecatedReason}}, і більше не буде оновлюватися. Будь ласка, використовуйте {{-alternativePluginId}} замість цього.', + }, }, debugInfo: { title: 'Налагодження', @@ -237,6 +247,56 @@ const translation = { clientInfo: 'Оскільки не знайдено жодних секретів клієнта системи для цього постачальника інструментів, потрібно налаштувати його вручну; для redirect_uri, будь ласка, використовуйте', useApiAuthDesc: 'Після налаштування облікових даних усі учасники робочого простору можуть використовувати цей інструмент під час оркестрації додатків.', }, + deprecated: 'Застарілий', + autoUpdate: { + strategy: { + disabled: { + name: 'Вимкнено', + description: 'Плагіни не будуть автоматично оновлюватися', + }, + fixOnly: { + name: 'Виправити тільки', + selectedDescription: 'Автоматичне оновлення лише для версій патчів', + }, + latest: { + name: 'Останні', + selectedDescription: 'Завжди оновлюйте до останньої версії', + description: 'Завжди оновлюйте до останньої версії', + }, + }, + upgradeMode: { + all: 'Оновити все', + partial: 'Тільки вибрані', + exclude: 'Виключити вибране', + }, + upgradeModePlaceholder: { + exclude: 'Вибрані плагіни не будуть оновлюватися автоматично', + partial: 'Тільки вибрані плагіни будуть автоматично оновлюватись. Наразі жоден з плагінів не вибрано, тому жоден плагін не буде автоматично оновлений.', + }, + operation: { + clearAll: 'Очистити все', + select: 'Виберіть плагіни', + }, + pluginDowngradeWarning: { + downgrade: 'Все одно знизити версію', + title: 'Пониження плагіна', + exclude: 'Виключити з автоматичного оновлення', + description: 'Автоматичне оновлення наразі увімкнене для цього плагіна. Пониження версії може призвести до того, що ваші зміни будуть перезаписані під час наступного автоматичного оновлення.', + }, + noPluginPlaceholder: { + noFound: 'Плагіни не були знайдені', + noInstalled: 'Жодних плагінів не встановлено', + }, + updateTime: 'Час оновлення', + automaticUpdates: 'Автоматичні оновлення', + updateTimeTitle: 'Час оновлення', + nextUpdateTime: 'Наступне автоматичне оновлення: {{time}}', + specifyPluginsToUpdate: 'Вкажіть плагіни для оновлення', + excludeUpdate: 'Наступні {{num}} плагіни не будуть автоматично оновлюватися', + updateSettings: 'Оновити налаштування', + changeTimezone: 'Щоб змінити часовий пояс, перейдіть до Налаштування', + partialUPdate: 'Тільки наступні {{num}} плагіни будуть автоматично оновлюватися', + }, } export default translation diff --git a/web/i18n/uk-UA/share-app.ts b/web/i18n/uk-UA/share.ts similarity index 100% rename from web/i18n/uk-UA/share-app.ts rename to web/i18n/uk-UA/share.ts diff --git a/web/i18n/vi-VN/plugin.ts b/web/i18n/vi-VN/plugin.ts index 143cd71a66..677d90e6a7 100644 --- a/web/i18n/vi-VN/plugin.ts +++ b/web/i18n/vi-VN/plugin.ts @@ -84,6 +84,16 @@ const translation = { endpointDeleteContent: 'Bạn có muốn xóa {{name}} không?', endpointModalTitle: 'Điểm cuối thiết lập', disabled: 'Tàn tật', + deprecation: { + reason: { + noMaintainer: 'không có người bảo trì', + ownershipTransferred: 'quyền sở hữu được chuyển nhượng', + businessAdjustments: 'điều chỉnh kinh doanh', + }, + noReason: 'Plugin này đã bị loại bỏ và sẽ không còn được cập nhật.', + onlyReason: 'Plugin này đã bị ngừng hỗ trợ do {{deprecatedReason}} và sẽ không còn được cập nhật nữa.', + fullMessage: 'Plugin này đã bị ngừng sử dụng do {{deprecatedReason}}, và sẽ không còn được cập nhật nữa. Vui lòng sử dụng {{-alternativePluginId}} thay thế.', + }, }, debugInfo: { title: 'Gỡ lỗi', @@ -237,6 +247,56 @@ const translation = { useApiAuthDesc: 'Sau khi cấu hình thông tin xác thực, tất cả các thành viên trong không gian làm việc có thể sử dụng công cụ này khi điều phối các ứng dụng.', clientInfo: 'Vì không tìm thấy bí mật khách hàng hệ thống cho nhà cung cấp công cụ này, cần thiết lập thủ công, đối với redirect_uri, vui lòng sử dụng', }, + deprecated: 'Đã bị ngưng sử dụng', + autoUpdate: { + strategy: { + disabled: { + name: 'Khuyết tật', + description: 'Các plugin sẽ không tự động cập nhật', + }, + fixOnly: { + name: 'Chỉ sửa chữa', + selectedDescription: 'Tự động cập nhật chỉ cho các phiên bản bản vá', + }, + latest: { + name: 'Mới nhất', + description: 'Luôn cập nhật lên phiên bản mới nhất', + selectedDescription: 'Luôn cập nhật lên phiên bản mới nhất', + }, + }, + upgradeMode: { + partial: 'Chỉ được chọn', + exclude: 'Loại trừ đã chọn', + all: 'Cập nhật tất cả', + }, + upgradeModePlaceholder: { + exclude: 'Các plugin được chọn sẽ không tự động cập nhật', + partial: 'Chỉ những plugin được chọn mới tự động cập nhật. Hiện tại không có plugin nào được chọn, vì vậy sẽ không có plugin nào tự động cập nhật.', + }, + operation: { + clearAll: 'Xóa tất cả', + select: 'Chọn plugin', + }, + pluginDowngradeWarning: { + exclude: 'Loại trừ khỏi cập nhật tự động', + downgrade: 'Giảm cấp vẫn vậy', + description: 'Chức năng tự động cập nhật hiện đang được bật cho plugin này. Việc hạ cấp phiên bản có thể khiến các thay đổi của bạn bị ghi đè trong lần cập nhật tự động tiếp theo.', + title: 'Hạ cấp Plugin', + }, + noPluginPlaceholder: { + noInstalled: 'Không có plugin nào được cài đặt', + noFound: 'Không tìm thấy plugin nào', + }, + updateTimeTitle: 'Thời gian cập nhật', + updateTime: 'Thời gian cập nhật', + automaticUpdates: 'Cập nhật tự động', + nextUpdateTime: 'Cập nhật tự động tiếp theo: {{time}}', + specifyPluginsToUpdate: 'Chỉ định các plugin để cập nhật', + excludeUpdate: 'Các plugin {{num}} sau đây sẽ không tự động cập nhật', + updateSettings: 'Cập nhật cài đặt', + partialUPdate: 'Chỉ có {{num}} plugin sau đây sẽ tự động cập nhật', + changeTimezone: 'Để thay đổi múi giờ, hãy vào Cài đặt', + }, } export default translation diff --git a/web/i18n/vi-VN/share-app.ts b/web/i18n/vi-VN/share.ts similarity index 100% rename from web/i18n/vi-VN/share-app.ts rename to web/i18n/vi-VN/share.ts diff --git a/web/i18n/zh-Hans/plugin.ts b/web/i18n/zh-Hans/plugin.ts index 5d2f2b1f59..122fc100dc 100644 --- a/web/i18n/zh-Hans/plugin.ts +++ b/web/i18n/zh-Hans/plugin.ts @@ -127,6 +127,56 @@ const translation = { admins: '管理员', noone: '无人', }, + autoUpdate: { + automaticUpdates: '自动更新', + updateTime: '更新时间', + specifyPluginsToUpdate: '指定要更新的插件', + strategy: { + disabled: { + name: '禁用', + description: '插件将不会自动更新', + }, + fixOnly: { + name: '仅修复', + description: '仅自动更新补丁版本(例如,1.0.1 → 1.0.2)。次要版本更改不会触发更新。', + selectedDescription: '仅自动更新补丁版本', + }, + latest: { + name: '最新', + description: '始终更新到最新版本', + selectedDescription: '始终更新到最新版本', + }, + }, + updateTimeTitle: '更新时间', + upgradeMode: { + all: '更新全部', + exclude: '排除选定', + partial: '仅选定', + }, + upgradeModePlaceholder: { + exclude: '选定的插件将不会自动更新', + partial: '仅选定的插件将自动更新。目前未选择任何插件,因此不会自动更新任何插件。', + }, + excludeUpdate: '以下 {{num}} 个插件将不会自动更新', + partialUPdate: '仅以下 {{num}} 个插件将自动更新', + operation: { + clearAll: '清除所有', + select: '选择插件', + }, + nextUpdateTime: '下次自动更新时间: {{time}}', + pluginDowngradeWarning: { + title: '插件降级', + description: '此插件目前已启用自动更新。降级版本可能会导致您的更改在下次自动更新时被覆盖。', + downgrade: '仍然降级', + exclude: '从自动更新中排除', + }, + noPluginPlaceholder: { + noFound: '未找到插件', + noInstalled: '未安装插件', + }, + updateSettings: '更新设置', + changeTimezone: '要更改时区,请前往设置', + }, pluginInfoModal: { title: '插件信息', repository: '仓库', diff --git a/web/i18n/zh-Hans/share-app.ts b/web/i18n/zh-Hans/share.ts similarity index 100% rename from web/i18n/zh-Hans/share-app.ts rename to web/i18n/zh-Hans/share.ts diff --git a/web/i18n/zh-Hant/plugin.ts b/web/i18n/zh-Hant/plugin.ts index 938c754394..0d0e1f8782 100644 --- a/web/i18n/zh-Hant/plugin.ts +++ b/web/i18n/zh-Hant/plugin.ts @@ -84,6 +84,16 @@ const translation = { configureModel: '配置模型', endpointModalTitle: '設置終端節點', endpointsDocLink: '查看文件', + deprecation: { + reason: { + businessAdjustments: '業務調整', + ownershipTransferred: '所有權轉移', + noMaintainer: '沒有維護者', + }, + noReason: '此插件已被廢棄,將不再進行更新。', + onlyReason: '此插件因為 {{deprecatedReason}} 而被棄用,將不再更新。', + fullMessage: '由於 {{deprecatedReason}},此插件已被棄用,將不再更新。請改用 {{-alternativePluginId}}。', + }, }, debugInfo: { viewDocs: '查看文件', @@ -237,6 +247,56 @@ const translation = { clientInfo: '由於未找到此工具提供者的系統客戶端秘密,因此需要手動設置,對於 redirect_uri,請使用', useApiAuthDesc: '配置完憑證後,工作區內的所有成員在協調應用程式時都可以使用此工具。', }, + deprecated: '不推薦使用的', + autoUpdate: { + strategy: { + disabled: { + description: '插件將不會自動更新', + name: '殘疾的', + }, + fixOnly: { + name: '僅修理', + selectedDescription: '僅限於修補版本的自動更新', + }, + latest: { + description: '始終更新至最新版本', + name: '最新', + selectedDescription: '始終更新至最新版本', + }, + }, + upgradeMode: { + all: '更新所有', + exclude: '排除選定的', + partial: '僅選擇', + }, + upgradeModePlaceholder: { + partial: '只有選定的插件會自動更新。目前未選定任何插件,因此不會自動更新任何插件。', + exclude: '選定的插件將不會自動更新', + }, + operation: { + select: '選擇插件', + clearAll: '清除所有', + }, + pluginDowngradeWarning: { + downgrade: '無論如何降級', + title: '插件降級', + exclude: '排除自動更新', + description: '這個插件目前已啟用自動更新。降級版本可能會導致您的更改在下一次自動更新時被覆蓋。', + }, + noPluginPlaceholder: { + noInstalled: '沒有安裝插件', + noFound: '未找到任何外掛', + }, + automaticUpdates: '自動更新', + updateTime: '更新時間', + updateTimeTitle: '更新時間', + updateSettings: '更新設定', + partialUPdate: '只有以下 {{num}} 個插件將自動更新', + excludeUpdate: '以下 {{num}} 個插件將不會自動更新', + nextUpdateTime: '下次自動更新:{{time}}', + specifyPluginsToUpdate: '指定要更新的插件', + changeTimezone: '要更改時區,請前往設定', + }, } export default translation diff --git a/web/i18n/zh-Hant/share-app.ts b/web/i18n/zh-Hant/share.ts similarity index 100% rename from web/i18n/zh-Hant/share-app.ts rename to web/i18n/zh-Hant/share.ts diff --git a/web/package.json b/web/package.json index 4bd21e6d86..2470a70dec 100644 --- a/web/package.json +++ b/web/package.json @@ -1,6 +1,6 @@ { "name": "dify-web", - "version": "1.6.0", + "version": "1.7.0", "private": true, "engines": { "node": ">=v22.11.0" @@ -46,7 +46,7 @@ "@eslint/compat": "^1.2.4", "@floating-ui/react": "^0.26.25", "@formatjs/intl-localematcher": "^0.5.6", - "@headlessui/react": "^2.2.0", + "@headlessui/react": "2.2.1", "@heroicons/react": "^2.0.16", "@hookform/resolvers": "^3.9.0", "@lexical/code": "^0.30.0", diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index 40825aec01..eaff8c8504 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -66,8 +66,8 @@ importers: specifier: ^0.5.6 version: 0.5.10 '@headlessui/react': - specifier: ^2.2.0 - version: 2.2.4(react-dom@19.1.0(react@19.1.0))(react@19.1.0) + specifier: 2.2.1 + version: 2.2.1(react-dom@19.1.0(react@19.1.0))(react@19.1.0) '@heroicons/react': specifier: ^2.0.16 version: 2.2.0(react@19.1.0) @@ -1634,8 +1634,8 @@ packages: resolution: {integrity: sha512-HXuHKvpHLo9/GQ/yKMmKFyS1AYL2t9pL67+GfpYZfOAb29qD80EMozi50zRZk82KmNRBcA2A0/ErjpOwUxJrNg==} engines: {node: '>=20.0.0'} - '@headlessui/react@2.2.4': - resolution: {integrity: sha512-lz+OGcAH1dK93rgSMzXmm1qKOJkBUqZf1L4M8TWLNplftQD3IkoEDdUFNfAn4ylsN6WOTVtWaLmvmaHOUk1dTA==} + '@headlessui/react@2.2.1': + resolution: {integrity: sha512-daiUqVLae8CKVjEVT19P/izW0aGK0GNhMSAeMlrDebKmoVZHcRRwbxzgtnEadUVDXyBsWo9/UH4KHeniO+0tMg==} engines: {node: '>=10'} peerDependencies: react: ^18 || ^19 || ^19.0.0-rc @@ -9496,7 +9496,7 @@ snapshots: jest-mock: 29.7.0 jest-util: 29.7.0 - '@headlessui/react@2.2.4(react-dom@19.1.0(react@19.1.0))(react@19.1.0)': + '@headlessui/react@2.2.1(react-dom@19.1.0(react@19.1.0))(react@19.1.0)': dependencies: '@floating-ui/react': 0.26.28(react-dom@19.1.0(react@19.1.0))(react@19.1.0) '@react-aria/focus': 3.20.5(react-dom@19.1.0(react@19.1.0))(react@19.1.0) @@ -9504,7 +9504,6 @@ snapshots: '@tanstack/react-virtual': 3.13.12(react-dom@19.1.0(react@19.1.0))(react@19.1.0) react: 19.1.0 react-dom: 19.1.0(react@19.1.0) - use-sync-external-store: 1.5.0(react@19.1.0) '@heroicons/react@2.2.0(react@19.1.0)': dependencies: diff --git a/web/service/use-plugins.ts b/web/service/use-plugins.ts index ff092bb037..2877ef15f2 100644 --- a/web/service/use-plugins.ts +++ b/web/service/use-plugins.ts @@ -13,7 +13,6 @@ import type { InstalledLatestVersionResponse, InstalledPluginListWithTotalResponse, PackageDependency, - Permissions, Plugin, PluginDeclaration, PluginDetail, @@ -22,6 +21,7 @@ import type { PluginType, PluginsFromMarketplaceByInfoResponse, PluginsFromMarketplaceResponse, + ReferenceSetting, VersionInfo, VersionListResponse, uploadGitHubResponse, @@ -40,7 +40,7 @@ import { useQueryClient, } from '@tanstack/react-query' import { useInvalidateAllBuiltInTools } from './use-tools' -import usePermission from '@/app/components/plugins/plugin-page/use-permission' +import useReferenceSetting from '@/app/components/plugins/plugin-page/use-reference-setting' import { uninstallPlugin } from '@/service/plugins' import useRefreshPluginList from '@/app/components/plugins/install-plugin/hooks/use-refresh-plugin-list' import { cloneDeep } from 'lodash-es' @@ -350,37 +350,45 @@ export const useDebugKey = () => { }) } -const usePermissionsKey = [NAME_SPACE, 'permissions'] -export const usePermissions = () => { +const useReferenceSettingKey = [NAME_SPACE, 'referenceSettings'] +export const useReferenceSettings = () => { return useQuery({ - queryKey: usePermissionsKey, - queryFn: () => get('/workspaces/current/plugin/permission/fetch'), + queryKey: useReferenceSettingKey, + queryFn: () => get('/workspaces/current/plugin/preferences/fetch'), }) } -export const useInvalidatePermissions = () => { +export const useInvalidateReferenceSettings = () => { const queryClient = useQueryClient() return () => { queryClient.invalidateQueries( { - queryKey: usePermissionsKey, + queryKey: useReferenceSettingKey, }) } } -export const useMutationPermissions = ({ +export const useMutationReferenceSettings = ({ onSuccess, }: { onSuccess?: () => void }) => { return useMutation({ - mutationFn: (payload: Permissions) => { - return post('/workspaces/current/plugin/permission/change', { body: payload }) + mutationFn: (payload: ReferenceSetting) => { + return post('/workspaces/current/plugin/preferences/change', { body: payload }) }, onSuccess, }) } +export const useRemoveAutoUpgrade = () => { + return useMutation({ + mutationFn: (payload: { plugin_id: string }) => { + return post('/workspaces/current/plugin/preferences/autoupgrade/exclude', { body: payload }) + }, + }) +} + export const useMutationPluginsFromMarketplace = () => { return useMutation({ mutationFn: (pluginsSearchParams: PluginsSearchParams) => { @@ -427,6 +435,39 @@ export const useFetchPluginsInMarketPlaceByIds = (unique_identifiers: string[], }) } +export const useFetchPluginListOrBundleList = (pluginsSearchParams: PluginsSearchParams) => { + return useQuery({ + queryKey: [NAME_SPACE, 'fetchPluginListOrBundleList', pluginsSearchParams], + queryFn: () => { + const { + query, + sortBy, + sortOrder, + category, + tags, + exclude, + type, + page = 1, + pageSize = 40, + } = pluginsSearchParams + const pluginOrBundle = type === 'bundle' ? 'bundles' : 'plugins' + return postMarketplace<{ data: PluginsFromMarketplaceResponse }>(`/${pluginOrBundle}/search/advanced`, { + body: { + page, + page_size: pageSize, + query, + sort_by: sortBy, + sort_order: sortOrder, + category: category !== 'all' ? category : '', + tags, + exclude, + type, + }, + }) + }, + }) +} + export const useFetchPluginsInMarketPlaceByInfo = (infos: Record[]) => { return useQuery({ queryKey: [NAME_SPACE, 'fetchPluginsInMarketPlaceByInfo', infos], @@ -448,7 +489,7 @@ const usePluginTaskListKey = [NAME_SPACE, 'pluginTaskList'] export const usePluginTaskList = (category?: PluginType) => { const { canManagement, - } = usePermission() + } = useReferenceSetting() const { refreshPluginList } = useRefreshPluginList() const { data, @@ -478,7 +519,6 @@ export const usePluginTaskList = (category?: PluginType) => { refreshPluginList(category ? { category } as any : undefined, !category) } } - // eslint-disable-next-line react-hooks/exhaustive-deps }, [isRefetching]) const handleRefetch = useCallback(() => { diff --git a/web/utils/format.ts b/web/utils/format.ts index 96ce8ea959..70238456c5 100644 --- a/web/utils/format.ts +++ b/web/utils/format.ts @@ -90,7 +90,3 @@ export const formatNumberAbbreviated = (num: number) => { } } } - -export const snakeCase2CamelCase = (input: string): string => { - return input.replace(/_([a-z])/g, (_, letter) => letter.toUpperCase()) -}