diff --git a/api/.ruff.toml b/api/.ruff.toml index f30275a943..89a2da35d6 100644 --- a/api/.ruff.toml +++ b/api/.ruff.toml @@ -85,11 +85,11 @@ ignore = [ ] "tests/*" = [ "F811", # redefined-while-unused - "F401", # unused-import ] [lint.pyflakes] -extend-generics = [ +allowed-unused-imports = [ "_pytest.monkeypatch", "tests.integration_tests", + "tests.unit_tests", ] diff --git a/api/Dockerfile b/api/Dockerfile index 65a68ea634..34211d9e85 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -55,7 +55,7 @@ RUN apt-get update \ && echo "deb http://deb.debian.org/debian testing main" > /etc/apt/sources.list \ && apt-get update \ # For Security - && apt-get install -y --no-install-recommends expat=2.6.4-1 libldap-2.5-0=2.5.18+dfsg-3+b1 perl=5.40.0-8 libsqlite3-0=3.46.1-1 zlib1g=1:1.3.dfsg+really1.3.1-1+b1 \ + && apt-get install -y --no-install-recommends expat=2.6.4-1 libldap-2.5-0=2.5.19+dfsg-1 perl=5.40.0-8 libsqlite3-0=3.46.1-1 zlib1g=1:1.3.dfsg+really1.3.1-1+b1 \ # install a chinese font to support the use of tools like matplotlib && apt-get install -y fonts-noto-cjk \ && apt-get autoremove -y \ diff --git a/api/app.py b/api/app.py index c6a0829080..740ad413da 100644 --- a/api/app.py +++ b/api/app.py @@ -1,12 +1,8 @@ -from libs import version_utils - -# preparation before creating app -version_utils.check_supported_python_version() +import os +import sys def is_db_command(): - import sys - if len(sys.argv) > 1 and sys.argv[0].endswith("flask") and sys.argv[1] == "db": return True return False @@ -18,10 +14,22 @@ if is_db_command(): app = create_migrations_app() else: - from app_factory import create_app - from libs import threadings_utils + if os.environ.get("FLASK_DEBUG", "False") != "True": + from gevent import monkey # type: ignore - threadings_utils.apply_gevent_threading_patch() + # gevent + monkey.patch_all() + + from grpc.experimental import gevent as grpc_gevent # type: ignore + + # grpc gevent + grpc_gevent.init_gevent() + + import psycogreen.gevent # type: ignore + + psycogreen.gevent.patch_psycopg() + + from app_factory import create_app app = create_app() celery = app.extensions["celery"] diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index 5865ddcc8b..dcfebd0a4e 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -823,6 +823,13 @@ class LoginConfig(BaseSettings): ) +class AccountConfig(BaseSettings): + ACCOUNT_DELETION_TOKEN_EXPIRY_MINUTES: PositiveInt = Field( + description="Duration in minutes for which a account deletion token remains valid", + default=5, + ) + + class FeatureConfig( # place the configs in alphabet order AppExecutionConfig, @@ -852,6 +859,7 @@ class FeatureConfig( WorkflowNodeExecutionConfig, WorkspaceConfig, LoginConfig, + AccountConfig, # hosted services config HostedServiceConfig, CeleryBeatConfig, diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index 291a6e5dd6..0cc5f31ddd 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -2,7 +2,7 @@ import json import logging from flask import abort, request -from flask_restful import Resource, marshal_with, reqparse # type: ignore +from flask_restful import Resource, inputs, marshal_with, reqparse # type: ignore from werkzeug.exceptions import Forbidden, InternalServerError, NotFound import services @@ -14,7 +14,7 @@ from controllers.console.wraps import account_initialization_required, setup_req from core.app.apps.base_app_queue_manager import AppQueueManager from core.app.entities.app_invoke_entities import InvokeFrom from factories import variable_factory -from fields.workflow_fields import workflow_fields +from fields.workflow_fields import workflow_fields, workflow_pagination_fields from fields.workflow_run_fields import workflow_run_node_execution_fields from libs import helper from libs.helper import TimestampField, uuid_value @@ -474,6 +474,31 @@ class WorkflowConfigApi(Resource): } +class PublishedAllWorkflowApi(Resource): + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @marshal_with(workflow_pagination_fields) + def get(self, app_model: App): + """ + Get published workflows + """ + if not current_user.is_editor: + raise Forbidden() + + parser = reqparse.RequestParser() + parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") + parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") + args = parser.parse_args() + page = args.get("page") + limit = args.get("limit") + workflow_service = WorkflowService() + workflows, has_more = workflow_service.get_all_published_workflow(app_model=app_model, page=page, limit=limit) + + return {"items": workflows, "page": page, "limit": limit, "has_more": has_more} + + api.add_resource(DraftWorkflowApi, "/apps//workflows/draft") api.add_resource(WorkflowConfigApi, "/apps//workflows/draft/config") api.add_resource(AdvancedChatDraftWorkflowRunApi, "/apps//advanced-chat/workflows/draft/run") @@ -488,6 +513,7 @@ api.add_resource( WorkflowDraftRunIterationNodeApi, "/apps//workflows/draft/iteration/nodes//run" ) api.add_resource(PublishedWorkflowApi, "/apps//workflows/publish") +api.add_resource(PublishedAllWorkflowApi, "/apps//workflows") api.add_resource(DefaultBlockConfigsApi, "/apps//workflows/default-workflow-block-configs") api.add_resource( DefaultBlockConfigApi, "/apps//workflows/default-workflow-block-configs/" diff --git a/api/controllers/console/auth/error.py b/api/controllers/console/auth/error.py index e6e30c3c0b..8ef10c7bbb 100644 --- a/api/controllers/console/auth/error.py +++ b/api/controllers/console/auth/error.py @@ -53,3 +53,9 @@ class EmailCodeLoginRateLimitExceededError(BaseHTTPException): error_code = "email_code_login_rate_limit_exceeded" description = "Too many login emails have been sent. Please try again in 5 minutes." code = 429 + + +class EmailCodeAccountDeletionRateLimitExceededError(BaseHTTPException): + error_code = "email_code_account_deletion_rate_limit_exceeded" + description = "Too many account deletion emails have been sent. Please try again in 5 minutes." + code = 429 diff --git a/api/controllers/console/auth/forgot_password.py b/api/controllers/console/auth/forgot_password.py index b9ce5d644d..dda50e1a5a 100644 --- a/api/controllers/console/auth/forgot_password.py +++ b/api/controllers/console/auth/forgot_password.py @@ -8,13 +8,8 @@ from sqlalchemy.orm import Session from constants.languages import languages from controllers.console import api -from controllers.console.auth.error import ( - EmailCodeError, - InvalidEmailError, - InvalidTokenError, - PasswordMismatchError, -) -from controllers.console.error import AccountNotFound, EmailSendIpLimitError +from controllers.console.auth.error import EmailCodeError, InvalidEmailError, InvalidTokenError, PasswordMismatchError +from controllers.console.error import AccountInFreezeError, AccountNotFound, EmailSendIpLimitError from controllers.console.wraps import setup_required from events.tenant_event import tenant_was_created from extensions.ext_database import db @@ -22,6 +17,7 @@ from libs.helper import email, extract_remote_ip from libs.password import hash_password, valid_password from models.account import Account from services.account_service import AccountService, TenantService +from services.errors.account import AccountRegisterError from services.errors.workspace import WorkSpaceNotAllowedCreateError from services.feature_service import FeatureService @@ -133,6 +129,8 @@ class ForgotPasswordResetApi(Resource): ) except WorkSpaceNotAllowedCreateError: pass + except AccountRegisterError as are: + raise AccountInFreezeError() return {"result": "success"} diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py index 78a80fc8d7..41362e9fa2 100644 --- a/api/controllers/console/auth/login.py +++ b/api/controllers/console/auth/login.py @@ -5,6 +5,7 @@ from flask import request from flask_restful import Resource, reqparse # type: ignore import services +from configs import dify_config from constants.languages import languages from controllers.console import api from controllers.console.auth.error import ( @@ -16,6 +17,7 @@ from controllers.console.auth.error import ( ) from controllers.console.error import ( AccountBannedError, + AccountInFreezeError, AccountNotFound, EmailSendIpLimitError, NotAllowedCreateWorkspace, @@ -26,6 +28,8 @@ from libs.helper import email, extract_remote_ip from libs.password import valid_password from models.account import Account from services.account_service import AccountService, RegisterService, TenantService +from services.billing_service import BillingService +from services.errors.account import AccountRegisterError from services.errors.workspace import WorkSpaceNotAllowedCreateError from services.feature_service import FeatureService @@ -44,6 +48,9 @@ class LoginApi(Resource): parser.add_argument("language", type=str, required=False, default="en-US", location="json") args = parser.parse_args() + if dify_config.BILLING_ENABLED and BillingService.is_email_in_freeze(args["email"]): + raise AccountInFreezeError() + is_login_error_rate_limit = AccountService.is_login_error_rate_limit(args["email"]) if is_login_error_rate_limit: raise EmailPasswordLoginLimitError() @@ -113,8 +120,10 @@ class ResetPasswordSendEmailApi(Resource): language = "zh-Hans" else: language = "en-US" - - account = AccountService.get_user_through_email(args["email"]) + try: + account = AccountService.get_user_through_email(args["email"]) + except AccountRegisterError as are: + raise AccountInFreezeError() if account is None: if FeatureService.get_system_features().is_allow_register: token = AccountService.send_reset_password_email(email=args["email"], language=language) @@ -142,8 +151,11 @@ class EmailCodeLoginSendEmailApi(Resource): language = "zh-Hans" else: language = "en-US" + try: + account = AccountService.get_user_through_email(args["email"]) + except AccountRegisterError as are: + raise AccountInFreezeError() - account = AccountService.get_user_through_email(args["email"]) if account is None: if FeatureService.get_system_features().is_allow_register: token = AccountService.send_email_code_login_email(email=args["email"], language=language) @@ -177,7 +189,10 @@ class EmailCodeLoginApi(Resource): raise EmailCodeError() AccountService.revoke_email_code_login_token(args["token"]) - account = AccountService.get_user_through_email(user_email) + try: + account = AccountService.get_user_through_email(user_email) + except AccountRegisterError as are: + raise AccountInFreezeError() if account: tenant = TenantService.get_join_tenants(account) if not tenant: @@ -196,6 +211,8 @@ class EmailCodeLoginApi(Resource): ) except WorkSpaceNotAllowedCreateError: return NotAllowedCreateWorkspace() + except AccountRegisterError as are: + raise AccountInFreezeError() token_pair = AccountService.login(account, ip_address=extract_remote_ip(request)) AccountService.reset_login_error_rate_limit(args["email"]) return {"result": "success", "data": token_pair.model_dump()} diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index 8e54da4ef6..f5284cc43b 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -18,7 +18,7 @@ from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo from models import Account from models.account import AccountStatus from services.account_service import AccountService, RegisterService, TenantService -from services.errors.account import AccountNotFoundError +from services.errors.account import AccountNotFoundError, AccountRegisterError from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkSpaceNotFoundError from services.feature_service import FeatureService @@ -101,6 +101,8 @@ class OAuthCallback(Resource): f"{dify_config.CONSOLE_WEB_URL}/signin" "?message=Workspace not found, please contact system admin to invite you to join in a workspace." ) + except AccountRegisterError as e: + return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message={e.description}") # Check account status if account.status == AccountStatus.BANNED.value: diff --git a/api/controllers/console/error.py b/api/controllers/console/error.py index 1b4e6deae6..ee87138a44 100644 --- a/api/controllers/console/error.py +++ b/api/controllers/console/error.py @@ -92,3 +92,12 @@ class UnauthorizedAndForceLogout(BaseHTTPException): error_code = "unauthorized_and_force_logout" description = "Unauthorized and force logout." code = 401 + + +class AccountInFreezeError(BaseHTTPException): + error_code = "account_in_freeze" + code = 400 + description = ( + "This email account has been deleted within the past 30 days" + "and is temporarily unavailable for new account registration." + ) diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py index 96ed4b7a57..f1ec0f3d29 100644 --- a/api/controllers/console/workspace/account.py +++ b/api/controllers/console/workspace/account.py @@ -11,6 +11,7 @@ from controllers.console import api from controllers.console.workspace.error import ( AccountAlreadyInitedError, CurrentPasswordIncorrectError, + InvalidAccountDeletionCodeError, InvalidInvitationCodeError, RepeatPasswordNotMatchError, ) @@ -21,6 +22,7 @@ from libs.helper import TimestampField, timezone from libs.login import login_required from models import AccountIntegrate, InvitationCode from services.account_service import AccountService +from services.billing_service import BillingService from services.errors.account import CurrentPasswordIncorrectError as ServiceCurrentPasswordIncorrectError @@ -242,6 +244,54 @@ class AccountIntegrateApi(Resource): return {"data": integrate_data} +class AccountDeleteVerifyApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self): + account = current_user + + token, code = AccountService.generate_account_deletion_verification_code(account) + AccountService.send_account_deletion_verification_email(account, code) + + return {"result": "success", "data": token} + + +class AccountDeleteApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self): + account = current_user + + parser = reqparse.RequestParser() + parser.add_argument("token", type=str, required=True, location="json") + parser.add_argument("code", type=str, required=True, location="json") + args = parser.parse_args() + + if not AccountService.verify_account_deletion_code(args["token"], args["code"]): + raise InvalidAccountDeletionCodeError() + + AccountService.delete_account(account) + + return {"result": "success"} + + +class AccountDeleteUpdateFeedbackApi(Resource): + @setup_required + def post(self): + account = current_user + + parser = reqparse.RequestParser() + parser.add_argument("email", type=str, required=True, location="json") + parser.add_argument("feedback", type=str, required=True, location="json") + args = parser.parse_args() + + BillingService.update_account_deletion_feedback(args["email"], args["feedback"]) + + return {"result": "success"} + + # Register API resources api.add_resource(AccountInitApi, "/account/init") api.add_resource(AccountProfileApi, "/account/profile") @@ -252,5 +302,8 @@ api.add_resource(AccountInterfaceThemeApi, "/account/interface-theme") api.add_resource(AccountTimezoneApi, "/account/timezone") api.add_resource(AccountPasswordApi, "/account/password") api.add_resource(AccountIntegrateApi, "/account/integrates") +api.add_resource(AccountDeleteVerifyApi, "/account/delete/verify") +api.add_resource(AccountDeleteApi, "/account/delete") +api.add_resource(AccountDeleteUpdateFeedbackApi, "/account/delete/feedback") # api.add_resource(AccountEmailApi, '/account/email') # api.add_resource(AccountEmailVerifyApi, '/account/email-verify') diff --git a/api/controllers/console/workspace/error.py b/api/controllers/console/workspace/error.py index 9e13c7b924..8b70ca62b9 100644 --- a/api/controllers/console/workspace/error.py +++ b/api/controllers/console/workspace/error.py @@ -35,3 +35,9 @@ class AccountNotInitializedError(BaseHTTPException): error_code = "account_not_initialized" description = "The account has not been initialized yet. Please proceed with the initialization process first." code = 400 + + +class InvalidAccountDeletionCodeError(BaseHTTPException): + error_code = "invalid_account_deletion_code" + description = "Invalid account deletion code." + code = 400 diff --git a/api/controllers/console/workspace/members.py b/api/controllers/console/workspace/members.py index 1afb41ea87..a2b41c1d38 100644 --- a/api/controllers/console/workspace/members.py +++ b/api/controllers/console/workspace/members.py @@ -122,7 +122,7 @@ class MemberUpdateRoleApi(Resource): return {"code": "invalid-role", "message": "Invalid role"}, 400 member = db.session.get(Account, str(member_id)) - if member: + if not member: abort(404) try: diff --git a/api/core/agent/base_agent_runner.py b/api/core/agent/base_agent_runner.py index 548631c19e..69ec80e247 100644 --- a/api/core/agent/base_agent_runner.py +++ b/api/core/agent/base_agent_runner.py @@ -330,13 +330,13 @@ class BaseAgentRunner(AppRunner): if not updated_agent_thought: raise ValueError("agent thought not found") - if thought is not None: - updated_agent_thought.thought = thought + if thought: + agent_thought.thought = thought - if tool_name is not None: - updated_agent_thought.tool = tool_name + if tool_name: + agent_thought.tool = tool_name - if tool_input is not None: + if tool_input: if isinstance(tool_input, dict): try: tool_input = json.dumps(tool_input, ensure_ascii=False) @@ -345,7 +345,7 @@ class BaseAgentRunner(AppRunner): updated_agent_thought.tool_input = tool_input - if observation is not None: + if observation: if isinstance(observation, dict): try: observation = json.dumps(observation, ensure_ascii=False) @@ -354,8 +354,8 @@ class BaseAgentRunner(AppRunner): updated_agent_thought.observation = observation - if answer is not None: - updated_agent_thought.answer = answer + if answer: + agent_thought.answer = answer if messages_ids is not None and len(messages_ids) > 0: updated_agent_thought.message_files = json.dumps(messages_ids) diff --git a/api/core/app/task_pipeline/workflow_cycle_manage.py b/api/core/app/task_pipeline/workflow_cycle_manage.py index 1a2e67f7e7..e218735eec 100644 --- a/api/core/app/task_pipeline/workflow_cycle_manage.py +++ b/api/core/app/task_pipeline/workflow_cycle_manage.py @@ -276,7 +276,7 @@ class WorkflowCycleManage: self, *, session: Session, workflow_run: WorkflowRun, event: QueueNodeStartedEvent ) -> WorkflowNodeExecution: workflow_node_execution = WorkflowNodeExecution() - workflow_node_execution.id = event.node_execution_id + workflow_node_execution.id = str(uuid4()) workflow_node_execution.tenant_id = workflow_run.tenant_id workflow_node_execution.app_id = workflow_run.app_id workflow_node_execution.workflow_id = workflow_run.workflow_id @@ -392,7 +392,7 @@ class WorkflowCycleManage: execution_metadata = json.dumps(merged_metadata) workflow_node_execution = WorkflowNodeExecution() - workflow_node_execution.id = event.node_execution_id + workflow_node_execution.id = str(uuid4()) workflow_node_execution.tenant_id = workflow_run.tenant_id workflow_node_execution.app_id = workflow_run.app_id workflow_node_execution.workflow_id = workflow_run.workflow_id @@ -825,7 +825,7 @@ class WorkflowCycleManage: return workflow_run def _get_workflow_node_execution(self, session: Session, node_execution_id: str) -> WorkflowNodeExecution: - stmt = select(WorkflowNodeExecution).where(WorkflowNodeExecution.id == node_execution_id) + stmt = select(WorkflowNodeExecution).where(WorkflowNodeExecution.node_execution_id == node_execution_id) workflow_node_execution = session.scalar(stmt) if not workflow_node_execution: raise WorkflowNodeExecutionNotFoundError(node_execution_id) diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-lite-pro-128k.yaml b/api/core/model_runtime/model_providers/wenxin/llm/ernie-lite-pro-128k.yaml new file mode 100644 index 0000000000..4f5832c859 --- /dev/null +++ b/api/core/model_runtime/model_providers/wenxin/llm/ernie-lite-pro-128k.yaml @@ -0,0 +1,42 @@ +model: ernie-lite-pro-128k +label: + en_US: Ernie-Lite-Pro-128K +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 128000 +parameter_rules: + - name: temperature + use_template: temperature + min: 0.1 + max: 1.0 + default: 0.8 + - name: top_p + use_template: top_p + - name: min_output_tokens + label: + en_US: "Min Output Tokens" + zh_Hans: "最小输出Token数" + use_template: max_tokens + min: 2 + max: 2048 + help: + zh_Hans: 指定模型最小输出token数 + en_US: Specifies the lower limit on the length of generated results. + - name: max_output_tokens + label: + en_US: "Max Output Tokens" + zh_Hans: "最大输出Token数" + use_template: max_tokens + min: 2 + max: 2048 + default: 2048 + help: + zh_Hans: 指定模型最大输出token数 + en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. + - name: presence_penalty + use_template: presence_penalty + - name: frequency_penalty + use_template: frequency_penalty diff --git a/api/core/rag/extractor/notion_extractor.py b/api/core/rag/extractor/notion_extractor.py index fdc2e46d14..41355d3fac 100644 --- a/api/core/rag/extractor/notion_extractor.py +++ b/api/core/rag/extractor/notion_extractor.py @@ -138,17 +138,24 @@ class NotionExtractor(BaseExtractor): block_url = BLOCK_CHILD_URL_TMPL.format(block_id=page_id) while True: query_dict: dict[str, Any] = {} if not start_cursor else {"start_cursor": start_cursor} - res = requests.request( - "GET", - block_url, - headers={ - "Authorization": "Bearer " + self._notion_access_token, - "Content-Type": "application/json", - "Notion-Version": "2022-06-28", - }, - params=query_dict, - ) - data = res.json() + try: + res = requests.request( + "GET", + block_url, + headers={ + "Authorization": "Bearer " + self._notion_access_token, + "Content-Type": "application/json", + "Notion-Version": "2022-06-28", + }, + params=query_dict, + ) + if res.status_code != 200: + raise ValueError(f"Error fetching Notion block data: {res.text}") + data = res.json() + except requests.RequestException as e: + raise ValueError("Error fetching Notion block data") from e + if "results" not in data or not isinstance(data["results"], list): + raise ValueError("Error fetching Notion block data") for result in data["results"]: result_type = result["type"] result_obj = result[result_type] diff --git a/api/core/tools/errors.py b/api/core/tools/errors.py index 6febf137b0..c5f9ca4774 100644 --- a/api/core/tools/errors.py +++ b/api/core/tools/errors.py @@ -31,3 +31,7 @@ class ToolApiSchemaError(ValueError): class ToolEngineInvokeError(Exception): meta: ToolInvokeMeta + + def __init__(self, meta, **kwargs): + self.meta = meta + super().__init__(**kwargs) diff --git a/api/core/tools/workflow_as_tool/tool.py b/api/core/tools/workflow_as_tool/tool.py index b9238d11a0..38ab6e4bba 100644 --- a/api/core/tools/workflow_as_tool/tool.py +++ b/api/core/tools/workflow_as_tool/tool.py @@ -2,12 +2,14 @@ import json import logging from collections.abc import Generator from typing import Any, Optional, Union +from typing import cast from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod from core.tools.__base.tool import Tool from core.tools.__base.tool_runtime import ToolRuntime from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolParameter, ToolProviderType from extensions.ext_database import db +from factories.file_factory import build_from_mapping from models.account import Account from models.model import App, EndUser from models.workflow import Workflow @@ -222,10 +224,18 @@ class WorkflowTool(Tool): if isinstance(value, list): for item in value: if isinstance(item, dict) and item.get("dify_model_identity") == FILE_MODEL_IDENTITY: - file = File.model_validate(item) + item["tool_file_id"] = item.get("related_id") + file = build_from_mapping( + mapping=item, + tenant_id=str(cast(ToolRuntime, self.runtime).tenant_id), + ) files.append(file) elif isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY: - file = File.model_validate(value) + value["tool_file_id"] = value.get("related_id") + file = build_from_mapping( + mapping=value, + tenant_id=str(cast(ToolRuntime, self.runtime).tenant_id), + ) files.append(file) result[key] = value diff --git a/api/events/event_handlers/update_app_dataset_join_when_app_model_config_updated.py b/api/events/event_handlers/update_app_dataset_join_when_app_model_config_updated.py index 408ed31096..14396e9920 100644 --- a/api/events/event_handlers/update_app_dataset_join_when_app_model_config_updated.py +++ b/api/events/event_handlers/update_app_dataset_join_when_app_model_config_updated.py @@ -15,11 +15,11 @@ def handle(sender, **kwargs): app_dataset_joins = db.session.query(AppDatasetJoin).filter(AppDatasetJoin.app_id == app.id).all() - removed_dataset_ids: set[int] = set() + removed_dataset_ids: set[str] = set() if not app_dataset_joins: added_dataset_ids = dataset_ids else: - old_dataset_ids: set[int] = set() + old_dataset_ids: set[str] = set() old_dataset_ids.update(app_dataset_join.dataset_id for app_dataset_join in app_dataset_joins) added_dataset_ids = dataset_ids - old_dataset_ids @@ -39,8 +39,8 @@ def handle(sender, **kwargs): db.session.commit() -def get_dataset_ids_from_model_config(app_model_config: AppModelConfig) -> set[int]: - dataset_ids: set[int] = set() +def get_dataset_ids_from_model_config(app_model_config: AppModelConfig) -> set[str]: + dataset_ids: set[str] = set() if not app_model_config: return dataset_ids diff --git a/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py b/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py index 7a31c82f6a..dd2efed94b 100644 --- a/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py +++ b/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py @@ -17,11 +17,11 @@ def handle(sender, **kwargs): dataset_ids = get_dataset_ids_from_workflow(published_workflow) app_dataset_joins = db.session.query(AppDatasetJoin).filter(AppDatasetJoin.app_id == app.id).all() - removed_dataset_ids: set[int] = set() + removed_dataset_ids: set[str] = set() if not app_dataset_joins: added_dataset_ids = dataset_ids else: - old_dataset_ids: set[int] = set() + old_dataset_ids: set[str] = set() old_dataset_ids.update(app_dataset_join.dataset_id for app_dataset_join in app_dataset_joins) added_dataset_ids = dataset_ids - old_dataset_ids @@ -41,8 +41,8 @@ def handle(sender, **kwargs): db.session.commit() -def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[int]: - dataset_ids: set[int] = set() +def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[str]: + dataset_ids: set[str] = set() graph = published_workflow.graph_dict if not graph: return dataset_ids @@ -60,7 +60,7 @@ def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[int]: for node in knowledge_retrieval_nodes: try: node_data = KnowledgeRetrievalNodeData(**node.get("data", {})) - dataset_ids.update(int(dataset_id) for dataset_id in node_data.dataset_ids) + dataset_ids.update(dataset_id for dataset_id in node_data.dataset_ids) except Exception as e: continue diff --git a/api/fields/workflow_fields.py b/api/fields/workflow_fields.py index bd093d4063..32f979a5f2 100644 --- a/api/fields/workflow_fields.py +++ b/api/fields/workflow_fields.py @@ -45,6 +45,7 @@ workflow_fields = { "graph": fields.Raw(attribute="graph_dict"), "features": fields.Raw(attribute="features_dict"), "hash": fields.String(attribute="unique_hash"), + "version": fields.String(attribute="version"), "created_by": fields.Nested(simple_account_fields, attribute="created_by_account"), "created_at": TimestampField, "updated_by": fields.Nested(simple_account_fields, attribute="updated_by_account", allow_null=True), @@ -61,3 +62,10 @@ workflow_partial_fields = { "updated_by": fields.String, "updated_at": TimestampField, } + +workflow_pagination_fields = { + "items": fields.List(fields.Nested(workflow_fields), attribute="items"), + "page": fields.Integer, + "limit": fields.Integer(attribute="limit"), + "has_more": fields.Boolean(attribute="has_more"), +} diff --git a/api/libs/threadings_utils.py b/api/libs/threadings_utils.py deleted file mode 100644 index e4d63fd314..0000000000 --- a/api/libs/threadings_utils.py +++ /dev/null @@ -1,19 +0,0 @@ -from configs import dify_config - - -def apply_gevent_threading_patch(): - """ - Run threading patch by gevent - to make standard library threading compatible. - Patching should be done as early as possible in the lifecycle of the program. - :return: - """ - if not dify_config.DEBUG: - from gevent import monkey # type: ignore - from grpc.experimental import gevent as grpc_gevent # type: ignore - - # gevent - monkey.patch_all() - - # grpc gevent - grpc_gevent.init_gevent() diff --git a/api/libs/version_utils.py b/api/libs/version_utils.py deleted file mode 100644 index 10edf8a058..0000000000 --- a/api/libs/version_utils.py +++ /dev/null @@ -1,12 +0,0 @@ -import sys - - -def check_supported_python_version(): - python_version = sys.version_info - if not ((3, 11) <= python_version < (3, 13)): - print( - "Aborted to launch the service " - f" with unsupported Python version {python_version.major}.{python_version.minor}." - " Please ensure Python 3.11 or 3.12." - ) - raise SystemExit(1) diff --git a/api/migrations/README b/api/migrations/README index 220678df7a..0e04844159 100644 --- a/api/migrations/README +++ b/api/migrations/README @@ -1,2 +1 @@ Single-database configuration for Flask. - diff --git a/api/poetry.lock b/api/poetry.lock index b42eb22dd4..435a8f38a2 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "aiofiles" @@ -955,10 +955,6 @@ files = [ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"}, {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"}, {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"}, {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"}, @@ -971,14 +967,8 @@ files = [ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"}, {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"}, {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"}, {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"}, @@ -989,24 +979,8 @@ files = [ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"}, {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"}, {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"}, - {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"}, - {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"}, - {file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"}, - {file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"}, {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"}, @@ -1016,10 +990,6 @@ files = [ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"}, {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"}, {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"}, {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"}, @@ -1031,10 +1001,6 @@ files = [ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"}, {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"}, {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"}, {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"}, @@ -1047,10 +1013,6 @@ files = [ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"}, {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"}, {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"}, {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"}, @@ -1063,10 +1025,6 @@ files = [ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"}, {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"}, {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"}, {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, @@ -7000,6 +6958,16 @@ files = [ dev = ["black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "wheel"] test = ["pytest", "pytest-xdist", "setuptools"] +[[package]] +name = "psycogreen" +version = "1.0.2" +description = "psycopg2 integration with coroutine libraries" +optional = false +python-versions = "*" +files = [ + {file = "psycogreen-1.0.2.tar.gz", hash = "sha256:c429845a8a49cf2f76b71265008760bcd7c7c77d80b806db4dc81116dbcd130d"}, +] + [[package]] name = "psycopg2-binary" version = "2.9.10" @@ -11173,4 +11141,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.13" -content-hash = "f4accd01805cbf080c4c5295f97a06c8e4faec7365d2c43d0435e56b46461732" +content-hash = "8b2b1bbc4d9c1d47f126775ea587ee116956df29f500534cb87f512402856e05" diff --git a/api/pyproject.toml b/api/pyproject.toml index 28e0305406..72f0510949 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -61,6 +61,7 @@ openai = "~1.52.0" openpyxl = "~3.1.5" pandas = { version = "~2.2.2", extras = ["performance", "excel"] } pandas-stubs = "~2.2.3.241009" +psycogreen = "~1.0.2" psycopg2-binary = "~2.9.6" pycryptodome = "3.19.1" pydantic = "~2.9.2" diff --git a/api/services/account_service.py b/api/services/account_service.py index 214fb88995..9d2a9dfb77 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -33,6 +33,7 @@ from models.account import ( TenantStatus, ) from models.model import DifySetup +from services.billing_service import BillingService from services.errors.account import ( AccountAlreadyInTenantError, AccountLoginError, @@ -51,6 +52,8 @@ from services.errors.account import ( ) from services.errors.workspace import WorkSpaceNotAllowedCreateError from services.feature_service import FeatureService +from tasks.delete_account_task import delete_account_task +from tasks.mail_account_deletion_task import send_account_deletion_verification_code from tasks.mail_email_code_login import send_email_code_login_mail_task from tasks.mail_invite_member_task import send_invite_member_mail_task from tasks.mail_reset_password_task import send_reset_password_mail_task @@ -71,6 +74,9 @@ class AccountService: email_code_login_rate_limiter = RateLimiter( prefix="email_code_login_rate_limit", max_attempts=1, time_window=60 * 1 ) + email_code_account_deletion_rate_limiter = RateLimiter( + prefix="email_code_account_deletion_rate_limit", max_attempts=1, time_window=60 * 1 + ) LOGIN_MAX_ERROR_LIMITS = 5 @staticmethod @@ -202,6 +208,15 @@ class AccountService: from controllers.console.error import AccountNotFound raise AccountNotFound() + + if dify_config.BILLING_ENABLED and BillingService.is_email_in_freeze(email): + raise AccountRegisterError( + description=( + "This email account has been deleted within the past " + "30 days and is temporarily unavailable for new account registration" + ) + ) + account = Account() account.email = email account.name = name @@ -241,6 +256,42 @@ class AccountService: return account + @staticmethod + def generate_account_deletion_verification_code(account: Account) -> tuple[str, str]: + code = "".join([str(random.randint(0, 9)) for _ in range(6)]) + token = TokenManager.generate_token( + account=account, token_type="account_deletion", additional_data={"code": code} + ) + return token, code + + @classmethod + def send_account_deletion_verification_email(cls, account: Account, code: str): + email = account.email + if cls.email_code_account_deletion_rate_limiter.is_rate_limited(email): + from controllers.console.auth.error import EmailCodeAccountDeletionRateLimitExceededError + + raise EmailCodeAccountDeletionRateLimitExceededError() + + send_account_deletion_verification_code.delay(to=email, code=code) + + cls.email_code_account_deletion_rate_limiter.increment_rate_limit(email) + + @staticmethod + def verify_account_deletion_code(token: str, code: str) -> bool: + token_data = TokenManager.get_token_data(token, "account_deletion") + if token_data is None: + return False + + if token_data["code"] != code: + return False + + return True + + @staticmethod + def delete_account(account: Account) -> None: + """Delete account. This method only adds a task to the queue for deletion.""" + delete_account_task.delay(account.id) + @staticmethod def link_account_integrate(provider: str, open_id: str, account: Account) -> None: """Link account integrate""" @@ -380,6 +431,7 @@ class AccountService: def send_email_code_login_email( cls, account: Optional[Account] = None, email: Optional[str] = None, language: Optional[str] = "en-US" ): + email = account.email if account else email if email is None: raise ValueError("Email must be provided.") if cls.email_code_login_rate_limiter.is_rate_limited(email): @@ -409,6 +461,14 @@ class AccountService: @classmethod def get_user_through_email(cls, email: str): + if dify_config.BILLING_ENABLED and BillingService.is_email_in_freeze(email): + raise AccountRegisterError( + description=( + "This email account has been deleted within the past " + "30 days and is temporarily unavailable for new account registration" + ) + ) + account = db.session.query(Account).filter(Account.email == email).first() if not account: return None @@ -825,6 +885,10 @@ class RegisterService: db.session.commit() except WorkSpaceNotAllowedCreateError: db.session.rollback() + except AccountRegisterError as are: + db.session.rollback() + logging.exception("Register failed") + raise are except Exception as e: db.session.rollback() logging.exception("Register failed") diff --git a/api/services/audio_service.py b/api/services/audio_service.py index ef52301c0a..f4178a69a4 100644 --- a/api/services/audio_service.py +++ b/api/services/audio_service.py @@ -139,7 +139,7 @@ class AudioService: return Response(stream_with_context(response), content_type="audio/mpeg") return response else: - if not text: + if text is None: raise ValueError("Text is required") response = invoke_tts(text, app_model, voice) if isinstance(response, Generator): diff --git a/api/services/billing_service.py b/api/services/billing_service.py index ed611a8be4..3a13c10102 100644 --- a/api/services/billing_service.py +++ b/api/services/billing_service.py @@ -70,3 +70,24 @@ class BillingService: if not TenantAccountRole.is_privileged_role(join.role): raise ValueError("Only team owner or team admin can perform this action") + + @classmethod + def delete_account(cls, account_id: str): + """Delete account.""" + params = {"account_id": account_id} + return cls._send_request("DELETE", "/account/", params=params) + + @classmethod + def is_email_in_freeze(cls, email: str) -> bool: + params = {"email": email} + try: + response = cls._send_request("GET", "/account/in-freeze", params=params) + return bool(response.get("data", False)) + except Exception: + return False + + @classmethod + def update_account_deletion_feedback(cls, email: str, feedback: str): + """Update account deletion feedback.""" + json = {"email": email, "feedback": feedback} + return cls._send_request("POST", "/account/delete-feedback", json=json) diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index b7ddd14025..7f32925470 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -86,25 +86,30 @@ class DatasetService: else: return [], 0 else: - # show all datasets that the user has permission to access - if permitted_dataset_ids: - query = query.filter( - db.or_( - Dataset.permission == DatasetPermissionEnum.ALL_TEAM, - db.and_(Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id), - db.and_( - Dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM, - Dataset.id.in_(permitted_dataset_ids), - ), + if user.current_role not in (TenantAccountRole.OWNER, TenantAccountRole.ADMIN): + # show all datasets that the user has permission to access + if permitted_dataset_ids: + query = query.filter( + db.or_( + Dataset.permission == DatasetPermissionEnum.ALL_TEAM, + db.and_( + Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id + ), + db.and_( + Dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM, + Dataset.id.in_(permitted_dataset_ids), + ), + ) ) - ) - else: - query = query.filter( - db.or_( - Dataset.permission == DatasetPermissionEnum.ALL_TEAM, - db.and_(Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id), + else: + query = query.filter( + db.or_( + Dataset.permission == DatasetPermissionEnum.ALL_TEAM, + db.and_( + Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id + ), + ) ) - ) else: # if no user, only show datasets that are shared with all team members query = query.filter(Dataset.permission == DatasetPermissionEnum.ALL_TEAM) @@ -377,14 +382,19 @@ class DatasetService: if dataset.tenant_id != user.current_tenant_id: logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}") raise NoPermissionError("You do not have permission to access this dataset.") - if dataset.permission == DatasetPermissionEnum.ONLY_ME and dataset.created_by != user.id: - logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}") - raise NoPermissionError("You do not have permission to access this dataset.") - if dataset.permission == "partial_members": - user_permission = DatasetPermission.query.filter_by(dataset_id=dataset.id, account_id=user.id).first() - if not user_permission and dataset.tenant_id != user.current_tenant_id and dataset.created_by != user.id: + if user.current_role not in (TenantAccountRole.OWNER, TenantAccountRole.ADMIN): + if dataset.permission == DatasetPermissionEnum.ONLY_ME and dataset.created_by != user.id: logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}") raise NoPermissionError("You do not have permission to access this dataset.") + if dataset.permission == "partial_members": + user_permission = DatasetPermission.query.filter_by(dataset_id=dataset.id, account_id=user.id).first() + if ( + not user_permission + and dataset.tenant_id != user.current_tenant_id + and dataset.created_by != user.id + ): + logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}") + raise NoPermissionError("You do not have permission to access this dataset.") @staticmethod def check_dataset_operator_permission(user: Optional[Account] = None, dataset: Optional[Dataset] = None): @@ -394,15 +404,16 @@ class DatasetService: if not user: raise ValueError("User not found") - if dataset.permission == DatasetPermissionEnum.ONLY_ME: - if dataset.created_by != user.id: - raise NoPermissionError("You do not have permission to access this dataset.") + if user.current_role not in (TenantAccountRole.OWNER, TenantAccountRole.ADMIN): + if dataset.permission == DatasetPermissionEnum.ONLY_ME: + if dataset.created_by != user.id: + raise NoPermissionError("You do not have permission to access this dataset.") - elif dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM: - if not any( - dp.dataset_id == dataset.id for dp in DatasetPermission.query.filter_by(account_id=user.id).all() - ): - raise NoPermissionError("You do not have permission to access this dataset.") + elif dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM: + if not any( + dp.dataset_id == dataset.id for dp in DatasetPermission.query.filter_by(account_id=user.id).all() + ): + raise NoPermissionError("You do not have permission to access this dataset.") @staticmethod def get_dataset_queries(dataset_id: str, page: int, per_page: int): @@ -441,7 +452,7 @@ class DatasetService: class DocumentService: - DEFAULT_RULES = { + DEFAULT_RULES: dict[str, Any] = { "mode": "custom", "rules": { "pre_processing_rules": [ @@ -455,7 +466,7 @@ class DocumentService: }, } - DOCUMENT_METADATA_SCHEMA = { + DOCUMENT_METADATA_SCHEMA: dict[str, Any] = { "book": { "title": str, "language": str, diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index 6f848d49c4..f1156feafb 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -439,7 +439,7 @@ class ApiToolManageService: tenant_id=tenant_id, ) ) - result = tool.validate_credentials(credentials, parameters) + result = runtime_tool.validate_credentials(credentials, parameters) except Exception as e: return {"error": str(e)} diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index 2de3d0ac55..20580e3815 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -5,6 +5,8 @@ from datetime import UTC, datetime from typing import Any, Optional from uuid import uuid4 +from sqlalchemy import desc + from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager from core.model_runtime.utils.encoders import jsonable_encoder @@ -77,6 +79,28 @@ class WorkflowService: return workflow + def get_all_published_workflow(self, app_model: App, page: int, limit: int) -> tuple[list[Workflow], bool]: + """ + Get published workflow with pagination + """ + if not app_model.workflow_id: + return [], False + + workflows = ( + db.session.query(Workflow) + .filter(Workflow.app_id == app_model.id) + .order_by(desc(Workflow.version)) + .offset((page - 1) * limit) + .limit(limit + 1) + .all() + ) + + has_more = len(workflows) > limit + if has_more: + workflows = workflows[:-1] + + return workflows, has_more + def sync_draft_workflow( self, *, diff --git a/api/tasks/add_document_to_index_task.py b/api/tasks/add_document_to_index_task.py index 9a172b2d9d..bd7fcdadea 100644 --- a/api/tasks/add_document_to_index_task.py +++ b/api/tasks/add_document_to_index_task.py @@ -38,7 +38,11 @@ def add_document_to_index_task(dataset_document_id: str): try: segments = ( db.session.query(DocumentSegment) - .filter(DocumentSegment.document_id == dataset_document.id, DocumentSegment.enabled == True) + .filter( + DocumentSegment.document_id == dataset_document.id, + DocumentSegment.enabled == False, + DocumentSegment.status == "completed", + ) .order_by(DocumentSegment.position.asc()) .all() ) @@ -85,6 +89,16 @@ def add_document_to_index_task(dataset_document_id: str): db.session.query(DatasetAutoDisableLog).filter( DatasetAutoDisableLog.document_id == dataset_document.id ).delete() + + # update segment to enable + db.session.query(DocumentSegment).filter(DocumentSegment.document_id == dataset_document.id).update( + { + DocumentSegment.enabled: True, + DocumentSegment.disabled_at: None, + DocumentSegment.disabled_by: None, + DocumentSegment.updated_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + } + ) db.session.commit() end_at = time.perf_counter() diff --git a/api/tasks/delete_account_task.py b/api/tasks/delete_account_task.py new file mode 100644 index 0000000000..52c884ca29 --- /dev/null +++ b/api/tasks/delete_account_task.py @@ -0,0 +1,26 @@ +import logging + +from celery import shared_task # type: ignore + +from extensions.ext_database import db +from models.account import Account +from services.billing_service import BillingService +from tasks.mail_account_deletion_task import send_deletion_success_task + +logger = logging.getLogger(__name__) + + +@shared_task(queue="dataset") +def delete_account_task(account_id): + account = db.session.query(Account).filter(Account.id == account_id).first() + try: + BillingService.delete_account(account_id) + except Exception as e: + logger.exception(f"Failed to delete account {account_id} from billing service.") + raise + + if not account: + logger.error(f"Account {account_id} not found.") + return + # send success email + send_deletion_success_task.delay(account.email) diff --git a/api/tasks/mail_account_deletion_task.py b/api/tasks/mail_account_deletion_task.py new file mode 100644 index 0000000000..49a3a6d280 --- /dev/null +++ b/api/tasks/mail_account_deletion_task.py @@ -0,0 +1,70 @@ +import logging +import time + +import click +from celery import shared_task # type: ignore +from flask import render_template + +from extensions.ext_mail import mail + + +@shared_task(queue="mail") +def send_deletion_success_task(to): + """Send email to user regarding account deletion. + + Args: + log (AccountDeletionLog): Account deletion log object + """ + if not mail.is_inited(): + return + + logging.info(click.style(f"Start send account deletion success email to {to}", fg="green")) + start_at = time.perf_counter() + + try: + html_content = render_template( + "delete_account_success_template_en-US.html", + to=to, + email=to, + ) + mail.send(to=to, subject="Your Dify.AI Account Has Been Successfully Deleted", html=html_content) + + end_at = time.perf_counter() + logging.info( + click.style( + "Send account deletion success email to {}: latency: {}".format(to, end_at - start_at), fg="green" + ) + ) + except Exception: + logging.exception("Send account deletion success email to {} failed".format(to)) + + +@shared_task(queue="mail") +def send_account_deletion_verification_code(to, code): + """Send email to user regarding account deletion verification code. + + Args: + to (str): Recipient email address + code (str): Verification code + """ + if not mail.is_inited(): + return + + logging.info(click.style(f"Start send account deletion verification code email to {to}", fg="green")) + start_at = time.perf_counter() + + try: + html_content = render_template("delete_account_code_email_template_en-US.html", to=to, code=code) + mail.send(to=to, subject="Dify.AI Account Deletion and Verification", html=html_content) + + end_at = time.perf_counter() + logging.info( + click.style( + "Send account deletion verification code email to {} succeeded: latency: {}".format( + to, end_at - start_at + ), + fg="green", + ) + ) + except Exception: + logging.exception("Send account deletion verification code email to {} failed".format(to)) diff --git a/api/tasks/remove_document_from_index_task.py b/api/tasks/remove_document_from_index_task.py index 1d580b3802..d0c4382f58 100644 --- a/api/tasks/remove_document_from_index_task.py +++ b/api/tasks/remove_document_from_index_task.py @@ -1,3 +1,4 @@ +import datetime import logging import time @@ -46,6 +47,16 @@ def remove_document_from_index_task(document_id: str): index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=False) except Exception: logging.exception(f"clean dataset {dataset.id} from index failed") + # update segment to disable + db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).update( + { + DocumentSegment.enabled: False, + DocumentSegment.disabled_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + DocumentSegment.disabled_by: document.disabled_by, + DocumentSegment.updated_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + } + ) + db.session.commit() end_at = time.perf_counter() logging.info( diff --git a/api/templates/delete_account_code_email_template_en-US.html b/api/templates/delete_account_code_email_template_en-US.html new file mode 100644 index 0000000000..7707385334 --- /dev/null +++ b/api/templates/delete_account_code_email_template_en-US.html @@ -0,0 +1,125 @@ + + + + + + + + +
+
+ + Dify Logo +
+

Dify.AI Account Deletion and Verification

+

We received a request to delete your Dify account. To ensure the security of your account and + confirm this action, please use the verification code below:

+
+ {{code}} +
+
+

To complete the account deletion process:

+

1. Return to the account deletion page on our website

+

2. Enter the verification code above

+

3. Click "Confirm Deletion"

+
+

Please note:

+
    +
  • This code is valid for 5 minutes
  • +
  • As the Owner of any Workspaces, your workspaces will be scheduled in a queue for permanent deletion.
  • +
  • All your user data will be queued for permanent deletion.
  • +
+
+ + + \ No newline at end of file diff --git a/api/templates/delete_account_success_template_en-US.html b/api/templates/delete_account_success_template_en-US.html new file mode 100644 index 0000000000..c5df75cabc --- /dev/null +++ b/api/templates/delete_account_success_template_en-US.html @@ -0,0 +1,105 @@ + + + + + + + + +
+
+ + Dify Logo +
+

Your Dify.AI Account Has Been Successfully Deleted

+

We're writing to confirm that your Dify.AI account has been successfully deleted as per your request. Your + account is no longer accessible, and you can't log in using your previous credentials. If you decide to use + Dify.AI services in the future, you'll need to create a new account after 30 days. We appreciate the time you + spent with Dify.AI and are sorry to see you go. If you have any questions or concerns about the deletion process, + please don't hesitate to reach out to our support team.

+

Thank you for being a part of the Dify.AI community.

+

Best regards,

+

Dify.AI Team

+
+ + + \ No newline at end of file diff --git a/api/tests/integration_tests/vdb/baidu/test_baidu.py b/api/tests/integration_tests/vdb/baidu/test_baidu.py index 5dc2ce4f82..25989958d9 100644 --- a/api/tests/integration_tests/vdb/baidu/test_baidu.py +++ b/api/tests/integration_tests/vdb/baidu/test_baidu.py @@ -1,5 +1,3 @@ -from unittest.mock import MagicMock - from core.rag.datasource.vdb.baidu.baidu_vector import BaiduConfig, BaiduVector from tests.integration_tests.vdb.__mock.baiduvectordb import setup_baiduvectordb_mock from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text, setup_mock_redis diff --git a/api/tests/integration_tests/vdb/tidb_vector/test_tidb_vector.py b/api/tests/integration_tests/vdb/tidb_vector/test_tidb_vector.py index 4c83c66bff..df0bb3f81a 100644 --- a/api/tests/integration_tests/vdb/tidb_vector/test_tidb_vector.py +++ b/api/tests/integration_tests/vdb/tidb_vector/test_tidb_vector.py @@ -1,5 +1,3 @@ -from unittest.mock import MagicMock, patch - import pytest from core.rag.datasource.vdb.tidb_vector.tidb_vector import TiDBVector, TiDBVectorConfig diff --git a/api/tests/unit_tests/core/prompt/test_advanced_prompt_transform.py b/api/tests/unit_tests/core/prompt/test_advanced_prompt_transform.py index ee0f7672f8..f6d22690d1 100644 --- a/api/tests/unit_tests/core/prompt/test_advanced_prompt_transform.py +++ b/api/tests/unit_tests/core/prompt/test_advanced_prompt_transform.py @@ -4,7 +4,7 @@ import pytest from configs import dify_config from core.app.app_config.entities import ModelConfigEntity -from core.file import File, FileTransferMethod, FileType, FileUploadConfig, ImageConfig +from core.file import File, FileTransferMethod, FileType from core.memory.token_buffer_memory import TokenBufferMemory from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, diff --git a/api/tests/unit_tests/core/workflow/nodes/answer/test_answer_stream_processor.py b/api/tests/unit_tests/core/workflow/nodes/answer/test_answer_stream_processor.py index f6555cfdde..c3a3818655 100644 --- a/api/tests/unit_tests/core/workflow/nodes/answer/test_answer_stream_processor.py +++ b/api/tests/unit_tests/core/workflow/nodes/answer/test_answer_stream_processor.py @@ -1,6 +1,6 @@ import uuid from collections.abc import Generator -from datetime import UTC, datetime, timezone +from datetime import UTC, datetime from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import SystemVariableKey diff --git a/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py b/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py index 76db42ef10..7e979bcaa8 100644 --- a/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py @@ -21,8 +21,7 @@ from core.model_runtime.entities.message_entities import ( from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelFeature, ModelType from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory from core.prompt.entities.advanced_prompt_entities import MemoryConfig -from core.variables import ArrayAnySegment, ArrayFileSegment, NoneSegment, StringSegment -from core.workflow.entities.variable_entities import VariableSelector +from core.variables import ArrayAnySegment, ArrayFileSegment, NoneSegment from core.workflow.entities.variable_pool import VariablePool from core.workflow.graph_engine import Graph, GraphInitParams, GraphRuntimeState from core.workflow.nodes.answer import AnswerStreamGenerateRoute diff --git a/api/tests/unit_tests/core/workflow/nodes/test_retry.py b/api/tests/unit_tests/core/workflow/nodes/test_retry.py index c232875ce5..4ac79d7acd 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_retry.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_retry.py @@ -1,7 +1,6 @@ from core.workflow.graph_engine.entities.event import ( GraphRunFailedEvent, GraphRunPartialSucceededEvent, - GraphRunSucceededEvent, NodeRunRetryEvent, ) from tests.unit_tests.core.workflow.nodes.test_continue_on_error import ContinueOnErrorTestHelper diff --git a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_helpers.py b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_helpers.py index 16c1370018..1501722b82 100644 --- a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_helpers.py +++ b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_helpers.py @@ -1,5 +1,3 @@ -import pytest - from core.variables import SegmentType from core.workflow.nodes.variable_assigner.v2.enums import Operation from core.workflow.nodes.variable_assigner.v2.helpers import is_input_value_valid diff --git a/api/tests/unit_tests/oss/aliyun_oss/aliyun_oss/test_aliyun_oss.py b/api/tests/unit_tests/oss/aliyun_oss/aliyun_oss/test_aliyun_oss.py index 380134bc46..f87a385690 100644 --- a/api/tests/unit_tests/oss/aliyun_oss/aliyun_oss/test_aliyun_oss.py +++ b/api/tests/unit_tests/oss/aliyun_oss/aliyun_oss/test_aliyun_oss.py @@ -1,4 +1,4 @@ -from unittest.mock import MagicMock, patch +from unittest.mock import patch import pytest from oss2 import Auth # type: ignore diff --git a/api/tests/unit_tests/utils/test_text_processing.py b/api/tests/unit_tests/utils/test_text_processing.py index f9d00d0b39..8bfc97ae63 100644 --- a/api/tests/unit_tests/utils/test_text_processing.py +++ b/api/tests/unit_tests/utils/test_text_processing.py @@ -1,5 +1,3 @@ -from textwrap import dedent - import pytest from core.tools.utils.text_processing_utils import remove_leading_symbols diff --git a/docker/.env.example b/docker/.env.example index 43e67a8db4..50ba856bd3 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -315,7 +315,7 @@ AZURE_BLOB_ACCOUNT_URL=https://.blob.core.windows.net # Google Storage Configuration # GOOGLE_STORAGE_BUCKET_NAME=your-bucket-name -GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=your-google-service-account-json-base64-string +GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64= # The Alibaba Cloud OSS configurations, # diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index e65ca45858..b82659d959 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -90,7 +90,7 @@ x-shared-env: &shared-api-worker-env AZURE_BLOB_CONTAINER_NAME: ${AZURE_BLOB_CONTAINER_NAME:-difyai-container} AZURE_BLOB_ACCOUNT_URL: ${AZURE_BLOB_ACCOUNT_URL:-https://.blob.core.windows.net} GOOGLE_STORAGE_BUCKET_NAME: ${GOOGLE_STORAGE_BUCKET_NAME:-your-bucket-name} - GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: ${GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64:-your-google-service-account-json-base64-string} + GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: ${GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64:-} ALIYUN_OSS_BUCKET_NAME: ${ALIYUN_OSS_BUCKET_NAME:-your-bucket-name} ALIYUN_OSS_ACCESS_KEY: ${ALIYUN_OSS_ACCESS_KEY:-your-access-key} ALIYUN_OSS_SECRET_KEY: ${ALIYUN_OSS_SECRET_KEY:-your-secret-key} @@ -374,7 +374,6 @@ x-shared-env: &shared-api-worker-env SSRF_COREDUMP_DIR: ${SSRF_COREDUMP_DIR:-/var/spool/squid} SSRF_REVERSE_PROXY_PORT: ${SSRF_REVERSE_PROXY_PORT:-8194} SSRF_SANDBOX_HOST: ${SSRF_SANDBOX_HOST:-sandbox} - COMPOSE_PROFILES: ${COMPOSE_PROFILES:-${VECTOR_STORE:-weaviate}} EXPOSE_NGINX_PORT: ${EXPOSE_NGINX_PORT:-80} EXPOSE_NGINX_SSL_PORT: ${EXPOSE_NGINX_SSL_PORT:-443} POSITION_TOOL_PINS: ${POSITION_TOOL_PINS:-} diff --git a/docker/generate_docker_compose b/docker/generate_docker_compose index dc4460f96c..b5c0acefb1 100755 --- a/docker/generate_docker_compose +++ b/docker/generate_docker_compose @@ -37,6 +37,8 @@ def generate_shared_env_block(env_vars, anchor_name="shared-api-worker-env"): """ lines = [f"x-shared-env: &{anchor_name}"] for key, default in env_vars.items(): + if key == "COMPOSE_PROFILES": + continue # If default value is empty, use ${KEY:-} if default == "": lines.append(f" {key}: ${{{key}:-}}") diff --git a/web/app/(commonLayout)/layout.tsx b/web/app/(commonLayout)/layout.tsx index f0f7e0321d..af36d4d961 100644 --- a/web/app/(commonLayout)/layout.tsx +++ b/web/app/(commonLayout)/layout.tsx @@ -8,27 +8,24 @@ import Header from '@/app/components/header' import { EventEmitterContextProvider } from '@/context/event-emitter' import { ProviderContextProvider } from '@/context/provider-context' import { ModalContextProvider } from '@/context/modal-context' -import { TanstackQueryIniter } from '@/context/query-client' const Layout = ({ children }: { children: ReactNode }) => { return ( <> - - - - - - -
- - {children} - - - - - + + + + + +
+ + {children} + + + + ) diff --git a/web/app/account/account-page/index.tsx b/web/app/account/account-page/index.tsx index c7af05793f..4435019561 100644 --- a/web/app/account/account-page/index.tsx +++ b/web/app/account/account-page/index.tsx @@ -3,11 +3,11 @@ import { useState } from 'react' import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' +import DeleteAccount from '../delete-account' import s from './index.module.css' import Collapse from '@/app/components/header/account-setting/collapse' import type { IItem } from '@/app/components/header/account-setting/collapse' import Modal from '@/app/components/base/modal' -import Confirm from '@/app/components/base/confirm' import Button from '@/app/components/base/button' import { updateUserProfile } from '@/service/common' import { useAppContext } from '@/context/app-context' @@ -296,37 +296,9 @@ export default function AccountPage() { } { showDeleteAccountModal && ( - setShowDeleteAccountModal(false)} onConfirm={() => setShowDeleteAccountModal(false)} - showCancel={false} - type='warning' - title={t('common.account.delete')} - content={ - <> -
- {t('common.account.deleteTip')} -
- -
{`${t('common.account.delete')}: ${userProfile.email}`}
- - } - confirmText={t('common.operation.ok') as string} /> ) } diff --git a/web/app/account/delete-account/components/check-email.tsx b/web/app/account/delete-account/components/check-email.tsx new file mode 100644 index 0000000000..84ea8a4c24 --- /dev/null +++ b/web/app/account/delete-account/components/check-email.tsx @@ -0,0 +1,48 @@ +'use client' +import { useTranslation } from 'react-i18next' +import { useCallback, useState } from 'react' +import Link from 'next/link' +import { useSendDeleteAccountEmail } from '../state' +import { useAppContext } from '@/context/app-context' +import Input from '@/app/components/base/input' +import Button from '@/app/components/base/button' + +type DeleteAccountProps = { + onCancel: () => void + onConfirm: () => void +} + +export default function CheckEmail(props: DeleteAccountProps) { + const { t } = useTranslation() + const { userProfile } = useAppContext() + const [userInputEmail, setUserInputEmail] = useState('') + + const { isPending: isSendingEmail, mutateAsync: getDeleteEmailVerifyCode } = useSendDeleteAccountEmail() + + const handleConfirm = useCallback(async () => { + try { + const ret = await getDeleteEmailVerifyCode() + if (ret.result === 'success') + props.onConfirm() + } + catch (error) { console.error(error) } + }, [getDeleteEmailVerifyCode, props]) + + return <> +
+ {t('common.account.deleteTip')} +
+
+ {t('common.account.deletePrivacyLinkTip')} + {t('common.account.deletePrivacyLink')} +
+ + { + setUserInputEmail(e.target.value) + }} /> +
+ + +
+ +} diff --git a/web/app/account/delete-account/components/feed-back.tsx b/web/app/account/delete-account/components/feed-back.tsx new file mode 100644 index 0000000000..1d01c69d94 --- /dev/null +++ b/web/app/account/delete-account/components/feed-back.tsx @@ -0,0 +1,68 @@ +'use client' +import { useTranslation } from 'react-i18next' +import { useCallback, useState } from 'react' +import { useRouter } from 'next/navigation' +import { useDeleteAccountFeedback } from '../state' +import { useAppContext } from '@/context/app-context' +import Button from '@/app/components/base/button' +import CustomDialog from '@/app/components/base/dialog' +import Textarea from '@/app/components/base/textarea' +import Toast from '@/app/components/base/toast' +import { logout } from '@/service/common' + +type DeleteAccountProps = { + onCancel: () => void + onConfirm: () => void +} + +export default function FeedBack(props: DeleteAccountProps) { + const { t } = useTranslation() + const { userProfile } = useAppContext() + const router = useRouter() + const [userFeedback, setUserFeedback] = useState('') + const { isPending, mutateAsync: sendFeedback } = useDeleteAccountFeedback() + + const handleSuccess = useCallback(async () => { + try { + await logout({ + url: '/logout', + params: {}, + }) + localStorage.removeItem('refresh_token') + localStorage.removeItem('console_token') + router.push('/signin') + Toast.notify({ type: 'info', message: t('common.account.deleteSuccessTip') }) + } + catch (error) { console.error(error) } + }, [router, t]) + + const handleSubmit = useCallback(async () => { + try { + await sendFeedback({ feedback: userFeedback, email: userProfile.email }) + props.onConfirm() + await handleSuccess() + } + catch (error) { console.error(error) } + }, [handleSuccess, userFeedback, sendFeedback, userProfile, props]) + + const handleSkip = useCallback(() => { + props.onCancel() + handleSuccess() + }, [handleSuccess, props]) + return + +