diff --git a/.devcontainer/post_create_command.sh b/.devcontainer/post_create_command.sh index 2fef313f72..546917c9a5 100755 --- a/.devcontainer/post_create_command.sh +++ b/.devcontainer/post_create_command.sh @@ -1,5 +1,6 @@ #!/bin/bash +npm add -g pnpm@10.15.0 corepack enable cd web && pnpm install pipx install uv diff --git a/.github/workflows/autofix.yml b/.github/workflows/autofix.yml index 068ba686fa..4b24da0e51 100644 --- a/.github/workflows/autofix.yml +++ b/.github/workflows/autofix.yml @@ -2,6 +2,8 @@ name: autofix.ci on: pull_request: branches: ["main"] + push: + branches: ["main"] permissions: contents: read diff --git a/.gitignore b/.gitignore index cbb7b4dac0..e386044aea 100644 --- a/.gitignore +++ b/.gitignore @@ -230,4 +230,6 @@ api/.env.backup # Benchmark scripts/stress-test/setup/config/ -scripts/stress-test/reports/ \ No newline at end of file +scripts/stress-test/reports/ +# mcp +.serena diff --git a/AGENTS.md b/AGENTS.md index 44f7b30360..cbd3a40878 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -85,3 +85,4 @@ pnpm test # Run Jest tests - All async tasks use Celery with Redis as broker - **Internationalization**: Frontend supports multiple languages with English (`web/i18n/en-US/`) as the source. All user-facing text must use i18n keys, no hardcoded strings. Edit corresponding module files in `en-US/` directory for translations. +- **Logging**: Never use `str(e)` in `logger.exception()` calls. Use `logger.exception("message", exc_info=e)` instead diff --git a/CLAUDE.md b/CLAUDE.md deleted file mode 120000 index 47dc3e3d86..0000000000 --- a/CLAUDE.md +++ /dev/null @@ -1 +0,0 @@ -AGENTS.md \ No newline at end of file diff --git a/api/.env.example b/api/.env.example index b89111a8e3..bf0e18fd74 100644 --- a/api/.env.example +++ b/api/.env.example @@ -436,6 +436,9 @@ HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760 HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576 HTTP_REQUEST_NODE_SSL_VERIFY=True +# Webhook request configuration +WEBHOOK_REQUEST_BODY_MAX_SIZE=10485760 + # Respect X-* headers to redirect clients RESPECT_XFORWARD_HEADERS_ENABLED=false @@ -514,6 +517,12 @@ ENABLE_CLEAN_MESSAGES=false ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false ENABLE_DATASETS_QUEUE_MONITOR=false ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true +ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK=true +# Interval time in minutes for polling scheduled workflows(default: 1 min) +WORKFLOW_SCHEDULE_POLLER_INTERVAL=1 +WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE=100 +# Maximum number of scheduled workflows to dispatch per tick (0 for unlimited) +WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK=0 # Position configuration POSITION_TOOL_PINS= diff --git a/api/.vscode/launch.json.example b/api/.vscode/launch.json.example index b9e32e2511..a52eca63d9 100644 --- a/api/.vscode/launch.json.example +++ b/api/.vscode/launch.json.example @@ -54,7 +54,7 @@ "--loglevel", "DEBUG", "-Q", - "dataset,generation,mail,ops_trace,app_deletion" + "dataset,generation,mail,ops_trace,app_deletion,workflow" ] } ] diff --git a/api/commands.py b/api/commands.py index 14e7222eb3..56d2432087 100644 --- a/api/commands.py +++ b/api/commands.py @@ -14,12 +14,12 @@ from sqlalchemy.exc import SQLAlchemyError from configs import dify_config from constants.languages import languages from core.helper import encrypter +from core.plugin.entities.plugin_daemon import CredentialType from core.plugin.impl.plugin import PluginInstaller from core.rag.datasource.vdb.vector_factory import Vector from core.rag.datasource.vdb.vector_type import VectorType from core.rag.index_processor.constant.built_in_field import BuiltInField from core.rag.models.document import Document -from core.tools.entities.tool_entities import CredentialType from core.tools.utils.system_oauth_encryption import encrypt_system_oauth_params from events.app_event import app_was_created from extensions.ext_database import db @@ -1227,6 +1227,55 @@ def setup_system_tool_oauth_client(provider, client_params): click.echo(click.style(f"OAuth client params setup successfully. id: {oauth_client.id}", fg="green")) +@click.command("setup-system-trigger-oauth-client", help="Setup system trigger oauth client.") +@click.option("--provider", prompt=True, help="Provider name") +@click.option("--client-params", prompt=True, help="Client Params") +def setup_system_trigger_oauth_client(provider, client_params): + """ + Setup system trigger oauth client + """ + from models.provider_ids import TriggerProviderID + from models.trigger import TriggerOAuthSystemClient + + provider_id = TriggerProviderID(provider) + provider_name = provider_id.provider_name + plugin_id = provider_id.plugin_id + + try: + # json validate + click.echo(click.style(f"Validating client params: {client_params}", fg="yellow")) + client_params_dict = TypeAdapter(dict[str, Any]).validate_json(client_params) + click.echo(click.style("Client params validated successfully.", fg="green")) + + click.echo(click.style(f"Encrypting client params: {client_params}", fg="yellow")) + click.echo(click.style(f"Using SECRET_KEY: `{dify_config.SECRET_KEY}`", fg="yellow")) + oauth_client_params = encrypt_system_oauth_params(client_params_dict) + click.echo(click.style("Client params encrypted successfully.", fg="green")) + except Exception as e: + click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red")) + return + + deleted_count = ( + db.session.query(TriggerOAuthSystemClient) + .filter_by( + provider=provider_name, + plugin_id=plugin_id, + ) + .delete() + ) + if deleted_count > 0: + click.echo(click.style(f"Deleted {deleted_count} existing oauth client params.", fg="yellow")) + + oauth_client = TriggerOAuthSystemClient( + provider=provider_name, + plugin_id=plugin_id, + encrypted_oauth_params=oauth_client_params, + ) + db.session.add(oauth_client) + db.session.commit() + click.echo(click.style(f"OAuth client params setup successfully. id: {oauth_client.id}", fg="green")) + + def _find_orphaned_draft_variables(batch_size: int = 1000) -> list[str]: """ Find draft variables that reference non-existent apps. diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index b17f30210c..ce2f469713 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -154,6 +154,17 @@ class CodeExecutionSandboxConfig(BaseSettings): ) +class TriggerConfig(BaseSettings): + """ + Configuration for trigger + """ + + WEBHOOK_REQUEST_BODY_MAX_SIZE: PositiveInt = Field( + description="Maximum allowed size for webhook request bodies in bytes", + default=10485760, + ) + + class PluginConfig(BaseSettings): """ Plugin configs @@ -950,6 +961,22 @@ class CeleryScheduleTasksConfig(BaseSettings): description="Enable check upgradable plugin task", default=True, ) + ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK: bool = Field( + description="Enable workflow schedule poller task", + default=True, + ) + WORKFLOW_SCHEDULE_POLLER_INTERVAL: int = Field( + description="Workflow schedule poller interval in minutes", + default=1, + ) + WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE: int = Field( + description="Maximum number of schedules to process in each poll batch", + default=100, + ) + WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK: int = Field( + description="Maximum schedules to dispatch per tick (0=unlimited, circuit breaker)", + default=0, + ) class PositionConfig(BaseSettings): @@ -1073,6 +1100,7 @@ class FeatureConfig( AuthConfig, # Changed from OAuthConfig to AuthConfig BillingConfig, CodeExecutionSandboxConfig, + TriggerConfig, PluginConfig, MarketplaceConfig, DataSetConfig, diff --git a/api/contexts/__init__.py b/api/contexts/__init__.py index 2126a06f75..07121a8b27 100644 --- a/api/contexts/__init__.py +++ b/api/contexts/__init__.py @@ -9,6 +9,8 @@ if TYPE_CHECKING: from core.model_runtime.entities.model_entities import AIModelEntity from core.plugin.entities.plugin_daemon import PluginModelProviderEntity from core.tools.plugin_tool.provider import PluginToolProviderController + from core.trigger.provider import PluginTriggerProviderController + from core.workflow.entities.variable_pool import VariablePool """ @@ -41,3 +43,11 @@ datasource_plugin_providers: RecyclableContextVar[dict[str, "DatasourcePluginPro datasource_plugin_providers_lock: RecyclableContextVar[Lock] = RecyclableContextVar( ContextVar("datasource_plugin_providers_lock") ) + +plugin_trigger_providers: RecyclableContextVar[dict[str, "PluginTriggerProviderController"]] = RecyclableContextVar( + ContextVar("plugin_trigger_providers") +) + +plugin_trigger_providers_lock: RecyclableContextVar[Lock] = RecyclableContextVar( + ContextVar("plugin_trigger_providers_lock") +) diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py index ee02ff3937..c5b981bae1 100644 --- a/api/controllers/console/__init__.py +++ b/api/controllers/console/__init__.py @@ -87,6 +87,7 @@ from .app import ( workflow_draft_variable, workflow_run, workflow_statistic, + workflow_trigger, ) # Import auth controllers @@ -223,6 +224,21 @@ api.add_resource( api.add_namespace(console_ns) +# Import workspace controllers +from .workspace import ( + account, + agent_providers, + endpoint, + load_balancing_config, + members, + model_providers, + models, + plugin, + tool_providers, + trigger_providers, + workspace, +) + __all__ = [ "account", "activate", @@ -288,6 +304,7 @@ __all__ = [ "statistic", "tags", "tool_providers", + "trigger_providers", "version", "website", "workflow", diff --git a/api/controllers/console/app/generator.py b/api/controllers/console/app/generator.py index 230ccdca15..26fd86050d 100644 --- a/api/controllers/console/app/generator.py +++ b/api/controllers/console/app/generator.py @@ -12,6 +12,7 @@ from controllers.console.app.error import ( ) from controllers.console.wraps import account_initialization_required, setup_required from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError +from core.helper.code_executor.code_node_provider import CodeNodeProvider from core.helper.code_executor.javascript.javascript_code_provider import JavascriptCodeProvider from core.helper.code_executor.python3.python3_code_provider import Python3CodeProvider from core.llm_generator.llm_generator import LLMGenerator @@ -198,13 +199,11 @@ class InstructionGenerateApi(Resource): parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json") parser.add_argument("ideal_output", type=str, required=False, default="", location="json") args = parser.parse_args() - code_template = ( - Python3CodeProvider.get_default_code() - if args["language"] == "python" - else (JavascriptCodeProvider.get_default_code()) - if args["language"] == "javascript" - else "" + providers: list[type[CodeNodeProvider]] = [Python3CodeProvider, JavascriptCodeProvider] + code_provider: type[CodeNodeProvider] | None = next( + (p for p in providers if p.is_accept_language(args["language"])), None ) + code_template = code_provider.get_default_code() if code_provider else "" try: # Generate from nothing for a workflow node if (args["current"] == code_template or args["current"] == "") and args["node_id"] != "": diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index e70765546c..34408ba3c9 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -20,6 +20,7 @@ from core.app.apps.base_app_queue_manager import AppQueueManager from core.app.entities.app_invoke_entities import InvokeFrom from core.file.models import File from core.helper.trace_id_helper import get_external_trace_id +from core.model_runtime.utils.encoders import jsonable_encoder from core.workflow.graph_engine.manager import GraphEngineManager from extensions.ext_database import db from factories import file_factory, variable_factory @@ -35,6 +36,7 @@ from models.workflow import Workflow from services.app_generate_service import AppGenerateService from services.errors.app import WorkflowHashNotEqualError from services.errors.llm import InvokeRateLimitError +from services.trigger_debug_service import TriggerDebugService from services.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError, WorkflowService logger = logging.getLogger(__name__) @@ -1004,3 +1006,165 @@ class DraftWorkflowNodeLastRunApi(Resource): if node_exec is None: raise NotFound("last run not found") return node_exec + + +class DraftWorkflowTriggerNodeApi(Resource): + """ + Single node debug - Polling API for trigger events + Path: /apps//workflows/draft/nodes//trigger + """ + + @api.doc("poll_draft_workflow_trigger_node") + @api.doc(description="Poll for trigger events and execute single node when event arrives") + @api.doc(params={ + "app_id": "Application ID", + "node_id": "Node ID" + }) + @api.expect( + api.model( + "DraftWorkflowTriggerNodeRequest", + { + "trigger_name": fields.String(required=True, description="Trigger name"), + "subscription_id": fields.String(required=True, description="Subscription ID"), + } + ) + ) + @api.response(200, "Trigger event received and node executed successfully") + @api.response(403, "Permission denied") + @api.response(500, "Internal server error") + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=[AppMode.WORKFLOW]) + def post(self, app_model: App, node_id: str): + """ + Poll for trigger events and execute single node when event arrives + """ + if not isinstance(current_user, Account) or not current_user.is_editor: + raise Forbidden() + + parser = reqparse.RequestParser() + parser.add_argument("trigger_name", type=str, required=True, location="json", nullable=False) + parser.add_argument("subscription_id", type=str, required=True, location="json", nullable=False) + args = parser.parse_args() + trigger_name = args["trigger_name"] + subscription_id = args["subscription_id"] + + event = TriggerDebugService.poll_event( + tenant_id=app_model.tenant_id, + user_id=current_user.id, + app_id=app_model.id, + subscription_id=subscription_id, + node_id=node_id, + trigger_name=trigger_name, + ) + if not event: + return jsonable_encoder({"status": "waiting"}) + + try: + workflow_service = WorkflowService() + draft_workflow = workflow_service.get_draft_workflow(app_model) + if not draft_workflow: + raise ValueError("Workflow not found") + + user_inputs = event.model_dump() + node_execution = workflow_service.run_draft_workflow_node( + app_model=app_model, + draft_workflow=draft_workflow, + node_id=node_id, + user_inputs=user_inputs, + account=current_user, + query="", + files=[], + ) + return jsonable_encoder(node_execution) + except Exception: + logger.exception("Error running draft workflow trigger node") + return jsonable_encoder( + { + "status": "error", + } + ), 500 + + +class DraftWorkflowTriggerRunApi(Resource): + """ + Full workflow debug - Polling API for trigger events + Path: /apps//workflows/draft/trigger/run + """ + + @api.doc("poll_draft_workflow_trigger_run") + @api.doc(description="Poll for trigger events and execute full workflow when event arrives") + @api.doc(params={"app_id": "Application ID"}) + @api.expect( + api.model( + "DraftWorkflowTriggerRunRequest", + { + "node_id": fields.String(required=True, description="Node ID"), + "trigger_name": fields.String(required=True, description="Trigger name"), + "subscription_id": fields.String(required=True, description="Subscription ID"), + } + ) + ) + @api.response(200, "Trigger event received and workflow executed successfully") + @api.response(403, "Permission denied") + @api.response(500, "Internal server error") + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=[AppMode.WORKFLOW]) + def post(self, app_model: App): + """ + Poll for trigger events and execute full workflow when event arrives + """ + if not isinstance(current_user, Account) or not current_user.is_editor: + raise Forbidden() + + parser = reqparse.RequestParser() + parser.add_argument("node_id", type=str, required=True, location="json", nullable=False) + parser.add_argument("trigger_name", type=str, required=True, location="json", nullable=False) + parser.add_argument("subscription_id", type=str, required=True, location="json", nullable=False) + args = parser.parse_args() + node_id = args["node_id"] + trigger_name = args["trigger_name"] + subscription_id = args["subscription_id"] + + event = TriggerDebugService.poll_event( + tenant_id=app_model.tenant_id, + user_id=current_user.id, + app_id=app_model.id, + subscription_id=subscription_id, + node_id=node_id, + trigger_name=trigger_name, + ) + if not event: + return jsonable_encoder({"status": "waiting"}) + + workflow_args = { + "inputs": event.model_dump(), + "query": "", + "files": [], + } + external_trace_id = get_external_trace_id(request) + if external_trace_id: + workflow_args["external_trace_id"] = external_trace_id + + try: + response = AppGenerateService.generate( + app_model=app_model, + user=current_user, + args=workflow_args, + invoke_from=InvokeFrom.DEBUGGER, + streaming=True, + ) + return helper.compact_generate_response(response) + except InvokeRateLimitError as ex: + raise InvokeRateLimitHttpError(ex.description) + except Exception: + logger.exception("Error running draft workflow trigger run") + return jsonable_encoder( + { + "status": "error", + } + ), 500 + diff --git a/api/controllers/console/app/workflow_trigger.py b/api/controllers/console/app/workflow_trigger.py new file mode 100644 index 0000000000..abfe882b67 --- /dev/null +++ b/api/controllers/console/app/workflow_trigger.py @@ -0,0 +1,249 @@ +import logging + +from flask_restx import Resource, marshal_with, reqparse +from sqlalchemy import select +from sqlalchemy.orm import Session +from werkzeug.exceptions import Forbidden, NotFound + +from configs import dify_config +from controllers.console import api +from controllers.console.app.wraps import get_app_model +from controllers.console.wraps import account_initialization_required, setup_required +from core.model_runtime.utils.encoders import jsonable_encoder +from extensions.ext_database import db +from fields.workflow_trigger_fields import trigger_fields, triggers_list_fields, webhook_trigger_fields +from libs.login import current_user, login_required +from models.model import Account, AppMode +from models.workflow import AppTrigger, AppTriggerStatus, WorkflowWebhookTrigger + +logger = logging.getLogger(__name__) + +from services.workflow_plugin_trigger_service import WorkflowPluginTriggerService + + +class PluginTriggerApi(Resource): + """Workflow Plugin Trigger API""" + + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=AppMode.WORKFLOW) + def post(self, app_model): + """Create plugin trigger""" + parser = reqparse.RequestParser() + parser.add_argument("node_id", type=str, required=False, location="json") + parser.add_argument("provider_id", type=str, required=False, location="json") + parser.add_argument("trigger_name", type=str, required=False, location="json") + parser.add_argument("subscription_id", type=str, required=False, location="json") + args = parser.parse_args() + + assert isinstance(current_user, Account) + assert current_user.current_tenant_id is not None + if not current_user.is_editor: + raise Forbidden() + + plugin_trigger = WorkflowPluginTriggerService.create_plugin_trigger( + app_id=app_model.id, + tenant_id=current_user.current_tenant_id, + node_id=args["node_id"], + provider_id=args["provider_id"], + trigger_name=args["trigger_name"], + subscription_id=args["subscription_id"], + ) + + return jsonable_encoder(plugin_trigger) + + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=AppMode.WORKFLOW) + def get(self, app_model): + """Get plugin trigger""" + parser = reqparse.RequestParser() + parser.add_argument("node_id", type=str, required=True, help="Node ID is required") + args = parser.parse_args() + + plugin_trigger = WorkflowPluginTriggerService.get_plugin_trigger( + app_id=app_model.id, + node_id=args["node_id"], + ) + + return jsonable_encoder(plugin_trigger) + + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=AppMode.WORKFLOW) + def put(self, app_model): + """Update plugin trigger""" + parser = reqparse.RequestParser() + parser.add_argument("node_id", type=str, required=True, help="Node ID is required") + parser.add_argument("subscription_id", type=str, required=True, location="json", help="Subscription ID") + args = parser.parse_args() + + assert isinstance(current_user, Account) + assert current_user.current_tenant_id is not None + if not current_user.is_editor: + raise Forbidden() + + plugin_trigger = WorkflowPluginTriggerService.update_plugin_trigger( + app_id=app_model.id, + node_id=args["node_id"], + subscription_id=args["subscription_id"], + ) + + return jsonable_encoder(plugin_trigger) + + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=AppMode.WORKFLOW) + def delete(self, app_model): + """Delete plugin trigger""" + parser = reqparse.RequestParser() + parser.add_argument("node_id", type=str, required=True, help="Node ID is required") + args = parser.parse_args() + + assert isinstance(current_user, Account) + assert current_user.current_tenant_id is not None + if not current_user.is_editor: + raise Forbidden() + + WorkflowPluginTriggerService.delete_plugin_trigger( + app_id=app_model.id, + node_id=args["node_id"], + ) + + return {"result": "success"}, 204 + + +class WebhookTriggerApi(Resource): + """Webhook Trigger API""" + + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=AppMode.WORKFLOW) + @marshal_with(webhook_trigger_fields) + def get(self, app_model): + """Get webhook trigger for a node""" + parser = reqparse.RequestParser() + parser.add_argument("node_id", type=str, required=True, help="Node ID is required") + args = parser.parse_args() + + node_id = args["node_id"] + + with Session(db.engine) as session: + # Get webhook trigger for this app and node + webhook_trigger = ( + session.query(WorkflowWebhookTrigger) + .filter( + WorkflowWebhookTrigger.app_id == app_model.id, + WorkflowWebhookTrigger.node_id == node_id, + ) + .first() + ) + + if not webhook_trigger: + raise NotFound("Webhook trigger not found for this node") + + # Add computed fields for marshal_with + base_url = dify_config.SERVICE_API_URL + webhook_trigger.webhook_url = f"{base_url}/triggers/webhook/{webhook_trigger.webhook_id}" # type: ignore + webhook_trigger.webhook_debug_url = f"{base_url}/triggers/webhook-debug/{webhook_trigger.webhook_id}" # type: ignore + + return webhook_trigger + + +class AppTriggersApi(Resource): + """App Triggers list API""" + + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=AppMode.WORKFLOW) + @marshal_with(triggers_list_fields) + def get(self, app_model): + """Get app triggers list""" + assert isinstance(current_user, Account) + assert current_user.current_tenant_id is not None + + with Session(db.engine) as session: + # Get all triggers for this app using select API + triggers = ( + session.execute( + select(AppTrigger) + .where( + AppTrigger.tenant_id == current_user.current_tenant_id, + AppTrigger.app_id == app_model.id, + ) + .order_by(AppTrigger.created_at.desc(), AppTrigger.id.desc()) + ) + .scalars() + .all() + ) + + # Add computed icon field for each trigger + url_prefix = dify_config.CONSOLE_API_URL + "/console/api/workspaces/current/tool-provider/builtin/" + for trigger in triggers: + if trigger.trigger_type == "trigger-plugin": + trigger.icon = url_prefix + trigger.provider_name + "/icon" # type: ignore + else: + trigger.icon = "" # type: ignore + + return {"data": triggers} + + +class AppTriggerEnableApi(Resource): + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=AppMode.WORKFLOW) + @marshal_with(trigger_fields) + def post(self, app_model): + """Update app trigger (enable/disable)""" + parser = reqparse.RequestParser() + parser.add_argument("trigger_id", type=str, required=True, nullable=False, location="json") + parser.add_argument("enable_trigger", type=bool, required=True, nullable=False, location="json") + args = parser.parse_args() + + assert isinstance(current_user, Account) + assert current_user.current_tenant_id is not None + if not current_user.is_editor: + raise Forbidden() + + trigger_id = args["trigger_id"] + + with Session(db.engine) as session: + # Find the trigger using select + trigger = session.execute( + select(AppTrigger).where( + AppTrigger.id == trigger_id, + AppTrigger.tenant_id == current_user.current_tenant_id, + AppTrigger.app_id == app_model.id, + ) + ).scalar_one_or_none() + + if not trigger: + raise NotFound("Trigger not found") + + # Update status based on enable_trigger boolean + trigger.status = AppTriggerStatus.ENABLED if args["enable_trigger"] else AppTriggerStatus.DISABLED + + session.commit() + session.refresh(trigger) + + # Add computed icon field + url_prefix = dify_config.CONSOLE_API_URL + "/console/api/workspaces/current/tool-provider/builtin/" + if trigger.trigger_type == "trigger-plugin": + trigger.icon = url_prefix + trigger.provider_name + "/icon" # type: ignore + else: + trigger.icon = "" # type: ignore + + return trigger + + +api.add_resource(WebhookTriggerApi, "/apps//workflows/triggers/webhook") +api.add_resource(PluginTriggerApi, "/apps//workflows/triggers/plugin") +api.add_resource(AppTriggersApi, "/apps//triggers") +api.add_resource(AppTriggerEnableApi, "/apps//trigger-enable") diff --git a/api/controllers/console/workspace/plugin.py b/api/controllers/console/workspace/plugin.py index fd5421fa64..b51af69af9 100644 --- a/api/controllers/console/workspace/plugin.py +++ b/api/controllers/console/workspace/plugin.py @@ -516,18 +516,20 @@ class PluginFetchDynamicSelectOptionsApi(Resource): parser.add_argument("provider", type=str, required=True, location="args") parser.add_argument("action", type=str, required=True, location="args") parser.add_argument("parameter", type=str, required=True, location="args") + parser.add_argument("credential_id", type=str, required=False, location="args") parser.add_argument("provider_type", type=str, required=True, location="args") args = parser.parse_args() try: options = PluginParameterService.get_dynamic_select_options( - tenant_id, - user_id, - args["plugin_id"], - args["provider"], - args["action"], - args["parameter"], - args["provider_type"], + tenant_id=tenant_id, + user_id=user_id, + plugin_id=args["plugin_id"], + provider=args["provider"], + action=args["action"], + parameter=args["parameter"], + credential_id=args["credential_id"], + provider_type=args["provider_type"], ) except PluginDaemonClientSideError as e: raise ValueError(e) diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index 8693d99e23..6654867871 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -21,8 +21,8 @@ from core.mcp.auth.auth_provider import OAuthClientProvider from core.mcp.error import MCPAuthError, MCPError from core.mcp.mcp_client import MCPClient from core.model_runtime.utils.encoders import jsonable_encoder +from core.plugin.entities.plugin_daemon import CredentialType from core.plugin.impl.oauth import OAuthHandler -from core.tools.entities.tool_entities import CredentialType from libs.helper import StrLen, alphanumeric, uuid_value from libs.login import login_required from models.provider_ids import ToolProviderID diff --git a/api/controllers/console/workspace/trigger_providers.py b/api/controllers/console/workspace/trigger_providers.py new file mode 100644 index 0000000000..0566adccb8 --- /dev/null +++ b/api/controllers/console/workspace/trigger_providers.py @@ -0,0 +1,589 @@ +import logging + +from flask import make_response, redirect, request +from flask_restx import Resource, reqparse +from sqlalchemy.orm import Session +from werkzeug.exceptions import BadRequest, Forbidden + +from configs import dify_config +from controllers.console import api +from controllers.console.wraps import account_initialization_required, setup_required +from core.model_runtime.utils.encoders import jsonable_encoder +from core.plugin.entities.plugin_daemon import CredentialType +from core.plugin.impl.oauth import OAuthHandler +from core.trigger.entities.entities import SubscriptionBuilderUpdater +from core.trigger.trigger_manager import TriggerManager +from extensions.ext_database import db +from libs.login import current_user, login_required +from models.account import Account +from models.provider_ids import TriggerProviderID +from services.plugin.oauth_service import OAuthProxyService +from services.trigger.trigger_provider_service import TriggerProviderService +from services.trigger.trigger_subscription_builder_service import TriggerSubscriptionBuilderService +from services.workflow_plugin_trigger_service import WorkflowPluginTriggerService + +logger = logging.getLogger(__name__) + + +class TriggerProviderListApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self): + """List all trigger providers for the current tenant""" + user = current_user + assert isinstance(user, Account) + assert user.current_tenant_id is not None + return jsonable_encoder(TriggerProviderService.list_trigger_providers(user.current_tenant_id)) + + +class TriggerProviderInfoApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self, provider): + """Get info for a trigger provider""" + user = current_user + assert isinstance(user, Account) + assert user.current_tenant_id is not None + return jsonable_encoder( + TriggerProviderService.get_trigger_provider(user.current_tenant_id, TriggerProviderID(provider)) + ) + + +class TriggerSubscriptionListApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self, provider): + """List all trigger subscriptions for the current tenant's provider""" + user = current_user + assert isinstance(user, Account) + assert user.current_tenant_id is not None + if not user.is_admin_or_owner: + raise Forbidden() + + try: + return jsonable_encoder( + TriggerProviderService.list_trigger_provider_subscriptions( + tenant_id=user.current_tenant_id, provider_id=TriggerProviderID(provider) + ) + ) + except Exception as e: + logger.exception("Error listing trigger providers", exc_info=e) + raise + + +class TriggerSubscriptionBuilderCreateApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self, provider): + """Add a new subscription instance for a trigger provider""" + user = current_user + assert isinstance(user, Account) + assert user.current_tenant_id is not None + if not user.is_admin_or_owner: + raise Forbidden() + + parser = reqparse.RequestParser() + parser.add_argument("credential_type", type=str, required=False, nullable=True, location="json") + args = parser.parse_args() + + try: + credential_type = CredentialType.of(args.get("credential_type") or CredentialType.UNAUTHORIZED.value) + subscription_builder = TriggerSubscriptionBuilderService.create_trigger_subscription_builder( + tenant_id=user.current_tenant_id, + user_id=user.id, + provider_id=TriggerProviderID(provider), + credential_type=credential_type, + ) + return jsonable_encoder({"subscription_builder": subscription_builder}) + except ValueError as e: + raise BadRequest(str(e)) + except Exception as e: + logger.exception("Error adding provider credential", exc_info=e) + raise + + +class TriggerSubscriptionBuilderGetApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self, provider, subscription_builder_id): + """Get a subscription instance for a trigger provider""" + return jsonable_encoder( + TriggerSubscriptionBuilderService.get_subscription_builder_by_id(subscription_builder_id) + ) + + +class TriggerSubscriptionBuilderVerifyApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self, provider, subscription_builder_id): + """Verify a subscription instance for a trigger provider""" + user = current_user + assert isinstance(user, Account) + assert user.current_tenant_id is not None + if not user.is_admin_or_owner: + raise Forbidden() + + parser = reqparse.RequestParser() + # The credentials of the subscription builder + parser.add_argument("credentials", type=dict, required=False, nullable=True, location="json") + args = parser.parse_args() + + try: + TriggerSubscriptionBuilderService.update_trigger_subscription_builder( + tenant_id=user.current_tenant_id, + provider_id=TriggerProviderID(provider), + subscription_builder_id=subscription_builder_id, + subscription_builder_updater=SubscriptionBuilderUpdater( + credentials=args.get("credentials", None), + ), + ) + return TriggerSubscriptionBuilderService.verify_trigger_subscription_builder( + tenant_id=user.current_tenant_id, + user_id=user.id, + provider_id=TriggerProviderID(provider), + subscription_builder_id=subscription_builder_id, + ) + except Exception as e: + logger.exception("Error verifying provider credential", exc_info=e) + raise + + +class TriggerSubscriptionBuilderUpdateApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self, provider, subscription_builder_id): + """Update a subscription instance for a trigger provider""" + user = current_user + assert isinstance(user, Account) + assert user.current_tenant_id is not None + + parser = reqparse.RequestParser() + # The name of the subscription builder + parser.add_argument("name", type=str, required=False, nullable=True, location="json") + # The parameters of the subscription builder + parser.add_argument("parameters", type=dict, required=False, nullable=True, location="json") + # The properties of the subscription builder + parser.add_argument("properties", type=dict, required=False, nullable=True, location="json") + # The credentials of the subscription builder + parser.add_argument("credentials", type=dict, required=False, nullable=True, location="json") + args = parser.parse_args() + try: + return jsonable_encoder( + TriggerSubscriptionBuilderService.update_trigger_subscription_builder( + tenant_id=user.current_tenant_id, + provider_id=TriggerProviderID(provider), + subscription_builder_id=subscription_builder_id, + subscription_builder_updater=SubscriptionBuilderUpdater( + name=args.get("name", None), + parameters=args.get("parameters", None), + properties=args.get("properties", None), + credentials=args.get("credentials", None), + ), + ) + ) + except Exception as e: + logger.exception("Error updating provider credential", exc_info=e) + raise + + +class TriggerSubscriptionBuilderLogsApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self, provider, subscription_builder_id): + """Get the request logs for a subscription instance for a trigger provider""" + user = current_user + assert isinstance(user, Account) + assert user.current_tenant_id is not None + + try: + logs = TriggerSubscriptionBuilderService.list_logs(subscription_builder_id) + return jsonable_encoder({"logs": [log.model_dump(mode="json") for log in logs]}) + except Exception as e: + logger.exception("Error getting request logs for subscription builder", exc_info=e) + raise + + +class TriggerSubscriptionBuilderBuildApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self, provider, subscription_builder_id): + """Build a subscription instance for a trigger provider""" + user = current_user + assert isinstance(user, Account) + assert user.current_tenant_id is not None + if not user.is_admin_or_owner: + raise Forbidden() + + parser = reqparse.RequestParser() + # The name of the subscription builder + parser.add_argument("name", type=str, required=False, nullable=True, location="json") + # The parameters of the subscription builder + parser.add_argument("parameters", type=dict, required=False, nullable=True, location="json") + # The properties of the subscription builder + parser.add_argument("properties", type=dict, required=False, nullable=True, location="json") + # The credentials of the subscription builder + parser.add_argument("credentials", type=dict, required=False, nullable=True, location="json") + args = parser.parse_args() + try: + TriggerSubscriptionBuilderService.update_trigger_subscription_builder( + tenant_id=user.current_tenant_id, + provider_id=TriggerProviderID(provider), + subscription_builder_id=subscription_builder_id, + subscription_builder_updater=SubscriptionBuilderUpdater( + name=args.get("name", None), + parameters=args.get("parameters", None), + properties=args.get("properties", None), + ), + ) + TriggerSubscriptionBuilderService.build_trigger_subscription_builder( + tenant_id=user.current_tenant_id, + user_id=user.id, + provider_id=TriggerProviderID(provider), + subscription_builder_id=subscription_builder_id, + ) + return 200 + except ValueError as e: + raise BadRequest(str(e)) + except Exception as e: + logger.exception("Error building provider credential", exc_info=e) + raise + + +class TriggerSubscriptionDeleteApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self, subscription_id): + """Delete a subscription instance""" + user = current_user + assert isinstance(user, Account) + assert user.current_tenant_id is not None + if not user.is_admin_or_owner: + raise Forbidden() + + try: + with Session(db.engine) as session: + # Delete trigger provider subscription + TriggerProviderService.delete_trigger_provider( + session=session, + tenant_id=user.current_tenant_id, + subscription_id=subscription_id, + ) + # Delete plugin triggers + WorkflowPluginTriggerService.delete_plugin_trigger_by_subscription( + session=session, + tenant_id=user.current_tenant_id, + subscription_id=subscription_id, + ) + session.commit() + return {"result": "success"} + except ValueError as e: + raise BadRequest(str(e)) + except Exception as e: + logger.exception("Error deleting provider credential", exc_info=e) + raise + + +class TriggerOAuthAuthorizeApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self, provider): + """Initiate OAuth authorization flow for a trigger provider""" + user = current_user + assert isinstance(user, Account) + assert user.current_tenant_id is not None + + try: + provider_id = TriggerProviderID(provider) + plugin_id = provider_id.plugin_id + provider_name = provider_id.provider_name + tenant_id = user.current_tenant_id + + # Get OAuth client configuration + oauth_client_params = TriggerProviderService.get_oauth_client( + tenant_id=tenant_id, + provider_id=provider_id, + ) + + if oauth_client_params is None: + raise Forbidden("No OAuth client configuration found for this trigger provider") + + # Create subscription builder + subscription_builder = TriggerSubscriptionBuilderService.create_trigger_subscription_builder( + tenant_id=tenant_id, + user_id=user.id, + provider_id=provider_id, + credential_type=CredentialType.OAUTH2, + ) + + # Create OAuth handler and proxy context + oauth_handler = OAuthHandler() + context_id = OAuthProxyService.create_proxy_context( + user_id=user.id, + tenant_id=tenant_id, + plugin_id=plugin_id, + provider=provider_name, + extra_data={ + "subscription_builder_id": subscription_builder.id, + }, + ) + + # Build redirect URI for callback + redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider}/trigger/callback" + + # Get authorization URL + authorization_url_response = oauth_handler.get_authorization_url( + tenant_id=tenant_id, + user_id=user.id, + plugin_id=plugin_id, + provider=provider_name, + redirect_uri=redirect_uri, + system_credentials=oauth_client_params, + ) + + # Create response with cookie + response = make_response( + jsonable_encoder( + { + "authorization_url": authorization_url_response.authorization_url, + "subscription_builder_id": subscription_builder.id, + "subscription_builder": subscription_builder, + } + ) + ) + response.set_cookie( + "context_id", + context_id, + httponly=True, + samesite="Lax", + max_age=OAuthProxyService.__MAX_AGE__, + ) + + return response + + except Exception as e: + logger.exception("Error initiating OAuth flow", exc_info=e) + raise + + +class TriggerOAuthCallbackApi(Resource): + @setup_required + def get(self, provider): + """Handle OAuth callback for trigger provider""" + context_id = request.cookies.get("context_id") + if not context_id: + raise Forbidden("context_id not found") + + # Use and validate proxy context + context = OAuthProxyService.use_proxy_context(context_id) + if context is None: + raise Forbidden("Invalid context_id") + + # Parse provider ID + provider_id = TriggerProviderID(provider) + plugin_id = provider_id.plugin_id + provider_name = provider_id.provider_name + user_id = context.get("user_id") + tenant_id = context.get("tenant_id") + subscription_builder_id = context.get("subscription_builder_id") + + # Get OAuth client configuration + oauth_client_params = TriggerProviderService.get_oauth_client( + tenant_id=tenant_id, + provider_id=provider_id, + ) + + if oauth_client_params is None: + raise Forbidden("No OAuth client configuration found for this trigger provider") + + # Get OAuth credentials from callback + oauth_handler = OAuthHandler() + redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider}/trigger/callback" + + credentials_response = oauth_handler.get_credentials( + tenant_id=tenant_id, + user_id=user_id, + plugin_id=plugin_id, + provider=provider_name, + redirect_uri=redirect_uri, + system_credentials=oauth_client_params, + request=request, + ) + + credentials = credentials_response.credentials + expires_at = credentials_response.expires_at + + if not credentials: + raise Exception("Failed to get OAuth credentials") + + # Update subscription builder + TriggerSubscriptionBuilderService.update_trigger_subscription_builder( + tenant_id=tenant_id, + provider_id=provider_id, + subscription_builder_id=subscription_builder_id, + subscription_builder_updater=SubscriptionBuilderUpdater( + credentials=credentials, + credential_expires_at=expires_at, + ), + ) + # Redirect to OAuth callback page + return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback") + + +class TriggerOAuthClientManageApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self, provider): + """Get OAuth client configuration for a provider""" + user = current_user + assert isinstance(user, Account) + assert user.current_tenant_id is not None + if not user.is_admin_or_owner: + raise Forbidden() + + try: + provider_id = TriggerProviderID(provider) + + # Get custom OAuth client params if exists + custom_params = TriggerProviderService.get_custom_oauth_client_params( + tenant_id=user.current_tenant_id, + provider_id=provider_id, + ) + + # Check if custom client is enabled + is_custom_enabled = TriggerProviderService.is_oauth_custom_client_enabled( + tenant_id=user.current_tenant_id, + provider_id=provider_id, + ) + + # Check if there's a system OAuth client + system_client = TriggerProviderService.get_oauth_client( + tenant_id=user.current_tenant_id, + provider_id=provider_id, + ) + provider_controller = TriggerManager.get_trigger_provider(user.current_tenant_id, provider_id) + redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider}/trigger/callback" + return jsonable_encoder( + { + "configured": bool(custom_params or system_client), + "oauth_client_schema": provider_controller.get_oauth_client_schema(), + "custom_configured": bool(custom_params), + "custom_enabled": is_custom_enabled, + "redirect_uri": redirect_uri, + "params": custom_params or {}, + } + ) + + except Exception as e: + logger.exception("Error getting OAuth client", exc_info=e) + raise + + @setup_required + @login_required + @account_initialization_required + def post(self, provider): + """Configure custom OAuth client for a provider""" + user = current_user + assert isinstance(user, Account) + assert user.current_tenant_id is not None + if not user.is_admin_or_owner: + raise Forbidden() + + parser = reqparse.RequestParser() + parser.add_argument("client_params", type=dict, required=False, nullable=True, location="json") + parser.add_argument("enabled", type=bool, required=False, nullable=True, location="json") + args = parser.parse_args() + + try: + provider_id = TriggerProviderID(provider) + return TriggerProviderService.save_custom_oauth_client_params( + tenant_id=user.current_tenant_id, + provider_id=provider_id, + client_params=args.get("client_params"), + enabled=args.get("enabled"), + ) + + except ValueError as e: + raise BadRequest(str(e)) + except Exception as e: + logger.exception("Error configuring OAuth client", exc_info=e) + raise + + @setup_required + @login_required + @account_initialization_required + def delete(self, provider): + """Remove custom OAuth client configuration""" + user = current_user + assert isinstance(user, Account) + assert user.current_tenant_id is not None + if not user.is_admin_or_owner: + raise Forbidden() + + try: + provider_id = TriggerProviderID(provider) + + return TriggerProviderService.delete_custom_oauth_client_params( + tenant_id=user.current_tenant_id, + provider_id=provider_id, + ) + except ValueError as e: + raise BadRequest(str(e)) + except Exception as e: + logger.exception("Error removing OAuth client", exc_info=e) + raise + + +# Trigger Subscription +api.add_resource(TriggerProviderListApi, "/workspaces/current/triggers") +api.add_resource(TriggerProviderInfoApi, "/workspaces/current/trigger-provider//info") +api.add_resource(TriggerSubscriptionListApi, "/workspaces/current/trigger-provider//subscriptions/list") +api.add_resource( + TriggerSubscriptionDeleteApi, + "/workspaces/current/trigger-provider//subscriptions/delete", +) + +# Trigger Subscription Builder +api.add_resource( + TriggerSubscriptionBuilderCreateApi, + "/workspaces/current/trigger-provider//subscriptions/builder/create", +) +api.add_resource( + TriggerSubscriptionBuilderGetApi, + "/workspaces/current/trigger-provider//subscriptions/builder/", +) +api.add_resource( + TriggerSubscriptionBuilderUpdateApi, + "/workspaces/current/trigger-provider//subscriptions/builder/update/", +) +api.add_resource( + TriggerSubscriptionBuilderVerifyApi, + "/workspaces/current/trigger-provider//subscriptions/builder/verify/", +) +api.add_resource( + TriggerSubscriptionBuilderBuildApi, + "/workspaces/current/trigger-provider//subscriptions/builder/build/", +) +api.add_resource( + TriggerSubscriptionBuilderLogsApi, + "/workspaces/current/trigger-provider//subscriptions/builder/logs/", +) + + +# OAuth +api.add_resource( + TriggerOAuthAuthorizeApi, "/workspaces/current/trigger-provider//subscriptions/oauth/authorize" +) +api.add_resource(TriggerOAuthCallbackApi, "/oauth/plugin//trigger/callback") +api.add_resource(TriggerOAuthClientManageApi, "/workspaces/current/trigger-provider//oauth/client") diff --git a/api/controllers/mcp/mcp.py b/api/controllers/mcp/mcp.py index a8629dca20..4f099f0057 100644 --- a/api/controllers/mcp/mcp.py +++ b/api/controllers/mcp/mcp.py @@ -9,10 +9,9 @@ from controllers.console.app.mcp_server import AppMCPServerStatus from controllers.mcp import mcp_ns from core.app.app_config.entities import VariableEntity from core.mcp import types as mcp_types -from core.mcp.server.streamable_http import handle_mcp_request from extensions.ext_database import db from libs import helper -from models.model import App, AppMCPServer, AppMode, EndUser +from models.model import App, AppMCPServer, AppMode class MCPRequestError(Exception): @@ -195,50 +194,6 @@ class MCPAppApi(Resource): except ValidationError as e: raise MCPRequestError(mcp_types.INVALID_PARAMS, f"Invalid MCP request: {str(e)}") - def _retrieve_end_user(self, tenant_id: str, mcp_server_id: str, session: Session) -> EndUser | None: - """Get end user from existing session - optimized query""" - return ( - session.query(EndUser) - .where(EndUser.tenant_id == tenant_id) - .where(EndUser.session_id == mcp_server_id) - .where(EndUser.type == "mcp") - .first() - ) - - def _create_end_user( - self, client_name: str, tenant_id: str, app_id: str, mcp_server_id: str, session: Session - ) -> EndUser: - """Create end user in existing session""" - end_user = EndUser( - tenant_id=tenant_id, - app_id=app_id, - type="mcp", - name=client_name, - session_id=mcp_server_id, - ) - session.add(end_user) - session.flush() # Use flush instead of commit to keep transaction open - session.refresh(end_user) - return end_user - - def _handle_mcp_request( - self, - app: App, - mcp_server: AppMCPServer, - mcp_request: mcp_types.ClientRequest, - user_input_form: list[VariableEntity], - session: Session, - request_id: Union[int, str], - ) -> mcp_types.JSONRPCResponse | mcp_types.JSONRPCError | None: - """Handle MCP request and return response""" - end_user = self._retrieve_end_user(mcp_server.tenant_id, mcp_server.id, session) - - if not end_user and isinstance(mcp_request.root, mcp_types.InitializeRequest): - client_info = mcp_request.root.params.clientInfo - client_name = f"{client_info.name}@{client_info.version}" - # Commit the session before creating end user to avoid transaction conflicts - session.commit() - with Session(db.engine, expire_on_commit=False) as create_session, create_session.begin(): - end_user = self._create_end_user(client_name, app.tenant_id, app.id, mcp_server.id, create_session) - - return handle_mcp_request(app, mcp_request, user_input_form, mcp_server, end_user, request_id) + mcp_server_handler = MCPServerStreamableHTTPRequestHandler(app, request, converted_user_input_form) + response = mcp_server_handler.handle() + return helper.compact_generate_response(response) diff --git a/api/controllers/trigger/__init__.py b/api/controllers/trigger/__init__.py new file mode 100644 index 0000000000..972f28649c --- /dev/null +++ b/api/controllers/trigger/__init__.py @@ -0,0 +1,7 @@ +from flask import Blueprint + +# Create trigger blueprint +bp = Blueprint("trigger", __name__, url_prefix="/triggers") + +# Import routes after blueprint creation to avoid circular imports +from . import trigger, webhook diff --git a/api/controllers/trigger/trigger.py b/api/controllers/trigger/trigger.py new file mode 100644 index 0000000000..b7bcfffcf6 --- /dev/null +++ b/api/controllers/trigger/trigger.py @@ -0,0 +1,41 @@ +import logging +import re + +from flask import jsonify, request +from werkzeug.exceptions import NotFound + +from controllers.trigger import bp +from services.trigger.trigger_subscription_builder_service import TriggerSubscriptionBuilderService +from services.trigger_service import TriggerService + +logger = logging.getLogger(__name__) + +UUID_PATTERN = r"^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$" +UUID_MATCHER = re.compile(UUID_PATTERN) + + +@bp.route("/plugin/", methods=["GET", "POST", "PUT", "PATCH", "DELETE", "HEAD", "OPTIONS"]) +def trigger_endpoint(endpoint_id: str): + """ + Handle endpoint trigger calls. + """ + # endpoint_id must be UUID + if not UUID_MATCHER.match(endpoint_id): + raise NotFound("Invalid endpoint ID") + handling_chain = [ + TriggerService.process_endpoint, + TriggerSubscriptionBuilderService.process_builder_validation_endpoint, + ] + try: + for handler in handling_chain: + response = handler(endpoint_id, request) + if response: + break + if not response: + raise NotFound("Endpoint not found") + return response + except ValueError as e: + raise NotFound(str(e)) + except Exception as e: + logger.exception("Webhook processing failed for {endpoint_id}") + return jsonify({"error": "Internal server error", "message": str(e)}), 500 diff --git a/api/controllers/trigger/webhook.py b/api/controllers/trigger/webhook.py new file mode 100644 index 0000000000..04f2d6483d --- /dev/null +++ b/api/controllers/trigger/webhook.py @@ -0,0 +1,46 @@ +import logging + +from flask import jsonify +from werkzeug.exceptions import NotFound, RequestEntityTooLarge + +from controllers.trigger import bp +from services.webhook_service import WebhookService + +logger = logging.getLogger(__name__) + + +@bp.route("/webhook/", methods=["GET", "POST", "PUT", "PATCH", "DELETE", "HEAD", "OPTIONS"]) +@bp.route("/webhook-debug/", methods=["GET", "POST", "PUT", "PATCH", "DELETE", "HEAD", "OPTIONS"]) +def handle_webhook(webhook_id: str): + """ + Handle webhook trigger calls. + + This endpoint receives webhook calls and processes them according to the + configured webhook trigger settings. + """ + try: + # Get webhook trigger, workflow, and node configuration + webhook_trigger, workflow, node_config = WebhookService.get_webhook_trigger_and_workflow(webhook_id) + + # Extract request data + webhook_data = WebhookService.extract_webhook_data(webhook_trigger) + + # Validate request against node configuration + validation_result = WebhookService.validate_webhook_request(webhook_data, node_config) + if not validation_result["valid"]: + return jsonify({"error": "Bad Request", "message": validation_result["error"]}), 400 + + # Process webhook call (send to Celery) + WebhookService.trigger_workflow_execution(webhook_trigger, webhook_data, workflow) + + # Return configured response + response_data, status_code = WebhookService.generate_webhook_response(node_config) + return jsonify(response_data), status_code + + except ValueError as e: + raise NotFound(str(e)) + except RequestEntityTooLarge: + raise + except Exception as e: + logger.exception("Webhook processing failed for %s", webhook_id) + return jsonify({"error": "Internal server error", "message": str(e)}), 500 diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index 45d047434b..bcce945a57 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -3,7 +3,7 @@ import logging import threading import uuid from collections.abc import Generator, Mapping, Sequence -from typing import Any, Literal, Union, overload +from typing import Any, Literal, Optional, Union, overload from flask import Flask, current_app from pydantic import ValidationError @@ -53,6 +53,8 @@ class WorkflowAppGenerator(BaseAppGenerator): invoke_from: InvokeFrom, streaming: Literal[True], call_depth: int, + triggered_from: Optional[WorkflowRunTriggeredFrom] = None, + root_node_id: Optional[str] = None, ) -> Generator[Mapping | str, None, None]: ... @overload @@ -66,6 +68,8 @@ class WorkflowAppGenerator(BaseAppGenerator): invoke_from: InvokeFrom, streaming: Literal[False], call_depth: int, + triggered_from: Optional[WorkflowRunTriggeredFrom] = None, + root_node_id: Optional[str] = None, ) -> Mapping[str, Any]: ... @overload @@ -79,6 +83,8 @@ class WorkflowAppGenerator(BaseAppGenerator): invoke_from: InvokeFrom, streaming: bool, call_depth: int, + triggered_from: Optional[WorkflowRunTriggeredFrom] = None, + root_node_id: Optional[str] = None, ) -> Union[Mapping[str, Any], Generator[Mapping | str, None, None]]: ... def generate( @@ -91,6 +97,8 @@ class WorkflowAppGenerator(BaseAppGenerator): invoke_from: InvokeFrom, streaming: bool = True, call_depth: int = 0, + triggered_from: Optional[WorkflowRunTriggeredFrom] = None, + root_node_id: Optional[str] = None, ) -> Union[Mapping[str, Any], Generator[Mapping | str, None, None]]: files: Sequence[Mapping[str, Any]] = args.get("files") or [] @@ -119,24 +127,26 @@ class WorkflowAppGenerator(BaseAppGenerator): app_id=app_model.id, user_id=user.id if isinstance(user, Account) else user.session_id, ) - inputs: Mapping[str, Any] = args["inputs"] extras = { **extract_external_trace_id_from_args(args), } workflow_run_id = str(uuid.uuid4()) + if triggered_from in (WorkflowRunTriggeredFrom.DEBUGGING, WorkflowRunTriggeredFrom.APP_RUN): + # start node get inputs + inputs = self._prepare_user_inputs( + user_inputs=inputs, + variables=app_config.variables, + tenant_id=app_model.tenant_id, + strict_type_validation=True if invoke_from == InvokeFrom.SERVICE_API else False, + ) # init application generate entity application_generate_entity = WorkflowAppGenerateEntity( task_id=str(uuid.uuid4()), app_config=app_config, file_upload_config=file_extra_config, - inputs=self._prepare_user_inputs( - user_inputs=inputs, - variables=app_config.variables, - tenant_id=app_model.tenant_id, - strict_type_validation=True if invoke_from == InvokeFrom.SERVICE_API else False, - ), + inputs=inputs, files=list(system_files), user_id=user.id, stream=streaming, @@ -155,7 +165,10 @@ class WorkflowAppGenerator(BaseAppGenerator): # Create session factory session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) # Create workflow execution(aka workflow run) repository - if invoke_from == InvokeFrom.DEBUGGER: + if triggered_from is not None: + # Use explicitly provided triggered_from (for async triggers) + workflow_triggered_from = triggered_from + elif invoke_from == InvokeFrom.DEBUGGER: workflow_triggered_from = WorkflowRunTriggeredFrom.DEBUGGING else: workflow_triggered_from = WorkflowRunTriggeredFrom.APP_RUN @@ -182,6 +195,7 @@ class WorkflowAppGenerator(BaseAppGenerator): workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, streaming=streaming, + root_node_id=root_node_id, ) def _generate( @@ -196,6 +210,7 @@ class WorkflowAppGenerator(BaseAppGenerator): workflow_node_execution_repository: WorkflowNodeExecutionRepository, streaming: bool = True, variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER, + root_node_id: Optional[str] = None, ) -> Union[Mapping[str, Any], Generator[str | Mapping[str, Any], None, None]]: """ Generate App response. @@ -231,6 +246,7 @@ class WorkflowAppGenerator(BaseAppGenerator): "queue_manager": queue_manager, "context": context, "variable_loader": variable_loader, + "root_node_id": root_node_id, }, ) @@ -424,15 +440,16 @@ class WorkflowAppGenerator(BaseAppGenerator): queue_manager: AppQueueManager, context: contextvars.Context, variable_loader: VariableLoader, + root_node_id: Optional[str] = None, ) -> None: """ Generate worker in a new thread. :param flask_app: Flask app :param application_generate_entity: application generate entity :param queue_manager: queue manager - :param workflow_thread_pool_id: workflow thread pool id :return: """ + with preserve_flask_contexts(flask_app, context_vars=context): with Session(db.engine, expire_on_commit=False) as session: workflow = session.scalar( @@ -465,6 +482,7 @@ class WorkflowAppGenerator(BaseAppGenerator): variable_loader=variable_loader, workflow=workflow, system_user_id=system_user_id, + root_node_id=root_node_id, ) try: diff --git a/api/core/app/apps/workflow/app_runner.py b/api/core/app/apps/workflow/app_runner.py index b009dc7715..df68a8e249 100644 --- a/api/core/app/apps/workflow/app_runner.py +++ b/api/core/app/apps/workflow/app_runner.py @@ -34,6 +34,7 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): variable_loader: VariableLoader, workflow: Workflow, system_user_id: str, + root_node_id: str | None = None, ): super().__init__( queue_manager=queue_manager, @@ -43,6 +44,7 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): self.application_generate_entity = application_generate_entity self._workflow = workflow self._sys_user_id = system_user_id + self._root_node_id = root_node_id def run(self): """ @@ -105,6 +107,7 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): graph_runtime_state=graph_runtime_state, workflow_id=self._workflow.id, tenant_id=self._workflow.tenant_id, + root_node_id=self._root_node_id, user_id=self.application_generate_entity.user_id, ) diff --git a/api/core/app/apps/workflow_app_runner.py b/api/core/app/apps/workflow_app_runner.py index 056e03fa14..1aede6daa5 100644 --- a/api/core/app/apps/workflow_app_runner.py +++ b/api/core/app/apps/workflow_app_runner.py @@ -1,5 +1,5 @@ from collections.abc import Mapping -from typing import Any, cast +from typing import Any, Optional, cast from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom from core.app.entities.app_invoke_entities import InvokeFrom @@ -79,6 +79,7 @@ class WorkflowBasedAppRunner: workflow_id: str = "", tenant_id: str = "", user_id: str = "", + root_node_id: Optional[str] = None, ) -> Graph: """ Init graph @@ -112,7 +113,7 @@ class WorkflowBasedAppRunner: ) # init graph - graph = Graph.init(graph_config=graph_config, node_factory=node_factory) + graph = Graph.init(graph_config=graph_config, node_factory=node_factory, root_node_id=root_node_id) if not graph: raise ValueError("graph not found in workflow") @@ -161,7 +162,7 @@ class WorkflowBasedAppRunner: edge for edge in graph_config.get("edges", []) if (edge.get("source") is None or edge.get("source") in node_ids) - and (edge.get("target") is None or edge.get("target") in node_ids) + and (edge.get("target") is None or edge.get("target") in node_ids) ] graph_config["edges"] = edge_configs @@ -276,7 +277,7 @@ class WorkflowBasedAppRunner: edge for edge in graph_config.get("edges", []) if (edge.get("source") is None or edge.get("source") in node_ids) - and (edge.get("target") is None or edge.get("target") in node_ids) + and (edge.get("target") is None or edge.get("target") in node_ids) ] graph_config["edges"] = edge_configs diff --git a/api/core/entities/provider_entities.py b/api/core/entities/provider_entities.py index 0496959ce2..ebe5ad9110 100644 --- a/api/core/entities/provider_entities.py +++ b/api/core/entities/provider_entities.py @@ -207,6 +207,7 @@ class ProviderConfig(BasicProviderConfig): required: bool = False default: Union[int, str, float, bool] | None = None options: list[Option] | None = None + multiple: bool | None = False label: I18nObject | None = None help: I18nObject | None = None url: str | None = None diff --git a/api/core/helper/name_generator.py b/api/core/helper/name_generator.py index 4e19e3946f..b5f9299d9f 100644 --- a/api/core/helper/name_generator.py +++ b/api/core/helper/name_generator.py @@ -3,7 +3,7 @@ import re from collections.abc import Sequence from typing import Any -from core.tools.entities.tool_entities import CredentialType +from core.plugin.entities.plugin_daemon import CredentialType logger = logging.getLogger(__name__) diff --git a/api/core/helper/provider_encryption.py b/api/core/helper/provider_encryption.py new file mode 100644 index 0000000000..98130fed58 --- /dev/null +++ b/api/core/helper/provider_encryption.py @@ -0,0 +1,128 @@ +import contextlib +from copy import deepcopy +from typing import Any, Optional, Protocol + +from core.entities.provider_entities import BasicProviderConfig +from core.helper import encrypter + + +class ProviderConfigCache(Protocol): + """ + Interface for provider configuration cache operations + """ + + def get(self) -> Optional[dict]: + """Get cached provider configuration""" + ... + + def set(self, config: dict[str, Any]) -> None: + """Cache provider configuration""" + ... + + def delete(self) -> None: + """Delete cached provider configuration""" + ... + + +class ProviderConfigEncrypter: + tenant_id: str + config: list[BasicProviderConfig] + provider_config_cache: ProviderConfigCache + + def __init__( + self, + tenant_id: str, + config: list[BasicProviderConfig], + provider_config_cache: ProviderConfigCache, + ): + self.tenant_id = tenant_id + self.config = config + self.provider_config_cache = provider_config_cache + + def _deep_copy(self, data: dict[str, str]) -> dict[str, str]: + """ + deep copy data + """ + return deepcopy(data) + + def encrypt(self, data: dict[str, str]) -> dict[str, str]: + """ + encrypt tool credentials with tenant id + + return a deep copy of credentials with encrypted values + """ + data = self._deep_copy(data) + + # get fields need to be decrypted + fields = dict[str, BasicProviderConfig]() + for credential in self.config: + fields[credential.name] = credential + + for field_name, field in fields.items(): + if field.type == BasicProviderConfig.Type.SECRET_INPUT: + if field_name in data: + encrypted = encrypter.encrypt_token(self.tenant_id, data[field_name] or "") + data[field_name] = encrypted + + return data + + def mask_credentials(self, data: dict[str, Any]) -> dict[str, Any]: + """ + mask credentials + + return a deep copy of credentials with masked values + """ + data = self._deep_copy(data) + + # get fields need to be decrypted + fields = dict[str, BasicProviderConfig]() + for credential in self.config: + fields[credential.name] = credential + + for field_name, field in fields.items(): + if field.type == BasicProviderConfig.Type.SECRET_INPUT: + if field_name in data: + if len(data[field_name]) > 6: + data[field_name] = ( + data[field_name][:2] + "*" * (len(data[field_name]) - 4) + data[field_name][-2:] + ) + else: + data[field_name] = "*" * len(data[field_name]) + + return data + + def mask_tool_credentials(self, data: dict[str, Any]) -> dict[str, Any]: + return self.mask_credentials(data) + + def decrypt(self, data: dict[str, str]) -> dict[str, Any]: + """ + decrypt tool credentials with tenant id + + return a deep copy of credentials with decrypted values + """ + cached_credentials = self.provider_config_cache.get() + if cached_credentials: + return cached_credentials + + data = self._deep_copy(data) + # get fields need to be decrypted + fields = dict[str, BasicProviderConfig]() + for credential in self.config: + fields[credential.name] = credential + + for field_name, field in fields.items(): + if field.type == BasicProviderConfig.Type.SECRET_INPUT: + if field_name in data: + with contextlib.suppress(Exception): + # if the value is None or empty string, skip decrypt + if not data[field_name]: + continue + + data[field_name] = encrypter.decrypt_token(self.tenant_id, data[field_name]) + + self.provider_config_cache.set(data) + return data + + +def create_provider_encrypter(tenant_id: str, config: list[BasicProviderConfig], cache: ProviderConfigCache): + return ProviderConfigEncrypter(tenant_id=tenant_id, config=config, provider_config_cache=cache), cache diff --git a/api/core/plugin/entities/plugin.py b/api/core/plugin/entities/plugin.py index f32b356937..3eb853dbaa 100644 --- a/api/core/plugin/entities/plugin.py +++ b/api/core/plugin/entities/plugin.py @@ -1,7 +1,7 @@ import datetime from collections.abc import Mapping from enum import StrEnum, auto -from typing import Any +from typing import Any, Optional from packaging.version import InvalidVersion, Version from pydantic import BaseModel, Field, field_validator, model_validator @@ -13,6 +13,7 @@ from core.plugin.entities.base import BasePluginEntity from core.plugin.entities.endpoint import EndpointProviderDeclaration from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_entities import ToolProviderEntity +from core.trigger.entities.entities import TriggerProviderEntity class PluginInstallationSource(StrEnum): @@ -27,54 +28,56 @@ class PluginResourceRequirements(BaseModel): class Permission(BaseModel): class Tool(BaseModel): - enabled: bool | None = Field(default=False) + enabled: Optional[bool] = Field(default=False) class Model(BaseModel): - enabled: bool | None = Field(default=False) - llm: bool | None = Field(default=False) - text_embedding: bool | None = Field(default=False) - rerank: bool | None = Field(default=False) - tts: bool | None = Field(default=False) - speech2text: bool | None = Field(default=False) - moderation: bool | None = Field(default=False) + enabled: Optional[bool] = Field(default=False) + llm: Optional[bool] = Field(default=False) + text_embedding: Optional[bool] = Field(default=False) + rerank: Optional[bool] = Field(default=False) + tts: Optional[bool] = Field(default=False) + speech2text: Optional[bool] = Field(default=False) + moderation: Optional[bool] = Field(default=False) class Node(BaseModel): - enabled: bool | None = Field(default=False) + enabled: Optional[bool] = Field(default=False) class Endpoint(BaseModel): - enabled: bool | None = Field(default=False) + enabled: Optional[bool] = Field(default=False) class Storage(BaseModel): - enabled: bool | None = Field(default=False) + enabled: Optional[bool] = Field(default=False) size: int = Field(ge=1024, le=1073741824, default=1048576) - tool: Tool | None = Field(default=None) - model: Model | None = Field(default=None) - node: Node | None = Field(default=None) - endpoint: Endpoint | None = Field(default=None) - storage: Storage | None = Field(default=None) + tool: Optional[Tool] = Field(default=None) + model: Optional[Model] = Field(default=None) + node: Optional[Node] = Field(default=None) + endpoint: Optional[Endpoint] = Field(default=None) + storage: Optional[Storage] = Field(default=None) - permission: Permission | None = Field(default=None) + permission: Optional[Permission] = Field(default=None) class PluginCategory(StrEnum): Tool = auto() Model = auto() Extension = auto() - AgentStrategy = "agent-strategy" - Datasource = "datasource" + AgentStrategy = auto() + Datasource = auto() + Trigger = auto() class PluginDeclaration(BaseModel): class Plugins(BaseModel): - tools: list[str] | None = Field(default_factory=list[str]) - models: list[str] | None = Field(default_factory=list[str]) - endpoints: list[str] | None = Field(default_factory=list[str]) + tools: Optional[list[str]] = Field(default_factory=list[str]) + models: Optional[list[str]] = Field(default_factory=list[str]) + endpoints: Optional[list[str]] = Field(default_factory=list[str]) + triggers: Optional[list[str]] = Field(default_factory=list[str]) datasources: list[str] | None = Field(default_factory=list[str]) class Meta(BaseModel): - minimum_dify_version: str | None = Field(default=None) - version: str | None = Field(default=None) + minimum_dify_version: Optional[str] = Field(default=None, pattern=r"^\d{1,4}(\.\d{1,4}){1,3}(-\w{1,16})?$") + version: Optional[str] = Field(default=None) @field_validator("minimum_dify_version") @classmethod @@ -87,25 +90,26 @@ class PluginDeclaration(BaseModel): except InvalidVersion as e: raise ValueError(f"Invalid version format: {v}") from e - version: str = Field(...) - author: str | None = Field(..., pattern=r"^[a-zA-Z0-9_-]{1,64}$") + version: str = Field(..., pattern=r"^\d{1,4}(\.\d{1,4}){1,3}(-\w{1,16})?$") + author: Optional[str] = Field(..., pattern=r"^[a-zA-Z0-9_-]{1,64}$") name: str = Field(..., pattern=r"^[a-z0-9_-]{1,128}$") description: I18nObject icon: str - icon_dark: str | None = Field(default=None) + icon_dark: Optional[str] = Field(default=None) label: I18nObject category: PluginCategory created_at: datetime.datetime resource: PluginResourceRequirements plugins: Plugins tags: list[str] = Field(default_factory=list) - repo: str | None = Field(default=None) + repo: Optional[str] = Field(default=None) verified: bool = Field(default=False) - tool: ToolProviderEntity | None = None - model: ProviderEntity | None = None - endpoint: EndpointProviderDeclaration | None = None - agent_strategy: AgentStrategyProviderEntity | None = None + tool: Optional[ToolProviderEntity] = None + model: Optional[ProviderEntity] = None + endpoint: Optional[EndpointProviderDeclaration] = None + agent_strategy: Optional[AgentStrategyProviderEntity] = None datasource: DatasourceProviderEntity | None = None + trigger: Optional[TriggerProviderEntity] = None meta: Meta @field_validator("version") @@ -119,7 +123,7 @@ class PluginDeclaration(BaseModel): @model_validator(mode="before") @classmethod - def validate_category(cls, values: dict): + def validate_category(cls, values: dict) -> dict: # auto detect category if values.get("tool"): values["category"] = PluginCategory.Tool @@ -129,6 +133,8 @@ class PluginDeclaration(BaseModel): values["category"] = PluginCategory.Datasource elif values.get("agent_strategy"): values["category"] = PluginCategory.AgentStrategy + elif values.get("trigger"): + values["category"] = PluginCategory.Trigger else: values["category"] = PluginCategory.Extension return values @@ -190,9 +196,9 @@ class PluginDependency(BaseModel): type: Type value: Github | Marketplace | Package - current_identifier: str | None = None + current_identifier: Optional[str] = None class MissingPluginDependency(BaseModel): plugin_unique_identifier: str - current_identifier: str | None = None + current_identifier: Optional[str] = None diff --git a/api/core/plugin/entities/plugin_daemon.py b/api/core/plugin/entities/plugin_daemon.py index f15acc16f9..f78d00607b 100644 --- a/api/core/plugin/entities/plugin_daemon.py +++ b/api/core/plugin/entities/plugin_daemon.py @@ -1,6 +1,6 @@ from collections.abc import Mapping, Sequence from datetime import datetime -from enum import StrEnum +from enum import StrEnum, auto from typing import Any, Generic, TypeVar from pydantic import BaseModel, ConfigDict, Field @@ -14,6 +14,7 @@ from core.plugin.entities.parameters import PluginParameterOption from core.plugin.entities.plugin import PluginDeclaration, PluginEntity from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_entities import ToolProviderEntityWithPlugin +from core.trigger.entities.entities import TriggerProviderEntity T = TypeVar("T", bound=(BaseModel | dict | list | bool | str)) @@ -205,3 +206,49 @@ class PluginListResponse(BaseModel): class PluginDynamicSelectOptionsResponse(BaseModel): options: Sequence[PluginParameterOption] = Field(description="The options of the dynamic select.") + + +class PluginTriggerProviderEntity(BaseModel): + provider: str + plugin_unique_identifier: str + plugin_id: str + declaration: TriggerProviderEntity + + +class CredentialType(StrEnum): + API_KEY = "api-key" + OAUTH2 = auto() + UNAUTHORIZED = auto() + + def get_name(self): + if self == CredentialType.API_KEY: + return "API KEY" + elif self == CredentialType.OAUTH2: + return "AUTH" + elif self == CredentialType.UNAUTHORIZED: + return "UNAUTHORIZED" + else: + return self.value.replace("-", " ").upper() + + def is_editable(self): + return self == CredentialType.API_KEY + + def is_validate_allowed(self): + return self == CredentialType.API_KEY + + @classmethod + def values(cls): + return [item.value for item in cls] + + @classmethod + def of(cls, credential_type: str) -> "CredentialType": + type_name = credential_type.lower() + if type_name in {"api-key", "api_key"}: + return cls.API_KEY + elif type_name in {"oauth2", "oauth"}: + return cls.OAUTH2 + elif type_name == "unauthorized": + return cls.UNAUTHORIZED + else: + raise ValueError(f"Invalid credential type: {credential_type}") + diff --git a/api/core/plugin/entities/request.py b/api/core/plugin/entities/request.py index 10f37f75f8..47ea483739 100644 --- a/api/core/plugin/entities/request.py +++ b/api/core/plugin/entities/request.py @@ -1,5 +1,7 @@ +from collections.abc import Mapping from typing import Any, Literal +from flask import Response from pydantic import BaseModel, ConfigDict, Field, field_validator from core.entities.provider_entities import BasicProviderConfig @@ -237,3 +239,33 @@ class RequestFetchAppInfo(BaseModel): """ app_id: str + + +class Event(BaseModel): + variables: Mapping[str, Any] + + +class TriggerInvokeResponse(BaseModel): + event: Event + + +class PluginTriggerDispatchResponse(BaseModel): + triggers: list[str] + raw_http_response: str + + +class TriggerSubscriptionResponse(BaseModel): + subscription: dict[str, Any] + + +class TriggerValidateProviderCredentialsResponse(BaseModel): + result: bool + + +class TriggerDispatchResponse: + triggers: list[str] + response: Response + + def __init__(self, triggers: list[str], response: Response): + self.triggers = triggers + self.response = response diff --git a/api/core/plugin/impl/dynamic_select.py b/api/core/plugin/impl/dynamic_select.py index 24839849b9..0a580a2978 100644 --- a/api/core/plugin/impl/dynamic_select.py +++ b/api/core/plugin/impl/dynamic_select.py @@ -15,6 +15,7 @@ class DynamicSelectClient(BasePluginClient): provider: str, action: str, credentials: Mapping[str, Any], + credential_type: str, parameter: str, ) -> PluginDynamicSelectOptionsResponse: """ @@ -29,6 +30,7 @@ class DynamicSelectClient(BasePluginClient): "data": { "provider": GenericProviderID(provider).provider_name, "credentials": credentials, + "credential_type": credential_type, "provider_action": action, "parameter": parameter, }, diff --git a/api/core/plugin/impl/tool.py b/api/core/plugin/impl/tool.py index bc4de38099..d7973fdb5b 100644 --- a/api/core/plugin/impl/tool.py +++ b/api/core/plugin/impl/tool.py @@ -4,13 +4,14 @@ from typing import Any from pydantic import BaseModel from core.plugin.entities.plugin_daemon import ( + CredentialType, PluginBasicBooleanResponse, PluginToolProviderEntity, ) from core.plugin.impl.base import BasePluginClient from core.plugin.utils.chunk_merger import merge_blob_chunks from core.schemas.resolver import resolve_dify_schema_refs -from core.tools.entities.tool_entities import CredentialType, ToolInvokeMessage, ToolParameter +from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter from models.provider_ids import GenericProviderID, ToolProviderID diff --git a/api/core/plugin/impl/trigger.py b/api/core/plugin/impl/trigger.py new file mode 100644 index 0000000000..e02065e724 --- /dev/null +++ b/api/core/plugin/impl/trigger.py @@ -0,0 +1,301 @@ +import binascii +from collections.abc import Mapping +from typing import Any + +from flask import Request + +from core.plugin.entities.plugin_daemon import CredentialType, PluginTriggerProviderEntity +from core.plugin.entities.request import ( + PluginTriggerDispatchResponse, + TriggerDispatchResponse, + TriggerInvokeResponse, + TriggerSubscriptionResponse, + TriggerValidateProviderCredentialsResponse, +) +from core.plugin.impl.base import BasePluginClient +from core.plugin.utils.http_parser import deserialize_response, serialize_request +from core.trigger.entities.entities import Subscription +from models.provider_ids import GenericProviderID, TriggerProviderID + + +class PluginTriggerManager(BasePluginClient): + def fetch_trigger_providers(self, tenant_id: str) -> list[PluginTriggerProviderEntity]: + """ + Fetch trigger providers for the given tenant. + """ + + def transformer(json_response: dict[str, Any]) -> dict: + for provider in json_response.get("data", []): + declaration = provider.get("declaration", {}) or {} + provider_id = provider.get("plugin_id") + "/" + provider.get("provider") + for trigger in declaration.get("triggers", []): + trigger["identity"]["provider"] = provider_id + + return json_response + + response = self._request_with_plugin_daemon_response( + "GET", + f"plugin/{tenant_id}/management/triggers", + list[PluginTriggerProviderEntity], + params={"page": 1, "page_size": 256}, + transformer=transformer, + ) + + for provider in response: + provider.declaration.identity.name = f"{provider.plugin_id}/{provider.declaration.identity.name}" + + # override the provider name for each trigger to plugin_id/provider_name + for trigger in provider.declaration.triggers: + trigger.identity.provider = provider.declaration.identity.name + + return response + + def fetch_trigger_provider(self, tenant_id: str, provider_id: TriggerProviderID) -> PluginTriggerProviderEntity: + """ + Fetch trigger provider for the given tenant and plugin. + """ + + def transformer(json_response: dict[str, Any]) -> dict: + data = json_response.get("data") + if data: + for trigger in data.get("declaration", {}).get("triggers", []): + trigger["identity"]["provider"] = str(provider_id) + + return json_response + + response = self._request_with_plugin_daemon_response( + "GET", + f"plugin/{tenant_id}/management/trigger", + PluginTriggerProviderEntity, + params={"provider": provider_id.provider_name, "plugin_id": provider_id.plugin_id}, + transformer=transformer, + ) + + response.declaration.identity.name = str(provider_id) + + # override the provider name for each trigger to plugin_id/provider_name + for trigger in response.declaration.triggers: + trigger.identity.provider = str(provider_id) + + return response + + def invoke_trigger( + self, + tenant_id: str, + user_id: str, + provider: str, + trigger: str, + credentials: Mapping[str, str], + credential_type: CredentialType, + request: Request, + parameters: Mapping[str, Any], + ) -> TriggerInvokeResponse: + """ + Invoke a trigger with the given parameters. + """ + trigger_provider_id = GenericProviderID(provider) + + response = self._request_with_plugin_daemon_response_stream( + "POST", + f"plugin/{tenant_id}/dispatch/trigger/invoke", + TriggerInvokeResponse, + data={ + "user_id": user_id, + "data": { + "provider": trigger_provider_id.provider_name, + "trigger": trigger, + "credentials": credentials, + "credential_type": credential_type, + "raw_http_request": binascii.hexlify(serialize_request(request)).decode(), + "parameters": parameters, + }, + }, + headers={ + "X-Plugin-ID": trigger_provider_id.plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + return TriggerInvokeResponse(event=resp.event) + + raise ValueError("No response received from plugin daemon for invoke trigger") + + def validate_provider_credentials( + self, tenant_id: str, user_id: str, provider: str, credentials: Mapping[str, str] + ) -> bool: + """ + Validate the credentials of the trigger provider. + """ + trigger_provider_id = GenericProviderID(provider) + + response = self._request_with_plugin_daemon_response_stream( + "POST", + f"plugin/{tenant_id}/dispatch/trigger/validate_credentials", + TriggerValidateProviderCredentialsResponse, + data={ + "user_id": user_id, + "data": { + "provider": trigger_provider_id.provider_name, + "credentials": credentials, + }, + }, + headers={ + "X-Plugin-ID": trigger_provider_id.plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + return resp.result + + raise ValueError("No response received from plugin daemon for validate provider credentials") + + def dispatch_event( + self, + tenant_id: str, + user_id: str, + provider: str, + subscription: Mapping[str, Any], + request: Request, + ) -> TriggerDispatchResponse: + """ + Dispatch an event to triggers. + """ + trigger_provider_id = GenericProviderID(provider) + + response = self._request_with_plugin_daemon_response_stream( + "POST", + f"plugin/{tenant_id}/dispatch/trigger/dispatch_event", + PluginTriggerDispatchResponse, + data={ + "user_id": user_id, + "data": { + "provider": trigger_provider_id.provider_name, + "subscription": subscription, + "raw_http_request": binascii.hexlify(serialize_request(request)).decode(), + }, + }, + headers={ + "X-Plugin-ID": trigger_provider_id.plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + return TriggerDispatchResponse( + triggers=resp.triggers, + response=deserialize_response(binascii.unhexlify(resp.raw_http_response.encode())), + ) + + raise ValueError("No response received from plugin daemon for dispatch event") + + def subscribe( + self, + tenant_id: str, + user_id: str, + provider: str, + credentials: Mapping[str, str], + endpoint: str, + parameters: Mapping[str, Any], + ) -> TriggerSubscriptionResponse: + """ + Subscribe to a trigger. + """ + trigger_provider_id = GenericProviderID(provider) + + response = self._request_with_plugin_daemon_response_stream( + "POST", + f"plugin/{tenant_id}/dispatch/trigger/subscribe", + TriggerSubscriptionResponse, + data={ + "user_id": user_id, + "data": { + "provider": trigger_provider_id.provider_name, + "credentials": credentials, + "endpoint": endpoint, + "parameters": parameters, + }, + }, + headers={ + "X-Plugin-ID": trigger_provider_id.plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + return resp + + raise ValueError("No response received from plugin daemon for subscribe") + + def unsubscribe( + self, + tenant_id: str, + user_id: str, + provider: str, + subscription: Subscription, + credentials: Mapping[str, str], + ) -> TriggerSubscriptionResponse: + """ + Unsubscribe from a trigger. + """ + trigger_provider_id = GenericProviderID(provider) + + response = self._request_with_plugin_daemon_response_stream( + "POST", + f"plugin/{tenant_id}/dispatch/trigger/unsubscribe", + TriggerSubscriptionResponse, + data={ + "user_id": user_id, + "data": { + "provider": trigger_provider_id.provider_name, + "subscription": subscription.model_dump(), + "credentials": credentials, + }, + }, + headers={ + "X-Plugin-ID": trigger_provider_id.plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + return resp + + raise ValueError("No response received from plugin daemon for unsubscribe") + + def refresh( + self, + tenant_id: str, + user_id: str, + provider: str, + subscription: Subscription, + credentials: Mapping[str, str], + ) -> TriggerSubscriptionResponse: + """ + Refresh a trigger subscription. + """ + trigger_provider_id = GenericProviderID(provider) + + response = self._request_with_plugin_daemon_response_stream( + "POST", + f"plugin/{tenant_id}/dispatch/trigger/refresh", + TriggerSubscriptionResponse, + data={ + "user_id": user_id, + "data": { + "provider": trigger_provider_id.provider_name, + "subscription": subscription.model_dump(), + "credentials": credentials, + }, + }, + headers={ + "X-Plugin-ID": trigger_provider_id.plugin_id, + "Content-Type": "application/json", + }, + ) + + for resp in response: + return resp + + raise ValueError("No response received from plugin daemon for refresh") diff --git a/api/core/plugin/utils/http_parser.py b/api/core/plugin/utils/http_parser.py new file mode 100644 index 0000000000..47cdcadcb3 --- /dev/null +++ b/api/core/plugin/utils/http_parser.py @@ -0,0 +1,159 @@ +from io import BytesIO + +from flask import Request, Response +from werkzeug.datastructures import Headers + + +def serialize_request(request: Request) -> bytes: + method = request.method + path = request.full_path.rstrip("?") + raw = f"{method} {path} HTTP/1.1\r\n".encode() + + for name, value in request.headers.items(): + raw += f"{name}: {value}\r\n".encode() + + raw += b"\r\n" + + body = request.get_data(as_text=False) + if body: + raw += body + + return raw + + +def deserialize_request(raw_data: bytes) -> Request: + header_end = raw_data.find(b"\r\n\r\n") + if header_end == -1: + header_end = raw_data.find(b"\n\n") + if header_end == -1: + header_data = raw_data + body = b"" + else: + header_data = raw_data[:header_end] + body = raw_data[header_end + 2 :] + else: + header_data = raw_data[:header_end] + body = raw_data[header_end + 4 :] + + lines = header_data.split(b"\r\n") + if len(lines) == 1 and b"\n" in lines[0]: + lines = header_data.split(b"\n") + + if not lines or not lines[0]: + raise ValueError("Empty HTTP request") + + request_line = lines[0].decode("utf-8", errors="ignore") + parts = request_line.split(" ", 2) + if len(parts) < 2: + raise ValueError(f"Invalid request line: {request_line}") + + method = parts[0] + full_path = parts[1] + protocol = parts[2] if len(parts) > 2 else "HTTP/1.1" + + if "?" in full_path: + path, query_string = full_path.split("?", 1) + else: + path = full_path + query_string = "" + + headers = Headers() + for line in lines[1:]: + if not line: + continue + line_str = line.decode("utf-8", errors="ignore") + if ":" not in line_str: + continue + name, value = line_str.split(":", 1) + headers.add(name, value.strip()) + + host = headers.get("Host", "localhost") + if ":" in host: + server_name, server_port = host.rsplit(":", 1) + else: + server_name = host + server_port = "80" + + environ = { + "REQUEST_METHOD": method, + "PATH_INFO": path, + "QUERY_STRING": query_string, + "SERVER_NAME": server_name, + "SERVER_PORT": server_port, + "SERVER_PROTOCOL": protocol, + "wsgi.input": BytesIO(body), + "wsgi.url_scheme": "http", + } + + if "Content-Type" in headers: + environ["CONTENT_TYPE"] = headers.get("Content-Type") + + if "Content-Length" in headers: + environ["CONTENT_LENGTH"] = headers.get("Content-Length") + elif body: + environ["CONTENT_LENGTH"] = str(len(body)) + + for name, value in headers.items(): + if name.upper() in ("CONTENT-TYPE", "CONTENT-LENGTH"): + continue + env_name = f"HTTP_{name.upper().replace('-', '_')}" + environ[env_name] = value + + return Request(environ) + + +def serialize_response(response: Response) -> bytes: + raw = f"HTTP/1.1 {response.status}\r\n".encode() + + for name, value in response.headers.items(): + raw += f"{name}: {value}\r\n".encode() + + raw += b"\r\n" + + body = response.get_data(as_text=False) + if body: + raw += body + + return raw + + +def deserialize_response(raw_data: bytes) -> Response: + header_end = raw_data.find(b"\r\n\r\n") + if header_end == -1: + header_end = raw_data.find(b"\n\n") + if header_end == -1: + header_data = raw_data + body = b"" + else: + header_data = raw_data[:header_end] + body = raw_data[header_end + 2 :] + else: + header_data = raw_data[:header_end] + body = raw_data[header_end + 4 :] + + lines = header_data.split(b"\r\n") + if len(lines) == 1 and b"\n" in lines[0]: + lines = header_data.split(b"\n") + + if not lines or not lines[0]: + raise ValueError("Empty HTTP response") + + status_line = lines[0].decode("utf-8", errors="ignore") + parts = status_line.split(" ", 2) + if len(parts) < 2: + raise ValueError(f"Invalid status line: {status_line}") + + status_code = int(parts[1]) + + response = Response(response=body, status=status_code) + + for line in lines[1:]: + if not line: + continue + line_str = line.decode("utf-8", errors="ignore") + if ":" not in line_str: + continue + name, value = line_str.split(":", 1) + response.headers[name] = value.strip() + + return response diff --git a/api/core/tools/__base/tool_runtime.py b/api/core/tools/__base/tool_runtime.py index 3de0014c61..328272fd50 100644 --- a/api/core/tools/__base/tool_runtime.py +++ b/api/core/tools/__base/tool_runtime.py @@ -4,7 +4,8 @@ from openai import BaseModel from pydantic import Field from core.app.entities.app_invoke_entities import InvokeFrom -from core.tools.entities.tool_entities import CredentialType, ToolInvokeFrom +from core.plugin.entities.plugin_daemon import CredentialType +from core.tools.entities.tool_entities import ToolInvokeFrom class ToolRuntime(BaseModel): diff --git a/api/core/tools/builtin_tool/provider.py b/api/core/tools/builtin_tool/provider.py index 45fd16d684..ca64f7455f 100644 --- a/api/core/tools/builtin_tool/provider.py +++ b/api/core/tools/builtin_tool/provider.py @@ -4,11 +4,11 @@ from typing import Any from core.entities.provider_entities import ProviderConfig from core.helper.module_import_helper import load_single_subclass_from_source +from core.plugin.entities.plugin_daemon import CredentialType from core.tools.__base.tool_provider import ToolProviderController from core.tools.__base.tool_runtime import ToolRuntime from core.tools.builtin_tool.tool import BuiltinTool from core.tools.entities.tool_entities import ( - CredentialType, OAuthSchema, ToolEntity, ToolProviderEntity, diff --git a/api/core/tools/entities/api_entities.py b/api/core/tools/entities/api_entities.py index 00c4ab9dd7..5ba4a9dfda 100644 --- a/api/core/tools/entities/api_entities.py +++ b/api/core/tools/entities/api_entities.py @@ -5,9 +5,10 @@ from typing import Any, Literal from pydantic import BaseModel, Field, field_validator from core.model_runtime.utils.encoders import jsonable_encoder +from core.plugin.entities.plugin_daemon import CredentialType from core.tools.__base.tool import ToolParameter from core.tools.entities.common_entities import I18nObject -from core.tools.entities.tool_entities import CredentialType, ToolProviderType +from core.tools.entities.tool_entities import ToolProviderType class ToolApiEntity(BaseModel): diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index a59b54216f..7065113ced 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -477,37 +477,4 @@ class ToolSelector(BaseModel): tool_parameters: Mapping[str, Parameter] = Field(..., description="Parameters, type llm") def to_plugin_parameter(self) -> dict[str, Any]: - return self.model_dump() - - -class CredentialType(StrEnum): - API_KEY = "api-key" - OAUTH2 = auto() - - def get_name(self): - if self == CredentialType.API_KEY: - return "API KEY" - elif self == CredentialType.OAUTH2: - return "AUTH" - else: - return self.value.replace("-", " ").upper() - - def is_editable(self): - return self == CredentialType.API_KEY - - def is_validate_allowed(self): - return self == CredentialType.API_KEY - - @classmethod - def values(cls): - return [item.value for item in cls] - - @classmethod - def of(cls, credential_type: str) -> "CredentialType": - type_name = credential_type.lower() - if type_name in {"api-key", "api_key"}: - return cls.API_KEY - elif type_name in {"oauth2", "oauth"}: - return cls.OAUTH2 - else: - raise ValueError(f"Invalid credential type: {credential_type}") + return self.model_dump() \ No newline at end of file diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 9e5f5a7c23..d42fbde772 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -21,6 +21,7 @@ from core.helper.module_import_helper import load_single_subclass_from_source from core.helper.position_helper import is_filtered from core.helper.provider_cache import ToolProviderCredentialsCache from core.model_runtime.utils.encoders import jsonable_encoder +from core.plugin.entities.plugin_daemon import CredentialType from core.plugin.impl.tool import PluginToolManager from core.tools.__base.tool import Tool from core.tools.__base.tool_provider import ToolProviderController @@ -34,7 +35,6 @@ from core.tools.entities.api_entities import ToolProviderApiEntity, ToolProvider from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_entities import ( ApiProviderAuthType, - CredentialType, ToolInvokeFrom, ToolParameter, ToolProviderType, diff --git a/api/core/tools/utils/encryption.py b/api/core/tools/utils/encryption.py index 6ea033b2b6..3b6af302db 100644 --- a/api/core/tools/utils/encryption.py +++ b/api/core/tools/utils/encryption.py @@ -1,137 +1,24 @@ -import contextlib -from copy import deepcopy -from typing import Any, Protocol +# Import generic components from provider_encryption module +from core.helper.provider_encryption import ( + ProviderConfigCache, + ProviderConfigEncrypter, + create_provider_encrypter, +) -from core.entities.provider_entities import BasicProviderConfig -from core.helper import encrypter +# Re-export for backward compatibility +__all__ = [ + "ProviderConfigCache", + "ProviderConfigEncrypter", + "create_provider_encrypter", + "create_tool_provider_encrypter", +] + +# Tool-specific imports from core.helper.provider_cache import SingletonProviderCredentialsCache from core.tools.__base.tool_provider import ToolProviderController -class ProviderConfigCache(Protocol): - """ - Interface for provider configuration cache operations - """ - - def get(self) -> dict | None: - """Get cached provider configuration""" - ... - - def set(self, config: dict[str, Any]): - """Cache provider configuration""" - ... - - def delete(self): - """Delete cached provider configuration""" - ... - - -class ProviderConfigEncrypter: - tenant_id: str - config: list[BasicProviderConfig] - provider_config_cache: ProviderConfigCache - - def __init__( - self, - tenant_id: str, - config: list[BasicProviderConfig], - provider_config_cache: ProviderConfigCache, - ): - self.tenant_id = tenant_id - self.config = config - self.provider_config_cache = provider_config_cache - - def _deep_copy(self, data: dict[str, str]) -> dict[str, str]: - """ - deep copy data - """ - return deepcopy(data) - - def encrypt(self, data: dict[str, str]) -> dict[str, str]: - """ - encrypt tool credentials with tenant id - - return a deep copy of credentials with encrypted values - """ - data = self._deep_copy(data) - - # get fields need to be decrypted - fields = dict[str, BasicProviderConfig]() - for credential in self.config: - fields[credential.name] = credential - - for field_name, field in fields.items(): - if field.type == BasicProviderConfig.Type.SECRET_INPUT: - if field_name in data: - encrypted = encrypter.encrypt_token(self.tenant_id, data[field_name] or "") - data[field_name] = encrypted - - return data - - def mask_tool_credentials(self, data: dict[str, Any]) -> dict[str, Any]: - """ - mask tool credentials - - return a deep copy of credentials with masked values - """ - data = self._deep_copy(data) - - # get fields need to be decrypted - fields = dict[str, BasicProviderConfig]() - for credential in self.config: - fields[credential.name] = credential - - for field_name, field in fields.items(): - if field.type == BasicProviderConfig.Type.SECRET_INPUT: - if field_name in data: - if len(data[field_name]) > 6: - data[field_name] = ( - data[field_name][:2] + "*" * (len(data[field_name]) - 4) + data[field_name][-2:] - ) - else: - data[field_name] = "*" * len(data[field_name]) - - return data - - def decrypt(self, data: dict[str, str]) -> dict[str, Any]: - """ - decrypt tool credentials with tenant id - - return a deep copy of credentials with decrypted values - """ - cached_credentials = self.provider_config_cache.get() - if cached_credentials: - return cached_credentials - - data = self._deep_copy(data) - # get fields need to be decrypted - fields = dict[str, BasicProviderConfig]() - for credential in self.config: - fields[credential.name] = credential - - for field_name, field in fields.items(): - if field.type == BasicProviderConfig.Type.SECRET_INPUT: - if field_name in data: - with contextlib.suppress(Exception): - # if the value is None or empty string, skip decrypt - if not data[field_name]: - continue - - data[field_name] = encrypter.decrypt_token(self.tenant_id, data[field_name]) - - self.provider_config_cache.set(data) - return data - - -def create_provider_encrypter( - tenant_id: str, config: list[BasicProviderConfig], cache: ProviderConfigCache -) -> tuple[ProviderConfigEncrypter, ProviderConfigCache]: - return ProviderConfigEncrypter(tenant_id=tenant_id, config=config, provider_config_cache=cache), cache - - -def create_tool_provider_encrypter( - tenant_id: str, controller: ToolProviderController -) -> tuple[ProviderConfigEncrypter, ProviderConfigCache]: +def create_tool_provider_encrypter(tenant_id: str, controller: ToolProviderController): cache = SingletonProviderCredentialsCache( tenant_id=tenant_id, provider_type=controller.provider_type.value, diff --git a/api/core/trigger/__init__.py b/api/core/trigger/__init__.py new file mode 100644 index 0000000000..1e5b8bb445 --- /dev/null +++ b/api/core/trigger/__init__.py @@ -0,0 +1 @@ +# Core trigger module initialization diff --git a/api/core/trigger/entities/api_entities.py b/api/core/trigger/entities/api_entities.py new file mode 100644 index 0000000000..799f548612 --- /dev/null +++ b/api/core/trigger/entities/api_entities.py @@ -0,0 +1,76 @@ +from collections.abc import Mapping +from typing import Any, Optional + +from pydantic import BaseModel, Field + +from core.entities.provider_entities import ProviderConfig +from core.plugin.entities.plugin_daemon import CredentialType +from core.tools.entities.common_entities import I18nObject +from core.trigger.entities.entities import ( + SubscriptionSchema, + TriggerCreationMethod, + TriggerDescription, + TriggerIdentity, + TriggerParameter, +) + + +class TriggerProviderSubscriptionApiEntity(BaseModel): + id: str = Field(description="The unique id of the subscription") + name: str = Field(description="The name of the subscription") + provider: str = Field(description="The provider id of the subscription") + credential_type: CredentialType = Field(description="The type of the credential") + credentials: dict = Field(description="The credentials of the subscription") + endpoint: str = Field(description="The endpoint of the subscription") + parameters: dict = Field(description="The parameters of the subscription") + properties: dict = Field(description="The properties of the subscription") + workflows_in_use: int = Field(description="The number of workflows using this subscription") + + +class TriggerApiEntity(BaseModel): + name: str = Field(description="The name of the trigger") + identity: TriggerIdentity = Field(description="The identity of the trigger") + description: TriggerDescription = Field(description="The description of the trigger") + parameters: list[TriggerParameter] = Field(description="The parameters of the trigger") + output_schema: Optional[Mapping[str, Any]] = Field(description="The output schema of the trigger") + + +class TriggerProviderApiEntity(BaseModel): + author: str = Field(..., description="The author of the trigger provider") + name: str = Field(..., description="The name of the trigger provider") + label: I18nObject = Field(..., description="The label of the trigger provider") + description: I18nObject = Field(..., description="The description of the trigger provider") + icon: Optional[str] = Field(default=None, description="The icon of the trigger provider") + icon_dark: Optional[str] = Field(default=None, description="The dark icon of the trigger provider") + tags: list[str] = Field(default_factory=list, description="The tags of the trigger provider") + + plugin_id: Optional[str] = Field(default="", description="The plugin id of the tool") + plugin_unique_identifier: Optional[str] = Field(default="", description="The unique identifier of the tool") + + supported_creation_methods: list[TriggerCreationMethod] = Field( + default_factory=list, + description="Supported creation methods for the trigger provider. Possible values: 'OAUTH', 'APIKEY', 'MANUAL'." + ) + + credentials_schema: list[ProviderConfig] = Field(description="The credentials schema of the trigger provider") + oauth_client_schema: list[ProviderConfig] = Field( + default_factory=list, description="The schema of the OAuth client" + ) + subscription_schema: Optional[SubscriptionSchema] = Field( + description="The subscription schema of the trigger provider" + ) + triggers: list[TriggerApiEntity] = Field(description="The triggers of the trigger provider") + + +class SubscriptionBuilderApiEntity(BaseModel): + id: str = Field(description="The id of the subscription builder") + name: str = Field(description="The name of the subscription builder") + provider: str = Field(description="The provider id of the subscription builder") + endpoint: str = Field(description="The endpoint id of the subscription builder") + parameters: Mapping[str, Any] = Field(description="The parameters of the subscription builder") + properties: Mapping[str, Any] = Field(description="The properties of the subscription builder") + credentials: Mapping[str, str] = Field(description="The credentials of the subscription builder") + credential_type: CredentialType = Field(description="The credential type of the subscription builder") + + +__all__ = ["TriggerApiEntity", "TriggerProviderApiEntity", "TriggerProviderSubscriptionApiEntity"] diff --git a/api/core/trigger/entities/entities.py b/api/core/trigger/entities/entities.py new file mode 100644 index 0000000000..3ab65e2a49 --- /dev/null +++ b/api/core/trigger/entities/entities.py @@ -0,0 +1,309 @@ +from collections.abc import Mapping +from datetime import datetime +from enum import StrEnum +from typing import Any, Optional, Union + +from pydantic import BaseModel, ConfigDict, Field + +from core.entities.provider_entities import ProviderConfig +from core.plugin.entities.parameters import PluginParameterAutoGenerate, PluginParameterOption, PluginParameterTemplate +from core.tools.entities.common_entities import I18nObject + + +class TriggerParameterType(StrEnum): + """The type of the parameter""" + + STRING = "string" + NUMBER = "number" + BOOLEAN = "boolean" + SELECT = "select" + FILE = "file" + FILES = "files" + MODEL_SELECTOR = "model-selector" + APP_SELECTOR = "app-selector" + OBJECT = "object" + ARRAY = "array" + DYNAMIC_SELECT = "dynamic-select" + + +class TriggerParameter(BaseModel): + """ + The parameter of the trigger + """ + + name: str = Field(..., description="The name of the parameter") + label: I18nObject = Field(..., description="The label presented to the user") + type: TriggerParameterType = Field(..., description="The type of the parameter") + auto_generate: Optional[PluginParameterAutoGenerate] = Field( + default=None, description="The auto generate of the parameter" + ) + template: Optional[PluginParameterTemplate] = Field(default=None, description="The template of the parameter") + scope: Optional[str] = None + required: Optional[bool] = False + multiple: bool | None = Field( + default=False, + description="Whether the parameter is multiple select, only valid for select or dynamic-select type", + ) + default: Union[int, float, str, list, None] = None + min: Union[float, int, None] = None + max: Union[float, int, None] = None + precision: Optional[int] = None + options: Optional[list[PluginParameterOption]] = None + description: Optional[I18nObject] = None + + +class TriggerProviderIdentity(BaseModel): + """ + The identity of the trigger provider + """ + + author: str = Field(..., description="The author of the trigger provider") + name: str = Field(..., description="The name of the trigger provider") + label: I18nObject = Field(..., description="The label of the trigger provider") + description: I18nObject = Field(..., description="The description of the trigger provider") + icon: Optional[str] = Field(default=None, description="The icon of the trigger provider") + icon_dark: Optional[str] = Field(default=None, description="The dark icon of the trigger provider") + tags: list[str] = Field(default_factory=list, description="The tags of the trigger provider") + + +class TriggerIdentity(BaseModel): + """ + The identity of the trigger + """ + + author: str = Field(..., description="The author of the trigger") + name: str = Field(..., description="The name of the trigger") + label: I18nObject = Field(..., description="The label of the trigger") + provider: Optional[str] = Field(default=None, description="The provider of the trigger") + + +class TriggerDescription(BaseModel): + """ + The description of the trigger + """ + + human: I18nObject = Field(..., description="Human readable description") + llm: I18nObject = Field(..., description="LLM readable description") + + +class TriggerEntity(BaseModel): + """ + The configuration of a trigger + """ + + identity: TriggerIdentity = Field(..., description="The identity of the trigger") + parameters: list[TriggerParameter] = Field(default=[], description="The parameters of the trigger") + description: TriggerDescription = Field(..., description="The description of the trigger") + output_schema: Optional[Mapping[str, Any]] = Field( + default=None, description="The output schema that this trigger produces" + ) + + +class OAuthSchema(BaseModel): + client_schema: list[ProviderConfig] = Field(default_factory=list, description="The schema of the OAuth client") + credentials_schema: list[ProviderConfig] = Field( + default_factory=list, description="The schema of the OAuth credentials" + ) + + +class SubscriptionSchema(BaseModel): + """ + The subscription schema of the trigger provider + """ + + parameters_schema: list[TriggerParameter] | None = Field( + default_factory=list, + description="The parameters schema required to create a subscription", + ) + + properties_schema: list[ProviderConfig] | None = Field( + default_factory=list, + description="The configuration schema stored in the subscription entity", + ) + + def get_default_parameters(self) -> Mapping[str, Any]: + """Get the default parameters from the parameters schema""" + if not self.parameters_schema: + return {} + return {param.name: param.default for param in self.parameters_schema if param.default} + + def get_default_properties(self) -> Mapping[str, Any]: + """Get the default properties from the properties schema""" + if not self.properties_schema: + return {} + return {prop.name: prop.default for prop in self.properties_schema if prop.default} + + +class TriggerProviderEntity(BaseModel): + """ + The configuration of a trigger provider + """ + + identity: TriggerProviderIdentity = Field(..., description="The identity of the trigger provider") + credentials_schema: list[ProviderConfig] = Field( + default_factory=list, + description="The credentials schema of the trigger provider", + ) + oauth_schema: Optional[OAuthSchema] = Field( + default=None, + description="The OAuth schema of the trigger provider if OAuth is supported", + ) + subscription_schema: SubscriptionSchema = Field( + description="The subscription schema for trigger(webhook, polling, etc.) subscription parameters", + ) + triggers: list[TriggerEntity] = Field(default=[], description="The triggers of the trigger provider") + + +class Subscription(BaseModel): + """ + Result of a successful trigger subscription operation. + + Contains all information needed to manage the subscription lifecycle. + """ + + expires_at: int = Field( + ..., description="The timestamp when the subscription will expire, this for refresh the subscription" + ) + + endpoint: str = Field(..., description="The webhook endpoint URL allocated by Dify for receiving events") + properties: Mapping[str, Any] = Field( + ..., description="Subscription data containing all properties and provider-specific information" + ) + + +class Unsubscription(BaseModel): + """ + Result of a trigger unsubscription operation. + + Provides detailed information about the unsubscription attempt, + including success status and error details if failed. + """ + + success: bool = Field(..., description="Whether the unsubscription was successful") + + message: Optional[str] = Field( + None, + description="Human-readable message about the operation result. " + "Success message for successful operations, " + "detailed error information for failures.", + ) + + +class RequestLog(BaseModel): + id: str = Field(..., description="The id of the request log") + endpoint: str = Field(..., description="The endpoint of the request log") + request: dict = Field(..., description="The request of the request log") + response: dict = Field(..., description="The response of the request log") + created_at: datetime = Field(..., description="The created at of the request log") + + +class SubscriptionBuilder(BaseModel): + id: str = Field(..., description="The id of the subscription builder") + name: str | None = Field(default=None, description="The name of the subscription builder") + tenant_id: str = Field(..., description="The tenant id of the subscription builder") + user_id: str = Field(..., description="The user id of the subscription builder") + provider_id: str = Field(..., description="The provider id of the subscription builder") + endpoint_id: str = Field(..., description="The endpoint id of the subscription builder") + parameters: Mapping[str, Any] = Field(..., description="The parameters of the subscription builder") + properties: Mapping[str, Any] = Field(..., description="The properties of the subscription builder") + credentials: Mapping[str, str] = Field(..., description="The credentials of the subscription builder") + credential_type: str | None = Field(default=None, description="The credential type of the subscription builder") + credential_expires_at: int | None = Field( + default=None, description="The credential expires at of the subscription builder" + ) + expires_at: int = Field(..., description="The expires at of the subscription builder") + + def to_subscription(self) -> Subscription: + return Subscription( + expires_at=self.expires_at, + endpoint=self.endpoint_id, + properties=self.properties, + ) + + +class SubscriptionBuilderUpdater(BaseModel): + name: str | None = Field(default=None, description="The name of the subscription builder") + parameters: Mapping[str, Any] | None = Field(default=None, description="The parameters of the subscription builder") + properties: Mapping[str, Any] | None = Field(default=None, description="The properties of the subscription builder") + credentials: Mapping[str, str] | None = Field( + default=None, description="The credentials of the subscription builder" + ) + credential_type: str | None = Field(default=None, description="The credential type of the subscription builder") + credential_expires_at: int | None = Field( + default=None, description="The credential expires at of the subscription builder" + ) + expires_at: int | None = Field(default=None, description="The expires at of the subscription builder") + + def update(self, subscription_builder: SubscriptionBuilder) -> None: + if self.name: + subscription_builder.name = self.name + if self.parameters: + subscription_builder.parameters = self.parameters + if self.properties: + subscription_builder.properties = self.properties + if self.credentials: + subscription_builder.credentials = self.credentials + if self.credential_type: + subscription_builder.credential_type = self.credential_type + if self.credential_expires_at: + subscription_builder.credential_expires_at = self.credential_expires_at + if self.expires_at: + subscription_builder.expires_at = self.expires_at + + +class TriggerEventData(BaseModel): + """Event data dispatched to trigger sessions.""" + + subscription_id: str + triggers: list[str] + request_id: str + timestamp: float + + model_config = ConfigDict(arbitrary_types_allowed=True) + + +class TriggerInputs(BaseModel): + """Standard inputs for trigger nodes.""" + + request_id: str + trigger_name: str + subscription_id: str + + @classmethod + def from_trigger_entity(cls, request_id: str, subscription_id: str, trigger: TriggerEntity) -> "TriggerInputs": + """Create from trigger entity (for production).""" + return cls(request_id=request_id, trigger_name=trigger.identity.name, subscription_id=subscription_id) + + def to_workflow_args(self) -> dict[str, Any]: + """Convert to workflow arguments format.""" + return {"inputs": self.model_dump(), "files": []} + + def to_dict(self) -> dict[str, Any]: + """Convert to dict (alias for model_dump).""" + return self.model_dump() + + +class TriggerCreationMethod(StrEnum): + OAUTH = "OAUTH" + APIKEY = "APIKEY" + MANUAL = "MANUAL" + + +# Export all entities +__all__ = [ + "OAuthSchema", + "RequestLog", + "Subscription", + "SubscriptionBuilder", + "TriggerCreationMethod", + "TriggerDescription", + "TriggerEntity", + "TriggerEventData", + "TriggerIdentity", + "TriggerInputs", + "TriggerParameter", + "TriggerParameterType", + "TriggerProviderEntity", + "TriggerProviderIdentity", + "Unsubscription", +] diff --git a/api/core/trigger/errors.py b/api/core/trigger/errors.py new file mode 100644 index 0000000000..bbc27e1eae --- /dev/null +++ b/api/core/trigger/errors.py @@ -0,0 +1,2 @@ +class TriggerProviderCredentialValidationError(ValueError): + pass diff --git a/api/core/trigger/provider.py b/api/core/trigger/provider.py new file mode 100644 index 0000000000..7564f87386 --- /dev/null +++ b/api/core/trigger/provider.py @@ -0,0 +1,358 @@ +""" +Trigger Provider Controller for managing trigger providers +""" + +import logging +from collections.abc import Mapping +from typing import Any, Optional + +from flask import Request + +from core.entities.provider_entities import BasicProviderConfig +from core.plugin.entities.plugin_daemon import CredentialType +from core.plugin.entities.request import ( + TriggerDispatchResponse, + TriggerInvokeResponse, +) +from core.plugin.impl.trigger import PluginTriggerManager +from core.trigger.entities.api_entities import TriggerApiEntity, TriggerProviderApiEntity +from core.trigger.entities.entities import ( + ProviderConfig, + Subscription, + SubscriptionSchema, + TriggerCreationMethod, + TriggerEntity, + TriggerProviderEntity, + TriggerProviderIdentity, + Unsubscription, +) +from core.trigger.errors import TriggerProviderCredentialValidationError +from models.provider_ids import TriggerProviderID +from services.plugin.plugin_service import PluginService + +logger = logging.getLogger(__name__) + + +class PluginTriggerProviderController: + """ + Controller for plugin trigger providers + """ + + def __init__( + self, + entity: TriggerProviderEntity, + plugin_id: str, + plugin_unique_identifier: str, + provider_id: TriggerProviderID, + tenant_id: str, + ): + """ + Initialize plugin trigger provider controller + + :param entity: Trigger provider entity + :param plugin_id: Plugin ID + :param plugin_unique_identifier: Plugin unique identifier + :param provider_id: Provider ID + :param tenant_id: Tenant ID + """ + self.entity = entity + self.tenant_id = tenant_id + self.plugin_id = plugin_id + self.provider_id = provider_id + self.plugin_unique_identifier = plugin_unique_identifier + + def get_provider_id(self) -> TriggerProviderID: + """ + Get provider ID + """ + return self.provider_id + + def to_api_entity(self) -> TriggerProviderApiEntity: + """ + Convert to API entity + """ + icon = ( + PluginService.get_plugin_icon_url(self.tenant_id, self.entity.identity.icon) + if self.entity.identity.icon + else None + ) + icon_dark = ( + PluginService.get_plugin_icon_url(self.tenant_id, self.entity.identity.icon_dark) + if self.entity.identity.icon_dark + else None + ) + supported_creation_methods = [] + if self.entity.oauth_schema: + supported_creation_methods.append(TriggerCreationMethod.OAUTH) + if self.entity.credentials_schema: + supported_creation_methods.append(TriggerCreationMethod.APIKEY) + if self.entity.subscription_schema: + supported_creation_methods.append(TriggerCreationMethod.MANUAL) + return TriggerProviderApiEntity( + author=self.entity.identity.author, + name=self.entity.identity.name, + label=self.entity.identity.label, + description=self.entity.identity.description, + icon=icon, + icon_dark=icon_dark, + tags=self.entity.identity.tags, + plugin_id=self.plugin_id, + plugin_unique_identifier=self.plugin_unique_identifier, + credentials_schema=self.entity.credentials_schema, + oauth_client_schema=self.entity.oauth_schema.client_schema if self.entity.oauth_schema else [], + subscription_schema=self.entity.subscription_schema, + supported_creation_methods=supported_creation_methods, + triggers=[ + TriggerApiEntity( + name=trigger.identity.name, + identity=trigger.identity, + description=trigger.description, + parameters=trigger.parameters, + output_schema=trigger.output_schema, + ) + for trigger in self.entity.triggers + ], + ) + + @property + def identity(self) -> TriggerProviderIdentity: + """Get provider identity""" + return self.entity.identity + + def get_triggers(self) -> list[TriggerEntity]: + """ + Get all triggers for this provider + + :return: List of trigger entities + """ + return self.entity.triggers + + def get_trigger(self, trigger_name: str) -> Optional[TriggerEntity]: + """ + Get a specific trigger by name + + :param trigger_name: Trigger name + :return: Trigger entity or None + """ + for trigger in self.entity.triggers: + if trigger.identity.name == trigger_name: + return trigger + return None + + def get_subscription_schema(self) -> SubscriptionSchema: + """ + Get subscription schema for this provider + + :return: List of subscription config schemas + """ + return self.entity.subscription_schema + + def validate_credentials(self, user_id: str, credentials: Mapping[str, str]) -> None: + """ + Validate credentials against schema + + :param credentials: Credentials to validate + :return: Validation response + """ + # First validate against schema + for config in self.entity.credentials_schema: + if config.required and config.name not in credentials: + raise TriggerProviderCredentialValidationError(f"Missing required credential field: {config.name}") + + # Then validate with the plugin daemon + manager = PluginTriggerManager() + provider_id = self.get_provider_id() + response = manager.validate_provider_credentials( + tenant_id=self.tenant_id, + user_id=user_id, + provider=str(provider_id), + credentials=credentials, + ) + if not response: + raise TriggerProviderCredentialValidationError( + "Invalid credentials", + ) + + def get_supported_credential_types(self) -> list[CredentialType]: + """ + Get supported credential types for this provider. + + :return: List of supported credential types + """ + types = [] + if self.entity.oauth_schema: + types.append(CredentialType.OAUTH2) + if self.entity.credentials_schema: + types.append(CredentialType.API_KEY) + return types + + def get_credentials_schema(self, credential_type: CredentialType | str) -> list[ProviderConfig]: + """ + Get credentials schema by credential type + + :param credential_type: The type of credential (oauth or api_key) + :return: List of provider config schemas + """ + credential_type = CredentialType.of(credential_type) if isinstance(credential_type, str) else credential_type + if credential_type == CredentialType.OAUTH2: + return self.entity.oauth_schema.credentials_schema.copy() if self.entity.oauth_schema else [] + if credential_type == CredentialType.API_KEY: + return self.entity.credentials_schema.copy() if self.entity.credentials_schema else [] + if credential_type == CredentialType.UNAUTHORIZED: + return [] + raise ValueError(f"Invalid credential type: {credential_type}") + + def get_credential_schema_config(self, credential_type: CredentialType | str) -> list[BasicProviderConfig]: + """ + Get credential schema config by credential type + """ + return [x.to_basic_provider_config() for x in self.get_credentials_schema(credential_type)] + + def get_oauth_client_schema(self) -> list[ProviderConfig]: + """ + Get OAuth client schema for this provider + + :return: List of OAuth client config schemas + """ + return self.entity.oauth_schema.client_schema.copy() if self.entity.oauth_schema else [] + + def get_properties_schema(self) -> list[BasicProviderConfig]: + """ + Get properties schema for this provider + + :return: List of properties config schemas + """ + return ( + [x.to_basic_provider_config() for x in self.entity.subscription_schema.properties_schema.copy()] + if self.entity.subscription_schema.properties_schema + else [] + ) + + def dispatch(self, user_id: str, request: Request, subscription: Subscription) -> TriggerDispatchResponse: + """ + Dispatch a trigger through plugin runtime + + :param user_id: User ID + :param request: Flask request object + :param subscription: Subscription + :return: Dispatch response with triggers and raw HTTP response + """ + manager = PluginTriggerManager() + provider_id = self.get_provider_id() + + response = manager.dispatch_event( + tenant_id=self.tenant_id, + user_id=user_id, + provider=str(provider_id), + subscription=subscription.model_dump(), + request=request, + ) + return response + + def invoke_trigger( + self, + user_id: str, + trigger_name: str, + parameters: Mapping[str, Any], + credentials: Mapping[str, str], + credential_type: CredentialType, + request: Request, + ) -> TriggerInvokeResponse: + """ + Execute a trigger through plugin runtime + + :param user_id: User ID + :param trigger_name: Trigger name + :param parameters: Trigger parameters + :param credentials: Provider credentials + :param credential_type: Credential type + :param request: Request + :return: Trigger execution result + """ + manager = PluginTriggerManager() + provider_id = self.get_provider_id() + + return manager.invoke_trigger( + tenant_id=self.tenant_id, + user_id=user_id, + provider=str(provider_id), + trigger=trigger_name, + credentials=credentials, + credential_type=credential_type, + request=request, + parameters=parameters, + ) + + def subscribe_trigger( + self, user_id: str, endpoint: str, parameters: Mapping[str, Any], credentials: Mapping[str, str] + ) -> Subscription: + """ + Subscribe to a trigger through plugin runtime + + :param user_id: User ID + :param endpoint: Subscription endpoint + :param subscription_params: Subscription parameters + :param credentials: Provider credentials + :return: Subscription result + """ + manager = PluginTriggerManager() + provider_id = self.get_provider_id() + + response = manager.subscribe( + tenant_id=self.tenant_id, + user_id=user_id, + provider=str(provider_id), + credentials=credentials, + endpoint=endpoint, + parameters=parameters, + ) + + return Subscription.model_validate(response.subscription) + + def unsubscribe_trigger( + self, user_id: str, subscription: Subscription, credentials: Mapping[str, str] + ) -> Unsubscription: + """ + Unsubscribe from a trigger through plugin runtime + + :param user_id: User ID + :param subscription: Subscription metadata + :param credentials: Provider credentials + :return: Unsubscription result + """ + manager = PluginTriggerManager() + provider_id = self.get_provider_id() + + response = manager.unsubscribe( + tenant_id=self.tenant_id, + user_id=user_id, + provider=str(provider_id), + subscription=subscription, + credentials=credentials, + ) + + return Unsubscription.model_validate(response.subscription) + + def refresh_trigger(self, subscription: Subscription, credentials: Mapping[str, str]) -> Subscription: + """ + Refresh a trigger subscription through plugin runtime + + :param subscription: Subscription metadata + :param credentials: Provider credentials + :return: Refreshed subscription result + """ + manager = PluginTriggerManager() + provider_id = self.get_provider_id() + + response = manager.refresh( + tenant_id=self.tenant_id, + user_id="system", # System refresh + provider=str(provider_id), + subscription=subscription, + credentials=credentials, + ) + + return Subscription.model_validate(response.subscription) + + +__all__ = ["PluginTriggerProviderController"] diff --git a/api/core/trigger/trigger_manager.py b/api/core/trigger/trigger_manager.py new file mode 100644 index 0000000000..863670c3d5 --- /dev/null +++ b/api/core/trigger/trigger_manager.py @@ -0,0 +1,254 @@ +""" +Trigger Manager for loading and managing trigger providers and triggers +""" + +import logging +from collections.abc import Mapping +from threading import Lock +from typing import Any, Optional + +from flask import Request + +import contexts +from core.plugin.entities.plugin_daemon import CredentialType +from core.plugin.entities.request import TriggerInvokeResponse +from core.plugin.impl.trigger import PluginTriggerManager +from core.trigger.entities.entities import ( + Subscription, + SubscriptionSchema, + TriggerEntity, + Unsubscription, +) +from core.trigger.provider import PluginTriggerProviderController +from models.provider_ids import TriggerProviderID + +logger = logging.getLogger(__name__) + + +class TriggerManager: + """ + Manager for trigger providers and triggers + """ + + @classmethod + def list_plugin_trigger_providers(cls, tenant_id: str) -> list[PluginTriggerProviderController]: + """ + List all plugin trigger providers for a tenant + + :param tenant_id: Tenant ID + :return: List of trigger provider controllers + """ + manager = PluginTriggerManager() + provider_entities = manager.fetch_trigger_providers(tenant_id) + + controllers = [] + for provider in provider_entities: + try: + controller = PluginTriggerProviderController( + entity=provider.declaration, + plugin_id=provider.plugin_id, + plugin_unique_identifier=provider.plugin_unique_identifier, + provider_id=TriggerProviderID(provider.provider), + tenant_id=tenant_id, + ) + controllers.append(controller) + except Exception: + logger.exception("Failed to load trigger provider %s", provider.plugin_id) + continue + + return controllers + + @classmethod + def get_trigger_provider(cls, tenant_id: str, provider_id: TriggerProviderID) -> PluginTriggerProviderController: + """ + Get a specific plugin trigger provider + + :param tenant_id: Tenant ID + :param provider_id: Provider ID + :return: Trigger provider controller or None + """ + # check if context is set + try: + contexts.plugin_trigger_providers.get() + except LookupError: + contexts.plugin_trigger_providers.set({}) + contexts.plugin_trigger_providers_lock.set(Lock()) + + plugin_trigger_providers = contexts.plugin_trigger_providers.get() + provider_id_str = str(provider_id) + if provider_id_str in plugin_trigger_providers: + return plugin_trigger_providers[provider_id_str] + + with contexts.plugin_trigger_providers_lock.get(): + # double check + plugin_trigger_providers = contexts.plugin_trigger_providers.get() + if provider_id_str in plugin_trigger_providers: + return plugin_trigger_providers[provider_id_str] + + manager = PluginTriggerManager() + provider = manager.fetch_trigger_provider(tenant_id, provider_id) + + if not provider: + raise ValueError(f"Trigger provider {provider_id} not found") + + try: + controller = PluginTriggerProviderController( + entity=provider.declaration, + plugin_id=provider.plugin_id, + plugin_unique_identifier=provider.plugin_unique_identifier, + provider_id=provider_id, + tenant_id=tenant_id, + ) + plugin_trigger_providers[provider_id_str] = controller + return controller + except Exception as e: + logger.exception("Failed to load trigger provider") + raise e + + @classmethod + def list_all_trigger_providers(cls, tenant_id: str) -> list[PluginTriggerProviderController]: + """ + List all trigger providers (plugin) + + :param tenant_id: Tenant ID + :return: List of all trigger provider controllers + """ + return cls.list_plugin_trigger_providers(tenant_id) + + @classmethod + def list_triggers_by_provider(cls, tenant_id: str, provider_id: TriggerProviderID) -> list[TriggerEntity]: + """ + List all triggers for a specific provider + + :param tenant_id: Tenant ID + :param provider_id: Provider ID + :return: List of trigger entities + """ + provider = cls.get_trigger_provider(tenant_id, provider_id) + return provider.get_triggers() + + @classmethod + def get_trigger(cls, tenant_id: str, provider_id: TriggerProviderID, trigger_name: str) -> Optional[TriggerEntity]: + """ + Get a specific trigger + + :param tenant_id: Tenant ID + :param provider_id: Provider ID + :param trigger_name: Trigger name + :return: Trigger entity or None + """ + return cls.get_trigger_provider(tenant_id, provider_id).get_trigger(trigger_name) + + @classmethod + def invoke_trigger( + cls, + tenant_id: str, + user_id: str, + provider_id: TriggerProviderID, + trigger_name: str, + parameters: Mapping[str, Any], + credentials: Mapping[str, str], + credential_type: CredentialType, + request: Request, + ) -> TriggerInvokeResponse: + """ + Execute a trigger + + :param tenant_id: Tenant ID + :param user_id: User ID + :param provider_id: Provider ID + :param trigger_name: Trigger name + :param parameters: Trigger parameters + :param credentials: Provider credentials + :param credential_type: Credential type + :param request: Request + :return: Trigger execution result + """ + provider = cls.get_trigger_provider(tenant_id, provider_id) + trigger = provider.get_trigger(trigger_name) + if not trigger: + raise ValueError(f"Trigger {trigger_name} not found in provider {provider_id}") + return provider.invoke_trigger(user_id, trigger_name, parameters, credentials, credential_type, request) + + @classmethod + def subscribe_trigger( + cls, + tenant_id: str, + user_id: str, + provider_id: TriggerProviderID, + endpoint: str, + parameters: Mapping[str, Any], + credentials: Mapping[str, str], + ) -> Subscription: + """ + Subscribe to a trigger (e.g., register webhook) + + :param tenant_id: Tenant ID + :param user_id: User ID + :param provider_id: Provider ID + :param endpoint: Subscription endpoint + :param parameters: Subscription parameters + :param credentials: Provider credentials + :return: Subscription result + """ + provider = cls.get_trigger_provider(tenant_id, provider_id) + return provider.subscribe_trigger( + user_id=user_id, endpoint=endpoint, parameters=parameters, credentials=credentials + ) + + @classmethod + def unsubscribe_trigger( + cls, + tenant_id: str, + user_id: str, + provider_id: TriggerProviderID, + subscription: Subscription, + credentials: Mapping[str, str], + ) -> Unsubscription: + """ + Unsubscribe from a trigger + + :param tenant_id: Tenant ID + :param user_id: User ID + :param provider_id: Provider ID + :param subscription: Subscription metadata from subscribe operation + :param credentials: Provider credentials + :return: Unsubscription result + """ + provider = cls.get_trigger_provider(tenant_id, provider_id) + return provider.unsubscribe_trigger(user_id=user_id, subscription=subscription, credentials=credentials) + + @classmethod + def get_provider_subscription_schema(cls, tenant_id: str, provider_id: TriggerProviderID) -> SubscriptionSchema: + """ + Get provider subscription schema + + :param tenant_id: Tenant ID + :param provider_id: Provider ID + :return: List of subscription config schemas + """ + return cls.get_trigger_provider(tenant_id, provider_id).get_subscription_schema() + + @classmethod + def refresh_trigger( + cls, + tenant_id: str, + provider_id: TriggerProviderID, + subscription: Subscription, + credentials: Mapping[str, str], + ) -> Subscription: + """ + Refresh a trigger subscription + + :param tenant_id: Tenant ID + :param provider_id: Provider ID + :param trigger_name: Trigger name + :param subscription: Subscription metadata from subscribe operation + :param credentials: Provider credentials + :return: Refreshed subscription result + """ + return cls.get_trigger_provider(tenant_id, provider_id).refresh_trigger(subscription, credentials) + + +# Export +__all__ = ["TriggerManager"] diff --git a/api/core/trigger/utils/encryption.py b/api/core/trigger/utils/encryption.py new file mode 100644 index 0000000000..026a65aa23 --- /dev/null +++ b/api/core/trigger/utils/encryption.py @@ -0,0 +1,145 @@ +from collections.abc import Mapping +from typing import Union + +from core.entities.provider_entities import BasicProviderConfig, ProviderConfig +from core.helper.provider_cache import ProviderCredentialsCache +from core.helper.provider_encryption import ProviderConfigCache, ProviderConfigEncrypter, create_provider_encrypter +from core.plugin.entities.plugin_daemon import CredentialType +from core.trigger.entities.api_entities import TriggerProviderSubscriptionApiEntity +from core.trigger.provider import PluginTriggerProviderController +from models.trigger import TriggerSubscription + + +class TriggerProviderCredentialsCache(ProviderCredentialsCache): + """Cache for trigger provider credentials""" + + def __init__(self, tenant_id: str, provider_id: str, credential_id: str): + super().__init__(tenant_id=tenant_id, provider_id=provider_id, credential_id=credential_id) + + def _generate_cache_key(self, **kwargs) -> str: + tenant_id = kwargs["tenant_id"] + provider_id = kwargs["provider_id"] + credential_id = kwargs["credential_id"] + return f"trigger_credentials:tenant_id:{tenant_id}:provider_id:{provider_id}:credential_id:{credential_id}" + + +class TriggerProviderOAuthClientParamsCache(ProviderCredentialsCache): + """Cache for trigger provider OAuth client""" + + def __init__(self, tenant_id: str, provider_id: str): + super().__init__(tenant_id=tenant_id, provider_id=provider_id) + + def _generate_cache_key(self, **kwargs) -> str: + tenant_id = kwargs["tenant_id"] + provider_id = kwargs["provider_id"] + return f"trigger_oauth_client:tenant_id:{tenant_id}:provider_id:{provider_id}" + + +class TriggerProviderPropertiesCache(ProviderCredentialsCache): + """Cache for trigger provider properties""" + + def __init__(self, tenant_id: str, provider_id: str, subscription_id: str): + super().__init__(tenant_id=tenant_id, provider_id=provider_id, subscription_id=subscription_id) + + def _generate_cache_key(self, **kwargs) -> str: + tenant_id = kwargs["tenant_id"] + provider_id = kwargs["provider_id"] + subscription_id = kwargs["subscription_id"] + return f"trigger_properties:tenant_id:{tenant_id}:provider_id:{provider_id}:subscription_id:{subscription_id}" + + +def create_trigger_provider_encrypter_for_subscription( + tenant_id: str, + controller: PluginTriggerProviderController, + subscription: Union[TriggerSubscription, TriggerProviderSubscriptionApiEntity], +) -> tuple[ProviderConfigEncrypter, ProviderConfigCache]: + cache = TriggerProviderCredentialsCache( + tenant_id=tenant_id, + provider_id=str(controller.get_provider_id()), + credential_id=subscription.id, + ) + encrypter, _ = create_provider_encrypter( + tenant_id=tenant_id, + config=controller.get_credential_schema_config(subscription.credential_type), + cache=cache, + ) + return encrypter, cache + + +def delete_cache_for_subscription(tenant_id: str, provider_id: str, subscription_id: str): + cache = TriggerProviderCredentialsCache( + tenant_id=tenant_id, + provider_id=provider_id, + credential_id=subscription_id, + ) + cache.delete() + + +def create_trigger_provider_encrypter_for_properties( + tenant_id: str, + controller: PluginTriggerProviderController, + subscription: Union[TriggerSubscription, TriggerProviderSubscriptionApiEntity], +) -> tuple[ProviderConfigEncrypter, ProviderConfigCache]: + cache = TriggerProviderPropertiesCache( + tenant_id=tenant_id, + provider_id=str(controller.get_provider_id()), + subscription_id=subscription.id, + ) + encrypter, _ = create_provider_encrypter( + tenant_id=tenant_id, + config=controller.get_properties_schema(), + cache=cache, + ) + return encrypter, cache + + +def create_trigger_provider_encrypter( + tenant_id: str, controller: PluginTriggerProviderController, credential_id: str, credential_type: CredentialType +) -> tuple[ProviderConfigEncrypter, ProviderConfigCache]: + cache = TriggerProviderCredentialsCache( + tenant_id=tenant_id, + provider_id=str(controller.get_provider_id()), + credential_id=credential_id, + ) + encrypter, _ = create_provider_encrypter( + tenant_id=tenant_id, + config=controller.get_credential_schema_config(credential_type), + cache=cache, + ) + return encrypter, cache + + +def create_trigger_provider_oauth_encrypter( + tenant_id: str, controller: PluginTriggerProviderController +) -> tuple[ProviderConfigEncrypter, ProviderConfigCache]: + cache = TriggerProviderOAuthClientParamsCache( + tenant_id=tenant_id, + provider_id=str(controller.get_provider_id()), + ) + encrypter, _ = create_provider_encrypter( + tenant_id=tenant_id, + config=[x.to_basic_provider_config() for x in controller.get_oauth_client_schema()], + cache=cache, + ) + return encrypter, cache + + +def masked_credentials( + schemas: list[ProviderConfig], + credentials: Mapping[str, str], +) -> Mapping[str, str]: + masked_credentials = {} + configs = {x.name: x.to_basic_provider_config() for x in schemas} + for key, value in credentials.items(): + config = configs.get(key) + if not config: + masked_credentials[key] = value + continue + if config.type == BasicProviderConfig.Type.SECRET_INPUT: + if len(value) <= 4: + masked_credentials[key] = "*" * len(value) + else: + masked_credentials[key] = value[:2] + "*" * (len(value) - 4) + value[-2:] + else: + masked_credentials[key] = value + return masked_credentials diff --git a/api/core/trigger/utils/endpoint.py b/api/core/trigger/utils/endpoint.py new file mode 100644 index 0000000000..c203cdd9f3 --- /dev/null +++ b/api/core/trigger/utils/endpoint.py @@ -0,0 +1,5 @@ +from configs import dify_config + + +def parse_endpoint_id(endpoint_id: str) -> str: + return f"{dify_config.CONSOLE_API_URL}/triggers/plugin/{endpoint_id}" diff --git a/api/core/workflow/enums.py b/api/core/workflow/enums.py index 00a125660a..5c2b43c953 100644 --- a/api/core/workflow/enums.py +++ b/api/core/workflow/enums.py @@ -58,6 +58,18 @@ class NodeType(StrEnum): DOCUMENT_EXTRACTOR = "document-extractor" LIST_OPERATOR = "list-operator" AGENT = "agent" + TRIGGER_WEBHOOK = "trigger-webhook" + TRIGGER_SCHEDULE = "trigger-schedule" + TRIGGER_PLUGIN = "trigger-plugin" + + @property + def is_start_node(self) -> bool: + return self in [ + NodeType.START, + NodeType.TRIGGER_WEBHOOK, + NodeType.TRIGGER_SCHEDULE, + NodeType.TRIGGER_PLUGIN, + ] class NodeExecutionType(StrEnum): @@ -122,6 +134,7 @@ class WorkflowNodeExecutionMetadataKey(StrEnum): ERROR_STRATEGY = "error_strategy" # node in continue on error mode return the field LOOP_VARIABLE_MAP = "loop_variable_map" # single loop variable output DATASOURCE_INFO = "datasource_info" + TRIGGER_INFO = "trigger_info" class WorkflowNodeExecutionStatus(StrEnum): diff --git a/api/core/workflow/nodes/node_mapping.py b/api/core/workflow/nodes/node_mapping.py index 3d3a1bec98..042836282c 100644 --- a/api/core/workflow/nodes/node_mapping.py +++ b/api/core/workflow/nodes/node_mapping.py @@ -21,6 +21,9 @@ from core.workflow.nodes.question_classifier import QuestionClassifierNode from core.workflow.nodes.start import StartNode from core.workflow.nodes.template_transform import TemplateTransformNode from core.workflow.nodes.tool import ToolNode +from core.workflow.nodes.trigger_plugin import TriggerPluginNode +from core.workflow.nodes.trigger_schedule import TriggerScheduleNode +from core.workflow.nodes.trigger_webhook import TriggerWebhookNode from core.workflow.nodes.variable_aggregator import VariableAggregatorNode from core.workflow.nodes.variable_assigner.v1 import VariableAssignerNode as VariableAssignerNodeV1 from core.workflow.nodes.variable_assigner.v2 import VariableAssignerNode as VariableAssignerNodeV2 @@ -142,4 +145,16 @@ NODE_TYPE_CLASSES_MAPPING: Mapping[NodeType, Mapping[str, type[Node]]] = { LATEST_VERSION: KnowledgeIndexNode, "1": KnowledgeIndexNode, }, + NodeType.TRIGGER_WEBHOOK: { + LATEST_VERSION: TriggerWebhookNode, + "1": TriggerWebhookNode, + }, + NodeType.TRIGGER_PLUGIN: { + LATEST_VERSION: TriggerPluginNode, + "1": TriggerPluginNode, + }, + NodeType.TRIGGER_SCHEDULE: { + LATEST_VERSION: TriggerScheduleNode, + "1": TriggerScheduleNode, + }, } diff --git a/api/core/workflow/nodes/trigger_plugin/__init__.py b/api/core/workflow/nodes/trigger_plugin/__init__.py new file mode 100644 index 0000000000..97ec2db3a3 --- /dev/null +++ b/api/core/workflow/nodes/trigger_plugin/__init__.py @@ -0,0 +1,3 @@ +from .trigger_plugin_node import TriggerPluginNode + +__all__ = ["TriggerPluginNode"] diff --git a/api/core/workflow/nodes/trigger_plugin/entities.py b/api/core/workflow/nodes/trigger_plugin/entities.py new file mode 100644 index 0000000000..ed5e07baa3 --- /dev/null +++ b/api/core/workflow/nodes/trigger_plugin/entities.py @@ -0,0 +1,28 @@ +from typing import Any, Optional + +from pydantic import Field + +from core.workflow.enums import ErrorStrategy +from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig + + +class PluginTriggerData(BaseNodeData): + """Plugin trigger node data""" + + title: str + desc: Optional[str] = None + plugin_id: str = Field(..., description="Plugin ID") + provider_id: str = Field(..., description="Provider ID") + trigger_name: str = Field(..., description="Trigger name") + subscription_id: str = Field(..., description="Subscription ID") + plugin_unique_identifier: str = Field(..., description="Plugin unique identifier") + parameters: dict[str, Any] = Field(default_factory=dict, description="Trigger parameters") + + # Error handling + error_strategy: Optional[ErrorStrategy] = Field( + default=ErrorStrategy.FAIL_BRANCH, description="Error handling strategy" + ) + retry_config: RetryConfig = Field(default_factory=lambda: RetryConfig(), description="Retry configuration") + default_value_dict: dict[str, Any] = Field( + default_factory=dict, description="Default values for outputs when error occurs" + ) diff --git a/api/core/workflow/nodes/trigger_plugin/trigger_plugin_node.py b/api/core/workflow/nodes/trigger_plugin/trigger_plugin_node.py new file mode 100644 index 0000000000..d819c8c04d --- /dev/null +++ b/api/core/workflow/nodes/trigger_plugin/trigger_plugin_node.py @@ -0,0 +1,153 @@ +from collections.abc import Mapping +from typing import Any, Optional + +from elastic_transport import BaseNode + +from core.plugin.impl.exc import PluginDaemonClientSideError, PluginInvokeError +from core.plugin.utils.http_parser import deserialize_request +from core.trigger.entities.api_entities import TriggerProviderSubscriptionApiEntity +from core.trigger.trigger_manager import TriggerManager +from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus +from core.workflow.enums import ErrorStrategy +from core.workflow.node_events.base import NodeRunResult +from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig +from core.workflow.nodes.node_mapping import NodeType +from extensions.ext_storage import storage +from models.provider_ids import TriggerProviderID +from services.trigger.trigger_provider_service import TriggerProviderService + +from .entities import PluginTriggerData + + +class TriggerPluginNode(BaseNode): + _node_type = NodeType.TRIGGER_PLUGIN + + _node_data: PluginTriggerData + + def init_node_data(self, data: Mapping[str, Any]) -> None: + self._node_data = PluginTriggerData.model_validate(data) + + def _get_error_strategy(self) -> Optional[ErrorStrategy]: + return self._node_data.error_strategy + + def _get_retry_config(self) -> RetryConfig: + return self._node_data.retry_config + + def _get_title(self) -> str: + return self._node_data.title + + def _get_description(self) -> Optional[str]: + return self._node_data.desc + + def _get_default_value_dict(self) -> dict[str, Any]: + return self._node_data.default_value_dict + + def get_base_node_data(self) -> BaseNodeData: + return self._node_data + + @classmethod + def get_default_config(cls, filters: Optional[dict[str, Any]] = None) -> dict: + return { + "type": "plugin", + "config": { + "plugin_id": "", + "provider_id": "", + "trigger_name": "", + "subscription_id": "", + "parameters": {}, + }, + } + + @classmethod + def version(cls) -> str: + return "1" + + def _run(self) -> NodeRunResult: + """ + Run the plugin trigger node. + + This node invokes the trigger to convert request data into events + and makes them available to downstream nodes. + """ + + # Get trigger data passed when workflow was triggered + trigger_inputs = dict(self.graph_runtime_state.variable_pool.user_inputs) + metadata = { + WorkflowNodeExecutionMetadataKey.TRIGGER_INFO: { + **trigger_inputs, + "provider_id": self._node_data.provider_id, + "trigger_name": self._node_data.trigger_name, + "plugin_unique_identifier": self._node_data.plugin_unique_identifier, + }, + } + + request_id = trigger_inputs.get("request_id") + trigger_name = trigger_inputs.get("trigger_name", "") + subscription_id = trigger_inputs.get("subscription_id", "") + + if not request_id or not subscription_id: + return NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + inputs=trigger_inputs, + outputs={"error": "No request ID or subscription ID available"}, + ) + try: + subscription: TriggerProviderSubscriptionApiEntity | None = TriggerProviderService.get_subscription_by_id( + tenant_id=self.tenant_id, subscription_id=subscription_id + ) + if not subscription: + return NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + inputs=trigger_inputs, + outputs={"error": f"Invalid subscription {subscription_id} not found"}, + ) + except Exception as e: + return NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + inputs=trigger_inputs, + outputs={"error": f"Failed to get subscription: {str(e)}"}, + ) + + try: + request = deserialize_request(storage.load_once(f"triggers/{request_id}")) + parameters = self._node_data.parameters if hasattr(self, "_node_data") and self._node_data else {} + invoke_response = TriggerManager.invoke_trigger( + tenant_id=self.tenant_id, + user_id=self.user_id, + provider_id=TriggerProviderID(subscription.provider), + trigger_name=trigger_name, + parameters=parameters, + credentials=subscription.credentials, + credential_type=subscription.credential_type, + request=request, + ) + outputs = invoke_response.event.variables or {} + return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=trigger_inputs, outputs=outputs) + except PluginInvokeError as e: + return NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + inputs=trigger_inputs, + metadata=metadata, + error="An error occurred in the plugin, " + f"please contact the author of {subscription.provider} for help, " + f"error type: {e.get_error_type()}, " + f"error details: {e.get_error_message()}", + error_type=type(e).__name__, + ) + except PluginDaemonClientSideError as e: + return NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + inputs=trigger_inputs, + metadata=metadata, + error=f"Failed to invoke trigger, error: {e.description}", + error_type=type(e).__name__, + ) + + except Exception as e: + return NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + inputs=trigger_inputs, + metadata=metadata, + error=f"Failed to invoke trigger: {str(e)}", + error_type=type(e).__name__, + ) diff --git a/api/core/workflow/nodes/trigger_schedule/__init__.py b/api/core/workflow/nodes/trigger_schedule/__init__.py new file mode 100644 index 0000000000..6773bae502 --- /dev/null +++ b/api/core/workflow/nodes/trigger_schedule/__init__.py @@ -0,0 +1,3 @@ +from core.workflow.nodes.trigger_schedule.trigger_schedule_node import TriggerScheduleNode + +__all__ = ["TriggerScheduleNode"] diff --git a/api/core/workflow/nodes/trigger_schedule/entities.py b/api/core/workflow/nodes/trigger_schedule/entities.py new file mode 100644 index 0000000000..7ff78c4054 --- /dev/null +++ b/api/core/workflow/nodes/trigger_schedule/entities.py @@ -0,0 +1,51 @@ +from typing import Literal, Optional, Union + +from pydantic import BaseModel, Field + +from core.workflow.nodes.base import BaseNodeData + + +class TriggerScheduleNodeData(BaseNodeData): + """ + Trigger Schedule Node Data + """ + + mode: str = Field(default="visual", description="Schedule mode: visual or cron") + frequency: Optional[str] = Field( + default=None, description="Frequency for visual mode: hourly, daily, weekly, monthly" + ) + cron_expression: Optional[str] = Field(default=None, description="Cron expression for cron mode") + visual_config: Optional[dict] = Field(default=None, description="Visual configuration details") + timezone: str = Field(default="UTC", description="Timezone for schedule execution") + + +class ScheduleConfig(BaseModel): + node_id: str + cron_expression: str + timezone: str = "UTC" + + +class SchedulePlanUpdate(BaseModel): + node_id: Optional[str] = None + cron_expression: Optional[str] = None + timezone: Optional[str] = None + + +class VisualConfig(BaseModel): + """Visual configuration for schedule trigger""" + + # For hourly frequency + on_minute: Optional[int] = Field(default=0, ge=0, le=59, description="Minute of the hour (0-59)") + + # For daily, weekly, monthly frequencies + time: Optional[str] = Field(default="12:00 AM", description="Time in 12-hour format (e.g., '2:30 PM')") + + # For weekly frequency + weekdays: Optional[list[Literal["sun", "mon", "tue", "wed", "thu", "fri", "sat"]]] = Field( + default=None, description="List of weekdays to run on" + ) + + # For monthly frequency + monthly_days: Optional[list[Union[int, Literal["last"]]]] = Field( + default=None, description="Days of month to run on (1-31 or 'last')" + ) diff --git a/api/core/workflow/nodes/trigger_schedule/exc.py b/api/core/workflow/nodes/trigger_schedule/exc.py new file mode 100644 index 0000000000..2f99880ff1 --- /dev/null +++ b/api/core/workflow/nodes/trigger_schedule/exc.py @@ -0,0 +1,31 @@ +from core.workflow.nodes.base.exc import BaseNodeError + + +class ScheduleNodeError(BaseNodeError): + """Base schedule node error.""" + + pass + + +class ScheduleNotFoundError(ScheduleNodeError): + """Schedule not found error.""" + + pass + + +class ScheduleConfigError(ScheduleNodeError): + """Schedule configuration error.""" + + pass + + +class ScheduleExecutionError(ScheduleNodeError): + """Schedule execution error.""" + + pass + + +class TenantOwnerNotFoundError(ScheduleExecutionError): + """Tenant owner not found error for schedule execution.""" + + pass diff --git a/api/core/workflow/nodes/trigger_schedule/trigger_schedule_node.py b/api/core/workflow/nodes/trigger_schedule/trigger_schedule_node.py new file mode 100644 index 0000000000..7908ca09cf --- /dev/null +++ b/api/core/workflow/nodes/trigger_schedule/trigger_schedule_node.py @@ -0,0 +1,63 @@ +from collections.abc import Mapping +from datetime import UTC, datetime +from typing import Any, Optional + +from elastic_transport import BaseNode + +from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus +from core.workflow.enums import ErrorStrategy, NodeType +from core.workflow.node_events.base import NodeRunResult +from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig +from core.workflow.nodes.trigger_schedule.entities import TriggerScheduleNodeData + + +class TriggerScheduleNode(BaseNode): + _node_type = NodeType.TRIGGER_SCHEDULE + + _node_data: TriggerScheduleNodeData + + def init_node_data(self, data: Mapping[str, Any]) -> None: + self._node_data = TriggerScheduleNodeData(**data) + + def _get_error_strategy(self) -> Optional[ErrorStrategy]: + return self._node_data.error_strategy + + def _get_retry_config(self) -> RetryConfig: + return self._node_data.retry_config + + def _get_title(self) -> str: + return self._node_data.title + + def _get_description(self) -> Optional[str]: + return self._node_data.desc + + def _get_default_value_dict(self) -> dict[str, Any]: + return self._node_data.default_value_dict + + def get_base_node_data(self) -> BaseNodeData: + return self._node_data + + @classmethod + def version(cls) -> str: + return "1" + + @classmethod + def get_default_config(cls, filters: Optional[dict] = None) -> dict: + return { + "type": "trigger-schedule", + "config": { + "mode": "visual", + "frequency": "daily", + "visual_config": {"time": "12:00 AM", "on_minute": 0, "weekdays": ["sun"], "monthly_days": [1]}, + "timezone": "UTC", + }, + } + + def _run(self) -> NodeRunResult: + current_time = datetime.now(UTC) + node_outputs = {"current_time": current_time.isoformat()} + + return NodeRunResult( + status=WorkflowNodeExecutionStatus.SUCCEEDED, + outputs=node_outputs, + ) diff --git a/api/core/workflow/nodes/trigger_webhook/__init__.py b/api/core/workflow/nodes/trigger_webhook/__init__.py new file mode 100644 index 0000000000..e41d290f6d --- /dev/null +++ b/api/core/workflow/nodes/trigger_webhook/__init__.py @@ -0,0 +1,3 @@ +from .node import TriggerWebhookNode + +__all__ = ["TriggerWebhookNode"] diff --git a/api/core/workflow/nodes/trigger_webhook/entities.py b/api/core/workflow/nodes/trigger_webhook/entities.py new file mode 100644 index 0000000000..edb7338473 --- /dev/null +++ b/api/core/workflow/nodes/trigger_webhook/entities.py @@ -0,0 +1,79 @@ +from collections.abc import Sequence +from enum import StrEnum +from typing import Literal, Optional + +from pydantic import BaseModel, Field, field_validator + +from core.workflow.nodes.base import BaseNodeData + + +class Method(StrEnum): + GET = "get" + POST = "post" + HEAD = "head" + PATCH = "patch" + PUT = "put" + DELETE = "delete" + + +class ContentType(StrEnum): + JSON = "application/json" + FORM_DATA = "multipart/form-data" + FORM_URLENCODED = "application/x-www-form-urlencoded" + TEXT = "text/plain" + BINARY = "application/octet-stream" + + +class WebhookParameter(BaseModel): + """Parameter definition for headers, query params, or body.""" + + name: str + required: bool = False + + +class WebhookBodyParameter(BaseModel): + """Body parameter with type information.""" + + name: str + type: Literal[ + "string", + "number", + "boolean", + "object", + "array[string]", + "array[number]", + "array[boolean]", + "array[object]", + "file", + ] = "string" + required: bool = False + + +class WebhookData(BaseNodeData): + """ + Webhook Node Data. + """ + + class SyncMode(StrEnum): + SYNC = "async" # only support + + method: Method = Method.GET + content_type: ContentType = Field(default=ContentType.JSON) + headers: Sequence[WebhookParameter] = Field(default_factory=list) + params: Sequence[WebhookParameter] = Field(default_factory=list) # query parameters + body: Sequence[WebhookBodyParameter] = Field(default_factory=list) + + @field_validator("method", mode="before") + @classmethod + def normalize_method(cls, v) -> str: + """Normalize HTTP method to lowercase to support both uppercase and lowercase input.""" + if isinstance(v, str): + return v.lower() + return v + + status_code: int = 200 # Expected status code for response + response_body: str = "" # Template for response body + + # Webhook specific fields (not from client data, set internally) + webhook_id: Optional[str] = None # Set when webhook trigger is created + timeout: int = 30 # Timeout in seconds to wait for webhook response diff --git a/api/core/workflow/nodes/trigger_webhook/exc.py b/api/core/workflow/nodes/trigger_webhook/exc.py new file mode 100644 index 0000000000..dc2239c287 --- /dev/null +++ b/api/core/workflow/nodes/trigger_webhook/exc.py @@ -0,0 +1,25 @@ +from core.workflow.nodes.base.exc import BaseNodeError + + +class WebhookNodeError(BaseNodeError): + """Base webhook node error.""" + + pass + + +class WebhookTimeoutError(WebhookNodeError): + """Webhook timeout error.""" + + pass + + +class WebhookNotFoundError(WebhookNodeError): + """Webhook not found error.""" + + pass + + +class WebhookConfigError(WebhookNodeError): + """Webhook configuration error.""" + + pass diff --git a/api/core/workflow/nodes/trigger_webhook/node.py b/api/core/workflow/nodes/trigger_webhook/node.py new file mode 100644 index 0000000000..cd10e0fa68 --- /dev/null +++ b/api/core/workflow/nodes/trigger_webhook/node.py @@ -0,0 +1,127 @@ +from collections.abc import Mapping +from typing import Any, Optional + +from elastic_transport import BaseNode + +from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus +from core.workflow.enums import ErrorStrategy, NodeType +from core.workflow.node_events.base import NodeRunResult +from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig + +from .entities import ContentType, WebhookData + + +class TriggerWebhookNode(BaseNode): + _node_type = NodeType.TRIGGER_WEBHOOK + + _node_data: WebhookData + + def init_node_data(self, data: Mapping[str, Any]) -> None: + self._node_data = WebhookData.model_validate(data) + + def _get_error_strategy(self) -> Optional[ErrorStrategy]: + return self._node_data.error_strategy + + def _get_retry_config(self) -> RetryConfig: + return self._node_data.retry_config + + def _get_title(self) -> str: + return self._node_data.title + + def _get_description(self) -> Optional[str]: + return self._node_data.desc + + def _get_default_value_dict(self) -> dict[str, Any]: + return self._node_data.default_value_dict + + def get_base_node_data(self) -> BaseNodeData: + return self._node_data + + @classmethod + def get_default_config(cls, filters: Optional[dict[str, Any]] = None) -> dict: + return { + "type": "webhook", + "config": { + "method": "get", + "content_type": "application/json", + "headers": [], + "params": [], + "body": [], + "async_mode": True, + "status_code": 200, + "response_body": "", + "timeout": 30, + }, + } + + @classmethod + def version(cls) -> str: + return "1" + + def _run(self) -> NodeRunResult: + """ + Run the webhook node. + + Like the start node, this simply takes the webhook data from the variable pool + and makes it available to downstream nodes. The actual webhook handling + happens in the trigger controller. + """ + # Get webhook data from variable pool (injected by Celery task) + webhook_inputs = dict(self.graph_runtime_state.variable_pool.user_inputs) + + # Extract webhook-specific outputs based on node configuration + outputs = self._extract_configured_outputs(webhook_inputs) + + return NodeRunResult( + status=WorkflowNodeExecutionStatus.SUCCEEDED, + inputs=webhook_inputs, + outputs=outputs, + ) + + def _extract_configured_outputs(self, webhook_inputs: dict[str, Any]) -> dict[str, Any]: + """Extract outputs based on node configuration from webhook inputs.""" + outputs = {} + + # Get the raw webhook data (should be injected by Celery task) + webhook_data = webhook_inputs.get("webhook_data", {}) + + # Extract configured headers (case-insensitive) + webhook_headers = webhook_data.get("headers", {}) + webhook_headers_lower = {k.lower(): v for k, v in webhook_headers.items()} + + for header in self._node_data.headers: + header_name = header.name + # Try exact match first, then case-insensitive match + value = webhook_headers.get(header_name) or webhook_headers_lower.get(header_name.lower()) + outputs[header_name] = value + + # Extract configured query parameters + for param in self._node_data.params: + param_name = param.name + outputs[param_name] = webhook_data.get("query_params", {}).get(param_name) + + # Extract configured body parameters + for body_param in self._node_data.body: + param_name = body_param.name + param_type = body_param.type + + if self._node_data.content_type == ContentType.TEXT: + # For text/plain, the entire body is a single string parameter + outputs[param_name] = str(webhook_data.get("body", {}).get("raw", "")) + continue + elif self._node_data.content_type == ContentType.BINARY: + outputs[param_name] = webhook_data.get("body", {}).get("raw", b"") + continue + + if param_type == "file": + # Get File object (already processed by webhook controller) + file_obj = webhook_data.get("files", {}).get(param_name) + outputs[param_name] = file_obj + else: + # Get regular body parameter + outputs[param_name] = webhook_data.get("body", {}).get(param_name) + + # Include raw webhook data for debugging/advanced use + outputs["_webhook_raw"] = webhook_data + + return outputs diff --git a/api/docker/entrypoint.sh b/api/docker/entrypoint.sh index 08c0a1f35e..97f5df388f 100755 --- a/api/docker/entrypoint.sh +++ b/api/docker/entrypoint.sh @@ -30,9 +30,41 @@ if [[ "${MODE}" == "worker" ]]; then CONCURRENCY_OPTION="-c ${CELERY_WORKER_AMOUNT:-1}" fi - exec celery -A celery_entrypoint.celery worker -P ${CELERY_WORKER_CLASS:-gevent} $CONCURRENCY_OPTION \ + # Configure queues based on edition if not explicitly set + if [[ -z "${CELERY_QUEUES}" ]]; then + if [[ "${EDITION}" == "CLOUD" ]]; then + # Cloud edition: separate queues for dataset and trigger tasks + DEFAULT_QUEUES="dataset,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor" + else + # Community edition (SELF_HOSTED): dataset, pipeline and workflow have separate queues + DEFAULT_QUEUES="dataset,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor" + fi + else + DEFAULT_QUEUES="${CELERY_QUEUES}" + fi + + # Support for Kubernetes deployment with specific queue workers + # Environment variables that can be set: + # - CELERY_WORKER_QUEUES: Comma-separated list of queues (overrides CELERY_QUEUES) + # - CELERY_WORKER_CONCURRENCY: Number of worker processes (overrides CELERY_WORKER_AMOUNT) + # - CELERY_WORKER_POOL: Pool implementation (overrides CELERY_WORKER_CLASS) + + if [[ -n "${CELERY_WORKER_QUEUES}" ]]; then + DEFAULT_QUEUES="${CELERY_WORKER_QUEUES}" + echo "Using CELERY_WORKER_QUEUES: ${DEFAULT_QUEUES}" + fi + + if [[ -n "${CELERY_WORKER_CONCURRENCY}" ]]; then + CONCURRENCY_OPTION="-c ${CELERY_WORKER_CONCURRENCY}" + echo "Using CELERY_WORKER_CONCURRENCY: ${CELERY_WORKER_CONCURRENCY}" + fi + + WORKER_POOL="${CELERY_WORKER_POOL:-${CELERY_WORKER_CLASS:-gevent}}" + echo "Starting Celery worker with queues: ${DEFAULT_QUEUES}" + + exec celery -A app.celery worker -P ${WORKER_POOL} $CONCURRENCY_OPTION \ --max-tasks-per-child ${MAX_TASKS_PER_CHILD:-50} --loglevel ${LOG_LEVEL:-INFO} \ - -Q ${CELERY_QUEUES:-dataset,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation} + -Q ${DEFAULT_QUEUES} elif [[ "${MODE}" == "beat" ]]; then exec celery -A app.celery beat --loglevel ${LOG_LEVEL:-INFO} diff --git a/api/events/event_handlers/__init__.py b/api/events/event_handlers/__init__.py index d714747e59..c79764983b 100644 --- a/api/events/event_handlers/__init__.py +++ b/api/events/event_handlers/__init__.py @@ -6,12 +6,18 @@ from .create_site_record_when_app_created import handle as handle_create_site_re from .delete_tool_parameters_cache_when_sync_draft_workflow import ( handle as handle_delete_tool_parameters_cache_when_sync_draft_workflow, ) +from .sync_plugin_trigger_when_app_created import handle as handle_sync_plugin_trigger_when_app_created +from .sync_webhook_when_app_created import handle as handle_sync_webhook_when_app_created +from .sync_workflow_schedule_when_app_published import handle as handle_sync_workflow_schedule_when_app_published from .update_app_dataset_join_when_app_model_config_updated import ( handle as handle_update_app_dataset_join_when_app_model_config_updated, ) from .update_app_dataset_join_when_app_published_workflow_updated import ( handle as handle_update_app_dataset_join_when_app_published_workflow_updated, ) +from .update_app_triggers_when_app_published_workflow_updated import ( + handle as handle_update_app_triggers_when_app_published_workflow_updated, +) # Consolidated handler replaces both deduct_quota_when_message_created and # update_provider_last_used_at_when_message_created @@ -24,7 +30,11 @@ __all__ = [ "handle_create_installed_app_when_app_created", "handle_create_site_record_when_app_created", "handle_delete_tool_parameters_cache_when_sync_draft_workflow", + "handle_sync_plugin_trigger_when_app_created", + "handle_sync_webhook_when_app_created", + "handle_sync_workflow_schedule_when_app_published", "handle_update_app_dataset_join_when_app_model_config_updated", "handle_update_app_dataset_join_when_app_published_workflow_updated", + "handle_update_app_triggers_when_app_published_workflow_updated", "handle_update_provider_when_message_created", ] diff --git a/api/events/event_handlers/sync_plugin_trigger_when_app_created.py b/api/events/event_handlers/sync_plugin_trigger_when_app_created.py new file mode 100644 index 0000000000..c20cf22f32 --- /dev/null +++ b/api/events/event_handlers/sync_plugin_trigger_when_app_created.py @@ -0,0 +1,22 @@ +import logging + +from events.app_event import app_draft_workflow_was_synced +from models.model import App, AppMode +from models.workflow import Workflow +from services.workflow_plugin_trigger_service import WorkflowPluginTriggerService + +logger = logging.getLogger(__name__) + + +@app_draft_workflow_was_synced.connect +def handle(sender, synced_draft_workflow: Workflow, **kwargs): + """ + While creating a workflow or updating a workflow, we may need to sync + its plugin trigger relationships in DB. + """ + app: App = sender + if app.mode != AppMode.WORKFLOW.value: + # only handle workflow app, chatflow is not supported yet + return + + WorkflowPluginTriggerService.sync_plugin_trigger_relationships(app, synced_draft_workflow) diff --git a/api/events/event_handlers/sync_webhook_when_app_created.py b/api/events/event_handlers/sync_webhook_when_app_created.py new file mode 100644 index 0000000000..6a5ef2c654 --- /dev/null +++ b/api/events/event_handlers/sync_webhook_when_app_created.py @@ -0,0 +1,22 @@ +import logging + +from events.app_event import app_draft_workflow_was_synced +from models.model import App, AppMode +from models.workflow import Workflow +from services.webhook_service import WebhookService + +logger = logging.getLogger(__name__) + + +@app_draft_workflow_was_synced.connect +def handle(sender, synced_draft_workflow: Workflow, **kwargs): + """ + While creating a workflow or updating a workflow, we may need to sync + its webhook relationships in DB. + """ + app: App = sender + if app.mode != AppMode.WORKFLOW.value: + # only handle workflow app, chatflow is not supported yet + return + + WebhookService.sync_webhook_relationships(app, synced_draft_workflow) diff --git a/api/events/event_handlers/sync_workflow_schedule_when_app_published.py b/api/events/event_handlers/sync_workflow_schedule_when_app_published.py new file mode 100644 index 0000000000..928ce60bd2 --- /dev/null +++ b/api/events/event_handlers/sync_workflow_schedule_when_app_published.py @@ -0,0 +1,86 @@ +import logging +from typing import Optional, cast + +from sqlalchemy import select +from sqlalchemy.orm import Session + +from core.workflow.nodes.trigger_schedule.entities import SchedulePlanUpdate +from events.app_event import app_published_workflow_was_updated +from extensions.ext_database import db +from models import AppMode, Workflow, WorkflowSchedulePlan +from services.schedule_service import ScheduleService + +logger = logging.getLogger(__name__) + + +@app_published_workflow_was_updated.connect +def handle(sender, **kwargs): + """ + Handle app published workflow update event to sync workflow_schedule_plans table. + + When a workflow is published, this handler will: + 1. Extract schedule trigger nodes from the workflow graph + 2. Compare with existing workflow_schedule_plans records + 3. Create/update/delete schedule plans as needed + """ + app = sender + if app.mode != AppMode.WORKFLOW.value: + return + + published_workflow = kwargs.get("published_workflow") + published_workflow = cast(Workflow, published_workflow) + + sync_schedule_from_workflow(tenant_id=app.tenant_id, app_id=app.id, workflow=published_workflow) + + +def sync_schedule_from_workflow(tenant_id: str, app_id: str, workflow: Workflow) -> Optional[WorkflowSchedulePlan]: + """ + Sync schedule plan from workflow graph configuration. + + Args: + tenant_id: Tenant ID + app_id: App ID + workflow: Published workflow instance + + Returns: + Updated or created WorkflowSchedulePlan, or None if no schedule node + """ + with Session(db.engine) as session: + schedule_config = ScheduleService.extract_schedule_config(workflow) + + existing_plan = session.scalar( + select(WorkflowSchedulePlan).where( + WorkflowSchedulePlan.tenant_id == tenant_id, + WorkflowSchedulePlan.app_id == app_id, + ) + ) + + if not schedule_config: + if existing_plan: + logger.info("No schedule node in workflow for app %s, removing schedule plan", app_id) + ScheduleService.delete_schedule(session=session, schedule_id=existing_plan.id) + session.commit() + return None + + if existing_plan: + updates = SchedulePlanUpdate( + node_id=schedule_config.node_id, + cron_expression=schedule_config.cron_expression, + timezone=schedule_config.timezone, + ) + updated_plan = ScheduleService.update_schedule( + session=session, + schedule_id=existing_plan.id, + updates=updates, + ) + session.commit() + return updated_plan + else: + new_plan = ScheduleService.create_schedule( + session=session, + tenant_id=tenant_id, + app_id=app_id, + config=schedule_config, + ) + session.commit() + return new_plan diff --git a/api/events/event_handlers/update_app_triggers_when_app_published_workflow_updated.py b/api/events/event_handlers/update_app_triggers_when_app_published_workflow_updated.py new file mode 100644 index 0000000000..646d6ecc87 --- /dev/null +++ b/api/events/event_handlers/update_app_triggers_when_app_published_workflow_updated.py @@ -0,0 +1,111 @@ +from typing import cast + +from sqlalchemy import select +from sqlalchemy.orm import Session + +from core.workflow.nodes import NodeType +from events.app_event import app_published_workflow_was_updated +from extensions.ext_database import db +from models import AppMode, AppTrigger, AppTriggerStatus, Workflow + + +@app_published_workflow_was_updated.connect +def handle(sender, **kwargs): + """ + Handle app published workflow update event to sync app_triggers table. + + When a workflow is published, this handler will: + 1. Extract trigger nodes from the workflow graph + 2. Compare with existing app_triggers records + 3. Add new triggers and remove obsolete ones + """ + app = sender + if app.mode != AppMode.WORKFLOW.value: + return + + published_workflow = kwargs.get("published_workflow") + published_workflow = cast(Workflow, published_workflow) + # Extract trigger info from workflow + trigger_infos = get_trigger_infos_from_workflow(published_workflow) + + with Session(db.engine) as session: + # Get existing app triggers + existing_triggers = ( + session.execute( + select(AppTrigger).where(AppTrigger.tenant_id == app.tenant_id, AppTrigger.app_id == app.id) + ) + .scalars() + .all() + ) + + # Convert existing triggers to dict for easy lookup + existing_triggers_map = {trigger.node_id: trigger for trigger in existing_triggers} + + # Get current and new node IDs + existing_node_ids = set(existing_triggers_map.keys()) + new_node_ids = {info["node_id"] for info in trigger_infos} + + # Calculate changes + added_node_ids = new_node_ids - existing_node_ids + removed_node_ids = existing_node_ids - new_node_ids + + # Remove obsolete triggers + for node_id in removed_node_ids: + session.delete(existing_triggers_map[node_id]) + + for trigger_info in trigger_infos: + node_id = trigger_info["node_id"] + + if node_id in added_node_ids: + # Create new trigger + app_trigger = AppTrigger( + tenant_id=app.tenant_id, + app_id=app.id, + trigger_type=trigger_info["node_type"], + title=trigger_info["node_title"], + node_id=node_id, + provider_name=trigger_info.get("node_provider_name", ""), + status=AppTriggerStatus.DISABLED, + ) + session.add(app_trigger) + elif node_id in existing_node_ids: + # Update existing trigger if needed + existing_trigger = existing_triggers_map[node_id] + new_title = trigger_info["node_title"] + if new_title and existing_trigger.title != new_title: + existing_trigger.title = new_title + session.add(existing_trigger) + + session.commit() + + +def get_trigger_infos_from_workflow(published_workflow: Workflow) -> list[dict]: + """ + Extract trigger node information from the workflow graph. + + Returns: + List of trigger info dictionaries containing: + - node_type: The type of the trigger node ('trigger-webhook', 'trigger-schedule', 'trigger-plugin') + - node_id: The node ID in the workflow + - node_title: The title of the node + - node_provider_name: The name of the node's provider, only for plugin + """ + graph = published_workflow.graph_dict + if not graph: + return [] + + nodes = graph.get("nodes", []) + trigger_types = {NodeType.TRIGGER_WEBHOOK.value, NodeType.TRIGGER_SCHEDULE.value, NodeType.TRIGGER_PLUGIN.value} + + trigger_infos = [ + { + "node_type": node.get("data", {}).get("type"), + "node_id": node.get("id"), + "node_title": node.get("data", {}).get("title"), + "node_provider_name": node.get("data", {}).get("provider_name"), + } + for node in nodes + if node.get("data", {}).get("type") in trigger_types + ] + + return trigger_infos diff --git a/api/extensions/ext_blueprints.py b/api/extensions/ext_blueprints.py index 9c08a08c45..b50c4fcc23 100644 --- a/api/extensions/ext_blueprints.py +++ b/api/extensions/ext_blueprints.py @@ -12,6 +12,7 @@ def init_app(app: DifyApp): from controllers.inner_api import bp as inner_api_bp from controllers.mcp import bp as mcp_bp from controllers.service_api import bp as service_api_bp + from controllers.trigger import bp as trigger_bp from controllers.web import bp as web_bp CORS( @@ -50,3 +51,11 @@ def init_app(app: DifyApp): app.register_blueprint(inner_api_bp) app.register_blueprint(mcp_bp) + + # Register trigger blueprint with CORS for webhook calls + CORS( + trigger_bp, + allow_headers=["Content-Type", "Authorization", "X-App-Code"], + methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH", "HEAD"], + ) + app.register_blueprint(trigger_bp) diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py index 585539e2ce..febf744369 100644 --- a/api/extensions/ext_celery.py +++ b/api/extensions/ext_celery.py @@ -96,7 +96,9 @@ def init_app(app: DifyApp) -> Celery: celery_app.set_default() app.extensions["celery"] = celery_app - imports = [] + imports = [ + "tasks.async_workflow_tasks", # trigger workers + ] day = dify_config.CELERY_BEAT_SCHEDULER_TIME # if you add a new task, please add the switch to CeleryScheduleTasksConfig @@ -156,6 +158,12 @@ def init_app(app: DifyApp) -> Celery: "task": "schedule.clean_workflow_runlogs_precise.clean_workflow_runlogs_precise", "schedule": crontab(minute="0", hour="2"), } + if dify_config.ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK: + imports.append("schedule.workflow_schedule_task") + beat_schedule["workflow_schedule_task"] = { + "task": "schedule.workflow_schedule_task.poll_workflow_schedules", + "schedule": timedelta(minutes=dify_config.WORKFLOW_SCHEDULE_POLLER_INTERVAL), + } celery_app.conf.update(beat_schedule=beat_schedule, imports=imports) return celery_app diff --git a/api/extensions/ext_commands.py b/api/extensions/ext_commands.py index 79dcdda6e3..71a63168a5 100644 --- a/api/extensions/ext_commands.py +++ b/api/extensions/ext_commands.py @@ -23,6 +23,7 @@ def init_app(app: DifyApp): reset_password, setup_datasource_oauth_client, setup_system_tool_oauth_client, + setup_system_trigger_oauth_client, transform_datasource_credentials, upgrade_db, vdb_migrate, @@ -47,6 +48,7 @@ def init_app(app: DifyApp): clear_orphaned_file_records, remove_orphaned_files_on_storage, setup_system_tool_oauth_client, + setup_system_trigger_oauth_client, cleanup_orphaned_draft_variables, migrate_oss, setup_datasource_oauth_client, diff --git a/api/fields/workflow_run_fields.py b/api/fields/workflow_run_fields.py index 649e881848..0082638a13 100644 --- a/api/fields/workflow_run_fields.py +++ b/api/fields/workflow_run_fields.py @@ -8,6 +8,7 @@ workflow_run_for_log_fields = { "id": fields.String, "version": fields.String, "status": fields.String, + "triggered_from": fields.String, "error": fields.String, "elapsed_time": fields.Float, "total_tokens": fields.Integer, diff --git a/api/fields/workflow_trigger_fields.py b/api/fields/workflow_trigger_fields.py new file mode 100644 index 0000000000..ce51d1833a --- /dev/null +++ b/api/fields/workflow_trigger_fields.py @@ -0,0 +1,25 @@ +from flask_restx import fields + +trigger_fields = { + "id": fields.String, + "trigger_type": fields.String, + "title": fields.String, + "node_id": fields.String, + "provider_name": fields.String, + "icon": fields.String, + "status": fields.String, + "created_at": fields.DateTime(dt_format="iso8601"), + "updated_at": fields.DateTime(dt_format="iso8601"), +} + +triggers_list_fields = {"data": fields.List(fields.Nested(trigger_fields))} + + +webhook_trigger_fields = { + "id": fields.String, + "webhook_id": fields.String, + "webhook_url": fields.String, + "webhook_debug_url": fields.String, + "node_id": fields.String, + "created_at": fields.DateTime(dt_format="iso8601"), +} diff --git a/api/libs/schedule_utils.py b/api/libs/schedule_utils.py new file mode 100644 index 0000000000..3f5c482be0 --- /dev/null +++ b/api/libs/schedule_utils.py @@ -0,0 +1,109 @@ +from datetime import UTC, datetime +from typing import Optional + +import pytz +from croniter import croniter + + +def calculate_next_run_at( + cron_expression: str, + timezone: str, + base_time: Optional[datetime] = None, +) -> datetime: + """ + Calculate the next run time for a cron expression in a specific timezone. + + Args: + cron_expression: Standard 5-field cron expression or predefined expression + timezone: Timezone string (e.g., 'UTC', 'America/New_York') + base_time: Base time to calculate from (defaults to current UTC time) + + Returns: + Next run time in UTC + + Note: + Supports enhanced cron syntax including: + - Month abbreviations: JAN, FEB, MAR-JUN, JAN,JUN,DEC + - Day abbreviations: MON, TUE, MON-FRI, SUN,WED,FRI + - Predefined expressions: @daily, @weekly, @monthly, @yearly, @hourly + - Special characters: ? wildcard, L (last day), Sunday as 7 + - Standard 5-field format only (minute hour day month dayOfWeek) + """ + # Validate cron expression format to match frontend behavior + parts = cron_expression.strip().split() + + # Support both 5-field format and predefined expressions (matching frontend) + if len(parts) != 5 and not cron_expression.startswith('@'): + raise ValueError( + f"Cron expression must have exactly 5 fields or be a predefined expression " + f"(@daily, @weekly, etc.). Got {len(parts)} fields: '{cron_expression}'" + ) + + tz = pytz.timezone(timezone) + + if base_time is None: + base_time = datetime.now(UTC) + + base_time_tz = base_time.astimezone(tz) + cron = croniter(cron_expression, base_time_tz) + next_run_tz = cron.get_next(datetime) + next_run_utc = next_run_tz.astimezone(UTC) + + return next_run_utc + + +def convert_12h_to_24h(time_str: str) -> tuple[int, int]: + """ + Parse 12-hour time format to 24-hour format for cron compatibility. + + Args: + time_str: Time string in format "HH:MM AM/PM" (e.g., "12:30 PM") + + Returns: + Tuple of (hour, minute) in 24-hour format + + Raises: + ValueError: If time string format is invalid or values are out of range + + Examples: + - "12:00 AM" -> (0, 0) # Midnight + - "12:00 PM" -> (12, 0) # Noon + - "1:30 PM" -> (13, 30) + - "11:59 PM" -> (23, 59) + """ + if not time_str or not time_str.strip(): + raise ValueError("Time string cannot be empty") + + parts = time_str.strip().split() + if len(parts) != 2: + raise ValueError(f"Invalid time format: '{time_str}'. Expected 'HH:MM AM/PM'") + + time_part, period = parts + period = period.upper() + + if period not in ["AM", "PM"]: + raise ValueError(f"Invalid period: '{period}'. Must be 'AM' or 'PM'") + + time_parts = time_part.split(":") + if len(time_parts) != 2: + raise ValueError(f"Invalid time format: '{time_part}'. Expected 'HH:MM'") + + try: + hour = int(time_parts[0]) + minute = int(time_parts[1]) + except ValueError as e: + raise ValueError(f"Invalid time values: {e}") + + if hour < 1 or hour > 12: + raise ValueError(f"Invalid hour: {hour}. Must be between 1 and 12") + + if minute < 0 or minute > 59: + raise ValueError(f"Invalid minute: {minute}. Must be between 0 and 59") + + # Handle 12-hour to 24-hour edge cases + if period == "PM" and hour != 12: + hour += 12 + elif period == "AM" and hour == 12: + hour = 0 + + return hour, minute diff --git a/api/migrations/versions/2025_08_23_2038-4558cfabe44e_add_workflow_trigger_logs.py b/api/migrations/versions/2025_08_23_2038-4558cfabe44e_add_workflow_trigger_logs.py new file mode 100644 index 0000000000..205d7aea82 --- /dev/null +++ b/api/migrations/versions/2025_08_23_2038-4558cfabe44e_add_workflow_trigger_logs.py @@ -0,0 +1,67 @@ +"""Add workflow trigger logs table + +Revision ID: 4558cfabe44e +Revises: 0e154742a5fa +Create Date: 2025-08-23 20:38:20.059323 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '4558cfabe44e' +down_revision = '8d289573e1da' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('workflow_trigger_logs', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('workflow_id', models.types.StringUUID(), nullable=False), + sa.Column('workflow_run_id', models.types.StringUUID(), nullable=True), + sa.Column('root_node_id', sa.String(length=255), nullable=True), + sa.Column('trigger_type', sa.String(length=50), nullable=False), + sa.Column('trigger_data', sa.Text(), nullable=False), + sa.Column('inputs', sa.Text(), nullable=False), + sa.Column('outputs', sa.Text(), nullable=True), + sa.Column('status', sa.String(length=50), nullable=False), + sa.Column('error', sa.Text(), nullable=True), + sa.Column('queue_name', sa.String(length=100), nullable=False), + sa.Column('celery_task_id', sa.String(length=255), nullable=True), + sa.Column('retry_count', sa.Integer(), nullable=False), + sa.Column('elapsed_time', sa.Float(), nullable=True), + sa.Column('total_tokens', sa.Integer(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('created_by_role', sa.String(length=255), nullable=False), + sa.Column('created_by', sa.String(length=255), nullable=False), + sa.Column('triggered_at', sa.DateTime(), nullable=True), + sa.Column('finished_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id', name='workflow_trigger_log_pkey') + ) + with op.batch_alter_table('workflow_trigger_logs', schema=None) as batch_op: + batch_op.create_index('workflow_trigger_log_created_at_idx', ['created_at'], unique=False) + batch_op.create_index('workflow_trigger_log_status_idx', ['status'], unique=False) + batch_op.create_index('workflow_trigger_log_tenant_app_idx', ['tenant_id', 'app_id'], unique=False) + batch_op.create_index('workflow_trigger_log_workflow_id_idx', ['workflow_id'], unique=False) + batch_op.create_index('workflow_trigger_log_workflow_run_idx', ['workflow_run_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('workflow_trigger_logs', schema=None) as batch_op: + batch_op.drop_index('workflow_trigger_log_workflow_run_idx') + batch_op.drop_index('workflow_trigger_log_workflow_id_idx') + batch_op.drop_index('workflow_trigger_log_tenant_app_idx') + batch_op.drop_index('workflow_trigger_log_status_idx') + batch_op.drop_index('workflow_trigger_log_created_at_idx') + + op.drop_table('workflow_trigger_logs') + # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_08_23_2039-5871f634954d_add_workflow_webhook_table.py b/api/migrations/versions/2025_08_23_2039-5871f634954d_add_workflow_webhook_table.py new file mode 100644 index 0000000000..dba124a70b --- /dev/null +++ b/api/migrations/versions/2025_08_23_2039-5871f634954d_add_workflow_webhook_table.py @@ -0,0 +1,47 @@ +"""Add workflow webhook table + +Revision ID: 5871f634954d +Revises: fa8b0fa6f407 +Create Date: 2025-08-23 20:39:20.704501 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '5871f634954d' +down_revision = '4558cfabe44e' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('workflow_webhook_triggers', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('node_id', sa.String(length=64), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('webhook_id', sa.String(length=24), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='workflow_webhook_trigger_pkey'), + sa.UniqueConstraint('app_id', 'node_id', name='uniq_node'), + sa.UniqueConstraint('webhook_id', name='uniq_webhook_id') + ) + with op.batch_alter_table('workflow_webhook_triggers', schema=None) as batch_op: + batch_op.create_index('workflow_webhook_trigger_tenant_idx', ['tenant_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('workflow_webhook_triggers', schema=None) as batch_op: + batch_op.drop_index('workflow_webhook_trigger_tenant_idx') + + op.drop_table('workflow_webhook_triggers') + # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_08_27_1733-9ee7d347f4c1_add_app_triggers_table.py b/api/migrations/versions/2025_08_27_1733-9ee7d347f4c1_add_app_triggers_table.py new file mode 100644 index 0000000000..af1825073a --- /dev/null +++ b/api/migrations/versions/2025_08_27_1733-9ee7d347f4c1_add_app_triggers_table.py @@ -0,0 +1,47 @@ +"""Add app triggers table + +Revision ID: 9ee7d347f4c1 +Revises: 5871f634954d +Create Date: 2025-08-27 17:33:30.082812 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '9ee7d347f4c1' +down_revision = '5871f634954d' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('app_triggers', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('node_id', sa.String(length=64), nullable=False), + sa.Column('trigger_type', sa.String(length=50), nullable=False), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('provider_name', sa.String(length=255), server_default='', nullable=True), + sa.Column('status', sa.String(length=50), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id', name='app_trigger_pkey') + ) + with op.batch_alter_table('app_triggers', schema=None) as batch_op: + batch_op.create_index('app_trigger_tenant_app_idx', ['tenant_id', 'app_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('app_triggers', schema=None) as batch_op: + batch_op.drop_index('app_trigger_tenant_app_idx') + + op.drop_table('app_triggers') + # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_08_28_2052-c19938f630b6_add_workflow_schedule_plan.py b/api/migrations/versions/2025_08_28_2052-c19938f630b6_add_workflow_schedule_plan.py new file mode 100644 index 0000000000..f9f4ddacbb --- /dev/null +++ b/api/migrations/versions/2025_08_28_2052-c19938f630b6_add_workflow_schedule_plan.py @@ -0,0 +1,47 @@ +"""Add workflow schedule plan table + +Revision ID: c19938f630b6 +Revises: 9ee7d347f4c1 +Create Date: 2025-08-28 20:52:41.300028 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'c19938f630b6' +down_revision = '875c659da2f8' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('workflow_schedule_plans', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('node_id', sa.String(length=64), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('cron_expression', sa.String(length=255), nullable=False), + sa.Column('timezone', sa.String(length=64), nullable=False), + sa.Column('next_run_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='workflow_schedule_plan_pkey'), + sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node') + ) + with op.batch_alter_table('workflow_schedule_plans', schema=None) as batch_op: + batch_op.create_index('workflow_schedule_plan_next_idx', ['next_run_at'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('workflow_schedule_plans', schema=None) as batch_op: + batch_op.drop_index('workflow_schedule_plan_next_idx') + + op.drop_table('workflow_schedule_plans') + # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_09_03_1500-132392a2635f_plugin_trigger.py b/api/migrations/versions/2025_09_03_1500-132392a2635f_plugin_trigger.py new file mode 100644 index 0000000000..fad1a8f248 --- /dev/null +++ b/api/migrations/versions/2025_09_03_1500-132392a2635f_plugin_trigger.py @@ -0,0 +1,104 @@ +"""plugin_trigger + +Revision ID: 132392a2635f +Revises: 9ee7d347f4c1 +Create Date: 2025-09-03 15:00:57.326868 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '132392a2635f' +down_revision = '9ee7d347f4c1' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('trigger_oauth_system_clients', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('plugin_id', sa.String(length=512), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('encrypted_oauth_params', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='trigger_oauth_system_client_pkey'), + sa.UniqueConstraint('plugin_id', 'provider', name='trigger_oauth_system_client_plugin_id_provider_idx') + ) + op.create_table('trigger_oauth_tenant_clients', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('plugin_id', sa.String(length=512), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('encrypted_oauth_params', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='trigger_oauth_tenant_client_pkey'), + sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_trigger_oauth_tenant_client') + ) + op.create_table('trigger_subscriptions', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False, comment='Subscription instance name'), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('user_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_id', sa.String(length=255), nullable=False, comment='Provider identifier (e.g., plugin_id/provider_name)'), + sa.Column('endpoint_id', sa.String(length=255), nullable=False, comment='Subscription endpoint'), + sa.Column('parameters', sa.JSON(), nullable=False, comment='Subscription parameters JSON'), + sa.Column('properties', sa.JSON(), nullable=False, comment='Subscription properties JSON'), + sa.Column('credentials', sa.JSON(), nullable=False, comment='Subscription credentials JSON'), + sa.Column('credential_type', sa.String(length=50), nullable=False, comment='oauth or api_key'), + sa.Column('credential_expires_at', sa.Integer(), nullable=False, comment='OAuth token expiration timestamp, -1 for never'), + sa.Column('expires_at', sa.Integer(), nullable=False, comment='Subscription instance expiration timestamp, -1 for never'), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='trigger_provider_pkey'), + sa.UniqueConstraint('tenant_id', 'provider_id', 'name', name='unique_trigger_provider') + ) + with op.batch_alter_table('trigger_subscriptions', schema=None) as batch_op: + batch_op.create_index('idx_trigger_providers_endpoint', ['endpoint_id'], unique=True) + batch_op.create_index('idx_trigger_providers_tenant_endpoint', ['tenant_id', 'endpoint_id'], unique=False) + batch_op.create_index('idx_trigger_providers_tenant_provider', ['tenant_id', 'provider_id'], unique=False) + + op.create_table('workflow_plugin_triggers', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('node_id', sa.String(length=64), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_id', sa.String(length=255), nullable=False), + sa.Column('trigger_id', sa.String(length=510), nullable=False), + sa.Column('triggered_by', sa.String(length=16), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='workflow_plugin_trigger_pkey'), + sa.UniqueConstraint('app_id', 'node_id', 'triggered_by', name='uniq_plugin_node'), + sa.UniqueConstraint('trigger_id', 'node_id', name='uniq_trigger_node') + ) + with op.batch_alter_table('workflow_plugin_triggers', schema=None) as batch_op: + batch_op.create_index('workflow_plugin_trigger_tenant_idx', ['tenant_id'], unique=False) + batch_op.create_index('workflow_plugin_trigger_trigger_idx', ['trigger_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('workflow_plugin_triggers', schema=None) as batch_op: + batch_op.drop_index('workflow_plugin_trigger_trigger_idx') + batch_op.drop_index('workflow_plugin_trigger_tenant_idx') + + op.drop_table('workflow_plugin_triggers') + with op.batch_alter_table('trigger_subscriptions', schema=None) as batch_op: + batch_op.drop_index('idx_trigger_providers_tenant_provider') + batch_op.drop_index('idx_trigger_providers_tenant_endpoint') + batch_op.drop_index('idx_trigger_providers_endpoint') + + op.drop_table('trigger_subscriptions') + op.drop_table('trigger_oauth_tenant_clients') + op.drop_table('trigger_oauth_system_clients') + + # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_09_04_1212-86f068bf56fb_plugin_trigger_workflow.py b/api/migrations/versions/2025_09_04_1212-86f068bf56fb_plugin_trigger_workflow.py new file mode 100644 index 0000000000..58f6ef07ed --- /dev/null +++ b/api/migrations/versions/2025_09_04_1212-86f068bf56fb_plugin_trigger_workflow.py @@ -0,0 +1,62 @@ +"""plugin_trigger_workflow + +Revision ID: 86f068bf56fb +Revises: 132392a2635f +Create Date: 2025-09-04 12:12:44.661875 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '86f068bf56fb' +down_revision = '132392a2635f' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('workflow_plugin_triggers', schema=None) as batch_op: + batch_op.add_column(sa.Column('subscription_id', sa.String(length=255), nullable=False)) + batch_op.alter_column('provider_id', + existing_type=sa.VARCHAR(length=255), + type_=sa.String(length=512), + existing_nullable=False) + batch_op.alter_column('trigger_id', + existing_type=sa.VARCHAR(length=510), + type_=sa.String(length=255), + existing_nullable=False) + batch_op.drop_constraint(batch_op.f('uniq_plugin_node'), type_='unique') + batch_op.drop_constraint(batch_op.f('uniq_trigger_node'), type_='unique') + batch_op.drop_index(batch_op.f('workflow_plugin_trigger_tenant_idx')) + batch_op.drop_index(batch_op.f('workflow_plugin_trigger_trigger_idx')) + batch_op.create_unique_constraint('uniq_app_node_subscription', ['app_id', 'node_id']) + batch_op.create_index('workflow_plugin_trigger_tenant_subscription_idx', ['tenant_id', 'subscription_id'], unique=False) + batch_op.drop_column('triggered_by') + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('workflow_plugin_triggers', schema=None) as batch_op: + batch_op.add_column(sa.Column('triggered_by', sa.VARCHAR(length=16), autoincrement=False, nullable=False)) + batch_op.drop_index('workflow_plugin_trigger_tenant_subscription_idx') + batch_op.drop_constraint('uniq_app_node_subscription', type_='unique') + batch_op.create_index(batch_op.f('workflow_plugin_trigger_trigger_idx'), ['trigger_id'], unique=False) + batch_op.create_index(batch_op.f('workflow_plugin_trigger_tenant_idx'), ['tenant_id'], unique=False) + batch_op.create_unique_constraint(batch_op.f('uniq_trigger_node'), ['trigger_id', 'node_id'], postgresql_nulls_not_distinct=False) + batch_op.create_unique_constraint(batch_op.f('uniq_plugin_node'), ['app_id', 'node_id', 'triggered_by'], postgresql_nulls_not_distinct=False) + batch_op.alter_column('trigger_id', + existing_type=sa.String(length=255), + type_=sa.VARCHAR(length=510), + existing_nullable=False) + batch_op.alter_column('provider_id', + existing_type=sa.String(length=512), + type_=sa.VARCHAR(length=255), + existing_nullable=False) + batch_op.drop_column('subscription_id') + # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_09_05_1551-875c659da2f8_plugin_trigger_idx.py b/api/migrations/versions/2025_09_05_1551-875c659da2f8_plugin_trigger_idx.py new file mode 100644 index 0000000000..ce7985097f --- /dev/null +++ b/api/migrations/versions/2025_09_05_1551-875c659da2f8_plugin_trigger_idx.py @@ -0,0 +1,37 @@ +"""plugin_trigger_idx + +Revision ID: 875c659da2f8 +Revises: 86f068bf56fb +Create Date: 2025-09-05 15:51:08.635283 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '875c659da2f8' +down_revision = '86f068bf56fb' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('workflow_plugin_triggers', schema=None) as batch_op: + batch_op.add_column(sa.Column('trigger_name', sa.String(length=255), nullable=False)) + batch_op.drop_index(batch_op.f('workflow_plugin_trigger_tenant_subscription_idx')) + batch_op.create_index('workflow_plugin_trigger_tenant_subscription_idx', ['tenant_id', 'subscription_id', 'trigger_name'], unique=False) + batch_op.drop_column('trigger_id') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('workflow_plugin_triggers', schema=None) as batch_op: + batch_op.add_column(sa.Column('trigger_id', sa.VARCHAR(length=255), autoincrement=False, nullable=False)) + batch_op.drop_index('workflow_plugin_trigger_tenant_subscription_idx') + batch_op.create_index(batch_op.f('workflow_plugin_trigger_tenant_subscription_idx'), ['tenant_id', 'subscription_id'], unique=False) + batch_op.drop_column('trigger_name') + # ### end Alembic commands ### diff --git a/api/models/__init__.py b/api/models/__init__.py index 779484283f..195e5afc93 100644 --- a/api/models/__init__.py +++ b/api/models/__init__.py @@ -79,8 +79,12 @@ from .tools import ( ToolModelInvoke, WorkflowToolProvider, ) +from .trigger import TriggerOAuthSystemClient, TriggerOAuthTenantClient, TriggerSubscription from .web import PinnedConversation, SavedMessage from .workflow import ( + AppTrigger, + AppTriggerStatus, + AppTriggerType, ConversationVariable, Workflow, WorkflowAppLog, @@ -89,6 +93,7 @@ from .workflow import ( WorkflowNodeExecutionOffload, WorkflowNodeExecutionTriggeredFrom, WorkflowRun, + WorkflowSchedulePlan, WorkflowType, ) @@ -105,9 +110,12 @@ __all__ = [ "AppAnnotationHitHistory", "AppAnnotationSetting", "AppDatasetJoin", - "AppMCPServer", # Added + "AppMCPServer", "AppMode", "AppModelConfig", + "AppTrigger", + "AppTriggerStatus", + "AppTriggerType", "BuiltinToolProvider", "CeleryTask", "CeleryTaskSet", @@ -168,6 +176,9 @@ __all__ = [ "ToolLabelBinding", "ToolModelInvoke", "TraceAppConfig", + "TriggerOAuthSystemClient", + "TriggerOAuthTenantClient", + "TriggerSubscription", "UploadFile", "UserFrom", "Whitelist", @@ -179,6 +190,7 @@ __all__ = [ "WorkflowNodeExecutionTriggeredFrom", "WorkflowRun", "WorkflowRunTriggeredFrom", + "WorkflowSchedulePlan", "WorkflowToolProvider", "WorkflowType", ] diff --git a/api/models/enums.py b/api/models/enums.py index 0be7567c80..ec7db77f8a 100644 --- a/api/models/enums.py +++ b/api/models/enums.py @@ -16,6 +16,9 @@ class WorkflowRunTriggeredFrom(StrEnum): APP_RUN = "app-run" RAG_PIPELINE_RUN = "rag-pipeline-run" RAG_PIPELINE_DEBUGGING = "rag-pipeline-debugging" + WEBHOOK = "webhook" + SCHEDULE = "schedule" + PLUGIN = "plugin" class DraftVariableType(StrEnum): diff --git a/api/models/provider_ids.py b/api/models/provider_ids.py index 98dc67f2f3..0be6a3dc98 100644 --- a/api/models/provider_ids.py +++ b/api/models/provider_ids.py @@ -57,3 +57,8 @@ class ToolProviderID(GenericProviderID): class DatasourceProviderID(GenericProviderID): def __init__(self, value: str, is_hardcoded: bool = False) -> None: super().__init__(value, is_hardcoded) + + +class TriggerProviderID(GenericProviderID): + def __init__(self, value: str, is_hardcoded: bool = False) -> None: + super().__init__(value, is_hardcoded) diff --git a/api/models/trigger.py b/api/models/trigger.py new file mode 100644 index 0000000000..08dc53d82f --- /dev/null +++ b/api/models/trigger.py @@ -0,0 +1,139 @@ +import json +import time +from datetime import datetime +from typing import cast + +import sqlalchemy as sa +from sqlalchemy import DateTime, Index, Integer, String, UniqueConstraint, func +from sqlalchemy.orm import Mapped, mapped_column + +from core.plugin.entities.plugin_daemon import CredentialType +from core.trigger.entities.api_entities import TriggerProviderSubscriptionApiEntity +from core.trigger.entities.entities import Subscription +from core.trigger.utils.endpoint import parse_endpoint_id +from models.base import Base +from models.types import StringUUID + + +class TriggerSubscription(Base): + """ + Trigger provider model for managing credentials + Supports multiple credential instances per provider + """ + + __tablename__ = "trigger_subscriptions" + __table_args__ = ( + sa.PrimaryKeyConstraint("id", name="trigger_provider_pkey"), + Index("idx_trigger_providers_tenant_provider", "tenant_id", "provider_id"), + # Primary index for O(1) lookup by endpoint + Index("idx_trigger_providers_endpoint", "endpoint_id", unique=True), + # Composite index for tenant-specific queries (optional, kept for compatibility) + Index("idx_trigger_providers_tenant_endpoint", "tenant_id", "endpoint_id"), + UniqueConstraint("tenant_id", "provider_id", "name", name="unique_trigger_provider"), + ) + + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + name: Mapped[str] = mapped_column(String(255), nullable=False, comment="Subscription instance name") + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + provider_id: Mapped[str] = mapped_column( + String(255), nullable=False, comment="Provider identifier (e.g., plugin_id/provider_name)" + ) + endpoint_id: Mapped[str] = mapped_column(String(255), nullable=False, comment="Subscription endpoint") + parameters: Mapped[dict] = mapped_column(sa.JSON, nullable=False, comment="Subscription parameters JSON") + properties: Mapped[dict] = mapped_column(sa.JSON, nullable=False, comment="Subscription properties JSON") + + credentials: Mapped[dict] = mapped_column(sa.JSON, nullable=False, comment="Subscription credentials JSON") + credential_type: Mapped[str] = mapped_column(String(50), nullable=False, comment="oauth or api_key") + credential_expires_at: Mapped[int] = mapped_column( + Integer, default=-1, comment="OAuth token expiration timestamp, -1 for never" + ) + expires_at: Mapped[int] = mapped_column( + Integer, default=-1, comment="Subscription instance expiration timestamp, -1 for never" + ) + + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=func.current_timestamp(), + server_onupdate=func.current_timestamp(), + ) + + def is_credential_expired(self) -> bool: + """Check if credential is expired""" + if self.credential_expires_at == -1: + return False + # Check if token expires in next 3 minutes + return (self.credential_expires_at - 180) < int(time.time()) + + def to_entity(self) -> Subscription: + return Subscription( + expires_at=self.expires_at, + endpoint=parse_endpoint_id(self.endpoint_id), + properties=self.properties, + ) + + def to_api_entity(self) -> TriggerProviderSubscriptionApiEntity: + return TriggerProviderSubscriptionApiEntity( + id=self.id, + name=self.name, + provider=self.provider_id, + endpoint=parse_endpoint_id(self.endpoint_id), + parameters=self.parameters, + properties=self.properties, + credential_type=CredentialType(self.credential_type), + credentials=self.credentials, + workflows_in_use=-1, + ) + + +# system level trigger oauth client params +class TriggerOAuthSystemClient(Base): + __tablename__ = "trigger_oauth_system_clients" + __table_args__ = ( + sa.PrimaryKeyConstraint("id", name="trigger_oauth_system_client_pkey"), + sa.UniqueConstraint("plugin_id", "provider", name="trigger_oauth_system_client_plugin_id_provider_idx"), + ) + + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + plugin_id: Mapped[str] = mapped_column(String(512), nullable=False) + provider: Mapped[str] = mapped_column(String(255), nullable=False) + # oauth params of the trigger provider + encrypted_oauth_params: Mapped[str] = mapped_column(sa.Text, nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=func.current_timestamp(), + server_onupdate=func.current_timestamp(), + ) + + +# tenant level trigger oauth client params (client_id, client_secret, etc.) +class TriggerOAuthTenantClient(Base): + __tablename__ = "trigger_oauth_tenant_clients" + __table_args__ = ( + sa.PrimaryKeyConstraint("id", name="trigger_oauth_tenant_client_pkey"), + sa.UniqueConstraint("tenant_id", "plugin_id", "provider", name="unique_trigger_oauth_tenant_client"), + ) + + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + # tenant id + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + plugin_id: Mapped[str] = mapped_column(String(512), nullable=False) + provider: Mapped[str] = mapped_column(String(255), nullable=False) + enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true")) + # oauth params of the trigger provider + encrypted_oauth_params: Mapped[str] = mapped_column(sa.Text, nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=func.current_timestamp(), + server_onupdate=func.current_timestamp(), + ) + + @property + def oauth_params(self) -> dict: + return cast(dict, json.loads(self.encrypted_oauth_params or "{}")) diff --git a/api/models/workflow.py b/api/models/workflow.py index e61005953e..d0118d1b7b 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -1,6 +1,6 @@ import json import logging -from collections.abc import Mapping, Sequence +from collections.abc import Generator, Mapping, Sequence from datetime import datetime from enum import StrEnum from typing import TYPE_CHECKING, Any, Optional, Union, cast @@ -322,6 +322,54 @@ class Workflow(Base): return variables + def walk_nodes( + self, specific_node_type: NodeType | None = None + ) -> Generator[tuple[str, Mapping[str, Any]], None, None]: + """ + Walk through the workflow nodes, yield each node configuration. + + Each node configuration is a tuple containing the node's id and the node's properties. + + Node properties example: + { + "type": "llm", + "title": "LLM", + "desc": "", + "variables": [], + "model": + { + "provider": "langgenius/openai/openai", + "name": "gpt-4", + "mode": "chat", + "completion_params": { "temperature": 0.7 }, + }, + "prompt_template": [{ "role": "system", "text": "" }], + "context": { "enabled": false, "variable_selector": [] }, + "vision": { "enabled": false }, + "memory": + { + "window": { "enabled": false, "size": 10 }, + "query_prompt_template": "{{#sys.query#}}\n\n{{#sys.files#}}", + "role_prefix": { "user": "", "assistant": "" }, + }, + "selected": false, + } + + For specific node type, refer to `core.workflow.nodes` + """ + graph_dict = self.graph_dict + if "nodes" not in graph_dict: + raise WorkflowDataError("nodes not found in workflow graph") + + if specific_node_type: + yield from ( + (node["id"], node["data"]) + for node in graph_dict["nodes"] + if node["data"]["type"] == specific_node_type.value + ) + else: + yield from ((node["id"], node["data"]) for node in graph_dict["nodes"]) + def rag_pipeline_user_input_form(self) -> list: # get user_input_form from start node variables: list[Any] = self.rag_pipeline_variables @@ -1579,3 +1627,320 @@ class WorkflowDraftVariableFile(Base): def is_system_variable_editable(name: str) -> bool: return name in _EDITABLE_SYSTEM_VARIABLE + + +class WorkflowTriggerStatus(StrEnum): + """Workflow Trigger Execution Status""" + + PENDING = "pending" + QUEUED = "queued" + RUNNING = "running" + SUCCEEDED = "succeeded" + FAILED = "failed" + RATE_LIMITED = "rate_limited" + RETRYING = "retrying" + + +class WorkflowTriggerLog(Base): + """ + Workflow Trigger Log + + Track async trigger workflow runs with re-invocation capability + + Attributes: + - id (uuid) Trigger Log ID (used as workflow_trigger_log_id) + - tenant_id (uuid) Workspace ID + - app_id (uuid) App ID + - workflow_id (uuid) Workflow ID + - workflow_run_id (uuid) Optional - Associated workflow run ID when execution starts + - root_node_id (string) Optional - Custom starting node ID for workflow execution + - trigger_type (string) Type of trigger: webhook, schedule, plugin + - trigger_data (text) Full trigger data including inputs (JSON) + - inputs (text) Input parameters (JSON) + - outputs (text) Optional - Output content (JSON) + - status (string) Execution status + - error (text) Optional - Error message if failed + - queue_name (string) Celery queue used + - celery_task_id (string) Optional - Celery task ID for tracking + - retry_count (int) Number of retry attempts + - elapsed_time (float) Optional - Time consumption in seconds + - total_tokens (int) Optional - Total tokens used + - created_by_role (string) Creator role: account, end_user + - created_by (string) Creator ID + - created_at (timestamp) Creation time + - triggered_at (timestamp) Optional - When actually triggered + - finished_at (timestamp) Optional - Completion time + """ + + __tablename__ = "workflow_trigger_logs" + __table_args__ = ( + sa.PrimaryKeyConstraint("id", name="workflow_trigger_log_pkey"), + sa.Index("workflow_trigger_log_tenant_app_idx", "tenant_id", "app_id"), + sa.Index("workflow_trigger_log_status_idx", "status"), + sa.Index("workflow_trigger_log_created_at_idx", "created_at"), + sa.Index("workflow_trigger_log_workflow_run_idx", "workflow_run_id"), + sa.Index("workflow_trigger_log_workflow_id_idx", "workflow_id"), + ) + + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + workflow_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + workflow_run_id: Mapped[Optional[str]] = mapped_column(StringUUID, nullable=True) + root_node_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + + trigger_type: Mapped[str] = mapped_column(String(50), nullable=False) + trigger_data: Mapped[str] = mapped_column(sa.Text, nullable=False) # Full TriggerData as JSON + inputs: Mapped[str] = mapped_column(sa.Text, nullable=False) # Just inputs for easy viewing + outputs: Mapped[Optional[str]] = mapped_column(sa.Text, nullable=True) + + status: Mapped[str] = mapped_column(String(50), nullable=False, default=WorkflowTriggerStatus.PENDING) + error: Mapped[Optional[str]] = mapped_column(sa.Text, nullable=True) + + queue_name: Mapped[str] = mapped_column(String(100), nullable=False) + celery_task_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + retry_count: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0) + + elapsed_time: Mapped[Optional[float]] = mapped_column(sa.Float, nullable=True) + total_tokens: Mapped[Optional[int]] = mapped_column(sa.Integer, nullable=True) + + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + created_by_role: Mapped[str] = mapped_column(String(255), nullable=False) + created_by: Mapped[str] = mapped_column(String(255), nullable=False) + + triggered_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) + finished_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) + + @property + def created_by_account(self): + created_by_role = CreatorUserRole(self.created_by_role) + return db.session.get(Account, self.created_by) if created_by_role == CreatorUserRole.ACCOUNT else None + + @property + def created_by_end_user(self): + from models.model import EndUser + + created_by_role = CreatorUserRole(self.created_by_role) + return db.session.get(EndUser, self.created_by) if created_by_role == CreatorUserRole.END_USER else None + + def to_dict(self) -> dict: + """Convert to dictionary for API responses""" + return { + "id": self.id, + "tenant_id": self.tenant_id, + "app_id": self.app_id, + "workflow_id": self.workflow_id, + "workflow_run_id": self.workflow_run_id, + "trigger_type": self.trigger_type, + "trigger_data": json.loads(self.trigger_data), + "inputs": json.loads(self.inputs), + "outputs": json.loads(self.outputs) if self.outputs else None, + "status": self.status, + "error": self.error, + "queue_name": self.queue_name, + "celery_task_id": self.celery_task_id, + "retry_count": self.retry_count, + "elapsed_time": self.elapsed_time, + "total_tokens": self.total_tokens, + "created_by_role": self.created_by_role, + "created_by": self.created_by, + "created_at": self.created_at.isoformat() if self.created_at else None, + "triggered_at": self.triggered_at.isoformat() if self.triggered_at else None, + "finished_at": self.finished_at.isoformat() if self.finished_at else None, + } + + +class WorkflowWebhookTrigger(Base): + """ + Workflow Webhook Trigger + + Attributes: + - id (uuid) Primary key + - app_id (uuid) App ID to bind to a specific app + - node_id (varchar) Node ID which node in the workflow + - tenant_id (uuid) Workspace ID + - webhook_id (varchar) Webhook ID for URL: https://api.dify.ai/triggers/webhook/:webhook_id + - created_by (varchar) User ID of the creator + - created_at (timestamp) Creation time + - updated_at (timestamp) Last update time + """ + + __tablename__ = "workflow_webhook_triggers" + __table_args__ = ( + sa.PrimaryKeyConstraint("id", name="workflow_webhook_trigger_pkey"), + sa.Index("workflow_webhook_trigger_tenant_idx", "tenant_id"), + sa.UniqueConstraint("app_id", "node_id", name="uniq_node"), + sa.UniqueConstraint("webhook_id", name="uniq_webhook_id"), + ) + + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) + app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + node_id: Mapped[str] = mapped_column(String(64), nullable=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + webhook_id: Mapped[str] = mapped_column(String(24), nullable=False) + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=func.current_timestamp(), + server_onupdate=func.current_timestamp(), + ) + + +class WorkflowPluginTrigger(Base): + """ + Workflow Plugin Trigger + + Maps plugin triggers to workflow nodes, similar to WorkflowWebhookTrigger + + Attributes: + - id (uuid) Primary key + - app_id (uuid) App ID to bind to a specific app + - node_id (varchar) Node ID which node in the workflow + - tenant_id (uuid) Workspace ID + - provider_id (varchar) Plugin provider ID + - trigger_name (varchar) trigger name + - subscription_id (varchar) Subscription ID + - created_at (timestamp) Creation time + - updated_at (timestamp) Last update time + """ + + __tablename__ = "workflow_plugin_triggers" + __table_args__ = ( + sa.PrimaryKeyConstraint("id", name="workflow_plugin_trigger_pkey"), + sa.Index("workflow_plugin_trigger_tenant_subscription_idx", "tenant_id", "subscription_id", "trigger_name"), + sa.UniqueConstraint("app_id", "node_id", name="uniq_app_node_subscription"), + ) + + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + node_id: Mapped[str] = mapped_column(String(64), nullable=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + provider_id: Mapped[str] = mapped_column(String(512), nullable=False) + trigger_name: Mapped[str] = mapped_column(String(255), nullable=False) + subscription_id: Mapped[str] = mapped_column(String(255), nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=func.current_timestamp(), + server_onupdate=func.current_timestamp(), + ) + + +class AppTriggerType(StrEnum): + """App Trigger Type Enum""" + + TRIGGER_WEBHOOK = "trigger-webhook" + TRIGGER_SCHEDULE = "trigger-schedule" + TRIGGER_PLUGIN = "trigger-plugin" + + +class AppTriggerStatus(StrEnum): + """App Trigger Status Enum""" + + ENABLED = "enabled" + DISABLED = "disabled" + UNAUTHORIZED = "unauthorized" + + +class AppTrigger(Base): + """ + App Trigger + + Manages multiple triggers for an app with enable/disable and authorization states. + + Attributes: + - id (uuid) Primary key + - tenant_id (uuid) Workspace ID + - app_id (uuid) App ID + - trigger_type (string) Type: webhook, schedule, plugin + - title (string) Trigger title + + - status (string) Status: enabled, disabled, unauthorized, error + - node_id (string) Optional workflow node ID + - created_at (timestamp) Creation time + - updated_at (timestamp) Last update time + """ + + __tablename__ = "app_triggers" + __table_args__ = ( + sa.PrimaryKeyConstraint("id", name="app_trigger_pkey"), + sa.Index("app_trigger_tenant_app_idx", "tenant_id", "app_id"), + ) + + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + node_id: Mapped[Optional[str]] = mapped_column(String(64), nullable=False) + trigger_type: Mapped[str] = mapped_column(EnumText(AppTriggerType, length=50), nullable=False) + title: Mapped[str] = mapped_column(String(255), nullable=False) + provider_name: Mapped[str] = mapped_column(String(255), server_default="", nullable=True) + status: Mapped[str] = mapped_column( + EnumText(AppTriggerStatus, length=50), nullable=False, default=AppTriggerStatus.DISABLED + ) + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + default=naive_utc_now(), + server_onupdate=func.current_timestamp(), + ) + + +class WorkflowSchedulePlan(Base): + """ + Workflow Schedule Configuration + + Store schedule configurations for time-based workflow triggers. + Uses cron expressions with timezone support for flexible scheduling. + + Attributes: + - id (uuid) Primary key + - app_id (uuid) App ID to bind to a specific app + - node_id (varchar) Starting node ID for workflow execution + - tenant_id (uuid) Workspace ID for multi-tenancy + - cron_expression (varchar) Cron expression defining schedule pattern + - timezone (varchar) Timezone for cron evaluation (e.g., 'Asia/Shanghai') + - next_run_at (timestamp) Next scheduled execution time + - created_at (timestamp) Creation timestamp + - updated_at (timestamp) Last update timestamp + """ + + __tablename__ = "workflow_schedule_plans" + __table_args__ = ( + sa.PrimaryKeyConstraint("id", name="workflow_schedule_plan_pkey"), + sa.UniqueConstraint("app_id", "node_id", name="uniq_app_node"), + sa.Index("workflow_schedule_plan_next_idx", "next_run_at"), + ) + + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) + app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + node_id: Mapped[str] = mapped_column(String(64), nullable=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + + # Schedule configuration + cron_expression: Mapped[str] = mapped_column(String(255), nullable=False) + timezone: Mapped[str] = mapped_column(String(64), nullable=False) + + # Schedule control + next_run_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() + ) + + def to_dict(self) -> dict: + """Convert to dictionary representation""" + return { + "id": self.id, + "app_id": self.app_id, + "node_id": self.node_id, + "tenant_id": self.tenant_id, + "cron_expression": self.cron_expression, + "timezone": self.timezone, + "next_run_at": self.next_run_at.isoformat() if self.next_run_at else None, + "created_at": self.created_at.isoformat(), + "updated_at": self.updated_at.isoformat(), + } diff --git a/api/pyproject.toml b/api/pyproject.toml index c193da937e..6e6923eccc 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -87,8 +87,9 @@ dependencies = [ "sseclient-py~=1.8.0", "httpx-sse~=0.4.0", "sendgrid~=6.12.3", - "flask-restx~=1.3.0", + "flask-restx>=1.3.0", "packaging~=23.2", + "croniter>=6.0.0", ] # Before adding new dependency, consider place it in # alphabet order (a-z) and suitable group. diff --git a/api/repositories/sqlalchemy_workflow_trigger_log_repository.py b/api/repositories/sqlalchemy_workflow_trigger_log_repository.py new file mode 100644 index 0000000000..1276686cd8 --- /dev/null +++ b/api/repositories/sqlalchemy_workflow_trigger_log_repository.py @@ -0,0 +1,198 @@ +""" +SQLAlchemy implementation of WorkflowTriggerLogRepository. +""" + +from collections.abc import Sequence +from datetime import datetime, timedelta +from typing import Any, Optional + +from sqlalchemy import and_, delete, func, select, update +from sqlalchemy.orm import Session + +from models.workflow import WorkflowTriggerLog, WorkflowTriggerStatus +from repositories.workflow_trigger_log_repository import TriggerLogOrderBy, WorkflowTriggerLogRepository + + +class SQLAlchemyWorkflowTriggerLogRepository(WorkflowTriggerLogRepository): + """ + SQLAlchemy implementation of WorkflowTriggerLogRepository. + + Optimized for large table operations with proper indexing and batch processing. + """ + + def __init__(self, session: Session): + self.session = session + + def create(self, trigger_log: WorkflowTriggerLog) -> WorkflowTriggerLog: + """Create a new trigger log entry.""" + self.session.add(trigger_log) + self.session.flush() + return trigger_log + + def update(self, trigger_log: WorkflowTriggerLog) -> WorkflowTriggerLog: + """Update an existing trigger log entry.""" + self.session.merge(trigger_log) + self.session.flush() + return trigger_log + + def get_by_id(self, trigger_log_id: str, tenant_id: Optional[str] = None) -> Optional[WorkflowTriggerLog]: + """Get a trigger log by its ID.""" + query = select(WorkflowTriggerLog).where(WorkflowTriggerLog.id == trigger_log_id) + + if tenant_id: + query = query.where(WorkflowTriggerLog.tenant_id == tenant_id) + + return self.session.scalar(query) + + def get_by_status( + self, + tenant_id: str, + app_id: str, + status: WorkflowTriggerStatus, + limit: int = 100, + offset: int = 0, + order_by: TriggerLogOrderBy = TriggerLogOrderBy.CREATED_AT, + order_desc: bool = True, + ) -> Sequence[WorkflowTriggerLog]: + """Get trigger logs by status with pagination.""" + query = select(WorkflowTriggerLog).where( + and_( + WorkflowTriggerLog.tenant_id == tenant_id, + WorkflowTriggerLog.app_id == app_id, + WorkflowTriggerLog.status == status, + ) + ) + + # Apply ordering + order_column = getattr(WorkflowTriggerLog, order_by.value) + if order_desc: + query = query.order_by(order_column.desc()) + else: + query = query.order_by(order_column.asc()) + + # Apply pagination + query = query.limit(limit).offset(offset) + + return list(self.session.scalars(query).all()) + + def get_failed_for_retry( + self, tenant_id: str, max_retry_count: int = 3, limit: int = 100 + ) -> Sequence[WorkflowTriggerLog]: + """Get failed trigger logs eligible for retry.""" + query = ( + select(WorkflowTriggerLog) + .where( + and_( + WorkflowTriggerLog.tenant_id == tenant_id, + WorkflowTriggerLog.status.in_([WorkflowTriggerStatus.FAILED, WorkflowTriggerStatus.RATE_LIMITED]), + WorkflowTriggerLog.retry_count < max_retry_count, + ) + ) + .order_by(WorkflowTriggerLog.created_at.asc()) + .limit(limit) + ) + + return list(self.session.scalars(query).all()) + + def get_recent_logs( + self, tenant_id: str, app_id: str, hours: int = 24, limit: int = 100, offset: int = 0 + ) -> Sequence[WorkflowTriggerLog]: + """Get recent trigger logs within specified hours.""" + since = datetime.utcnow() - timedelta(hours=hours) + + query = ( + select(WorkflowTriggerLog) + .where( + and_( + WorkflowTriggerLog.tenant_id == tenant_id, + WorkflowTriggerLog.app_id == app_id, + WorkflowTriggerLog.created_at >= since, + ) + ) + .order_by(WorkflowTriggerLog.created_at.desc()) + .limit(limit) + .offset(offset) + ) + + return list(self.session.scalars(query).all()) + + def count_by_status( + self, + tenant_id: str, + app_id: str, + status: Optional[WorkflowTriggerStatus] = None, + since: Optional[datetime] = None, + ) -> int: + """Count trigger logs by status.""" + query = select(func.count(WorkflowTriggerLog.id)).where( + and_(WorkflowTriggerLog.tenant_id == tenant_id, WorkflowTriggerLog.app_id == app_id) + ) + + if status: + query = query.where(WorkflowTriggerLog.status == status) + + if since: + query = query.where(WorkflowTriggerLog.created_at >= since) + + return self.session.scalar(query) or 0 + + def delete_expired_logs(self, tenant_id: str, before_date: datetime, batch_size: int = 1000) -> int: + """Delete expired trigger logs in batches.""" + total_deleted = 0 + + while True: + # Get batch of IDs to delete + subquery = ( + select(WorkflowTriggerLog.id) + .where(and_(WorkflowTriggerLog.tenant_id == tenant_id, WorkflowTriggerLog.created_at < before_date)) + .limit(batch_size) + ) + + # Delete the batch + result = self.session.execute(delete(WorkflowTriggerLog).where(WorkflowTriggerLog.id.in_(subquery))) + + deleted = result.rowcount + total_deleted += deleted + + if deleted < batch_size: + break + + self.session.commit() + + return total_deleted + + def archive_completed_logs( + self, tenant_id: str, before_date: datetime, batch_size: int = 1000 + ) -> Sequence[WorkflowTriggerLog]: + """Get completed logs for archival.""" + query = ( + select(WorkflowTriggerLog) + .where( + and_( + WorkflowTriggerLog.tenant_id == tenant_id, + WorkflowTriggerLog.status == WorkflowTriggerStatus.SUCCEEDED, + WorkflowTriggerLog.finished_at < before_date, + ) + ) + .limit(batch_size) + ) + + return list(self.session.scalars(query).all()) + + def update_status_batch( + self, trigger_log_ids: Sequence[str], new_status: WorkflowTriggerStatus, error_message: Optional[str] = None + ) -> int: + """Update status for multiple trigger logs.""" + update_data: dict[str, Any] = {"status": new_status} + + if error_message is not None: + update_data["error"] = error_message + + if new_status in [WorkflowTriggerStatus.SUCCEEDED, WorkflowTriggerStatus.FAILED]: + update_data["finished_at"] = datetime.utcnow() + + result = self.session.execute( + update(WorkflowTriggerLog).where(WorkflowTriggerLog.id.in_(trigger_log_ids)).values(**update_data) + ) + + return result.rowcount diff --git a/api/repositories/workflow_trigger_log_repository.py b/api/repositories/workflow_trigger_log_repository.py new file mode 100644 index 0000000000..46e945b892 --- /dev/null +++ b/api/repositories/workflow_trigger_log_repository.py @@ -0,0 +1,206 @@ +""" +Repository protocol for WorkflowTriggerLog operations. + +This module provides a protocol interface for operations on WorkflowTriggerLog, +designed to efficiently handle a potentially large volume of trigger logs with +proper indexing and batch operations. +""" + +from collections.abc import Sequence +from datetime import datetime +from enum import StrEnum +from typing import Optional, Protocol + +from models.workflow import WorkflowTriggerLog, WorkflowTriggerStatus + + +class TriggerLogOrderBy(StrEnum): + """Fields available for ordering trigger logs""" + + CREATED_AT = "created_at" + TRIGGERED_AT = "triggered_at" + FINISHED_AT = "finished_at" + STATUS = "status" + + +class WorkflowTriggerLogRepository(Protocol): + """ + Protocol for operations on WorkflowTriggerLog. + + This repository provides efficient access patterns for the trigger log table, + which is expected to grow large over time. It includes: + - Batch operations for cleanup + - Efficient queries with proper indexing + - Pagination support + - Status-based filtering + + Implementation notes: + - Leverage database indexes on (tenant_id, app_id), status, and created_at + - Use batch operations for deletions to avoid locking + - Support pagination for large result sets + """ + + def create(self, trigger_log: WorkflowTriggerLog) -> WorkflowTriggerLog: + """ + Create a new trigger log entry. + + Args: + trigger_log: The WorkflowTriggerLog instance to create + + Returns: + The created WorkflowTriggerLog with generated ID + """ + ... + + def update(self, trigger_log: WorkflowTriggerLog) -> WorkflowTriggerLog: + """ + Update an existing trigger log entry. + + Args: + trigger_log: The WorkflowTriggerLog instance to update + + Returns: + The updated WorkflowTriggerLog + """ + ... + + def get_by_id(self, trigger_log_id: str, tenant_id: Optional[str] = None) -> Optional[WorkflowTriggerLog]: + """ + Get a trigger log by its ID. + + Args: + trigger_log_id: The trigger log identifier + tenant_id: Optional tenant identifier for additional security + + Returns: + The WorkflowTriggerLog if found, None otherwise + """ + ... + + def get_by_status( + self, + tenant_id: str, + app_id: str, + status: WorkflowTriggerStatus, + limit: int = 100, + offset: int = 0, + order_by: TriggerLogOrderBy = TriggerLogOrderBy.CREATED_AT, + order_desc: bool = True, + ) -> Sequence[WorkflowTriggerLog]: + """ + Get trigger logs by status with pagination. + + Args: + tenant_id: The tenant identifier + app_id: The application identifier + status: The workflow trigger status to filter by + limit: Maximum number of results + offset: Number of results to skip + order_by: Field to order results by + order_desc: Whether to order descending (True) or ascending (False) + + Returns: + A sequence of WorkflowTriggerLog instances + """ + ... + + def get_failed_for_retry( + self, tenant_id: str, max_retry_count: int = 3, limit: int = 100 + ) -> Sequence[WorkflowTriggerLog]: + """ + Get failed trigger logs that are eligible for retry. + + Args: + tenant_id: The tenant identifier + max_retry_count: Maximum retry count to consider + limit: Maximum number of results + + Returns: + A sequence of WorkflowTriggerLog instances eligible for retry + """ + ... + + def get_recent_logs( + self, tenant_id: str, app_id: str, hours: int = 24, limit: int = 100, offset: int = 0 + ) -> Sequence[WorkflowTriggerLog]: + """ + Get recent trigger logs within specified hours. + + Args: + tenant_id: The tenant identifier + app_id: The application identifier + hours: Number of hours to look back + limit: Maximum number of results + offset: Number of results to skip + + Returns: + A sequence of recent WorkflowTriggerLog instances + """ + ... + + def count_by_status( + self, + tenant_id: str, + app_id: str, + status: Optional[WorkflowTriggerStatus] = None, + since: Optional[datetime] = None, + ) -> int: + """ + Count trigger logs by status. + + Args: + tenant_id: The tenant identifier + app_id: The application identifier + status: Optional status filter + since: Optional datetime to count from + + Returns: + Count of matching trigger logs + """ + ... + + def delete_expired_logs(self, tenant_id: str, before_date: datetime, batch_size: int = 1000) -> int: + """ + Delete expired trigger logs in batches. + + Args: + tenant_id: The tenant identifier + before_date: Delete logs created before this date + batch_size: Number of logs to delete per batch + + Returns: + Total number of logs deleted + """ + ... + + def archive_completed_logs( + self, tenant_id: str, before_date: datetime, batch_size: int = 1000 + ) -> Sequence[WorkflowTriggerLog]: + """ + Get completed logs for archival before deletion. + + Args: + tenant_id: The tenant identifier + before_date: Get logs completed before this date + batch_size: Number of logs to retrieve + + Returns: + A sequence of WorkflowTriggerLog instances for archival + """ + ... + + def update_status_batch( + self, trigger_log_ids: Sequence[str], new_status: WorkflowTriggerStatus, error_message: Optional[str] = None + ) -> int: + """ + Update status for multiple trigger logs at once. + + Args: + trigger_log_ids: List of trigger log IDs to update + new_status: The new status to set + error_message: Optional error message to set + + Returns: + Number of logs updated + """ + ... diff --git a/api/schedule/workflow_schedule_task.py b/api/schedule/workflow_schedule_task.py new file mode 100644 index 0000000000..30e00ee27c --- /dev/null +++ b/api/schedule/workflow_schedule_task.py @@ -0,0 +1,127 @@ +import logging + +from celery import group, shared_task +from sqlalchemy import and_, select +from sqlalchemy.orm import Session, sessionmaker + +from configs import dify_config +from extensions.ext_database import db +from libs.datetime_utils import naive_utc_now +from libs.schedule_utils import calculate_next_run_at +from models.workflow import AppTrigger, AppTriggerStatus, AppTriggerType, WorkflowSchedulePlan +from services.workflow.queue_dispatcher import QueueDispatcherManager +from tasks.workflow_schedule_tasks import run_schedule_trigger + +logger = logging.getLogger(__name__) + + +@shared_task(queue="schedule_poller") +def poll_workflow_schedules() -> None: + """ + Poll and process due workflow schedules. + + Streaming flow: + 1. Fetch due schedules in batches + 2. Process each batch until all due schedules are handled + 3. Optional: Limit total dispatches per tick as a circuit breaker + """ + session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) + + with session_factory() as session: + total_dispatched = 0 + total_rate_limited = 0 + + # Process in batches until we've handled all due schedules or hit the limit + while True: + due_schedules = _fetch_due_schedules(session) + + if not due_schedules: + break + + dispatched_count, rate_limited_count = _process_schedules(session, due_schedules) + total_dispatched += dispatched_count + total_rate_limited += rate_limited_count + + logger.debug("Batch processed: %d dispatched, %d rate limited", dispatched_count, rate_limited_count) + + # Circuit breaker: check if we've hit the per-tick limit (if enabled) + if ( + dify_config.WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK > 0 + and total_dispatched >= dify_config.WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK + ): + logger.warning( + "Circuit breaker activated: reached dispatch limit (%d), will continue next tick", + dify_config.WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK, + ) + break + + if total_dispatched > 0 or total_rate_limited > 0: + logger.info("Total processed: %d dispatched, %d rate limited", total_dispatched, total_rate_limited) + + +def _fetch_due_schedules(session: Session) -> list[WorkflowSchedulePlan]: + """ + Fetch a batch of due schedules, sorted by most overdue first. + + Returns up to WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE schedules per call. + Used in a loop to progressively process all due schedules. + """ + now = naive_utc_now() + + due_schedules = session.scalars( + ( + select(WorkflowSchedulePlan) + .join( + AppTrigger, + and_( + AppTrigger.app_id == WorkflowSchedulePlan.app_id, + AppTrigger.node_id == WorkflowSchedulePlan.node_id, + AppTrigger.trigger_type == AppTriggerType.TRIGGER_SCHEDULE, + ), + ) + .where( + WorkflowSchedulePlan.next_run_at <= now, + WorkflowSchedulePlan.next_run_at.isnot(None), + AppTrigger.status == AppTriggerStatus.ENABLED, + ) + ) + .order_by(WorkflowSchedulePlan.next_run_at.asc()) + .with_for_update(skip_locked=True) + .limit(dify_config.WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE) + ) + + return list(due_schedules) + + +def _process_schedules(session: Session, schedules: list[WorkflowSchedulePlan]) -> tuple[int, int]: + """Process schedules: check quota, update next run time and dispatch to Celery in parallel.""" + if not schedules: + return 0, 0 + + dispatcher_manager = QueueDispatcherManager() + tasks_to_dispatch = [] + rate_limited_count = 0 + + for schedule in schedules: + next_run_at = calculate_next_run_at( + schedule.cron_expression, + schedule.timezone, + ) + schedule.next_run_at = next_run_at + + dispatcher = dispatcher_manager.get_dispatcher(schedule.tenant_id) + if not dispatcher.check_daily_quota(schedule.tenant_id): + logger.info("Tenant %s rate limited, skipping schedule_plan %s", schedule.tenant_id, schedule.id) + rate_limited_count += 1 + else: + tasks_to_dispatch.append(schedule.id) + + if tasks_to_dispatch: + job = group(run_schedule_trigger.s(schedule_id) for schedule_id in tasks_to_dispatch) + job.apply_async() + + logger.debug("Dispatched %d tasks in parallel", len(tasks_to_dispatch)) + + session.commit() + + return len(tasks_to_dispatch), rate_limited_count diff --git a/api/services/app_dsl_service.py b/api/services/app_dsl_service.py index 8701fe4f4e..d3ef5283c3 100644 --- a/api/services/app_dsl_service.py +++ b/api/services/app_dsl_service.py @@ -26,6 +26,7 @@ from core.workflow.nodes.llm.entities import LLMNodeData from core.workflow.nodes.parameter_extractor.entities import ParameterExtractorNodeData from core.workflow.nodes.question_classifier.entities import QuestionClassifierNodeData from core.workflow.nodes.tool.entities import ToolNodeData +from core.workflow.nodes.trigger_schedule.trigger_schedule_node import TriggerScheduleNode from events.app_event import app_model_config_was_updated, app_was_created from extensions.ext_redis import redis_client from factories import variable_factory @@ -597,6 +598,13 @@ class AppDslService: if not include_secret and data_type == NodeType.AGENT.value: for tool in node_data.get("agent_parameters", {}).get("tools", {}).get("value", []): tool.pop("credential_id", None) + if data_type == NodeType.TRIGGER_SCHEDULE.value: + # override the config with the default config + node_data["config"] = TriggerScheduleNode.get_default_config()["config"] + if data_type == NodeType.TRIGGER_WEBHOOK.value: + # clear the webhook_url + node_data["webhook_url"] = "" + node_data["webhook_debug_url"] = "" export_data["workflow"] = workflow_dict dependencies = cls._extract_dependencies_from_workflow(workflow) diff --git a/api/services/async_workflow_service.py b/api/services/async_workflow_service.py new file mode 100644 index 0000000000..58b75fc07f --- /dev/null +++ b/api/services/async_workflow_service.py @@ -0,0 +1,320 @@ +""" +Universal async workflow execution service. + +This service provides a centralized entry point for triggering workflows asynchronously +with support for different subscription tiers, rate limiting, and execution tracking. +""" + +import json +from datetime import UTC, datetime +from typing import Optional, Union + +from celery.result import AsyncResult +from sqlalchemy import select +from sqlalchemy.orm import Session + +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from models.account import Account +from models.enums import CreatorUserRole +from models.model import App, EndUser +from models.workflow import Workflow, WorkflowTriggerLog, WorkflowTriggerStatus +from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository +from services.errors.app import InvokeDailyRateLimitError, WorkflowNotFoundError +from services.workflow.entities import AsyncTriggerResponse, TriggerData, WorkflowTaskData +from services.workflow.queue_dispatcher import QueueDispatcherManager, QueuePriority +from services.workflow.rate_limiter import TenantDailyRateLimiter +from services.workflow_service import WorkflowService +from tasks.async_workflow_tasks import ( + execute_workflow_professional, + execute_workflow_sandbox, + execute_workflow_team, +) + + +class AsyncWorkflowService: + """ + Universal entry point for async workflow execution - ALL METHODS ARE NON-BLOCKING + + This service handles: + - Trigger data validation and processing + - Queue routing based on subscription tier + - Daily rate limiting with timezone support + - Execution tracking and logging + - Retry mechanisms for failed executions + + Important: All trigger methods return immediately after queuing tasks. + Actual workflow execution happens asynchronously in background Celery workers. + Use trigger log IDs to monitor execution status and results. + """ + + @classmethod + def trigger_workflow_async( + cls, session: Session, user: Union[Account, EndUser], trigger_data: TriggerData + ) -> AsyncTriggerResponse: + """ + Universal entry point for async workflow execution - THIS METHOD WILL NOT BLOCK + + Creates a trigger log and dispatches to appropriate queue based on subscription tier. + The workflow execution happens asynchronously in the background via Celery workers. + This method returns immediately after queuing the task, not after execution completion. + + Args: + session: Database session to use for operations + user: User (Account or EndUser) who initiated the workflow trigger + trigger_data: Validated Pydantic model containing trigger information + + Returns: + AsyncTriggerResponse with workflow_trigger_log_id, task_id, status="queued", and queue + Note: The actual workflow execution status must be checked separately via workflow_trigger_log_id + + Raises: + WorkflowNotFoundError: If app or workflow not found + InvokeDailyRateLimitError: If daily rate limit exceeded + + Behavior: + - Non-blocking: Returns immediately after queuing + - Asynchronous: Actual execution happens in background Celery workers + - Status tracking: Use workflow_trigger_log_id to monitor progress + - Queue-based: Routes to different queues based on subscription tier + """ + trigger_log_repo = SQLAlchemyWorkflowTriggerLogRepository(session) + dispatcher_manager = QueueDispatcherManager() + workflow_service = WorkflowService() + rate_limiter = TenantDailyRateLimiter(redis_client) + + # 1. Validate app exists + app_model = session.scalar(select(App).where(App.id == trigger_data.app_id)) + if not app_model: + raise WorkflowNotFoundError(f"App not found: {trigger_data.app_id}") + + # 2. Get workflow + workflow = cls._get_workflow(workflow_service, app_model, trigger_data.workflow_id) + + # 3. Get dispatcher based on tenant subscription + dispatcher = dispatcher_manager.get_dispatcher(trigger_data.tenant_id) + + # 4. Rate limiting check will be done without timezone first + + # 5. Determine user role and ID + if isinstance(user, Account): + created_by_role = CreatorUserRole.ACCOUNT + created_by = user.id + else: # EndUser + created_by_role = CreatorUserRole.END_USER + created_by = user.id + + # 6. Create trigger log entry first (for tracking) + trigger_log = WorkflowTriggerLog( + tenant_id=trigger_data.tenant_id, + app_id=trigger_data.app_id, + workflow_id=workflow.id, + root_node_id=trigger_data.root_node_id, + trigger_type=trigger_data.trigger_type, + trigger_data=trigger_data.model_dump_json(), + inputs=json.dumps(dict(trigger_data.inputs)), + status=WorkflowTriggerStatus.PENDING, + queue_name=dispatcher.get_queue_name(), + retry_count=0, + created_by_role=created_by_role, + created_by=created_by, + ) + + trigger_log = trigger_log_repo.create(trigger_log) + session.commit() + + # 7. Check and consume daily quota + if not dispatcher.consume_quota(trigger_data.tenant_id): + # Update trigger log status + trigger_log.status = WorkflowTriggerStatus.RATE_LIMITED + trigger_log.error = f"Daily limit reached for {dispatcher.get_queue_name()}" + trigger_log_repo.update(trigger_log) + session.commit() + + tenant_owner_tz = rate_limiter._get_tenant_owner_timezone(trigger_data.tenant_id) + + remaining = rate_limiter.get_remaining_quota(trigger_data.tenant_id, dispatcher.get_daily_limit()) + + reset_time = rate_limiter.get_quota_reset_time(trigger_data.tenant_id, tenant_owner_tz) + + raise InvokeDailyRateLimitError( + f"Daily workflow execution limit reached. " + f"Limit resets at {reset_time.strftime('%Y-%m-%d %H:%M:%S %Z')}. " + f"Remaining quota: {remaining}" + ) + + # 8. Create task data + queue_name = dispatcher.get_queue_name() + + task_data = WorkflowTaskData(workflow_trigger_log_id=trigger_log.id) + + # 9. Dispatch to appropriate queue + task_data_dict = task_data.model_dump(mode="json") + + task: AsyncResult | None = None + if queue_name == QueuePriority.PROFESSIONAL: + task = execute_workflow_professional.delay(task_data_dict) # type: ignore + elif queue_name == QueuePriority.TEAM: + task = execute_workflow_team.delay(task_data_dict) # type: ignore + else: # SANDBOX + task = execute_workflow_sandbox.delay(task_data_dict) # type: ignore + + if not task: + raise ValueError(f"Failed to queue task for queue: {queue_name}") + + # 10. Update trigger log with task info + trigger_log.status = WorkflowTriggerStatus.QUEUED + trigger_log.celery_task_id = task.id + trigger_log.triggered_at = datetime.now(UTC) + trigger_log_repo.update(trigger_log) + session.commit() + + return AsyncTriggerResponse( + workflow_trigger_log_id=trigger_log.id, + task_id=task.id, # type: ignore + status="queued", + queue=queue_name, + ) + + @classmethod + def reinvoke_trigger( + cls, session: Session, user: Union[Account, EndUser], workflow_trigger_log_id: str + ) -> AsyncTriggerResponse: + """ + Re-invoke a previously failed or rate-limited trigger - THIS METHOD WILL NOT BLOCK + + Updates the existing trigger log to retry status and creates a new async execution. + Returns immediately after queuing the retry, not after execution completion. + + Args: + session: Database session to use for operations + user: User (Account or EndUser) who initiated the retry + workflow_trigger_log_id: ID of the trigger log to re-invoke + + Returns: + AsyncTriggerResponse with new execution information (status="queued") + Note: This creates a new trigger log entry for the retry attempt + + Raises: + ValueError: If trigger log not found + + Behavior: + - Non-blocking: Returns immediately after queuing retry + - Creates new trigger log: Original log marked as retrying, new log for execution + - Preserves original trigger data: Uses same inputs and configuration + """ + trigger_log_repo = SQLAlchemyWorkflowTriggerLogRepository(session) + + trigger_log = trigger_log_repo.get_by_id(workflow_trigger_log_id) + + if not trigger_log: + raise ValueError(f"Trigger log not found: {workflow_trigger_log_id}") + + # Reconstruct trigger data from log + trigger_data = TriggerData.model_validate_json(trigger_log.trigger_data) + + # Reset log for retry + trigger_log.status = WorkflowTriggerStatus.RETRYING + trigger_log.retry_count += 1 + trigger_log.error = None + trigger_log.triggered_at = datetime.now(UTC) + trigger_log_repo.update(trigger_log) + session.commit() + + # Re-trigger workflow (this will create a new trigger log) + return cls.trigger_workflow_async(session, user, trigger_data) + + @classmethod + def get_trigger_log(cls, workflow_trigger_log_id: str, tenant_id: Optional[str] = None) -> Optional[dict]: + """ + Get trigger log by ID + + Args: + workflow_trigger_log_id: ID of the trigger log + tenant_id: Optional tenant ID for security check + + Returns: + Trigger log as dictionary or None if not found + """ + with Session(db.engine) as session: + trigger_log_repo = SQLAlchemyWorkflowTriggerLogRepository(session) + trigger_log = trigger_log_repo.get_by_id(workflow_trigger_log_id, tenant_id) + + if not trigger_log: + return None + + return trigger_log.to_dict() + + @classmethod + def get_recent_logs( + cls, tenant_id: str, app_id: str, hours: int = 24, limit: int = 100, offset: int = 0 + ) -> list[dict]: + """ + Get recent trigger logs + + Args: + tenant_id: Tenant ID + app_id: Application ID + hours: Number of hours to look back + limit: Maximum number of results + offset: Number of results to skip + + Returns: + List of trigger logs as dictionaries + """ + with Session(db.engine) as session: + trigger_log_repo = SQLAlchemyWorkflowTriggerLogRepository(session) + logs = trigger_log_repo.get_recent_logs( + tenant_id=tenant_id, app_id=app_id, hours=hours, limit=limit, offset=offset + ) + + return [log.to_dict() for log in logs] + + @classmethod + def get_failed_logs_for_retry(cls, tenant_id: str, max_retry_count: int = 3, limit: int = 100) -> list[dict]: + """ + Get failed logs eligible for retry + + Args: + tenant_id: Tenant ID + max_retry_count: Maximum retry count + limit: Maximum number of results + + Returns: + List of failed trigger logs as dictionaries + """ + with Session(db.engine) as session: + trigger_log_repo = SQLAlchemyWorkflowTriggerLogRepository(session) + logs = trigger_log_repo.get_failed_for_retry( + tenant_id=tenant_id, max_retry_count=max_retry_count, limit=limit + ) + + return [log.to_dict() for log in logs] + + @staticmethod + def _get_workflow(workflow_service: WorkflowService, app_model: App, workflow_id: Optional[str] = None) -> Workflow: + """ + Get workflow for the app + + Args: + app_model: App model instance + workflow_id: Optional specific workflow ID + + Returns: + Workflow instance + + Raises: + WorkflowNotFoundError: If workflow not found + """ + if workflow_id: + # Get specific published workflow + workflow = workflow_service.get_published_workflow_by_id(app_model, workflow_id) + if not workflow: + raise WorkflowNotFoundError(f"Published workflow not found: {workflow_id}") + else: + # Get default published workflow + workflow = workflow_service.get_published_workflow(app_model) + if not workflow: + raise WorkflowNotFoundError(f"No published workflow found for app: {app_model.id}") + + return workflow diff --git a/api/services/datasource_provider_service.py b/api/services/datasource_provider_service.py index 89a5d89f61..ffa60b9055 100644 --- a/api/services/datasource_provider_service.py +++ b/api/services/datasource_provider_service.py @@ -12,9 +12,9 @@ from core.helper import encrypter from core.helper.name_generator import generate_incremental_name from core.helper.provider_cache import NoOpProviderCredentialCache from core.model_runtime.entities.provider_entities import FormType +from core.plugin.entities.plugin_daemon import CredentialType from core.plugin.impl.datasource import PluginDatasourceManager from core.plugin.impl.oauth import OAuthHandler -from core.tools.entities.tool_entities import CredentialType from core.tools.utils.encryption import ProviderConfigCache, ProviderConfigEncrypter, create_provider_encrypter from extensions.ext_database import db from extensions.ext_redis import redis_client diff --git a/api/services/errors/app.py b/api/services/errors/app.py index 390716a47f..338636d9b6 100644 --- a/api/services/errors/app.py +++ b/api/services/errors/app.py @@ -16,3 +16,9 @@ class WorkflowNotFoundError(Exception): class WorkflowIdFormatError(Exception): pass + + +class InvokeDailyRateLimitError(Exception): + """Raised when daily rate limit is exceeded for workflow invocations.""" + + pass diff --git a/api/services/plugin/oauth_service.py b/api/services/plugin/oauth_service.py index 057b20428f..3c76c1869f 100644 --- a/api/services/plugin/oauth_service.py +++ b/api/services/plugin/oauth_service.py @@ -17,6 +17,7 @@ class OAuthProxyService(BasePluginClient): plugin_id: str, provider: str, credential_id: str | None = None, + extra_data: dict = {} ): """ Create a proxy context for an OAuth 2.0 authorization request. @@ -32,6 +33,7 @@ class OAuthProxyService(BasePluginClient): """ context_id = str(uuid.uuid4()) data = { + **extra_data, "user_id": user_id, "plugin_id": plugin_id, "tenant_id": tenant_id, diff --git a/api/services/plugin/plugin_parameter_service.py b/api/services/plugin/plugin_parameter_service.py index 00b59dacb3..17fda44150 100644 --- a/api/services/plugin/plugin_parameter_service.py +++ b/api/services/plugin/plugin_parameter_service.py @@ -4,11 +4,18 @@ from typing import Any, Literal from sqlalchemy.orm import Session from core.plugin.entities.parameters import PluginParameterOption +from core.plugin.entities.plugin_daemon import CredentialType from core.plugin.impl.dynamic_select import DynamicSelectClient from core.tools.tool_manager import ToolManager from core.tools.utils.encryption import create_tool_provider_encrypter +from core.trigger.entities.api_entities import TriggerProviderSubscriptionApiEntity +from core.trigger.entities.entities import SubscriptionBuilder +from core.trigger.trigger_manager import TriggerManager from extensions.ext_database import db +from models.provider_ids import TriggerProviderID from models.tools import BuiltinToolProvider +from services.trigger.trigger_provider_service import TriggerProviderService +from services.trigger.trigger_subscription_builder_service import TriggerSubscriptionBuilderService class PluginParameterService: @@ -20,7 +27,8 @@ class PluginParameterService: provider: str, action: str, parameter: str, - provider_type: Literal["tool"], + credential_id: str | None, + provider_type: Literal["tool", "trigger"], ) -> Sequence[PluginParameterOption]: """ Get dynamic select options for a plugin parameter. @@ -33,7 +41,7 @@ class PluginParameterService: parameter: The parameter name. """ credentials: Mapping[str, Any] = {} - + credential_type: str = CredentialType.UNAUTHORIZED.value match provider_type: case "tool": provider_controller = ToolManager.get_builtin_provider(provider, tenant_id) @@ -49,24 +57,56 @@ class PluginParameterService: else: # fetch credentials from db with Session(db.engine) as session: - db_record = ( - session.query(BuiltinToolProvider) - .where( - BuiltinToolProvider.tenant_id == tenant_id, - BuiltinToolProvider.provider == provider, + if credential_id: + db_record = ( + session.query(BuiltinToolProvider) + .where( + BuiltinToolProvider.tenant_id == tenant_id, + BuiltinToolProvider.provider == provider, + BuiltinToolProvider.id == credential_id, + ) + .first() + ) + else: + db_record = ( + session.query(BuiltinToolProvider) + .where( + BuiltinToolProvider.tenant_id == tenant_id, + BuiltinToolProvider.provider == provider, + ) + .order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc()) + .first() ) - .first() - ) if db_record is None: raise ValueError(f"Builtin provider {provider} not found when fetching credentials") credentials = encrypter.decrypt(db_record.credentials) + credential_type = db_record.credential_type + case "trigger": + provider_controller = TriggerManager.get_trigger_provider(tenant_id, TriggerProviderID(provider)) + if credential_id: + subscription: TriggerProviderSubscriptionApiEntity | SubscriptionBuilder | None = ( + TriggerSubscriptionBuilderService.get_subscription_builder(credential_id) + or TriggerProviderService.get_subscription_by_id(tenant_id, credential_id) + ) + else: + subscription: TriggerProviderSubscriptionApiEntity | SubscriptionBuilder | None = ( + TriggerProviderService.get_subscription_by_id(tenant_id) + ) + + if subscription is None: + raise ValueError(f"Subscription {credential_id} not found") + + credentials = subscription.credentials + credential_type = subscription.credential_type or CredentialType.UNAUTHORIZED case _: raise ValueError(f"Invalid provider type: {provider_type}") return ( DynamicSelectClient() - .fetch_dynamic_select_options(tenant_id, user_id, plugin_id, provider, action, credentials, parameter) + .fetch_dynamic_select_options( + tenant_id, user_id, plugin_id, provider, action, credentials, credential_type, parameter + ) .options ) diff --git a/api/services/plugin/plugin_service.py b/api/services/plugin/plugin_service.py index 604adeb7b5..96268af812 100644 --- a/api/services/plugin/plugin_service.py +++ b/api/services/plugin/plugin_service.py @@ -3,6 +3,7 @@ from collections.abc import Mapping, Sequence from mimetypes import guess_type from pydantic import BaseModel +from yarl import URL from configs import dify_config from core.helper import marketplace @@ -175,6 +176,13 @@ class PluginService: manager = PluginInstaller() return manager.fetch_plugin_installation_by_ids(tenant_id, ids) + @classmethod + def get_plugin_icon_url(cls, tenant_id: str, filename: str) -> str: + url_prefix = ( + URL(dify_config.CONSOLE_API_URL or "/") / "console" / "api" / "workspaces" / "current" / "plugin" / "icon" + ) + return str(url_prefix % {"tenant_id": tenant_id, "filename": filename}) + @staticmethod def get_asset(tenant_id: str, asset_file: str) -> tuple[bytes, str]: """ diff --git a/api/services/schedule_service.py b/api/services/schedule_service.py new file mode 100644 index 0000000000..333eeb2cc4 --- /dev/null +++ b/api/services/schedule_service.py @@ -0,0 +1,274 @@ +import json +import logging +from datetime import datetime +from typing import Optional + +from sqlalchemy import select +from sqlalchemy.orm import Session + +from core.workflow.nodes import NodeType +from core.workflow.nodes.trigger_schedule.entities import ScheduleConfig, SchedulePlanUpdate, VisualConfig +from core.workflow.nodes.trigger_schedule.exc import ScheduleConfigError, ScheduleNotFoundError +from libs.schedule_utils import calculate_next_run_at, convert_12h_to_24h +from models.account import Account, TenantAccountJoin +from models.workflow import Workflow, WorkflowSchedulePlan + +logger = logging.getLogger(__name__) + + +class ScheduleService: + @staticmethod + def create_schedule( + session: Session, + tenant_id: str, + app_id: str, + config: ScheduleConfig, + ) -> WorkflowSchedulePlan: + """ + Create a new schedule with validated configuration. + + Args: + session: Database session + tenant_id: Tenant ID + app_id: Application ID + config: Validated schedule configuration + + Returns: + Created WorkflowSchedulePlan instance + """ + next_run_at = calculate_next_run_at( + config.cron_expression, + config.timezone, + ) + + schedule = WorkflowSchedulePlan( + tenant_id=tenant_id, + app_id=app_id, + node_id=config.node_id, + cron_expression=config.cron_expression, + timezone=config.timezone, + next_run_at=next_run_at, + ) + + session.add(schedule) + session.flush() + + return schedule + + @staticmethod + def update_schedule( + session: Session, + schedule_id: str, + updates: SchedulePlanUpdate, + ) -> WorkflowSchedulePlan: + """ + Update an existing schedule with validated configuration. + + Args: + session: Database session + schedule_id: Schedule ID to update + updates: Validated update configuration + + Raises: + ScheduleNotFoundError: If schedule not found + + Returns: + Updated WorkflowSchedulePlan instance + """ + schedule = session.get(WorkflowSchedulePlan, schedule_id) + if not schedule: + raise ScheduleNotFoundError(f"Schedule not found: {schedule_id}") + + # If time-related fields are updated, synchronously update the next_run_at. + time_fields_updated = False + + if updates.node_id is not None: + schedule.node_id = updates.node_id + + if updates.cron_expression is not None: + schedule.cron_expression = updates.cron_expression + time_fields_updated = True + + if updates.timezone is not None: + schedule.timezone = updates.timezone + time_fields_updated = True + + if time_fields_updated: + schedule.next_run_at = calculate_next_run_at( + schedule.cron_expression, + schedule.timezone, + ) + + session.flush() + return schedule + + @staticmethod + def delete_schedule( + session: Session, + schedule_id: str, + ) -> None: + """ + Delete a schedule plan. + + Args: + session: Database session + schedule_id: Schedule ID to delete + """ + schedule = session.get(WorkflowSchedulePlan, schedule_id) + if not schedule: + raise ScheduleNotFoundError(f"Schedule not found: {schedule_id}") + + session.delete(schedule) + session.flush() + + @staticmethod + def get_tenant_owner(session: Session, tenant_id: str) -> Optional[Account]: + """ + Returns an account to execute scheduled workflows on behalf of the tenant. + Prioritizes owner over admin to ensure proper authorization hierarchy. + """ + result = session.execute( + select(TenantAccountJoin) + .where(TenantAccountJoin.tenant_id == tenant_id, TenantAccountJoin.role == "owner") + .limit(1) + ).scalar_one_or_none() + + if not result: + # Owner may not exist in some tenant configurations, fallback to admin + result = session.execute( + select(TenantAccountJoin) + .where(TenantAccountJoin.tenant_id == tenant_id, TenantAccountJoin.role == "admin") + .limit(1) + ).scalar_one_or_none() + + if result: + return session.get(Account, result.account_id) + + @staticmethod + def update_next_run_at( + session: Session, + schedule_id: str, + ) -> datetime: + """ + Advances the schedule to its next execution time after a successful trigger. + Uses current time as base to prevent missing executions during delays. + """ + schedule = session.get(WorkflowSchedulePlan, schedule_id) + if not schedule: + raise ScheduleNotFoundError(f"Schedule not found: {schedule_id}") + + # Base on current time to handle execution delays gracefully + next_run_at = calculate_next_run_at( + schedule.cron_expression, + schedule.timezone, + ) + + schedule.next_run_at = next_run_at + session.flush() + return next_run_at + + @staticmethod + def extract_schedule_config(workflow: Workflow) -> Optional[ScheduleConfig]: + """ + Extracts schedule configuration from workflow graph. + + Searches for the first schedule trigger node in the workflow and converts + its configuration (either visual or cron mode) into a unified ScheduleConfig. + + Args: + workflow: The workflow containing the graph definition + + Returns: + ScheduleConfig if a valid schedule node is found, None if no schedule node exists + + Raises: + ScheduleConfigError: If graph parsing fails or schedule configuration is invalid + + Note: + Currently only returns the first schedule node found. + Multiple schedule nodes in the same workflow are not supported. + """ + try: + graph_data = workflow.graph_dict + except (json.JSONDecodeError, TypeError, AttributeError) as e: + raise ScheduleConfigError(f"Failed to parse workflow graph: {e}") + + if not graph_data: + raise ScheduleConfigError("Workflow graph is empty") + + nodes = graph_data.get("nodes", []) + for node in nodes: + node_data = node.get("data", {}) + + if node_data.get("type") != NodeType.TRIGGER_SCHEDULE.value: + continue + + mode = node_data.get("mode", "visual") + timezone = node_data.get("timezone", "UTC") + node_id = node.get("id", "start") + + cron_expression = None + if mode == "cron": + cron_expression = node_data.get("cron_expression") + if not cron_expression: + raise ScheduleConfigError("Cron expression is required for cron mode") + elif mode == "visual": + frequency = node_data.get("frequency") + visual_config_dict = node_data.get("visual_config", {}) + visual_config = VisualConfig(**visual_config_dict) + cron_expression = ScheduleService.visual_to_cron(frequency, visual_config) + else: + raise ScheduleConfigError(f"Invalid schedule mode: {mode}") + + return ScheduleConfig(node_id=node_id, cron_expression=cron_expression, timezone=timezone) + + @staticmethod + def visual_to_cron(frequency: str, visual_config: VisualConfig) -> str: + """ + Converts user-friendly visual schedule settings to cron expression. + Maintains consistency with frontend UI expectations while supporting croniter's extended syntax. + """ + if frequency == "hourly": + if visual_config.on_minute is None: + raise ScheduleConfigError("on_minute is required for hourly schedules") + return f"{visual_config.on_minute} * * * *" + + elif frequency == "daily": + if not visual_config.time: + raise ScheduleConfigError("time is required for daily schedules") + hour, minute = convert_12h_to_24h(visual_config.time) + return f"{minute} {hour} * * *" + + elif frequency == "weekly": + if not visual_config.time: + raise ScheduleConfigError("time is required for weekly schedules") + if not visual_config.weekdays: + raise ScheduleConfigError("Weekdays are required for weekly schedules") + hour, minute = convert_12h_to_24h(visual_config.time) + weekday_map = {"sun": "0", "mon": "1", "tue": "2", "wed": "3", "thu": "4", "fri": "5", "sat": "6"} + cron_weekdays = [weekday_map[day] for day in visual_config.weekdays] + return f"{minute} {hour} * * {','.join(sorted(cron_weekdays))}" + + elif frequency == "monthly": + if not visual_config.time: + raise ScheduleConfigError("time is required for monthly schedules") + if not visual_config.monthly_days: + raise ScheduleConfigError("Monthly days are required for monthly schedules") + hour, minute = convert_12h_to_24h(visual_config.time) + + numeric_days = [] + has_last = False + for day in visual_config.monthly_days: + if day == "last": + has_last = True + else: + numeric_days.append(day) + + result_days = [str(d) for d in sorted(set(numeric_days))] + if has_last: + result_days.append("L") + + return f"{minute} {hour} {','.join(result_days)} * *" + + else: + raise ScheduleConfigError(f"Unsupported frequency: {frequency}") diff --git a/api/services/tools/builtin_tools_manage_service.py b/api/services/tools/builtin_tools_manage_service.py index 6b0b6b0f0e..17d1785b5e 100644 --- a/api/services/tools/builtin_tools_manage_service.py +++ b/api/services/tools/builtin_tools_manage_service.py @@ -2,7 +2,7 @@ import json import logging from collections.abc import Mapping from pathlib import Path -from typing import Any +from typing import Any, Optional from sqlalchemy import exists, select from sqlalchemy.orm import Session @@ -12,6 +12,7 @@ from constants import HIDDEN_VALUE, UNKNOWN_VALUE from core.helper.name_generator import generate_incremental_name from core.helper.position_helper import is_filtered from core.helper.provider_cache import NoOpProviderCredentialCache, ToolProviderCredentialsCache +from core.plugin.entities.plugin_daemon import CredentialType from core.tools.builtin_tool.provider import BuiltinToolProviderController from core.tools.builtin_tool.providers._positions import BuiltinToolProviderSort from core.tools.entities.api_entities import ( @@ -20,7 +21,6 @@ from core.tools.entities.api_entities import ( ToolProviderCredentialApiEntity, ToolProviderCredentialInfoApiEntity, ) -from core.tools.entities.tool_entities import CredentialType from core.tools.errors import ToolProviderNotFoundError from core.tools.plugin_tool.provider import PluginToolProviderController from core.tools.tool_label_manager import ToolLabelManager @@ -39,7 +39,6 @@ logger = logging.getLogger(__name__) class BuiltinToolManageService: __MAX_BUILTIN_TOOL_PROVIDER_COUNT__ = 100 - __DEFAULT_EXPIRES_AT__ = 2147483647 @staticmethod def delete_custom_oauth_client_params(tenant_id: str, provider: str): @@ -278,9 +277,7 @@ class BuiltinToolManageService: encrypted_credentials=json.dumps(encrypter.encrypt(credentials)), credential_type=api_type.value, name=name, - expires_at=expires_at - if expires_at is not None - else BuiltinToolManageService.__DEFAULT_EXPIRES_AT__, + expires_at=expires_at if expires_at is not None else -1, ) session.add(db_provider) @@ -643,8 +640,8 @@ class BuiltinToolManageService: def save_custom_oauth_client_params( tenant_id: str, provider: str, - client_params: dict | None = None, - enable_oauth_custom_client: bool | None = None, + client_params: Optional[dict] = None, + enable_oauth_custom_client: Optional[bool] = None, ): """ setup oauth custom client diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index 6b36ed0eb7..40a0d69272 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -8,7 +8,7 @@ from yarl import URL from configs import dify_config from core.helper.provider_cache import ToolProviderCredentialsCache from core.mcp.types import Tool as MCPTool -from core.plugin.entities.plugin_daemon import PluginDatasourceProviderEntity +from core.plugin.entities.plugin_daemon import CredentialType, PluginDatasourceProviderEntity from core.tools.__base.tool import Tool from core.tools.__base.tool_runtime import ToolRuntime from core.tools.builtin_tool.provider import BuiltinToolProviderController @@ -18,7 +18,6 @@ from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ( ApiProviderAuthType, - CredentialType, ToolParameter, ToolProviderType, ) @@ -27,18 +26,13 @@ from core.tools.utils.encryption import create_provider_encrypter, create_tool_p from core.tools.workflow_as_tool.provider import WorkflowToolProviderController from core.tools.workflow_as_tool.tool import WorkflowTool from models.tools import ApiToolProvider, BuiltinToolProvider, MCPToolProvider, WorkflowToolProvider +from services.plugin.plugin_service import PluginService logger = logging.getLogger(__name__) class ToolTransformService: - @classmethod - def get_plugin_icon_url(cls, tenant_id: str, filename: str) -> str: - url_prefix = ( - URL(dify_config.CONSOLE_API_URL or "/") / "console" / "api" / "workspaces" / "current" / "plugin" / "icon" - ) - return str(url_prefix % {"tenant_id": tenant_id, "filename": filename}) - + @classmethod def get_tool_provider_icon_url( cls, provider_type: str, provider_name: str, icon: str | Mapping[str, str] @@ -78,11 +72,9 @@ class ToolTransformService: elif isinstance(provider, ToolProviderApiEntity): if provider.plugin_id: if isinstance(provider.icon, str): - provider.icon = ToolTransformService.get_plugin_icon_url( - tenant_id=tenant_id, filename=provider.icon - ) + provider.icon = PluginService.get_plugin_icon_url(tenant_id=tenant_id, filename=provider.icon) if isinstance(provider.icon_dark, str) and provider.icon_dark: - provider.icon_dark = ToolTransformService.get_plugin_icon_url( + provider.icon_dark = PluginService.get_plugin_icon_url( tenant_id=tenant_id, filename=provider.icon_dark ) else: @@ -96,7 +88,7 @@ class ToolTransformService: elif isinstance(provider, PluginDatasourceProviderEntity): if provider.plugin_id: if isinstance(provider.declaration.identity.icon, str): - provider.declaration.identity.icon = ToolTransformService.get_plugin_icon_url( + provider.declaration.identity.icon = PluginService.get_plugin_icon_url( tenant_id=tenant_id, filename=provider.declaration.identity.icon ) diff --git a/api/services/trigger/trigger_provider_service.py b/api/services/trigger/trigger_provider_service.py new file mode 100644 index 0000000000..fb7fdf81d1 --- /dev/null +++ b/api/services/trigger/trigger_provider_service.py @@ -0,0 +1,530 @@ +import json +import logging +import uuid +from collections.abc import Mapping +from typing import Any, Optional + +from sqlalchemy import desc, func +from sqlalchemy.orm import Session + +from configs import dify_config +from constants import HIDDEN_VALUE, UNKNOWN_VALUE +from core.helper.provider_cache import NoOpProviderCredentialCache +from core.helper.provider_encryption import create_provider_encrypter +from core.plugin.entities.plugin_daemon import CredentialType +from core.plugin.impl.oauth import OAuthHandler +from core.tools.utils.system_oauth_encryption import decrypt_system_oauth_params +from core.trigger.entities.api_entities import ( + TriggerProviderApiEntity, + TriggerProviderSubscriptionApiEntity, +) +from core.trigger.trigger_manager import TriggerManager +from core.trigger.utils.encryption import ( + create_trigger_provider_encrypter_for_properties, + create_trigger_provider_encrypter_for_subscription, + delete_cache_for_subscription, +) +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from models.provider_ids import TriggerProviderID +from models.trigger import TriggerOAuthSystemClient, TriggerOAuthTenantClient, TriggerSubscription +from models.workflow import WorkflowPluginTrigger +from services.plugin.plugin_service import PluginService + +logger = logging.getLogger(__name__) + + +class TriggerProviderService: + """Service for managing trigger providers and credentials""" + + ########################## + # Trigger provider + ########################## + __MAX_TRIGGER_PROVIDER_COUNT__ = 10 + + @classmethod + def get_trigger_provider(cls, tenant_id: str, provider: TriggerProviderID) -> TriggerProviderApiEntity: + """Get info for a trigger provider""" + return TriggerManager.get_trigger_provider(tenant_id, provider).to_api_entity() + + @classmethod + def list_trigger_providers(cls, tenant_id: str) -> list[TriggerProviderApiEntity]: + """List all trigger providers for the current tenant""" + return [provider.to_api_entity() for provider in TriggerManager.list_all_trigger_providers(tenant_id)] + + @classmethod + def list_trigger_provider_subscriptions( + cls, tenant_id: str, provider_id: TriggerProviderID + ) -> list[TriggerProviderSubscriptionApiEntity]: + """List all trigger subscriptions for the current tenant""" + subscriptions: list[TriggerProviderSubscriptionApiEntity] = [] + workflows_in_use_map: dict[str, int] = {} + with Session(db.engine, expire_on_commit=False) as session: + # Get all subscriptions + subscriptions_db = ( + session.query(TriggerSubscription) + .filter_by(tenant_id=tenant_id, provider_id=str(provider_id)) + .order_by(desc(TriggerSubscription.created_at)) + .all() + ) + subscriptions = [subscription.to_api_entity() for subscription in subscriptions_db] + if not subscriptions: + return [] + usage_counts = ( + session.query( + WorkflowPluginTrigger.subscription_id, + func.count(func.distinct(WorkflowPluginTrigger.app_id)).label("app_count"), + ) + .filter( + WorkflowPluginTrigger.tenant_id == tenant_id, + WorkflowPluginTrigger.subscription_id.in_([s.id for s in subscriptions]), + ) + .group_by(WorkflowPluginTrigger.subscription_id) + .all() + ) + workflows_in_use_map = {str(row.subscription_id): int(row.app_count) for row in usage_counts} + + provider_controller = TriggerManager.get_trigger_provider(tenant_id, provider_id) + for subscription in subscriptions: + encrypter, _ = create_trigger_provider_encrypter_for_subscription( + tenant_id=tenant_id, + controller=provider_controller, + subscription=subscription, + ) + subscription.credentials = encrypter.mask_credentials(subscription.credentials) + count = workflows_in_use_map.get(subscription.id) + subscription.workflows_in_use = count if count is not None else 0 + + return subscriptions + + @classmethod + def add_trigger_subscription( + cls, + tenant_id: str, + user_id: str, + name: str, + provider_id: TriggerProviderID, + endpoint_id: str, + credential_type: CredentialType, + parameters: Mapping[str, Any], + properties: Mapping[str, Any], + credentials: Mapping[str, str], + subscription_id: Optional[str] = None, + credential_expires_at: int = -1, + expires_at: int = -1, + ) -> dict: + """ + Add a new trigger provider with credentials. + Supports multiple credential instances per provider. + + :param tenant_id: Tenant ID + :param provider_id: Provider identifier (e.g., "plugin_id/provider_name") + :param credential_type: Type of credential (oauth or api_key) + :param credentials: Credential data to encrypt and store + :param name: Optional name for this credential instance + :param expires_at: OAuth token expiration timestamp + :return: Success response + """ + try: + provider_controller = TriggerManager.get_trigger_provider(tenant_id, provider_id) + with Session(db.engine, expire_on_commit=False) as session: + # Use distributed lock to prevent race conditions + lock_key = f"trigger_provider_create_lock:{tenant_id}_{provider_id}" + with redis_client.lock(lock_key, timeout=20): + # Check provider count limit + provider_count = ( + session.query(TriggerSubscription) + .filter_by(tenant_id=tenant_id, provider_id=str(provider_id)) + .count() + ) + + if provider_count >= cls.__MAX_TRIGGER_PROVIDER_COUNT__: + raise ValueError( + f"Maximum number of providers ({cls.__MAX_TRIGGER_PROVIDER_COUNT__}) " + f"reached for {provider_id}" + ) + + # Check if name already exists + existing = ( + session.query(TriggerSubscription) + .filter_by(tenant_id=tenant_id, provider_id=str(provider_id), name=name) + .first() + ) + if existing: + raise ValueError(f"Credential name '{name}' already exists for this provider") + + credential_encrypter, _ = create_provider_encrypter( + tenant_id=tenant_id, + config=provider_controller.get_credential_schema_config(credential_type), + cache=NoOpProviderCredentialCache(), + ) + + properties_encrypter, _ = create_provider_encrypter( + tenant_id=tenant_id, + config=provider_controller.get_properties_schema(), + cache=NoOpProviderCredentialCache(), + ) + + # Create provider record + db_provider = TriggerSubscription( + id=subscription_id or str(uuid.uuid4()), + tenant_id=tenant_id, + user_id=user_id, + name=name, + endpoint_id=endpoint_id, + provider_id=str(provider_id), + parameters=parameters, + properties=properties_encrypter.encrypt(dict(properties)), + credentials=credential_encrypter.encrypt(dict(credentials)), + credential_type=credential_type.value, + credential_expires_at=credential_expires_at, + expires_at=expires_at, + ) + + session.add(db_provider) + session.commit() + + return {"result": "success", "id": str(db_provider.id)} + + except Exception as e: + logger.exception("Failed to add trigger provider") + raise ValueError(str(e)) + + @classmethod + def get_subscription_by_id( + cls, tenant_id: str, subscription_id: str | None = None + ) -> TriggerProviderSubscriptionApiEntity | None: + """ + Get a trigger subscription by the ID. + """ + with Session(db.engine, expire_on_commit=False) as session: + subscription: TriggerSubscription | None = None + if subscription_id: + subscription = ( + session.query(TriggerSubscription).filter_by(tenant_id=tenant_id, id=subscription_id).first() + ) + else: + subscription = session.query(TriggerSubscription).filter_by(tenant_id=tenant_id).first() + if subscription: + provider_controller = TriggerManager.get_trigger_provider( + tenant_id, TriggerProviderID(subscription.provider_id) + ) + encrypter, _ = create_trigger_provider_encrypter_for_subscription( + tenant_id=tenant_id, + controller=provider_controller, + subscription=subscription, + ) + subscription.credentials = encrypter.decrypt(subscription.credentials) + return subscription.to_api_entity() + return None + + @classmethod + def delete_trigger_provider(cls, session: Session, tenant_id: str, subscription_id: str): + """ + Delete a trigger provider subscription within an existing session. + + :param session: Database session + :param tenant_id: Tenant ID + :param subscription_id: Subscription instance ID + :return: Success response + """ + db_provider = session.query(TriggerSubscription).filter_by(tenant_id=tenant_id, id=subscription_id).first() + if not db_provider: + raise ValueError(f"Trigger provider subscription {subscription_id} not found") + + # Clear cache + session.delete(db_provider) + delete_cache_for_subscription( + tenant_id=tenant_id, + provider_id=db_provider.provider_id, + subscription_id=db_provider.id, + ) + + @classmethod + def refresh_oauth_token( + cls, + tenant_id: str, + subscription_id: str, + ) -> dict: + """ + Refresh OAuth token for a trigger provider. + + :param tenant_id: Tenant ID + :param subscription_id: Subscription instance ID + :return: New token info + """ + with Session(db.engine) as session: + db_provider = session.query(TriggerSubscription).filter_by(tenant_id=tenant_id, id=subscription_id).first() + + if not db_provider: + raise ValueError(f"Trigger provider subscription {subscription_id} not found") + + if db_provider.credential_type != CredentialType.OAUTH2.value: + raise ValueError("Only OAuth credentials can be refreshed") + + provider_id = TriggerProviderID(db_provider.provider_id) + provider_controller = TriggerManager.get_trigger_provider(tenant_id, provider_id) + # Create encrypter + encrypter, cache = create_provider_encrypter( + tenant_id=tenant_id, + config=[x.to_basic_provider_config() for x in provider_controller.get_oauth_client_schema()], + cache=NoOpProviderCredentialCache(), + ) + + # Decrypt current credentials + current_credentials = encrypter.decrypt(db_provider.credentials) + + # Get OAuth client configuration + redirect_uri = ( + f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{db_provider.provider_id}/trigger/callback" + ) + system_credentials = cls.get_oauth_client(tenant_id, provider_id) + + # Refresh token + oauth_handler = OAuthHandler() + refreshed_credentials = oauth_handler.refresh_credentials( + tenant_id=tenant_id, + user_id=db_provider.user_id, + plugin_id=provider_id.plugin_id, + provider=provider_id.provider_name, + redirect_uri=redirect_uri, + system_credentials=system_credentials or {}, + credentials=current_credentials, + ) + + # Update credentials + db_provider.credentials = encrypter.encrypt(dict(refreshed_credentials.credentials)) + db_provider.expires_at = refreshed_credentials.expires_at + session.commit() + + # Clear cache + cache.delete() + + return { + "result": "success", + "expires_at": refreshed_credentials.expires_at, + } + + @classmethod + def get_oauth_client(cls, tenant_id: str, provider_id: TriggerProviderID) -> Optional[Mapping[str, Any]]: + """ + Get OAuth client configuration for a provider. + First tries tenant-level OAuth, then falls back to system OAuth. + + :param tenant_id: Tenant ID + :param provider_id: Provider identifier + :return: OAuth client configuration or None + """ + provider_controller = TriggerManager.get_trigger_provider(tenant_id, provider_id) + with Session(db.engine, expire_on_commit=False) as session: + tenant_client: TriggerOAuthTenantClient | None = ( + session.query(TriggerOAuthTenantClient) + .filter_by( + tenant_id=tenant_id, + provider=provider_id.provider_name, + plugin_id=provider_id.plugin_id, + enabled=True, + ) + .first() + ) + + oauth_params: Mapping[str, Any] | None = None + if tenant_client: + encrypter, _ = create_provider_encrypter( + tenant_id=tenant_id, + config=[x.to_basic_provider_config() for x in provider_controller.get_oauth_client_schema()], + cache=NoOpProviderCredentialCache(), + ) + oauth_params = encrypter.decrypt(tenant_client.oauth_params) + return oauth_params + + is_verified = PluginService.is_plugin_verified(tenant_id, provider_id.plugin_id) + if not is_verified: + return oauth_params + + # Check for system-level OAuth client + system_client: TriggerOAuthSystemClient | None = ( + session.query(TriggerOAuthSystemClient) + .filter_by(plugin_id=provider_id.plugin_id, provider=provider_id.provider_name) + .first() + ) + + if system_client: + try: + oauth_params = decrypt_system_oauth_params(system_client.encrypted_oauth_params) + except Exception as e: + raise ValueError(f"Error decrypting system oauth params: {e}") + + return oauth_params + + @classmethod + def save_custom_oauth_client_params( + cls, + tenant_id: str, + provider_id: TriggerProviderID, + client_params: Optional[dict] = None, + enabled: Optional[bool] = None, + ) -> dict: + """ + Save or update custom OAuth client parameters for a trigger provider. + + :param tenant_id: Tenant ID + :param provider_id: Provider identifier + :param client_params: OAuth client parameters (client_id, client_secret, etc.) + :param enabled: Enable/disable the custom OAuth client + :return: Success response + """ + if client_params is None and enabled is None: + return {"result": "success"} + + # Get provider controller to access schema + provider_controller = TriggerManager.get_trigger_provider(tenant_id, provider_id) + + with Session(db.engine) as session: + # Find existing custom client params + custom_client = ( + session.query(TriggerOAuthTenantClient) + .filter_by( + tenant_id=tenant_id, + plugin_id=provider_id.plugin_id, + provider=provider_id.provider_name, + ) + .first() + ) + + # Create new record if doesn't exist + if custom_client is None: + custom_client = TriggerOAuthTenantClient( + tenant_id=tenant_id, + plugin_id=provider_id.plugin_id, + provider=provider_id.provider_name, + ) + session.add(custom_client) + + # Update client params if provided + if client_params is not None: + encrypter, cache = create_provider_encrypter( + tenant_id=tenant_id, + config=[x.to_basic_provider_config() for x in provider_controller.get_oauth_client_schema()], + cache=NoOpProviderCredentialCache(), + ) + + # Handle hidden values + original_params = encrypter.decrypt(custom_client.oauth_params) + new_params: dict = { + key: value if value != HIDDEN_VALUE else original_params.get(key, UNKNOWN_VALUE) + for key, value in client_params.items() + } + custom_client.encrypted_oauth_params = json.dumps(encrypter.encrypt(new_params)) + cache.delete() + + # Update enabled status if provided + if enabled is not None: + custom_client.enabled = enabled + + session.commit() + + return {"result": "success"} + + @classmethod + def get_custom_oauth_client_params(cls, tenant_id: str, provider_id: TriggerProviderID) -> dict: + """ + Get custom OAuth client parameters for a trigger provider. + + :param tenant_id: Tenant ID + :param provider_id: Provider identifier + :return: Masked OAuth client parameters + """ + with Session(db.engine) as session: + custom_client = ( + session.query(TriggerOAuthTenantClient) + .filter_by( + tenant_id=tenant_id, + plugin_id=provider_id.plugin_id, + provider=provider_id.provider_name, + ) + .first() + ) + + if custom_client is None: + return {} + + # Get provider controller to access schema + provider_controller = TriggerManager.get_trigger_provider(tenant_id, provider_id) + + # Create encrypter to decrypt and mask values + encrypter, _ = create_provider_encrypter( + tenant_id=tenant_id, + config=[x.to_basic_provider_config() for x in provider_controller.get_oauth_client_schema()], + cache=NoOpProviderCredentialCache(), + ) + + return encrypter.mask_tool_credentials(encrypter.decrypt(custom_client.oauth_params)) + + @classmethod + def delete_custom_oauth_client_params(cls, tenant_id: str, provider_id: TriggerProviderID) -> dict: + """ + Delete custom OAuth client parameters for a trigger provider. + + :param tenant_id: Tenant ID + :param provider_id: Provider identifier + :return: Success response + """ + with Session(db.engine) as session: + session.query(TriggerOAuthTenantClient).filter_by( + tenant_id=tenant_id, + provider=provider_id.provider_name, + plugin_id=provider_id.plugin_id, + ).delete() + session.commit() + + return {"result": "success"} + + @classmethod + def is_oauth_custom_client_enabled(cls, tenant_id: str, provider_id: TriggerProviderID) -> bool: + """ + Check if custom OAuth client is enabled for a trigger provider. + + :param tenant_id: Tenant ID + :param provider_id: Provider identifier + :return: True if enabled, False otherwise + """ + with Session(db.engine, expire_on_commit=False) as session: + custom_client = ( + session.query(TriggerOAuthTenantClient) + .filter_by( + tenant_id=tenant_id, + plugin_id=provider_id.plugin_id, + provider=provider_id.provider_name, + enabled=True, + ) + .first() + ) + return custom_client is not None + + @classmethod + def get_subscription_by_endpoint(cls, endpoint_id: str) -> TriggerSubscription | None: + """ + Get a trigger subscription by the endpoint ID. + """ + with Session(db.engine, expire_on_commit=False) as session: + subscription = session.query(TriggerSubscription).filter_by(endpoint_id=endpoint_id).first() + if not subscription: + return None + provider_controller = TriggerManager.get_trigger_provider( + subscription.tenant_id, TriggerProviderID(subscription.provider_id) + ) + credential_encrypter, _ = create_trigger_provider_encrypter_for_subscription( + tenant_id=subscription.tenant_id, + controller=provider_controller, + subscription=subscription, + ) + subscription.credentials = credential_encrypter.decrypt(subscription.credentials) + + properties_encrypter, _ = create_trigger_provider_encrypter_for_properties( + tenant_id=subscription.tenant_id, + controller=provider_controller, + subscription=subscription, + ) + subscription.properties = properties_encrypter.decrypt(subscription.properties) + return subscription diff --git a/api/services/trigger/trigger_subscription_builder_service.py b/api/services/trigger/trigger_subscription_builder_service.py new file mode 100644 index 0000000000..7f9334e91e --- /dev/null +++ b/api/services/trigger/trigger_subscription_builder_service.py @@ -0,0 +1,319 @@ +import json +import logging +import uuid +from collections.abc import Mapping +from datetime import datetime +from typing import Any + +from flask import Request, Response + +from core.plugin.entities.plugin_daemon import CredentialType +from core.tools.errors import ToolProviderCredentialValidationError +from core.trigger.entities.api_entities import SubscriptionBuilderApiEntity +from core.trigger.entities.entities import ( + RequestLog, + SubscriptionBuilder, + SubscriptionBuilderUpdater, +) +from core.trigger.provider import PluginTriggerProviderController +from core.trigger.trigger_manager import TriggerManager +from core.trigger.utils.encryption import masked_credentials +from core.trigger.utils.endpoint import parse_endpoint_id +from extensions.ext_redis import redis_client +from models.provider_ids import TriggerProviderID +from services.trigger.trigger_provider_service import TriggerProviderService + +logger = logging.getLogger(__name__) + + +class TriggerSubscriptionBuilderService: + """Service for managing trigger providers and credentials""" + + ########################## + # Trigger provider + ########################## + __MAX_TRIGGER_PROVIDER_COUNT__ = 10 + + ########################## + # Builder endpoint + ########################## + __BUILDER_CACHE_EXPIRE_SECONDS__ = 30 * 60 + + __VALIDATION_REQUEST_CACHE_COUNT__ = 10 + __VALIDATION_REQUEST_CACHE_EXPIRE_SECONDS__ = 30 * 60 + + @classmethod + def encode_cache_key(cls, subscription_id: str) -> str: + return f"trigger:subscription:builder:{subscription_id}" + + @classmethod + def verify_trigger_subscription_builder( + cls, + tenant_id: str, + user_id: str, + provider_id: TriggerProviderID, + subscription_builder_id: str, + ) -> Mapping[str, Any]: + """Verify a trigger subscription builder""" + provider_controller = TriggerManager.get_trigger_provider(tenant_id, provider_id) + if not provider_controller: + raise ValueError(f"Provider {provider_id} not found") + + subscription_builder = cls.get_subscription_builder(subscription_builder_id) + if not subscription_builder: + raise ValueError(f"Subscription builder {subscription_builder_id} not found") + + if subscription_builder.credential_type == CredentialType.OAUTH2: + return {"verified": bool(subscription_builder.credentials)} + + if subscription_builder.credential_type == CredentialType.API_KEY: + credentials_to_validate = subscription_builder.credentials + try: + provider_controller.validate_credentials(user_id, credentials_to_validate) + except ToolProviderCredentialValidationError as e: + raise ValueError(f"Invalid credentials: {e}") + return {"verified": True} + + return {"verified": True} + + @classmethod + def build_trigger_subscription_builder( + cls, tenant_id: str, user_id: str, provider_id: TriggerProviderID, subscription_builder_id: str + ) -> None: + """Build a trigger subscription builder""" + provider_controller = TriggerManager.get_trigger_provider(tenant_id, provider_id) + if not provider_controller: + raise ValueError(f"Provider {provider_id} not found") + + subscription_builder = cls.get_subscription_builder(subscription_builder_id) + if not subscription_builder: + raise ValueError(f"Subscription builder {subscription_builder_id} not found") + + if not subscription_builder.name: + raise ValueError("Subscription builder name is required") + + credential_type = CredentialType.of(subscription_builder.credential_type or CredentialType.UNAUTHORIZED.value) + if credential_type == CredentialType.UNAUTHORIZED: + # manually create + TriggerProviderService.add_trigger_subscription( + subscription_id=subscription_builder.id, + tenant_id=tenant_id, + user_id=user_id, + name=subscription_builder.name, + provider_id=provider_id, + endpoint_id=subscription_builder.endpoint_id, + parameters=subscription_builder.parameters, + properties=subscription_builder.properties, + credential_expires_at=subscription_builder.credential_expires_at or -1, + expires_at=subscription_builder.expires_at, + credentials=subscription_builder.credentials, + credential_type=credential_type, + ) + else: + # automatically create + subscription = TriggerManager.subscribe_trigger( + tenant_id=tenant_id, + user_id=user_id, + provider_id=provider_id, + endpoint=parse_endpoint_id(subscription_builder.endpoint_id), + parameters=subscription_builder.parameters, + credentials=subscription_builder.credentials, + ) + + TriggerProviderService.add_trigger_subscription( + subscription_id=subscription_builder.id, + tenant_id=tenant_id, + user_id=user_id, + name=subscription_builder.name, + provider_id=provider_id, + endpoint_id=subscription_builder.endpoint_id, + parameters=subscription_builder.parameters, + properties=subscription.properties, + credentials=subscription_builder.credentials, + credential_type=credential_type, + credential_expires_at=subscription_builder.credential_expires_at or -1, + expires_at=subscription_builder.expires_at, + ) + + cls.delete_trigger_subscription_builder(subscription_builder_id) + + @classmethod + def create_trigger_subscription_builder( + cls, + tenant_id: str, + user_id: str, + provider_id: TriggerProviderID, + credential_type: CredentialType, + ) -> SubscriptionBuilderApiEntity: + """ + Add a new trigger subscription validation. + """ + provider_controller = TriggerManager.get_trigger_provider(tenant_id, provider_id) + if not provider_controller: + raise ValueError(f"Provider {provider_id} not found") + + subscription_schema = provider_controller.get_subscription_schema() + subscription_id = str(uuid.uuid4()) + subscription_builder = SubscriptionBuilder( + id=subscription_id, + name=None, + endpoint_id=subscription_id, + tenant_id=tenant_id, + user_id=user_id, + provider_id=str(provider_id), + parameters=subscription_schema.get_default_parameters(), + properties=subscription_schema.get_default_properties(), + credentials={}, + credential_type=credential_type, + credential_expires_at=-1, + expires_at=-1, + ) + cache_key = cls.encode_cache_key(subscription_id) + redis_client.setex( + cache_key, cls.__BUILDER_CACHE_EXPIRE_SECONDS__, subscription_builder.model_dump_json() + ) + return cls.builder_to_api_entity(controller=provider_controller, entity=subscription_builder) + + @classmethod + def update_trigger_subscription_builder( + cls, + tenant_id: str, + provider_id: TriggerProviderID, + subscription_builder_id: str, + subscription_builder_updater: SubscriptionBuilderUpdater, + ) -> SubscriptionBuilderApiEntity: + """ + Update a trigger subscription validation. + """ + subscription_id = subscription_builder_id + provider_controller = TriggerManager.get_trigger_provider(tenant_id, provider_id) + if not provider_controller: + raise ValueError(f"Provider {provider_id} not found") + + cache_key = cls.encode_cache_key(subscription_id) + subscription_builder_cache = cls.get_subscription_builder(subscription_builder_id) + if not subscription_builder_cache or subscription_builder_cache.tenant_id != tenant_id: + raise ValueError(f"Subscription {subscription_id} expired or not found") + + subscription_builder_updater.update(subscription_builder_cache) + + redis_client.setex( + cache_key, cls.__BUILDER_CACHE_EXPIRE_SECONDS__, subscription_builder_cache.model_dump_json() + ) + return cls.builder_to_api_entity(controller=provider_controller, entity=subscription_builder_cache) + + @classmethod + def builder_to_api_entity( + cls, controller: PluginTriggerProviderController, entity: SubscriptionBuilder + ) -> SubscriptionBuilderApiEntity: + credential_type = CredentialType.of(entity.credential_type or CredentialType.UNAUTHORIZED.value) + return SubscriptionBuilderApiEntity( + id=entity.id, + name=entity.name or "", + provider=entity.provider_id, + endpoint=parse_endpoint_id(entity.endpoint_id), + parameters=entity.parameters, + properties=entity.properties, + credential_type=credential_type, + credentials=masked_credentials( + schemas=controller.get_credentials_schema(credential_type), + credentials=entity.credentials, + ), + ) + + @classmethod + def delete_trigger_subscription_builder(cls, subscription_id: str) -> None: + """ + Delete a trigger subscription validation. + """ + cache_key = cls.encode_cache_key(subscription_id) + redis_client.delete(cache_key) + + @classmethod + def get_subscription_builder(cls, endpoint_id: str) -> SubscriptionBuilder | None: + """ + Get a trigger subscription by the endpoint ID. + """ + cache_key = cls.encode_cache_key(endpoint_id) + subscription_cache = redis_client.get(cache_key) + if subscription_cache: + return SubscriptionBuilder.model_validate(json.loads(subscription_cache)) + + return None + + @classmethod + def append_log(cls, endpoint_id: str, request: Request, response: Response) -> None: + """Append validation request log to Redis.""" + log = RequestLog( + id=str(uuid.uuid4()), + endpoint=endpoint_id, + request={ + "method": request.method, + "url": request.url, + "headers": dict(request.headers), + "data": request.get_data(as_text=True), + }, + response={ + "status_code": response.status_code, + "headers": dict(response.headers), + "data": response.get_data(as_text=True), + }, + created_at=datetime.now(), + ) + + key = f"trigger:subscription:builder:logs:{endpoint_id}" + logs = json.loads(redis_client.get(key) or "[]") + logs.append(log.model_dump(mode="json")) + + # Keep last N logs + logs = logs[-cls.__VALIDATION_REQUEST_CACHE_COUNT__ :] + redis_client.setex(key, cls.__VALIDATION_REQUEST_CACHE_EXPIRE_SECONDS__, json.dumps(logs, default=str)) + + @classmethod + def list_logs(cls, endpoint_id: str) -> list[RequestLog]: + """List request logs for validation endpoint.""" + key = f"trigger:subscription:builder:logs:{endpoint_id}" + logs_json = redis_client.get(key) + if not logs_json: + return [] + return [RequestLog.model_validate(log) for log in json.loads(logs_json)] + + @classmethod + def process_builder_validation_endpoint(cls, endpoint_id: str, request: Request) -> Response | None: + """ + Process a temporary endpoint request. + + :param endpoint_id: The endpoint identifier + :param request: The Flask request object + :return: The Flask response object + """ + # check if validation endpoint exists + subscription_builder = cls.get_subscription_builder(endpoint_id) + if not subscription_builder: + return None + + # response to validation endpoint + controller = TriggerManager.get_trigger_provider( + subscription_builder.tenant_id, TriggerProviderID(subscription_builder.provider_id) + ) + response = controller.dispatch( + user_id=subscription_builder.user_id, + request=request, + subscription=subscription_builder.to_subscription(), + ) + # append the request log + cls.append_log(endpoint_id, request, response.response) + return response.response + + @classmethod + def get_subscription_builder_by_id(cls, subscription_builder_id: str) -> SubscriptionBuilderApiEntity: + """Get a trigger subscription builder API entity.""" + subscription_builder = cls.get_subscription_builder(subscription_builder_id) + if not subscription_builder: + raise ValueError(f"Subscription builder {subscription_builder_id} not found") + return cls.builder_to_api_entity( + controller=TriggerManager.get_trigger_provider( + subscription_builder.tenant_id, TriggerProviderID(subscription_builder.provider_id) + ), + entity=subscription_builder, + ) diff --git a/api/services/trigger_debug_service.py b/api/services/trigger_debug_service.py new file mode 100644 index 0000000000..956ce7132f --- /dev/null +++ b/api/services/trigger_debug_service.py @@ -0,0 +1,126 @@ +"""Trigger debug service for webhook debugging in draft workflows.""" + +import hashlib +import logging +from typing import Optional + +from pydantic import BaseModel +from redis import RedisError + +from extensions.ext_redis import redis_client + +logger = logging.getLogger(__name__) + +TRIGGER_DEBUG_EVENT_TTL = 300 + + +class TriggerDebugEvent(BaseModel): + subscription_id: str + request_id: str + timestamp: int + + +class TriggerDebugService: + """ + Redis-based trigger debug service with polling support. + Uses {tenant_id} hash tags for Redis Cluster compatibility. + """ + + # LUA_SELECT: Atomic poll or register for event + # KEYS[1] = trigger_debug_inbox:{tenant_id}:{address_id} + # KEYS[2] = trigger_debug_waiting_pool:{tenant_id}:{subscription_id}:{trigger} + # ARGV[1] = address_id + # compressed lua code, you can use LLM to uncompress it + LUA_SELECT = ( + "local v=redis.call('GET',KEYS[1]);" + "if v then redis.call('DEL',KEYS[1]);return v end;" + "redis.call('SADD',KEYS[2],ARGV[1]);" + f"redis.call('EXPIRE',KEYS[2],{TRIGGER_DEBUG_EVENT_TTL});" + "return false" + ) + + # LUA_DISPATCH: Dispatch event to all waiting addresses + # KEYS[1] = trigger_debug_waiting_pool:{tenant_id}:{subscription_id}:{trigger} + # ARGV[1] = tenant_id + # ARGV[2] = event_json + # compressed lua code, you can use LLM to uncompress it + LUA_DISPATCH = ( + "local a=redis.call('SMEMBERS',KEYS[1]);" + "if #a==0 then return 0 end;" + "redis.call('DEL',KEYS[1]);" + "for i=1,#a do " + f"redis.call('SET','trigger_debug_inbox:{{'..ARGV[1]..'}}'..':'..a[i],ARGV[2],'EX',{TRIGGER_DEBUG_EVENT_TTL});" + "end;" + "return #a" + ) + + @classmethod + def address(cls, tenant_id: str, user_id: str, app_id: str, node_id: str) -> str: + address_id = hashlib.sha1(f"{user_id}|{app_id}|{node_id}".encode()).hexdigest() + return f"trigger_debug_inbox:{{{tenant_id}}}:{address_id}" + + @classmethod + def waiting_pool(cls, tenant_id: str, subscription_id: str, trigger_name: str) -> str: + return f"trigger_debug_waiting_pool:{{{tenant_id}}}:{subscription_id}:{trigger_name}" + + @classmethod + def dispatch_debug_event( + cls, + tenant_id: str, + subscription_id: str, + triggers: list[str], + request_id: str, + timestamp: int, + ) -> int: + event_json = TriggerDebugEvent( + subscription_id=subscription_id, + request_id=request_id, + timestamp=timestamp, + ).model_dump_json() + + dispatched = 0 + if len(triggers) > 10: + logger.warning( + "Too many triggers to dispatch at once: %d triggers tenant: %s subscription: %s", + len(triggers), + tenant_id, + subscription_id, + ) + + for trigger_name in triggers: + try: + dispatched += redis_client.eval( + cls.LUA_DISPATCH, + 1, + cls.waiting_pool(tenant_id, subscription_id, trigger_name), + tenant_id, + event_json, + ) + except RedisError: + logger.exception("Failed to dispatch for trigger: %s", trigger_name) + return dispatched + + @classmethod + def poll_event( + cls, + tenant_id: str, + user_id: str, + app_id: str, + subscription_id: str, + node_id: str, + trigger_name: str, + ) -> Optional[TriggerDebugEvent]: + address_id = hashlib.sha1(f"{user_id}|{app_id}|{node_id}".encode()).hexdigest() + + try: + event = redis_client.eval( + cls.LUA_SELECT, + 2, + cls.address(tenant_id, user_id, app_id, node_id), + cls.waiting_pool(tenant_id, subscription_id, trigger_name), + address_id, + ) + return TriggerDebugEvent.model_validate_json(event) if event else None + except RedisError: + logger.exception("Failed to poll debug event") + return None diff --git a/api/services/trigger_service.py b/api/services/trigger_service.py new file mode 100644 index 0000000000..9370b14398 --- /dev/null +++ b/api/services/trigger_service.py @@ -0,0 +1,192 @@ +import logging +import time +import uuid + +from flask import Request, Response +from sqlalchemy import select +from sqlalchemy.orm import Session + +from core.plugin.utils.http_parser import serialize_request +from core.trigger.entities.entities import TriggerEntity, TriggerInputs +from core.trigger.trigger_manager import TriggerManager +from extensions.ext_database import db +from extensions.ext_storage import storage +from models.account import Account, TenantAccountJoin, TenantAccountRole +from models.enums import WorkflowRunTriggeredFrom +from models.provider_ids import TriggerProviderID +from models.trigger import TriggerSubscription +from models.workflow import Workflow, WorkflowPluginTrigger +from services.async_workflow_service import AsyncWorkflowService +from services.trigger.trigger_provider_service import TriggerProviderService +from services.workflow.entities import PluginTriggerData, PluginTriggerDispatchData + +logger = logging.getLogger(__name__) + + +class TriggerService: + __TEMPORARY_ENDPOINT_EXPIRE_MS__ = 5 * 60 * 1000 + __ENDPOINT_REQUEST_CACHE_COUNT__ = 10 + __ENDPOINT_REQUEST_CACHE_EXPIRE_MS__ = 5 * 60 * 1000 + + __WEBHOOK_NODE_CACHE_KEY__ = "webhook_nodes" + + @classmethod + def dispatch_triggered_workflows( + cls, subscription: TriggerSubscription, trigger: TriggerEntity, request_id: str + ) -> int: + """Process triggered workflows. + + Args: + subscription: The trigger subscription + trigger: The trigger entity that was activated + request_id: The ID of the stored request in storage system + """ + + subscribers = cls.get_subscriber_triggers( + tenant_id=subscription.tenant_id, subscription_id=subscription.id, trigger_name=trigger.identity.name + ) + if not subscribers: + logger.warning( + "No workflows found for trigger '%s' in subscription '%s'", + trigger.identity.name, + subscription.id, + ) + return 0 + + with Session(db.engine) as session: + # Get tenant owner for workflow execution + tenant_owner = session.scalar( + select(Account) + .join(TenantAccountJoin, TenantAccountJoin.account_id == Account.id) + .where( + TenantAccountJoin.tenant_id == subscription.tenant_id, + TenantAccountJoin.role == TenantAccountRole.OWNER, + ) + ) + + if not tenant_owner: + logger.error("Tenant owner not found for tenant %s", subscription.tenant_id) + return 0 + dispatched_count = 0 + for plugin_trigger in subscribers: + # Get workflow + workflow = session.scalar( + select(Workflow) + .where( + Workflow.app_id == plugin_trigger.app_id, + Workflow.version != Workflow.VERSION_DRAFT, + ) + .order_by(Workflow.created_at.desc()) + ) + + if not workflow: + logger.error( + "Workflow not found for app %s", + plugin_trigger.app_id, + ) + continue + + # Create trigger inputs using new structure + trigger_inputs = TriggerInputs.from_trigger_entity( + request_id=request_id, subscription_id=subscription.id, trigger=trigger + ) + + # Create trigger data for async execution + trigger_data = PluginTriggerData( + app_id=plugin_trigger.app_id, + tenant_id=subscription.tenant_id, + workflow_id=workflow.id, + root_node_id=plugin_trigger.node_id, + trigger_type=WorkflowRunTriggeredFrom.PLUGIN, + plugin_id=subscription.provider_id, + endpoint_id=subscription.endpoint_id, + inputs=trigger_inputs.to_dict(), + ) + + # Trigger async workflow + try: + AsyncWorkflowService.trigger_workflow_async(session, tenant_owner, trigger_data) + dispatched_count += 1 + logger.info( + "Triggered workflow for app %s with trigger %s", + plugin_trigger.app_id, + trigger.identity.name, + ) + except Exception: + logger.exception( + "Failed to trigger workflow for app %s", + plugin_trigger.app_id, + ) + + return dispatched_count + + @classmethod + def process_endpoint(cls, endpoint_id: str, request: Request) -> Response | None: + """ + Extract and process data from incoming endpoint request. + + Args: + endpoint_id: Endpoint ID + request: Request + """ + timestamp = int(time.time()) + subscription = TriggerProviderService.get_subscription_by_endpoint(endpoint_id) + if not subscription: + return None + + provider_id = TriggerProviderID(subscription.provider_id) + controller = TriggerManager.get_trigger_provider(subscription.tenant_id, provider_id) + if not controller: + return None + + dispatch_response = controller.dispatch( + user_id=subscription.user_id, request=request, subscription=subscription.to_entity() + ) + + if dispatch_response.triggers: + request_id = f"trigger_request_{uuid.uuid4().hex}" + serialized_request = serialize_request(request) + storage.save(f"triggers/{request_id}", serialized_request) + + # Production dispatch + from tasks.trigger_processing_tasks import dispatch_triggered_workflows_async + plugin_trigger_dispatch_data = PluginTriggerDispatchData( + endpoint_id=endpoint_id, + provider_id=subscription.provider_id, + subscription_id=subscription.id, + timestamp=timestamp, + triggers=list(dispatch_response.triggers), + request_id=request_id, + ) + dispatch_data = plugin_trigger_dispatch_data.model_dump(mode="json") + dispatch_triggered_workflows_async.delay(dispatch_data) + + logger.info( + "Queued async dispatching for %d triggers on endpoint %s with request_id %s", + len(dispatch_response.triggers), + endpoint_id, + request_id, + ) + return dispatch_response.response + + @classmethod + def get_subscriber_triggers( + cls, tenant_id: str, subscription_id: str, trigger_name: str + ) -> list[WorkflowPluginTrigger]: + """ + Get WorkflowPluginTriggers for a subscription and trigger. + + Args: + tenant_id: Tenant ID + subscription_id: Subscription ID + trigger_name: Trigger name + """ + with Session(db.engine, expire_on_commit=False) as session: + subscribers = session.scalars( + select(WorkflowPluginTrigger).where( + WorkflowPluginTrigger.tenant_id == tenant_id, + WorkflowPluginTrigger.subscription_id == subscription_id, + WorkflowPluginTrigger.trigger_name == trigger_name, + ) + ).all() + return list(subscribers) diff --git a/api/services/webhook_service.py b/api/services/webhook_service.py new file mode 100644 index 0000000000..bf699c58ac --- /dev/null +++ b/api/services/webhook_service.py @@ -0,0 +1,697 @@ +import json +import logging +import mimetypes +import secrets +from collections.abc import Mapping +from typing import Any + +from flask import request +from pydantic import BaseModel +from sqlalchemy import select +from sqlalchemy.orm import Session +from werkzeug.exceptions import RequestEntityTooLarge + +from configs import dify_config +from core.file.models import FileTransferMethod +from core.tools.tool_file_manager import ToolFileManager +from core.variables.types import SegmentType +from core.workflow.enums import NodeType +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from factories import file_factory +from models.account import Account, TenantAccountJoin, TenantAccountRole +from models.enums import WorkflowRunTriggeredFrom +from models.model import App +from models.workflow import AppTrigger, AppTriggerStatus, AppTriggerType, Workflow, WorkflowWebhookTrigger +from services.async_workflow_service import AsyncWorkflowService +from services.workflow.entities import TriggerData + +logger = logging.getLogger(__name__) + + +class WebhookService: + """Service for handling webhook operations.""" + + __WEBHOOK_NODE_CACHE_KEY__ = "webhook_nodes" + MAX_WEBHOOK_NODES_PER_WORKFLOW = 5 # Maximum allowed webhook nodes per workflow + + @classmethod + def get_webhook_trigger_and_workflow( + cls, webhook_id: str + ) -> tuple[WorkflowWebhookTrigger, Workflow, Mapping[str, Any]]: + """Get webhook trigger, workflow, and node configuration.""" + with Session(db.engine) as session: + # Get webhook trigger + webhook_trigger = ( + session.query(WorkflowWebhookTrigger).filter(WorkflowWebhookTrigger.webhook_id == webhook_id).first() + ) + if not webhook_trigger: + raise ValueError(f"Webhook not found: {webhook_id}") + + # Check if the corresponding AppTrigger is enabled + app_trigger = ( + session.query(AppTrigger) + .filter( + AppTrigger.app_id == webhook_trigger.app_id, + AppTrigger.node_id == webhook_trigger.node_id, + AppTrigger.trigger_type == AppTriggerType.TRIGGER_WEBHOOK, + ) + .first() + ) + + if not app_trigger: + raise ValueError(f"App trigger not found for webhook {webhook_id}") + + if app_trigger.status != AppTriggerStatus.ENABLED: + raise ValueError(f"Webhook trigger is disabled for webhook {webhook_id}") + + # Get workflow + workflow = ( + session.query(Workflow) + .filter( + Workflow.app_id == webhook_trigger.app_id, + Workflow.version != Workflow.VERSION_DRAFT, + ) + .order_by(Workflow.created_at.desc()) + .first() + ) + if not workflow: + raise ValueError(f"Workflow not found for app {webhook_trigger.app_id}") + + node_config = workflow.get_node_config_by_id(webhook_trigger.node_id) + + return webhook_trigger, workflow, node_config + + @classmethod + def extract_webhook_data(cls, webhook_trigger: WorkflowWebhookTrigger) -> dict[str, Any]: + """Extract and process data from incoming webhook request.""" + cls._validate_content_length() + + data = { + "method": request.method, + "headers": dict(request.headers), + "query_params": dict(request.args), + "body": {}, + "files": {}, + } + + # Extract and normalize content type + content_type = cls._extract_content_type(dict(request.headers)) + + # Route to appropriate extractor based on content type + extractors = { + "application/json": cls._extract_json_body, + "application/x-www-form-urlencoded": cls._extract_form_body, + "multipart/form-data": lambda: cls._extract_multipart_body(webhook_trigger), + "application/octet-stream": lambda: cls._extract_octet_stream_body(webhook_trigger), + "text/plain": cls._extract_text_body, + } + + extractor = extractors.get(content_type) + if not extractor: + # Default to text/plain for unknown content types + logger.warning("Unknown Content-Type: %s, treating as text/plain", content_type) + extractor = cls._extract_text_body + + # Extract body and files + body_data, files_data = extractor() + data["body"] = body_data + data["files"] = files_data + + return data + + @classmethod + def _validate_content_length(cls) -> None: + """Validate request content length against maximum allowed size.""" + content_length = request.content_length + if content_length and content_length > dify_config.WEBHOOK_REQUEST_BODY_MAX_SIZE: + raise RequestEntityTooLarge( + f"Webhook request too large: {content_length} bytes exceeds maximum allowed size " + f"of {dify_config.WEBHOOK_REQUEST_BODY_MAX_SIZE} bytes" + ) + + @classmethod + def _extract_json_body(cls) -> tuple[dict[str, Any], dict[str, Any]]: + """Extract JSON body from request.""" + try: + body = request.get_json() or {} + except Exception: + logger.warning("Failed to parse JSON body") + body = {} + return body, {} + + @classmethod + def _extract_form_body(cls) -> tuple[dict[str, Any], dict[str, Any]]: + """Extract form-urlencoded body from request.""" + return dict(request.form), {} + + @classmethod + def _extract_multipart_body(cls, webhook_trigger: WorkflowWebhookTrigger) -> tuple[dict[str, Any], dict[str, Any]]: + """Extract multipart/form-data body and files from request.""" + body = dict(request.form) + files = cls._process_file_uploads(request.files, webhook_trigger) if request.files else {} + return body, files + + @classmethod + def _extract_octet_stream_body( + cls, webhook_trigger: WorkflowWebhookTrigger + ) -> tuple[dict[str, Any], dict[str, Any]]: + """Extract binary data as file from request.""" + try: + file_content = request.get_data() + if file_content: + file_obj = cls._create_file_from_binary(file_content, "application/octet-stream", webhook_trigger) + return {"raw": file_obj.to_dict()}, {} + else: + return {"raw": None}, {} + except Exception: + logger.exception("Failed to process octet-stream data") + return {"raw": None}, {} + + @classmethod + def _extract_text_body(cls) -> tuple[dict[str, Any], dict[str, Any]]: + """Extract text/plain body from request.""" + try: + body = {"raw": request.get_data(as_text=True)} + except Exception: + logger.warning("Failed to extract text body") + body = {"raw": ""} + return body, {} + + @classmethod + def _process_file_uploads(cls, files, webhook_trigger: WorkflowWebhookTrigger) -> dict[str, Any]: + """Process file uploads using ToolFileManager.""" + processed_files = {} + + for name, file in files.items(): + if file and file.filename: + try: + file_content = file.read() + mimetype = file.content_type or mimetypes.guess_type(file.filename)[0] or "application/octet-stream" + file_obj = cls._create_file_from_binary(file_content, mimetype, webhook_trigger) + processed_files[name] = file_obj.to_dict() + except Exception: + logger.exception("Failed to process file upload '%s'", name) + # Continue processing other files + + return processed_files + + @classmethod + def _create_file_from_binary( + cls, file_content: bytes, mimetype: str, webhook_trigger: WorkflowWebhookTrigger + ) -> Any: + """Create a file object from binary content using ToolFileManager.""" + tool_file_manager = ToolFileManager() + + # Create file using ToolFileManager + tool_file = tool_file_manager.create_file_by_raw( + user_id=webhook_trigger.created_by, + tenant_id=webhook_trigger.tenant_id, + conversation_id=None, + file_binary=file_content, + mimetype=mimetype, + ) + + # Build File object + mapping = { + "tool_file_id": tool_file.id, + "transfer_method": FileTransferMethod.TOOL_FILE.value, + } + return file_factory.build_from_mapping( + mapping=mapping, + tenant_id=webhook_trigger.tenant_id, + ) + + @classmethod + def validate_webhook_request(cls, webhook_data: dict[str, Any], node_config: Mapping[str, Any]) -> dict[str, Any]: + """Validate webhook request against node configuration.""" + if node_config is None: + return cls._validation_error("Validation failed: Invalid node configuration") + + node_data = node_config.get("data", {}) + + # Early validation of HTTP method and content-type + validation_result = cls._validate_http_metadata(webhook_data, node_data) + if not validation_result["valid"]: + return validation_result + + # Validate headers and query params + validation_result = cls._validate_headers_and_params(webhook_data, node_data) + if not validation_result["valid"]: + return validation_result + + # Validate body based on content type + configured_content_type = node_data.get("content_type", "application/json").lower() + return cls._validate_body_by_content_type(webhook_data, node_data, configured_content_type) + + @classmethod + def _validate_http_metadata(cls, webhook_data: dict[str, Any], node_data: dict[str, Any]) -> dict[str, Any]: + """Validate HTTP method and content-type.""" + # Validate HTTP method + configured_method = node_data.get("method", "get").upper() + request_method = webhook_data["method"].upper() + if configured_method != request_method: + return cls._validation_error(f"HTTP method mismatch. Expected {configured_method}, got {request_method}") + + # Validate Content-type + configured_content_type = node_data.get("content_type", "application/json").lower() + request_content_type = cls._extract_content_type(webhook_data["headers"]) + + if configured_content_type != request_content_type: + return cls._validation_error( + f"Content-type mismatch. Expected {configured_content_type}, got {request_content_type}" + ) + + return {"valid": True} + + @classmethod + def _extract_content_type(cls, headers: dict[str, Any]) -> str: + """Extract and normalize content-type from headers.""" + content_type = headers.get("Content-Type", "").lower() + if not content_type: + content_type = headers.get("content-type", "application/json").lower() + # Extract the main content type (ignore parameters like boundary) + return content_type.split(";")[0].strip() + + @classmethod + def _validate_headers_and_params(cls, webhook_data: dict[str, Any], node_data: dict[str, Any]) -> dict[str, Any]: + """Validate required headers and query parameters.""" + # Validate required headers (case-insensitive) + webhook_headers_lower = {k.lower(): v for k, v in webhook_data["headers"].items()} + for header in node_data.get("headers", []): + if header.get("required", False): + header_name = header.get("name", "") + if header_name.lower() not in webhook_headers_lower: + return cls._validation_error(f"Required header missing: {header_name}") + + # Validate required query parameters + for param in node_data.get("params", []): + if param.get("required", False): + param_name = param.get("name", "") + if param_name not in webhook_data["query_params"]: + return cls._validation_error(f"Required query parameter missing: {param_name}") + + return {"valid": True} + + @classmethod + def _validate_body_by_content_type( + cls, webhook_data: dict[str, Any], node_data: dict[str, Any], content_type: str + ) -> dict[str, Any]: + """Route body validation to appropriate validator based on content type.""" + validators = { + "text/plain": cls._validate_text_plain_body, + "application/octet-stream": cls._validate_octet_stream_body, + "application/json": cls._validate_json_body, + "application/x-www-form-urlencoded": cls._validate_form_urlencoded_body, + "multipart/form-data": cls._validate_multipart_body, + } + + validator = validators.get(content_type) + if not validator: + raise ValueError(f"Unsupported Content-Type for validation: {content_type}") + + return validator(webhook_data, node_data) + + @classmethod + def _validate_text_plain_body(cls, webhook_data: dict[str, Any], node_data: dict[str, Any]) -> dict[str, Any]: + """Validate text/plain body.""" + body_params = node_data.get("body", []) + if body_params and any(param.get("required", False) for param in body_params): + body_data = webhook_data.get("body", {}) + raw_content = body_data.get("raw", "") + if not raw_content or not isinstance(raw_content, str): + return cls._validation_error("Required body content missing for text/plain request") + return {"valid": True} + + @classmethod + def _validate_octet_stream_body(cls, webhook_data: dict[str, Any], node_data: dict[str, Any]) -> dict[str, Any]: + """Validate application/octet-stream body.""" + body_params = node_data.get("body", []) + if body_params and any(param.get("required", False) for param in body_params): + body_data = webhook_data.get("body", {}) + raw_content = body_data.get("raw", "") + if not raw_content or not isinstance(raw_content, bytes): + return cls._validation_error("Required body content missing for application/octet-stream request") + return {"valid": True} + + @classmethod + def _validate_json_body(cls, webhook_data: dict[str, Any], node_data: dict[str, Any]) -> dict[str, Any]: + """Validate application/json body.""" + body_params = node_data.get("body", []) + body_data = webhook_data.get("body", {}) + + for body_param in body_params: + param_name = body_param.get("name", "") + param_type = body_param.get("type", SegmentType.STRING) + is_required = body_param.get("required", False) + + param_exists = param_name in body_data + + if is_required and not param_exists: + return cls._validation_error(f"Required body parameter missing: {param_name}") + + if param_exists: + param_value = body_data[param_name] + validation_result = cls._validate_json_parameter_type(param_name, param_value, param_type) + if not validation_result["valid"]: + return validation_result + + return {"valid": True} + + @classmethod + def _validate_form_urlencoded_body(cls, webhook_data: dict[str, Any], node_data: dict[str, Any]) -> dict[str, Any]: + """Validate application/x-www-form-urlencoded body.""" + body_params = node_data.get("body", []) + body_data = webhook_data.get("body", {}) + + for body_param in body_params: + param_name = body_param.get("name", "") + param_type = body_param.get("type", SegmentType.STRING) + is_required = body_param.get("required", False) + + param_exists = param_name in body_data + if is_required and not param_exists: + return cls._validation_error(f"Required body parameter missing: {param_name}") + + if param_exists and param_type != SegmentType.STRING: + param_value = body_data[param_name] + validation_result = cls._validate_form_parameter_type(param_name, param_value, param_type) + if not validation_result["valid"]: + return validation_result + + return {"valid": True} + + @classmethod + def _validate_multipart_body(cls, webhook_data: dict[str, Any], node_data: dict[str, Any]) -> dict[str, Any]: + """Validate multipart/form-data body.""" + body_params = node_data.get("body", []) + body_data = webhook_data.get("body", {}) + + for body_param in body_params: + param_name = body_param.get("name", "") + param_type = body_param.get("type", SegmentType.STRING) + is_required = body_param.get("required", False) + + if param_type == SegmentType.FILE: + file_obj = webhook_data.get("files", {}).get(param_name) + if is_required and not file_obj: + return cls._validation_error(f"Required file parameter missing: {param_name}") + else: + param_exists = param_name in body_data + + if is_required and not param_exists: + return cls._validation_error(f"Required body parameter missing: {param_name}") + + if param_exists and param_type != SegmentType.STRING: + param_value = body_data[param_name] + validation_result = cls._validate_form_parameter_type(param_name, param_value, param_type) + if not validation_result["valid"]: + return validation_result + + return {"valid": True} + + @classmethod + def _validation_error(cls, error_message: str) -> dict[str, Any]: + """Create a standard validation error response.""" + return {"valid": False, "error": error_message} + + @classmethod + def _validate_json_parameter_type(cls, param_name: str, param_value: Any, param_type: str) -> dict[str, Any]: + """Validate JSON parameter type against expected type.""" + try: + # Define type validators + type_validators = { + SegmentType.STRING: (lambda v: isinstance(v, str), "string"), + SegmentType.NUMBER: (lambda v: isinstance(v, (int, float)), "number"), + SegmentType.BOOLEAN: (lambda v: isinstance(v, bool), "boolean"), + SegmentType.OBJECT: (lambda v: isinstance(v, dict), "object"), + SegmentType.ARRAY_STRING: ( + lambda v: isinstance(v, list) and all(isinstance(item, str) for item in v), + "array of strings", + ), + SegmentType.ARRAY_NUMBER: ( + lambda v: isinstance(v, list) and all(isinstance(item, (int, float)) for item in v), + "array of numbers", + ), + SegmentType.ARRAY_BOOLEAN: ( + lambda v: isinstance(v, list) and all(isinstance(item, bool) for item in v), + "array of booleans", + ), + SegmentType.ARRAY_OBJECT: ( + lambda v: isinstance(v, list) and all(isinstance(item, dict) for item in v), + "array of objects", + ), + } + + # Get validator for the type + validator_info = type_validators.get(SegmentType(param_type)) + if not validator_info: + logger.warning("Unknown parameter type: %s for parameter %s", param_type, param_name) + return {"valid": True} + + validator, expected_type = validator_info + + # Validate the parameter + if not validator(param_value): + # Check if it's an array type first + if param_type.startswith("array") and not isinstance(param_value, list): + actual_type = type(param_value).__name__ + error_msg = f"Parameter '{param_name}' must be an array, got {actual_type}" + else: + actual_type = type(param_value).__name__ + # Format error message based on expected type + if param_type.startswith("array"): + error_msg = f"Parameter '{param_name}' must be an {expected_type}" + elif expected_type in ["string", "number", "boolean"]: + error_msg = f"Parameter '{param_name}' must be a {expected_type}, got {actual_type}" + else: + error_msg = f"Parameter '{param_name}' must be an {expected_type}, got {actual_type}" + + return {"valid": False, "error": error_msg} + + return {"valid": True} + + except Exception: + logger.exception("Type validation error for parameter %s", param_name) + return {"valid": False, "error": f"Type validation failed for parameter '{param_name}'"} + + @classmethod + def _validate_form_parameter_type(cls, param_name: str, param_value: str, param_type: str) -> dict[str, Any]: + """Validate form parameter type against expected type. Form data are always strings but can be converted.""" + try: + # Define form type converters and validators + form_validators = { + SegmentType.STRING: (lambda _: True, None), # String is always valid + SegmentType.NUMBER: (lambda v: cls._can_convert_to_number(v), "a valid number"), + SegmentType.BOOLEAN: ( + lambda v: v.lower() in ["true", "false", "1", "0", "yes", "no"], + "a boolean value", + ), + } + + # Get validator for the type + validator_info = form_validators.get(SegmentType(param_type)) + if not validator_info: + # Unsupported type for form data + return { + "valid": False, + "error": f"Parameter '{param_name}' type '{param_type}' is not supported for form data.", + } + + validator, expected_format = validator_info + + # Validate the parameter + if not validator(param_value): + return { + "valid": False, + "error": f"Parameter '{param_name}' must be {expected_format}, got '{param_value}'", + } + + return {"valid": True} + + except Exception: + logger.exception("Form type validation error for parameter %s", param_name) + return {"valid": False, "error": f"Form type validation failed for parameter '{param_name}'"} + + @classmethod + def _can_convert_to_number(cls, value: str) -> bool: + """Check if a string can be converted to a number.""" + try: + float(value) + return True + except ValueError: + return False + + @classmethod + def trigger_workflow_execution( + cls, webhook_trigger: WorkflowWebhookTrigger, webhook_data: dict[str, Any], workflow: Workflow + ) -> None: + """Trigger workflow execution via AsyncWorkflowService.""" + try: + with Session(db.engine) as session: + # Get tenant owner as the user for webhook execution + tenant_owner = session.scalar( + select(Account) + .join(TenantAccountJoin, TenantAccountJoin.account_id == Account.id) + .where( + TenantAccountJoin.tenant_id == webhook_trigger.tenant_id, + TenantAccountJoin.role == TenantAccountRole.OWNER, + ) + ) + + if not tenant_owner: + logger.error("Tenant owner not found for tenant %s", webhook_trigger.tenant_id) + raise ValueError("Tenant owner not found") + + # Prepare inputs for the webhook node + # The webhook node expects webhook_data in the inputs + workflow_inputs = { + "webhook_data": webhook_data, + "webhook_headers": webhook_data.get("headers", {}), + "webhook_query_params": webhook_data.get("query_params", {}), + "webhook_body": webhook_data.get("body", {}), + } + + # Create trigger data + trigger_data = TriggerData( + app_id=webhook_trigger.app_id, + workflow_id=workflow.id, + root_node_id=webhook_trigger.node_id, # Start from the webhook node + trigger_type=WorkflowRunTriggeredFrom.WEBHOOK, + inputs=workflow_inputs, + tenant_id=webhook_trigger.tenant_id, + ) + + # Trigger workflow execution asynchronously + AsyncWorkflowService.trigger_workflow_async( + session, + tenant_owner, + trigger_data, + ) + + except Exception: + logger.exception("Failed to trigger workflow for webhook %s", webhook_trigger.webhook_id) + raise + + @classmethod + def generate_webhook_response(cls, node_config: Mapping[str, Any]) -> tuple[dict[str, Any], int]: + """Generate HTTP response based on node configuration.""" + node_data = node_config.get("data", {}) + + # Get configured status code and response body + status_code = node_data.get("status_code", 200) + response_body = node_data.get("response_body", "") + + # Parse response body as JSON if it's valid JSON, otherwise return as text + try: + if response_body: + try: + response_data = ( + json.loads(response_body) + if response_body.strip().startswith(("{", "[")) + else {"message": response_body} + ) + except json.JSONDecodeError: + response_data = {"message": response_body} + else: + response_data = {"status": "success", "message": "Webhook processed successfully"} + except: + response_data = {"message": response_body or "Webhook processed successfully"} + + return response_data, status_code + + @classmethod + def sync_webhook_relationships(cls, app: App, workflow: Workflow): + """ + Sync webhook relationships in DB. + + 1. Check if the workflow has any webhook trigger nodes + 2. Fetch the nodes from DB, see if there were any webhook records already + 3. Diff the nodes and the webhook records, create/update/delete the webhook records as needed + + Approach: + Frequent DB operations may cause performance issues, using Redis to cache it instead. + If any record exists, cache it. + + Limits: + - Maximum 5 webhook nodes per workflow + """ + + class Cache(BaseModel): + """ + Cache model for webhook nodes + """ + + record_id: str + node_id: str + webhook_id: str + + nodes_id_in_graph = [node_id for node_id, _ in workflow.walk_nodes(NodeType.TRIGGER_WEBHOOK)] + + # Check webhook node limit + if len(nodes_id_in_graph) > cls.MAX_WEBHOOK_NODES_PER_WORKFLOW: + raise ValueError( + f"Workflow exceeds maximum webhook node limit. " + f"Found {len(nodes_id_in_graph)} webhook nodes, maximum allowed is {cls.MAX_WEBHOOK_NODES_PER_WORKFLOW}" + ) + + not_found_in_cache: list[str] = [] + for node_id in nodes_id_in_graph: + # firstly check if the node exists in cache + if not redis_client.get(f"{cls.__WEBHOOK_NODE_CACHE_KEY__}:{node_id}"): + not_found_in_cache.append(node_id) + continue + + with Session(db.engine) as session: + try: + # lock the concurrent webhook trigger creation + redis_client.lock(f"{cls.__WEBHOOK_NODE_CACHE_KEY__}:apps:{app.id}:lock", timeout=10) + # fetch the non-cached nodes from DB + all_records = session.scalars( + select(WorkflowWebhookTrigger).where( + WorkflowWebhookTrigger.app_id == app.id, + WorkflowWebhookTrigger.tenant_id == app.tenant_id, + ) + ).all() + + nodes_id_in_db = {node.node_id: node for node in all_records} + + # get the nodes not found both in cache and DB + nodes_not_found = [node_id for node_id in not_found_in_cache if node_id not in nodes_id_in_db] + + # create new webhook records + for node_id in nodes_not_found: + webhook_record = WorkflowWebhookTrigger( + app_id=app.id, + tenant_id=app.tenant_id, + node_id=node_id, + webhook_id=cls.generate_webhook_id(), + created_by=app.created_by, + ) + session.add(webhook_record) + session.flush() + cache = Cache(record_id=webhook_record.id, node_id=node_id, webhook_id=webhook_record.webhook_id) + redis_client.set(f"{cls.__WEBHOOK_NODE_CACHE_KEY__}:{node_id}", cache.model_dump_json(), ex=60 * 60) + session.commit() + + # delete the nodes not found in the graph + for node_id in nodes_id_in_db: + if node_id not in nodes_id_in_graph: + session.delete(nodes_id_in_db[node_id]) + redis_client.delete(f"{cls.__WEBHOOK_NODE_CACHE_KEY__}:{node_id}") + session.commit() + except Exception: + logger.exception("Failed to sync webhook relationships for app %s", app.id) + raise + finally: + redis_client.delete(f"{cls.__WEBHOOK_NODE_CACHE_KEY__}:apps:{app.id}:lock") + + @classmethod + def generate_webhook_id(cls) -> str: + """ + Generate unique 24-character webhook ID + + Deduplication is not needed, DB already has unique constraint on webhook_id. + """ + # Generate 24-character random string + return secrets.token_urlsafe(18)[:24] # token_urlsafe gives base64url, take first 24 chars diff --git a/api/services/workflow/entities.py b/api/services/workflow/entities.py new file mode 100644 index 0000000000..0df407c986 --- /dev/null +++ b/api/services/workflow/entities.py @@ -0,0 +1,125 @@ +""" +Pydantic models for async workflow trigger system. +""" + +from collections.abc import Mapping, Sequence +from enum import StrEnum +from typing import Any, Optional + +from pydantic import BaseModel, ConfigDict, Field + +from models.enums import WorkflowRunTriggeredFrom + + +class AsyncTriggerStatus(StrEnum): + """Async trigger execution status""" + + COMPLETED = "completed" + FAILED = "failed" + TIMEOUT = "timeout" + + +class TriggerData(BaseModel): + """Base trigger data model for async workflow execution""" + + app_id: str + tenant_id: str + workflow_id: Optional[str] = None + root_node_id: str + inputs: Mapping[str, Any] + files: Sequence[Mapping[str, Any]] = Field(default_factory=list) + trigger_type: WorkflowRunTriggeredFrom + + model_config = ConfigDict(use_enum_values=True) + + +class WebhookTriggerData(TriggerData): + """Webhook-specific trigger data""" + + trigger_type: WorkflowRunTriggeredFrom = WorkflowRunTriggeredFrom.WEBHOOK + webhook_url: str + headers: Mapping[str, str] = Field(default_factory=dict) + method: str = "POST" + + +class ScheduleTriggerData(TriggerData): + """Schedule-specific trigger data""" + + trigger_type: WorkflowRunTriggeredFrom = WorkflowRunTriggeredFrom.SCHEDULE + schedule_id: str + cron_expression: str + + +class PluginTriggerData(TriggerData): + """Plugin webhook trigger data""" + + trigger_type: WorkflowRunTriggeredFrom = WorkflowRunTriggeredFrom.PLUGIN + plugin_id: str + endpoint_id: str + + +class PluginTriggerDispatchData(BaseModel): + """Plugin trigger dispatch data for Celery tasks""" + + endpoint_id: str + provider_id: str + subscription_id: str + timestamp: int + triggers: list[str] + request_id: str + + +class WorkflowTaskData(BaseModel): + """Lightweight data structure for Celery workflow tasks""" + + workflow_trigger_log_id: str # Primary tracking ID - all other data can be fetched from DB + + model_config = ConfigDict(arbitrary_types_allowed=True) + + +class AsyncTriggerExecutionResult(BaseModel): + """Result from async trigger-based workflow execution""" + + execution_id: str + status: AsyncTriggerStatus + result: Optional[Mapping[str, Any]] = None + error: Optional[str] = None + elapsed_time: Optional[float] = None + total_tokens: Optional[int] = None + + model_config = ConfigDict(use_enum_values=True) + + +class AsyncTriggerResponse(BaseModel): + """Response from triggering an async workflow""" + + workflow_trigger_log_id: str + task_id: str + status: str + queue: str + + model_config = ConfigDict(use_enum_values=True) + + +class TriggerLogResponse(BaseModel): + """Response model for trigger log data""" + + id: str + tenant_id: str + app_id: str + workflow_id: str + trigger_type: WorkflowRunTriggeredFrom + status: str + queue_name: str + retry_count: int + celery_task_id: Optional[str] = None + workflow_run_id: Optional[str] = None + error: Optional[str] = None + outputs: Optional[str] = None + elapsed_time: Optional[float] = None + total_tokens: Optional[int] = None + created_at: Optional[str] = None + triggered_at: Optional[str] = None + finished_at: Optional[str] = None + + model_config = ConfigDict(use_enum_values=True) diff --git a/api/services/workflow/queue_dispatcher.py b/api/services/workflow/queue_dispatcher.py new file mode 100644 index 0000000000..158e91dbc9 --- /dev/null +++ b/api/services/workflow/queue_dispatcher.py @@ -0,0 +1,151 @@ +""" +Queue dispatcher system for async workflow execution. + +Implements an ABC-based pattern for handling different subscription tiers +with appropriate queue routing and rate limiting. +""" + +from abc import ABC, abstractmethod +from enum import StrEnum + +from configs import dify_config +from extensions.ext_redis import redis_client +from services.billing_service import BillingService +from services.workflow.rate_limiter import TenantDailyRateLimiter + + +class QueuePriority(StrEnum): + """Queue priorities for different subscription tiers""" + + PROFESSIONAL = "workflow_professional" # Highest priority + TEAM = "workflow_team" + SANDBOX = "workflow_sandbox" # Free tier + + +class BaseQueueDispatcher(ABC): + """Abstract base class for queue dispatchers""" + + def __init__(self): + self.rate_limiter = TenantDailyRateLimiter(redis_client) + + @abstractmethod + def get_queue_name(self) -> str: + """Get the queue name for this dispatcher""" + pass + + @abstractmethod + def get_daily_limit(self) -> int: + """Get daily execution limit""" + pass + + @abstractmethod + def get_priority(self) -> int: + """Get task priority level""" + pass + + def check_daily_quota(self, tenant_id: str) -> bool: + """ + Check if tenant has remaining daily quota + + Args: + tenant_id: The tenant identifier + + Returns: + True if quota available, False otherwise + """ + # Check without consuming + remaining = self.rate_limiter.get_remaining_quota(tenant_id=tenant_id, max_daily_limit=self.get_daily_limit()) + return remaining > 0 + + def consume_quota(self, tenant_id: str) -> bool: + """ + Consume one execution from daily quota + + Args: + tenant_id: The tenant identifier + + Returns: + True if quota consumed successfully, False if limit reached + """ + return self.rate_limiter.check_and_consume(tenant_id=tenant_id, max_daily_limit=self.get_daily_limit()) + + +class ProfessionalQueueDispatcher(BaseQueueDispatcher): + """Dispatcher for professional tier""" + + def get_queue_name(self) -> str: + return QueuePriority.PROFESSIONAL + + def get_daily_limit(self) -> int: + return int(1e9) + + def get_priority(self) -> int: + return 100 + + +class TeamQueueDispatcher(BaseQueueDispatcher): + """Dispatcher for team tier""" + + def get_queue_name(self) -> str: + return QueuePriority.TEAM + + def get_daily_limit(self) -> int: + return int(1e9) + + def get_priority(self) -> int: + return 50 + + +class SandboxQueueDispatcher(BaseQueueDispatcher): + """Dispatcher for free/sandbox tier""" + + def get_queue_name(self) -> str: + return QueuePriority.SANDBOX + + def get_daily_limit(self) -> int: + return dify_config.APP_DAILY_RATE_LIMIT + + def get_priority(self) -> int: + return 10 + + +class QueueDispatcherManager: + """Factory for creating appropriate dispatcher based on tenant subscription""" + + # Mapping of billing plans to dispatchers + PLAN_DISPATCHER_MAP = { + "professional": ProfessionalQueueDispatcher, + "team": TeamQueueDispatcher, + "sandbox": SandboxQueueDispatcher, + # Add new tiers here as they're created + # For any unknown plan, default to sandbox + } + + @classmethod + def get_dispatcher(cls, tenant_id: str) -> BaseQueueDispatcher: + """ + Get dispatcher based on tenant's subscription plan + + Args: + tenant_id: The tenant identifier + + Returns: + Appropriate queue dispatcher instance + """ + if dify_config.BILLING_ENABLED: + try: + billing_info = BillingService.get_info(tenant_id) + plan = billing_info.get("subscription", {}).get("plan", "sandbox") + except Exception: + # If billing service fails, default to sandbox + plan = "sandbox" + else: + # If billing is disabled, use team tier as default + plan = "team" + + dispatcher_class = cls.PLAN_DISPATCHER_MAP.get( + plan, + SandboxQueueDispatcher, # Default to sandbox for unknown plans + ) + + return dispatcher_class() diff --git a/api/services/workflow/rate_limiter.py b/api/services/workflow/rate_limiter.py new file mode 100644 index 0000000000..dff284538a --- /dev/null +++ b/api/services/workflow/rate_limiter.py @@ -0,0 +1,182 @@ +""" +Day-based rate limiter for workflow executions. + +Implements UTC-based daily quotas that reset at midnight UTC for consistent rate limiting. +""" + +from datetime import datetime, time, timedelta +from typing import Union + +import pytz +from redis import Redis +from sqlalchemy import select + +from extensions.ext_database import db +from extensions.ext_redis import RedisClientWrapper +from models.account import Account, TenantAccountJoin, TenantAccountRole + + +class TenantDailyRateLimiter: + """ + Day-based rate limiter that resets at midnight UTC + + This class provides Redis-based rate limiting with the following features: + - Daily quotas that reset at midnight UTC for consistency + - Atomic check-and-consume operations + - Automatic cleanup of stale counters + - Timezone-aware error messages for better UX + """ + + def __init__(self, redis_client: Union[Redis, RedisClientWrapper]): + self.redis = redis_client + + def _get_tenant_owner_timezone(self, tenant_id: str) -> str: + """ + Get timezone of tenant owner + + Args: + tenant_id: The tenant identifier + + Returns: + Timezone string (e.g., 'America/New_York', 'UTC') + """ + # Query to get tenant owner's timezone using scalar and select + owner = db.session.scalar( + select(Account) + .join(TenantAccountJoin, TenantAccountJoin.account_id == Account.id) + .where(TenantAccountJoin.tenant_id == tenant_id, TenantAccountJoin.role == TenantAccountRole.OWNER) + ) + + if not owner: + return "UTC" + + return owner.timezone or "UTC" + + def _get_day_key(self, tenant_id: str) -> str: + """ + Get Redis key for current UTC day + + Args: + tenant_id: The tenant identifier + + Returns: + Redis key for the current UTC day + """ + utc_now = datetime.utcnow() + date_str = utc_now.strftime("%Y-%m-%d") + return f"workflow:daily_limit:{tenant_id}:{date_str}" + + def _get_ttl_seconds(self) -> int: + """ + Calculate seconds until UTC midnight + + Returns: + Number of seconds until UTC midnight + """ + utc_now = datetime.utcnow() + + # Get next midnight in UTC + next_midnight = datetime.combine(utc_now.date() + timedelta(days=1), time.min) + + return int((next_midnight - utc_now).total_seconds()) + + def check_and_consume(self, tenant_id: str, max_daily_limit: int) -> bool: + """ + Check if quota available and consume one execution + + Args: + tenant_id: The tenant identifier + max_daily_limit: Maximum daily limit + + Returns: + True if quota consumed successfully, False if limit reached + """ + key = self._get_day_key(tenant_id) + ttl = self._get_ttl_seconds() + + # Check current usage + current = self.redis.get(key) + + if current is None: + # First execution of the day - set to 1 + self.redis.setex(key, ttl, 1) + return True + + current_count = int(current) + if current_count < max_daily_limit: + # Within limit, increment + new_count = self.redis.incr(key) + # Update TTL + self.redis.expire(key, ttl) + + # Double-check in case of race condition + if new_count <= max_daily_limit: + return True + else: + # Race condition occurred, decrement back + self.redis.decr(key) + return False + else: + # Limit exceeded + return False + + def get_remaining_quota(self, tenant_id: str, max_daily_limit: int) -> int: + """ + Get remaining quota for the day + + Args: + tenant_id: The tenant identifier + max_daily_limit: Maximum daily limit + + Returns: + Number of remaining executions for the day + """ + key = self._get_day_key(tenant_id) + used = int(self.redis.get(key) or 0) + return max(0, max_daily_limit - used) + + def get_current_usage(self, tenant_id: str) -> int: + """ + Get current usage for the day + + Args: + tenant_id: The tenant identifier + + Returns: + Number of executions used today + """ + key = self._get_day_key(tenant_id) + return int(self.redis.get(key) or 0) + + def reset_quota(self, tenant_id: str) -> bool: + """ + Reset quota for testing purposes + + Args: + tenant_id: The tenant identifier + + Returns: + True if key was deleted, False if key didn't exist + """ + key = self._get_day_key(tenant_id) + return bool(self.redis.delete(key)) + + def get_quota_reset_time(self, tenant_id: str, timezone_str: str) -> datetime: + """ + Get the time when quota will reset (next UTC midnight in tenant's timezone) + + Args: + tenant_id: The tenant identifier + timezone_str: Tenant's timezone for display purposes + + Returns: + Datetime when quota resets (next UTC midnight in tenant's timezone) + """ + tz = pytz.timezone(timezone_str) + utc_now = datetime.utcnow() + + # Get next midnight in UTC, then convert to tenant's timezone + next_utc_midnight = datetime.combine(utc_now.date() + timedelta(days=1), time.min) + next_utc_midnight = pytz.UTC.localize(next_utc_midnight) + + return next_utc_midnight.astimezone(tz) diff --git a/api/services/workflow_plugin_trigger_service.py b/api/services/workflow_plugin_trigger_service.py new file mode 100644 index 0000000000..e1b1b0a291 --- /dev/null +++ b/api/services/workflow_plugin_trigger_service.py @@ -0,0 +1,546 @@ +from typing import Optional + +from pydantic import BaseModel +from sqlalchemy import select +from sqlalchemy.orm import Session +from werkzeug.exceptions import NotFound + +from core.workflow.enums import NodeType +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from models.model import App +from models.trigger import TriggerSubscription +from models.workflow import Workflow, WorkflowPluginTrigger + + +class WorkflowPluginTriggerService: + """Service for managing workflow plugin triggers""" + + __PLUGIN_TRIGGER_NODE_CACHE_KEY__ = "plugin_trigger_nodes" + MAX_PLUGIN_TRIGGER_NODES_PER_WORKFLOW = 5 # Maximum allowed plugin trigger nodes per workflow + + @classmethod + def create_plugin_trigger( + cls, + app_id: str, + tenant_id: str, + node_id: str, + provider_id: str, + trigger_name: str, + subscription_id: str, + ) -> WorkflowPluginTrigger: + """Create a new plugin trigger + + Args: + app_id: The app ID + tenant_id: The tenant ID + node_id: The node ID in the workflow + provider_id: The plugin provider ID + trigger_name: The trigger name + subscription_id: The subscription ID + + Returns: + The created WorkflowPluginTrigger instance + + Raises: + BadRequest: If plugin trigger already exists for this app and node + """ + with Session(db.engine) as session: + # Check if plugin trigger already exists for this app and node + # Based on unique constraint: uniq_app_node + existing_trigger = session.scalar( + select(WorkflowPluginTrigger).where( + WorkflowPluginTrigger.app_id == app_id, + WorkflowPluginTrigger.node_id == node_id, + ) + ) + + if existing_trigger: + raise ValueError("Plugin trigger already exists for this app and node") + + # Check if subscription exists + subscription = session.scalar( + select(TriggerSubscription).where( + TriggerSubscription.id == subscription_id, + ) + ) + + if not subscription: + raise NotFound("Subscription not found") + + # Create new plugin trigger + plugin_trigger = WorkflowPluginTrigger( + app_id=app_id, + node_id=node_id, + tenant_id=tenant_id, + provider_id=provider_id, + trigger_name=trigger_name, + subscription_id=subscription_id, + ) + + session.add(plugin_trigger) + session.commit() + session.refresh(plugin_trigger) + + return plugin_trigger + + @classmethod + def get_plugin_trigger( + cls, + app_id: str, + node_id: str, + ) -> WorkflowPluginTrigger: + """Get a plugin trigger by app_id and node_id + + Args: + app_id: The app ID + node_id: The node ID in the workflow + + Returns: + The WorkflowPluginTrigger instance + + Raises: + NotFound: If plugin trigger not found + """ + with Session(db.engine) as session: + # Find plugin trigger using unique constraint + plugin_trigger = session.scalar( + select(WorkflowPluginTrigger).where( + WorkflowPluginTrigger.app_id == app_id, + WorkflowPluginTrigger.node_id == node_id, + ) + ) + + if not plugin_trigger: + raise NotFound("Plugin trigger not found") + + return plugin_trigger + + @classmethod + def get_plugin_trigger_by_subscription( + cls, + tenant_id: str, + subscription_id: str, + ) -> WorkflowPluginTrigger: + """Get a plugin trigger by tenant_id and subscription_id + This is the primary query pattern, optimized with composite index + + Args: + tenant_id: The tenant ID + subscription_id: The subscription ID + + Returns: + The WorkflowPluginTrigger instance + + Raises: + NotFound: If plugin trigger not found + """ + with Session(db.engine) as session: + # Find plugin trigger using indexed columns + plugin_trigger = session.scalar( + select(WorkflowPluginTrigger).where( + WorkflowPluginTrigger.tenant_id == tenant_id, + WorkflowPluginTrigger.subscription_id == subscription_id, + ) + ) + + if not plugin_trigger: + raise NotFound("Plugin trigger not found") + + return plugin_trigger + + @classmethod + def list_plugin_triggers_by_tenant( + cls, + tenant_id: str, + ) -> list[WorkflowPluginTrigger]: + """List all plugin triggers for a tenant + + Args: + tenant_id: The tenant ID + + Returns: + List of WorkflowPluginTrigger instances + """ + with Session(db.engine) as session: + plugin_triggers = session.scalars( + select(WorkflowPluginTrigger) + .where(WorkflowPluginTrigger.tenant_id == tenant_id) + .order_by(WorkflowPluginTrigger.created_at.desc()) + ).all() + + return list(plugin_triggers) + + @classmethod + def list_plugin_triggers_by_subscription( + cls, + subscription_id: str, + ) -> list[WorkflowPluginTrigger]: + """List all plugin triggers for a subscription + + Args: + subscription_id: The subscription ID + + Returns: + List of WorkflowPluginTrigger instances + """ + with Session(db.engine) as session: + plugin_triggers = session.scalars( + select(WorkflowPluginTrigger) + .where(WorkflowPluginTrigger.subscription_id == subscription_id) + .order_by(WorkflowPluginTrigger.created_at.desc()) + ).all() + + return list(plugin_triggers) + + @classmethod + def update_plugin_trigger( + cls, + app_id: str, + node_id: str, + subscription_id: str, + ) -> WorkflowPluginTrigger: + """Update a plugin trigger + + Args: + app_id: The app ID + node_id: The node ID in the workflow + subscription_id: The new subscription ID (optional) + + Returns: + The updated WorkflowPluginTrigger instance + + Raises: + NotFound: If plugin trigger not found + """ + with Session(db.engine) as session: + # Find plugin trigger using unique constraint + plugin_trigger = session.scalar( + select(WorkflowPluginTrigger).where( + WorkflowPluginTrigger.app_id == app_id, + WorkflowPluginTrigger.node_id == node_id, + ) + ) + + if not plugin_trigger: + raise NotFound("Plugin trigger not found") + + # Check if subscription exists + subscription = session.scalar( + select(TriggerSubscription).where( + TriggerSubscription.id == subscription_id, + ) + ) + + if not subscription: + raise NotFound("Subscription not found") + + # Update subscription ID + plugin_trigger.subscription_id = subscription_id + + session.commit() + session.refresh(plugin_trigger) + + return plugin_trigger + + @classmethod + def update_plugin_trigger_by_subscription( + cls, + tenant_id: str, + subscription_id: str, + provider_id: Optional[str] = None, + trigger_name: Optional[str] = None, + new_subscription_id: Optional[str] = None, + ) -> WorkflowPluginTrigger: + """Update a plugin trigger by tenant_id and subscription_id + + Args: + tenant_id: The tenant ID + subscription_id: The current subscription ID + provider_id: The new provider ID (optional) + trigger_name: The new trigger name (optional) + new_subscription_id: The new subscription ID (optional) + + Returns: + The updated WorkflowPluginTrigger instance + + Raises: + NotFound: If plugin trigger not found + """ + with Session(db.engine) as session: + # Find plugin trigger using indexed columns + plugin_trigger = session.scalar( + select(WorkflowPluginTrigger).where( + WorkflowPluginTrigger.tenant_id == tenant_id, + WorkflowPluginTrigger.subscription_id == subscription_id, + ) + ) + + if not plugin_trigger: + raise NotFound("Plugin trigger not found") + + # Update fields if provided + if provider_id: + plugin_trigger.provider_id = provider_id + + if trigger_name: + # Update trigger_id if provider_id or trigger_name changed + provider_id = provider_id or plugin_trigger.provider_id + plugin_trigger.trigger_name = f"{provider_id}:{trigger_name}" + + if new_subscription_id: + plugin_trigger.subscription_id = new_subscription_id + + session.commit() + session.refresh(plugin_trigger) + + return plugin_trigger + + @classmethod + def delete_plugin_trigger( + cls, + app_id: str, + node_id: str, + ) -> None: + """Delete a plugin trigger by app_id and node_id + + Args: + app_id: The app ID + node_id: The node ID in the workflow + + Raises: + NotFound: If plugin trigger not found + """ + with Session(db.engine) as session: + # Find plugin trigger using unique constraint + plugin_trigger = session.scalar( + select(WorkflowPluginTrigger).where( + WorkflowPluginTrigger.app_id == app_id, + WorkflowPluginTrigger.node_id == node_id, + ) + ) + + if not plugin_trigger: + raise NotFound("Plugin trigger not found") + + session.delete(plugin_trigger) + session.commit() + + @classmethod + def delete_plugin_trigger_by_subscription( + cls, + session: Session, + tenant_id: str, + subscription_id: str, + ) -> None: + """Delete a plugin trigger by tenant_id and subscription_id within an existing session + + Args: + session: Database session + tenant_id: The tenant ID + subscription_id: The subscription ID + + Raises: + NotFound: If plugin trigger not found + """ + # Find plugin trigger using indexed columns + plugin_trigger = session.scalar( + select(WorkflowPluginTrigger).where( + WorkflowPluginTrigger.tenant_id == tenant_id, + WorkflowPluginTrigger.subscription_id == subscription_id, + ) + ) + + if not plugin_trigger: + return + + session.delete(plugin_trigger) + + @classmethod + def delete_all_by_subscription( + cls, + subscription_id: str, + ) -> int: + """Delete all plugin triggers for a subscription + Useful when a subscription is cancelled + + Args: + subscription_id: The subscription ID + + Returns: + Number of triggers deleted + """ + with Session(db.engine) as session: + # Find all plugin triggers for this subscription + plugin_triggers = session.scalars( + select(WorkflowPluginTrigger).where( + WorkflowPluginTrigger.subscription_id == subscription_id, + ) + ).all() + + count = len(plugin_triggers) + + for trigger in plugin_triggers: + session.delete(trigger) + + session.commit() + + return count + + @classmethod + def sync_plugin_trigger_relationships(cls, app: App, workflow: Workflow): + """ + Sync plugin trigger relationships in DB. + + 1. Check if the workflow has any plugin trigger nodes + 2. Fetch the nodes from DB, see if there were any plugin trigger records already + 3. Diff the nodes and the plugin trigger records, create/update/delete the records as needed + + Approach: + Frequent DB operations may cause performance issues, using Redis to cache it instead. + If any record exists, cache it. + + Limits: + - Maximum 5 plugin trigger nodes per workflow + """ + + class Cache(BaseModel): + """ + Cache model for plugin trigger nodes + """ + + record_id: str + node_id: str + provider_id: str + trigger_name: str + subscription_id: str + + # Walk nodes to find plugin triggers + nodes_in_graph = [] + for node_id, node_config in workflow.walk_nodes(NodeType.TRIGGER_PLUGIN): + # Extract plugin trigger configuration from node + plugin_id = node_config.get("plugin_id", "") + provider_id = node_config.get("provider_id", "") + trigger_name = node_config.get("trigger_name", "") + subscription_id = node_config.get("subscription_id", "") + + if not subscription_id: + continue + + nodes_in_graph.append( + { + "node_id": node_id, + "plugin_id": plugin_id, + "provider_id": provider_id, + "trigger_name": trigger_name, + "subscription_id": subscription_id, + } + ) + + # Check plugin trigger node limit + if len(nodes_in_graph) > cls.MAX_PLUGIN_TRIGGER_NODES_PER_WORKFLOW: + raise ValueError( + f"Workflow exceeds maximum plugin trigger node limit. " + f"Found {len(nodes_in_graph)} plugin trigger nodes, " + f"maximum allowed is {cls.MAX_PLUGIN_TRIGGER_NODES_PER_WORKFLOW}" + ) + + not_found_in_cache: list[dict] = [] + for node_info in nodes_in_graph: + node_id = node_info["node_id"] + # firstly check if the node exists in cache + if not redis_client.get(f"{cls.__PLUGIN_TRIGGER_NODE_CACHE_KEY__}:{node_id}"): + not_found_in_cache.append(node_info) + continue + + with Session(db.engine) as session: + try: + # lock the concurrent plugin trigger creation + redis_client.lock(f"{cls.__PLUGIN_TRIGGER_NODE_CACHE_KEY__}:apps:{app.id}:lock", timeout=10) + # fetch the non-cached nodes from DB + all_records = session.scalars( + select(WorkflowPluginTrigger).where( + WorkflowPluginTrigger.app_id == app.id, + WorkflowPluginTrigger.tenant_id == app.tenant_id, + ) + ).all() + + nodes_id_in_db = {node.node_id: node for node in all_records} + nodes_id_in_graph = {node["node_id"] for node in nodes_in_graph} + + # get the nodes not found both in cache and DB + nodes_not_found = [ + node_info for node_info in not_found_in_cache if node_info["node_id"] not in nodes_id_in_db + ] + + # create new plugin trigger records + for node_info in nodes_not_found: + plugin_trigger = WorkflowPluginTrigger( + app_id=app.id, + tenant_id=app.tenant_id, + node_id=node_info["node_id"], + provider_id=node_info["provider_id"], + trigger_name=node_info["trigger_name"], + subscription_id=node_info["subscription_id"], + ) + session.add(plugin_trigger) + session.flush() # Get the ID for caching + + cache = Cache( + record_id=plugin_trigger.id, + node_id=node_info["node_id"], + provider_id=node_info["provider_id"], + trigger_name=node_info["trigger_name"], + subscription_id=node_info["subscription_id"], + ) + redis_client.set( + f"{cls.__PLUGIN_TRIGGER_NODE_CACHE_KEY__}:{node_info['node_id']}", + cache.model_dump_json(), + ex=60 * 60, + ) + session.commit() + + # Update existing records if subscription_id changed + for node_info in nodes_in_graph: + node_id = node_info["node_id"] + if node_id in nodes_id_in_db: + existing_record = nodes_id_in_db[node_id] + if ( + existing_record.subscription_id != node_info["subscription_id"] + or existing_record.provider_id != node_info["provider_id"] + or existing_record.trigger_name != node_info["trigger_name"] + ): + existing_record.subscription_id = node_info["subscription_id"] + existing_record.provider_id = node_info["provider_id"] + existing_record.trigger_name = node_info["trigger_name"] + session.add(existing_record) + + # Update cache + cache = Cache( + record_id=existing_record.id, + node_id=node_id, + provider_id=node_info["provider_id"], + trigger_name=node_info["trigger_name"], + subscription_id=node_info["subscription_id"], + ) + redis_client.set( + f"{cls.__PLUGIN_TRIGGER_NODE_CACHE_KEY__}:{node_id}", + cache.model_dump_json(), + ex=60 * 60, + ) + session.commit() + + # delete the nodes not found in the graph + for node_id in nodes_id_in_db: + if node_id not in nodes_id_in_graph: + session.delete(nodes_id_in_db[node_id]) + redis_client.delete(f"{cls.__PLUGIN_TRIGGER_NODE_CACHE_KEY__}:{node_id}") + session.commit() + except Exception: + import logging + + logger = logging.getLogger(__name__) + logger.exception("Failed to sync plugin trigger relationships for app %s", app.id) + raise + finally: + redis_client.delete(f"{cls.__PLUGIN_TRIGGER_NODE_CACHE_KEY__}:apps:{app.id}:lock") diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index 97168c462b..9466b3df82 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -621,7 +621,7 @@ class WorkflowService: node_config = draft_workflow.get_node_config_by_id(node_id) node_type = Workflow.get_node_type_from_node_config(node_config) node_data = node_config.get("data", {}) - if node_type == NodeType.START: + if node_type.is_start_node: with Session(bind=db.engine) as session, session.begin(): draft_var_srv = WorkflowDraftVariableService(session) conversation_id = draft_var_srv.get_or_create_conversation( diff --git a/api/tasks/async_workflow_tasks.py b/api/tasks/async_workflow_tasks.py new file mode 100644 index 0000000000..6fc96f5155 --- /dev/null +++ b/api/tasks/async_workflow_tasks.py @@ -0,0 +1,203 @@ +""" +Celery tasks for async workflow execution. + +These tasks handle workflow execution for different subscription tiers +with appropriate retry policies and error handling. +""" + +import json +from datetime import UTC, datetime + +from celery import shared_task +from sqlalchemy import select +from sqlalchemy.orm import Session, sessionmaker + +from configs import dify_config +from core.app.apps.workflow.app_generator import WorkflowAppGenerator +from core.app.entities.app_invoke_entities import InvokeFrom +from extensions.ext_database import db +from models.account import Account +from models.enums import CreatorUserRole +from models.model import App, EndUser, Tenant +from models.workflow import Workflow, WorkflowTriggerLog, WorkflowTriggerStatus +from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository +from services.errors.app import WorkflowNotFoundError +from services.workflow.entities import AsyncTriggerExecutionResult, AsyncTriggerStatus, TriggerData, WorkflowTaskData + +# Determine queue names based on edition +if dify_config.EDITION == "CLOUD": + # Cloud edition: separate queues for different tiers + PROFESSIONAL_QUEUE = "workflow_professional" + TEAM_QUEUE = "workflow_team" + SANDBOX_QUEUE = "workflow_sandbox" +else: + # Community edition: single workflow queue (not dataset) + PROFESSIONAL_QUEUE = "workflow" + TEAM_QUEUE = "workflow" + SANDBOX_QUEUE = "workflow" + + +@shared_task(queue=PROFESSIONAL_QUEUE) +def execute_workflow_professional(task_data_dict: dict) -> dict: + """Execute workflow for professional tier with highest priority""" + task_data = WorkflowTaskData.model_validate(task_data_dict) + return _execute_workflow_common(task_data).model_dump() + + +@shared_task(queue=TEAM_QUEUE) +def execute_workflow_team(task_data_dict: dict) -> dict: + """Execute workflow for team tier""" + task_data = WorkflowTaskData.model_validate(task_data_dict) + return _execute_workflow_common(task_data).model_dump() + + +@shared_task(queue=SANDBOX_QUEUE) +def execute_workflow_sandbox(task_data_dict: dict) -> dict: + """Execute workflow for free tier with lower retry limit""" + task_data = WorkflowTaskData.model_validate(task_data_dict) + return _execute_workflow_common(task_data).model_dump() + + +def _execute_workflow_common(task_data: WorkflowTaskData) -> AsyncTriggerExecutionResult: + """ + Common workflow execution logic with trigger log updates + + Args: + task_data: Validated Pydantic model with task data + + Returns: + AsyncTriggerExecutionResult: Pydantic model with execution results + """ + # Create a new session for this task + session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) + + with session_factory() as session: + trigger_log_repo = SQLAlchemyWorkflowTriggerLogRepository(session) + + # Get trigger log + trigger_log = trigger_log_repo.get_by_id(task_data.workflow_trigger_log_id) + + if not trigger_log: + # This should not happen, but handle gracefully + return AsyncTriggerExecutionResult( + execution_id=task_data.workflow_trigger_log_id, + status=AsyncTriggerStatus.FAILED, + error=f"Trigger log not found: {task_data.workflow_trigger_log_id}", + ) + + # Reconstruct execution data from trigger log + trigger_data = TriggerData.model_validate_json(trigger_log.trigger_data) + + # Update status to running + trigger_log.status = WorkflowTriggerStatus.RUNNING + trigger_log_repo.update(trigger_log) + session.commit() + + start_time = datetime.now(UTC) + + try: + # Get app and workflow models + app_model = session.scalar(select(App).where(App.id == trigger_log.app_id)) + + if not app_model: + raise WorkflowNotFoundError(f"App not found: {trigger_log.app_id}") + + workflow = session.scalar(select(Workflow).where(Workflow.id == trigger_log.workflow_id)) + if not workflow: + raise WorkflowNotFoundError(f"Workflow not found: {trigger_log.workflow_id}") + + user = _get_user(session, trigger_log) + + # Execute workflow using WorkflowAppGenerator + generator = WorkflowAppGenerator() + + # Prepare args matching AppGenerateService.generate format + args = {"inputs": dict(trigger_data.inputs), "files": list(trigger_data.files)} + + # If workflow_id was specified, add it to args + if trigger_data.workflow_id: + args["workflow_id"] = trigger_data.workflow_id + + # Execute the workflow with the trigger type + result = generator.generate( + app_model=app_model, + workflow=workflow, + user=user, + args=args, + invoke_from=InvokeFrom.SERVICE_API, + streaming=False, + call_depth=0, + workflow_thread_pool_id=None, + triggered_from=trigger_data.trigger_type, + root_node_id=trigger_data.root_node_id, + ) + + # Calculate elapsed time + elapsed_time = (datetime.now(UTC) - start_time).total_seconds() + + # Extract relevant data from result + if isinstance(result, dict): + workflow_run_id = result.get("workflow_run_id") + total_tokens = result.get("total_tokens") + outputs = result + else: + # Handle generator result - collect all data + workflow_run_id = None + total_tokens = None + outputs = {"data": "streaming_result"} + + # Update trigger log with success + trigger_log.status = WorkflowTriggerStatus.SUCCEEDED + trigger_log.workflow_run_id = workflow_run_id + trigger_log.outputs = json.dumps(outputs) + trigger_log.elapsed_time = elapsed_time + trigger_log.total_tokens = total_tokens + trigger_log.finished_at = datetime.now(UTC) + trigger_log_repo.update(trigger_log) + session.commit() + + return AsyncTriggerExecutionResult( + execution_id=trigger_log.id, + status=AsyncTriggerStatus.COMPLETED, + result=outputs, + elapsed_time=elapsed_time, + total_tokens=total_tokens, + ) + + except Exception as e: + # Calculate elapsed time for failed execution + elapsed_time = (datetime.now(UTC) - start_time).total_seconds() + + # Update trigger log with failure + trigger_log.status = WorkflowTriggerStatus.FAILED + trigger_log.error = str(e) + trigger_log.finished_at = datetime.now(UTC) + trigger_log.elapsed_time = elapsed_time + trigger_log_repo.update(trigger_log) + + # Final failure - no retry logic (simplified like RAG tasks) + session.commit() + + return AsyncTriggerExecutionResult( + execution_id=trigger_log.id, status=AsyncTriggerStatus.FAILED, error=str(e), elapsed_time=elapsed_time + ) + + +def _get_user(session: Session, trigger_log: WorkflowTriggerLog) -> Account | EndUser: + """Compose user from trigger log""" + tenant = session.scalar(select(Tenant).where(Tenant.id == trigger_log.tenant_id)) + if not tenant: + raise ValueError(f"Tenant not found: {trigger_log.tenant_id}") + + # Get user from trigger log + if trigger_log.created_by_role == CreatorUserRole.ACCOUNT: + user = session.scalar(select(Account).where(Account.id == trigger_log.created_by)) + if user: + user.current_tenant = tenant + else: # CreatorUserRole.END_USER + user = session.scalar(select(EndUser).where(EndUser.id == trigger_log.created_by)) + + if not user: + raise ValueError(f"User not found: {trigger_log.created_by} (role: {trigger_log.created_by_role})") + + return user diff --git a/api/tasks/remove_app_and_related_data_task.py b/api/tasks/remove_app_and_related_data_task.py index f8f39583ac..770bdd6676 100644 --- a/api/tasks/remove_app_and_related_data_task.py +++ b/api/tasks/remove_app_and_related_data_task.py @@ -69,6 +69,7 @@ def remove_app_and_related_data_task(self, tenant_id: str, app_id: str): _delete_trace_app_configs(tenant_id, app_id) _delete_conversation_variables(app_id=app_id) _delete_draft_variables(app_id) + _delete_app_plugin_triggers(tenant_id, app_id) end_at = time.perf_counter() logger.info(click.style(f"App and related data deleted: {app_id} latency: {end_at - start_at}", fg="green")) @@ -501,3 +502,13 @@ def _delete_records(query_sql: str, params: dict, delete_func: Callable, name: s logger.exception("Error occurred while deleting %s %s", name, record_id) continue rs.close() + + +def _delete_app_plugin_triggers(tenant_id: str, app_id: str): + with db.engine.begin() as conn: + result = conn.execute( + sa.text("DELETE FROM workflow_plugin_triggers WHERE app_id = :app_id"), {"app_id": app_id} + ) + deleted_count = result.rowcount + if deleted_count > 0: + logger.info(click.style(f"Deleted {deleted_count} workflow plugin triggers for app {app_id}", fg="green")) diff --git a/api/tasks/trigger_processing_tasks.py b/api/tasks/trigger_processing_tasks.py new file mode 100644 index 0000000000..ed646863d2 --- /dev/null +++ b/api/tasks/trigger_processing_tasks.py @@ -0,0 +1,156 @@ +""" +Celery tasks for async trigger processing. + +These tasks handle trigger workflow execution asynchronously +to avoid blocking the main request thread. +""" + +import logging + +from celery import shared_task +from sqlalchemy.orm import Session + +from core.trigger.trigger_manager import TriggerManager +from extensions.ext_database import db +from extensions.ext_storage import storage +from models.provider_ids import TriggerProviderID +from models.trigger import TriggerSubscription +from services.trigger_debug_service import TriggerDebugService +from services.trigger_service import TriggerService +from services.workflow.entities import PluginTriggerDispatchData + +logger = logging.getLogger(__name__) + +# Use workflow queue for trigger processing +TRIGGER_QUEUE = "triggered_workflow_dispatcher" + + +@shared_task(queue=TRIGGER_QUEUE) +def dispatch_triggered_workflows_async( + dispatch_data: dict, +) -> dict: + """ + Dispatch triggers asynchronously. + + Args: + endpoint_id: Endpoint ID + provider_id: Provider ID + subscription_id: Subscription ID + timestamp: Timestamp of the event + triggers: List of triggers to dispatch + request_id: Unique ID of the stored request + + Returns: + dict: Execution result with status and dispatched trigger count + """ + dispatch_params: PluginTriggerDispatchData = PluginTriggerDispatchData.model_validate( + dispatch_data + ) + endpoint_id = dispatch_params.endpoint_id + provider_id = dispatch_params.provider_id + subscription_id = dispatch_params.subscription_id + timestamp = dispatch_params.timestamp + triggers = dispatch_params.triggers + request_id = dispatch_params.request_id + + try: + logger.info( + "Starting async trigger dispatching for endpoint=%s, triggers=%s, request_id=%s, timestamp=%s", + endpoint_id, + triggers, + request_id, + timestamp, + ) + + # Verify request exists in storage + try: + serialized_request = storage.load_once(f"triggers/{request_id}") + # Just verify it exists, we don't need to deserialize it here + if not serialized_request: + raise ValueError("Request not found in storage") + except Exception as e: + logger.exception("Failed to load request %s", request_id, exc_info=e) + return {"status": "failed", "error": f"Failed to load request: {str(e)}"} + + with Session(db.engine) as session: + # Get subscription + subscription = session.query(TriggerSubscription).filter_by(id=subscription_id).first() + if not subscription: + logger.error("Subscription not found: %s", subscription_id) + return {"status": "failed", "error": "Subscription not found"} + + # Get controller + controller = TriggerManager.get_trigger_provider(subscription.tenant_id, TriggerProviderID(provider_id)) + if not controller: + logger.error("Controller not found for provider: %s", provider_id) + return {"status": "failed", "error": "Controller not found"} + + # Dispatch each trigger + dispatched_count = 0 + for trigger in triggers: + try: + trigger = controller.get_trigger(trigger) + if trigger is None: + logger.error( + "Trigger '%s' not found in provider '%s'", + trigger, + provider_id, + ) + continue + + dispatched_count += TriggerService.dispatch_triggered_workflows( + subscription=subscription, + trigger=trigger, + request_id=request_id, + ) + + except Exception: + logger.exception( + "Failed to dispatch trigger '%s' for subscription %s", + trigger, + subscription_id, + ) + # Continue processing other triggers even if one fails + continue + + # Dispatch to debug sessions after processing all triggers + try: + debug_dispatched = TriggerDebugService.dispatch_debug_event( + tenant_id=subscription.tenant_id, + subscription_id=subscription_id, + triggers=triggers, + timestamp=timestamp, + request_id=request_id, + ) + except Exception: + # Silent failure for debug dispatch + logger.exception("Failed to dispatch to debug sessions") + + logger.info( + "Completed async trigger dispatching: processed %d/%d triggers", + dispatched_count, + len(triggers), + ) + + # Note: Stored request is not deleted here. It should be handled by: + # 1. Storage system's lifecycle policy (e.g., S3 lifecycle rules for triggers/* prefix) + # 2. Or periodic cleanup job if using local/persistent storage + # This ensures request data is available for debugging/retry purposes + + return { + "status": "completed", + "total_count": len(triggers), + "dispatched_count": dispatched_count, + "debug_dispatched_count": debug_dispatched, + } + + except Exception as e: + logger.exception( + "Error in async trigger dispatching for endpoint %s data %s", + endpoint_id, + dispatch_data, + ) + return { + "status": "failed", + "error": str(e), + } diff --git a/api/tasks/workflow_schedule_tasks.py b/api/tasks/workflow_schedule_tasks.py new file mode 100644 index 0000000000..17f4e0751c --- /dev/null +++ b/api/tasks/workflow_schedule_tasks.py @@ -0,0 +1,69 @@ +import logging +from datetime import UTC, datetime +from zoneinfo import ZoneInfo + +from celery import shared_task +from sqlalchemy.orm import sessionmaker + +from core.workflow.nodes.trigger_schedule.exc import ( + ScheduleExecutionError, + ScheduleNotFoundError, + TenantOwnerNotFoundError, +) +from extensions.ext_database import db +from models.enums import WorkflowRunTriggeredFrom +from models.workflow import WorkflowSchedulePlan +from services.async_workflow_service import AsyncWorkflowService +from services.schedule_service import ScheduleService +from services.workflow.entities import TriggerData + +logger = logging.getLogger(__name__) + + +@shared_task(queue="schedule_executor") +def run_schedule_trigger(schedule_id: str) -> None: + """ + Execute a scheduled workflow trigger. + + Note: No retry logic needed as schedules will run again at next interval. + The execution result is tracked via WorkflowTriggerLog. + + Raises: + ScheduleNotFoundError: If schedule doesn't exist + TenantOwnerNotFoundError: If no owner/admin for tenant + ScheduleExecutionError: If workflow trigger fails + """ + session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) + + with session_factory() as session: + schedule = session.get(WorkflowSchedulePlan, schedule_id) + if not schedule: + raise ScheduleNotFoundError(f"Schedule {schedule_id} not found") + + tenant_owner = ScheduleService.get_tenant_owner(session, schedule.tenant_id) + if not tenant_owner: + raise TenantOwnerNotFoundError(f"No owner or admin found for tenant {schedule.tenant_id}") + + try: + current_utc = datetime.now(UTC) + schedule_tz = ZoneInfo(schedule.timezone) if schedule.timezone else UTC + current_in_tz = current_utc.astimezone(schedule_tz) + inputs = {"current_time": current_in_tz.isoformat()} + + response = AsyncWorkflowService.trigger_workflow_async( + session=session, + user=tenant_owner, + trigger_data=TriggerData( + app_id=schedule.app_id, + root_node_id=schedule.node_id, + trigger_type=WorkflowRunTriggeredFrom.SCHEDULE, + inputs=inputs, + tenant_id=schedule.tenant_id, + ), + ) + logger.info("Schedule %s triggered workflow: %s", schedule_id, response.workflow_trigger_log_id) + + except Exception as e: + raise ScheduleExecutionError( + f"Failed to trigger workflow for schedule {schedule_id}, app {schedule.app_id}" + ) from e diff --git a/api/tests/integration_tests/.env.example b/api/tests/integration_tests/.env.example index 92df93fb13..58145bd610 100644 --- a/api/tests/integration_tests/.env.example +++ b/api/tests/integration_tests/.env.example @@ -144,6 +144,9 @@ HTTP_REQUEST_MAX_WRITE_TIMEOUT=600 HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760 HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576 +# Webhook configuration +WEBHOOK_REQUEST_BODY_MAX_SIZE=10485760 + # Respect X-* headers to redirect clients RESPECT_XFORWARD_HEADERS_ENABLED=false diff --git a/api/tests/test_containers_integration_tests/services/test_webhook_service.py b/api/tests/test_containers_integration_tests/services/test_webhook_service.py new file mode 100644 index 0000000000..264a12947f --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_webhook_service.py @@ -0,0 +1,497 @@ +import json +from io import BytesIO +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker +from flask import Flask +from werkzeug.datastructures import FileStorage + +from models.model import App +from models.workflow import Workflow, WorkflowWebhookTrigger +from services.account_service import AccountService, TenantService +from services.webhook_service import WebhookService + + +class TestWebhookService: + """Integration tests for WebhookService using testcontainers.""" + + @pytest.fixture + def mock_external_dependencies(self): + """Mock external service dependencies.""" + with ( + patch("services.webhook_service.AsyncWorkflowService") as mock_async_service, + patch("services.webhook_service.ToolFileManager") as mock_tool_file_manager, + patch("services.webhook_service.file_factory") as mock_file_factory, + patch("services.account_service.FeatureService") as mock_feature_service, + ): + # Mock ToolFileManager + mock_tool_file_instance = MagicMock() + mock_tool_file_manager.return_value = mock_tool_file_instance + + # Mock file creation + mock_tool_file = MagicMock() + mock_tool_file.id = "test_file_id" + mock_tool_file_instance.create_file_by_raw.return_value = mock_tool_file + + # Mock file factory + mock_file_obj = MagicMock() + mock_file_factory.build_from_mapping.return_value = mock_file_obj + + # Mock feature service + mock_feature_service.get_system_features.return_value.is_allow_register = True + mock_feature_service.get_system_features.return_value.is_allow_create_workspace = True + + yield { + "async_service": mock_async_service, + "tool_file_manager": mock_tool_file_manager, + "file_factory": mock_file_factory, + "tool_file": mock_tool_file, + "file_obj": mock_file_obj, + "feature_service": mock_feature_service, + } + + @pytest.fixture + def test_data(self, db_session_with_containers, mock_external_dependencies): + """Create test data for webhook service tests.""" + fake = Faker() + + # Create account and tenant + account = AccountService.create_account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + password=fake.password(length=12), + ) + TenantService.create_owner_tenant_if_not_exist(account, name=fake.company()) + tenant = account.current_tenant + + # Create app + app = App( + tenant_id=tenant.id, + name=fake.company(), + description=fake.text(), + mode="workflow", + icon="", + icon_background="", + enable_site=True, + enable_api=True, + ) + db_session_with_containers.add(app) + db_session_with_containers.flush() + + # Create workflow + workflow_data = { + "nodes": [ + { + "id": "webhook_node", + "type": "webhook", + "data": { + "title": "Test Webhook", + "method": "post", + "content_type": "application/json", + "headers": [ + {"name": "Authorization", "required": True}, + {"name": "Content-Type", "required": False}, + ], + "params": [{"name": "version", "required": True}, {"name": "format", "required": False}], + "body": [ + {"name": "message", "type": "string", "required": True}, + {"name": "count", "type": "number", "required": False}, + {"name": "upload", "type": "file", "required": False}, + ], + "status_code": 200, + "response_body": '{"status": "success"}', + "timeout": 30, + }, + } + ], + "edges": [], + } + + workflow = Workflow( + tenant_id=tenant.id, + app_id=app.id, + type="workflow", + graph=json.dumps(workflow_data), + features=json.dumps({}), + created_by=account.id, + environment_variables=[], + conversation_variables=[], + version="1.0", + ) + db_session_with_containers.add(workflow) + db_session_with_containers.flush() + + # Create webhook trigger + webhook_id = fake.uuid4()[:16] + webhook_trigger = WorkflowWebhookTrigger( + app_id=app.id, + node_id="webhook_node", + tenant_id=tenant.id, + webhook_id=webhook_id, + created_by=account.id, + ) + db_session_with_containers.add(webhook_trigger) + db_session_with_containers.commit() + + return { + "tenant": tenant, + "account": account, + "app": app, + "workflow": workflow, + "webhook_trigger": webhook_trigger, + "webhook_id": webhook_id, + } + + def test_get_webhook_trigger_and_workflow_success(self, test_data, flask_app_with_containers): + """Test successful retrieval of webhook trigger and workflow.""" + webhook_id = test_data["webhook_id"] + + with flask_app_with_containers.app_context(): + webhook_trigger, workflow, node_config = WebhookService.get_webhook_trigger_and_workflow(webhook_id) + + assert webhook_trigger is not None + assert webhook_trigger.webhook_id == webhook_id + assert workflow is not None + assert workflow.app_id == test_data["app"].id + assert node_config is not None + assert node_config["id"] == "webhook_node" + assert node_config["data"]["title"] == "Test Webhook" + + def test_get_webhook_trigger_and_workflow_not_found(self, flask_app_with_containers): + """Test webhook trigger not found scenario.""" + with flask_app_with_containers.app_context(): + with pytest.raises(ValueError, match="Webhook not found"): + WebhookService.get_webhook_trigger_and_workflow("nonexistent_webhook") + + def test_extract_webhook_data_json(self): + """Test webhook data extraction from JSON request.""" + app = Flask(__name__) + + with app.test_request_context( + "/webhook", + method="POST", + headers={"Content-Type": "application/json", "Authorization": "Bearer token"}, + query_string="version=1&format=json", + json={"message": "hello", "count": 42}, + ): + webhook_trigger = MagicMock() + webhook_data = WebhookService.extract_webhook_data(webhook_trigger) + + assert webhook_data["method"] == "POST" + assert webhook_data["headers"]["Authorization"] == "Bearer token" + assert webhook_data["query_params"]["version"] == "1" + assert webhook_data["query_params"]["format"] == "json" + assert webhook_data["body"]["message"] == "hello" + assert webhook_data["body"]["count"] == 42 + assert webhook_data["files"] == {} + + def test_extract_webhook_data_form_urlencoded(self): + """Test webhook data extraction from form URL encoded request.""" + app = Flask(__name__) + + with app.test_request_context( + "/webhook", + method="POST", + headers={"Content-Type": "application/x-www-form-urlencoded"}, + data={"username": "test", "password": "secret"}, + ): + webhook_trigger = MagicMock() + webhook_data = WebhookService.extract_webhook_data(webhook_trigger) + + assert webhook_data["method"] == "POST" + assert webhook_data["body"]["username"] == "test" + assert webhook_data["body"]["password"] == "secret" + + def test_extract_webhook_data_multipart_with_files(self, mock_external_dependencies): + """Test webhook data extraction from multipart form with files.""" + app = Flask(__name__) + + # Create a mock file + file_content = b"test file content" + file_storage = FileStorage(stream=BytesIO(file_content), filename="test.txt", content_type="text/plain") + + with app.test_request_context( + "/webhook", + method="POST", + headers={"Content-Type": "multipart/form-data"}, + data={"message": "test", "upload": file_storage}, + ): + webhook_trigger = MagicMock() + webhook_trigger.tenant_id = "test_tenant" + + webhook_data = WebhookService.extract_webhook_data(webhook_trigger) + + assert webhook_data["method"] == "POST" + assert webhook_data["body"]["message"] == "test" + assert "upload" in webhook_data["files"] + + # Verify file processing was called + mock_external_dependencies["tool_file_manager"].assert_called_once() + mock_external_dependencies["file_factory"].build_from_mapping.assert_called_once() + + def test_extract_webhook_data_raw_text(self): + """Test webhook data extraction from raw text request.""" + app = Flask(__name__) + + with app.test_request_context( + "/webhook", method="POST", headers={"Content-Type": "text/plain"}, data="raw text content" + ): + webhook_trigger = MagicMock() + webhook_data = WebhookService.extract_webhook_data(webhook_trigger) + + assert webhook_data["method"] == "POST" + assert webhook_data["body"]["raw"] == "raw text content" + + def test_validate_webhook_request_success(self): + """Test successful webhook request validation.""" + webhook_data = { + "method": "POST", + "headers": {"Authorization": "Bearer token", "Content-Type": "application/json"}, + "query_params": {"version": "1"}, + "body": {"message": "hello"}, + "files": {}, + } + + node_config = { + "data": { + "method": "post", + "headers": [{"name": "Authorization", "required": True}, {"name": "Content-Type", "required": False}], + "params": [{"name": "version", "required": True}], + "body": [{"name": "message", "type": "string", "required": True}], + } + } + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + + assert result["valid"] is True + + def test_validate_webhook_request_method_mismatch(self): + """Test webhook validation with HTTP method mismatch.""" + webhook_data = {"method": "GET", "headers": {}, "query_params": {}, "body": {}, "files": {}} + + node_config = {"data": {"method": "post"}} + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + + assert result["valid"] is False + assert "HTTP method mismatch" in result["error"] + + def test_validate_webhook_request_missing_required_header(self): + """Test webhook validation with missing required header.""" + webhook_data = {"method": "POST", "headers": {}, "query_params": {}, "body": {}, "files": {}} + + node_config = {"data": {"method": "post", "headers": [{"name": "Authorization", "required": True}]}} + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + + assert result["valid"] is False + assert "Required header missing: Authorization" in result["error"] + + def test_validate_webhook_request_case_insensitive_headers(self): + """Test webhook validation with case-insensitive header matching.""" + webhook_data = { + "method": "POST", + "headers": {"authorization": "Bearer token"}, # lowercase + "query_params": {}, + "body": {}, + "files": {}, + } + + node_config = { + "data": { + "method": "post", + "headers": [ + {"name": "Authorization", "required": True} # Pascal case + ], + } + } + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + + assert result["valid"] is True + + def test_validate_webhook_request_missing_required_param(self): + """Test webhook validation with missing required query parameter.""" + webhook_data = {"method": "POST", "headers": {}, "query_params": {}, "body": {}, "files": {}} + + node_config = {"data": {"method": "post", "params": [{"name": "version", "required": True}]}} + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + + assert result["valid"] is False + assert "Required query parameter missing: version" in result["error"] + + def test_validate_webhook_request_missing_required_body_param(self): + """Test webhook validation with missing required body parameter.""" + webhook_data = {"method": "POST", "headers": {}, "query_params": {}, "body": {}, "files": {}} + + node_config = {"data": {"method": "post", "body": [{"name": "message", "type": "string", "required": True}]}} + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + + assert result["valid"] is False + assert "Required body parameter missing: message" in result["error"] + + def test_validate_webhook_request_missing_required_file(self): + """Test webhook validation with missing required file parameter.""" + webhook_data = {"method": "POST", "headers": {}, "query_params": {}, "body": {}, "files": {}} + + node_config = {"data": {"method": "post", "body": [{"name": "upload", "type": "file", "required": True}]}} + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + + assert result["valid"] is False + assert "Required file parameter missing: upload" in result["error"] + + def test_trigger_workflow_execution_success(self, test_data, mock_external_dependencies, flask_app_with_containers): + """Test successful workflow execution trigger.""" + webhook_data = { + "method": "POST", + "headers": {"Authorization": "Bearer token"}, + "query_params": {"version": "1"}, + "body": {"message": "hello"}, + "files": {}, + } + + with flask_app_with_containers.app_context(): + # Mock tenant owner lookup to return the test account + with patch("services.webhook_service.select") as mock_select: + mock_query = MagicMock() + mock_select.return_value.join.return_value.where.return_value = mock_query + + # Mock the session to return our test account + with patch("services.webhook_service.Session") as mock_session: + mock_session_instance = MagicMock() + mock_session.return_value.__enter__.return_value = mock_session_instance + mock_session_instance.scalar.return_value = test_data["account"] + + # Should not raise any exceptions + WebhookService.trigger_workflow_execution( + test_data["webhook_trigger"], webhook_data, test_data["workflow"] + ) + + # Verify AsyncWorkflowService was called + mock_external_dependencies["async_service"].trigger_workflow_async.assert_called_once() + + def test_trigger_workflow_execution_no_tenant_owner( + self, test_data, mock_external_dependencies, flask_app_with_containers + ): + """Test workflow execution trigger when tenant owner not found.""" + webhook_data = {"method": "POST", "headers": {}, "query_params": {}, "body": {}, "files": {}} + + with flask_app_with_containers.app_context(): + # Mock tenant owner lookup to return None + with ( + patch("services.webhook_service.select") as mock_select, + patch("services.webhook_service.Session") as mock_session, + ): + mock_session_instance = MagicMock() + mock_session.return_value.__enter__.return_value = mock_session_instance + mock_session_instance.scalar.return_value = None + + with pytest.raises(ValueError, match="Tenant owner not found"): + WebhookService.trigger_workflow_execution( + test_data["webhook_trigger"], webhook_data, test_data["workflow"] + ) + + def test_generate_webhook_response_default(self): + """Test webhook response generation with default values.""" + node_config = {"data": {}} + + response_data, status_code = WebhookService.generate_webhook_response(node_config) + + assert status_code == 200 + assert response_data["status"] == "success" + assert "Webhook processed successfully" in response_data["message"] + + def test_generate_webhook_response_custom_json(self): + """Test webhook response generation with custom JSON response.""" + node_config = {"data": {"status_code": 201, "response_body": '{"result": "created", "id": 123}'}} + + response_data, status_code = WebhookService.generate_webhook_response(node_config) + + assert status_code == 201 + assert response_data["result"] == "created" + assert response_data["id"] == 123 + + def test_generate_webhook_response_custom_text(self): + """Test webhook response generation with custom text response.""" + node_config = {"data": {"status_code": 202, "response_body": "Request accepted for processing"}} + + response_data, status_code = WebhookService.generate_webhook_response(node_config) + + assert status_code == 202 + assert response_data["message"] == "Request accepted for processing" + + def test_generate_webhook_response_invalid_json(self): + """Test webhook response generation with invalid JSON response.""" + node_config = {"data": {"status_code": 400, "response_body": '{"invalid": json}'}} + + response_data, status_code = WebhookService.generate_webhook_response(node_config) + + assert status_code == 400 + assert response_data["message"] == '{"invalid": json}' + + def test_process_file_uploads_success(self, mock_external_dependencies): + """Test successful file upload processing.""" + # Create mock files + files = { + "file1": MagicMock(filename="test1.txt", content_type="text/plain"), + "file2": MagicMock(filename="test2.jpg", content_type="image/jpeg"), + } + + # Mock file reads + files["file1"].read.return_value = b"content1" + files["file2"].read.return_value = b"content2" + + webhook_trigger = MagicMock() + webhook_trigger.tenant_id = "test_tenant" + + result = WebhookService._process_file_uploads(files, webhook_trigger) + + assert len(result) == 2 + assert "file1" in result + assert "file2" in result + + # Verify file processing was called for each file + assert mock_external_dependencies["tool_file_manager"].call_count == 2 + assert mock_external_dependencies["file_factory"].build_from_mapping.call_count == 2 + + def test_process_file_uploads_with_errors(self, mock_external_dependencies): + """Test file upload processing with errors.""" + # Create mock files, one will fail + files = { + "good_file": MagicMock(filename="test.txt", content_type="text/plain"), + "bad_file": MagicMock(filename="test.bad", content_type="text/plain"), + } + + files["good_file"].read.return_value = b"content" + files["bad_file"].read.side_effect = Exception("Read error") + + webhook_trigger = MagicMock() + webhook_trigger.tenant_id = "test_tenant" + + result = WebhookService._process_file_uploads(files, webhook_trigger) + + # Should process the good file and skip the bad one + assert len(result) == 1 + assert "good_file" in result + assert "bad_file" not in result + + def test_process_file_uploads_empty_filename(self, mock_external_dependencies): + """Test file upload processing with empty filename.""" + files = { + "no_filename": MagicMock(filename="", content_type="text/plain"), + "none_filename": MagicMock(filename=None, content_type="text/plain"), + } + + webhook_trigger = MagicMock() + webhook_trigger.tenant_id = "test_tenant" + + result = WebhookService._process_file_uploads(files, webhook_trigger) + + # Should skip files without filenames + assert len(result) == 0 + mock_external_dependencies["tool_file_manager"].assert_not_called() diff --git a/api/tests/unit_tests/core/plugin/utils/test_http_parser.py b/api/tests/unit_tests/core/plugin/utils/test_http_parser.py new file mode 100644 index 0000000000..1c2e0c96f8 --- /dev/null +++ b/api/tests/unit_tests/core/plugin/utils/test_http_parser.py @@ -0,0 +1,655 @@ +import pytest +from flask import Request, Response + +from core.plugin.utils.http_parser import ( + deserialize_request, + deserialize_response, + serialize_request, + serialize_response, +) + + +class TestSerializeRequest: + def test_serialize_simple_get_request(self): + # Create a simple GET request + environ = { + "REQUEST_METHOD": "GET", + "PATH_INFO": "/api/test", + "QUERY_STRING": "", + "SERVER_NAME": "localhost", + "SERVER_PORT": "8000", + "wsgi.input": None, + "wsgi.url_scheme": "http", + } + request = Request(environ) + + raw_data = serialize_request(request) + + assert raw_data.startswith(b"GET /api/test HTTP/1.1\r\n") + assert b"\r\n\r\n" in raw_data # Empty line between headers and body + + def test_serialize_request_with_query_params(self): + # Create a GET request with query parameters + environ = { + "REQUEST_METHOD": "GET", + "PATH_INFO": "/api/search", + "QUERY_STRING": "q=test&limit=10", + "SERVER_NAME": "localhost", + "SERVER_PORT": "8000", + "wsgi.input": None, + "wsgi.url_scheme": "http", + } + request = Request(environ) + + raw_data = serialize_request(request) + + assert raw_data.startswith(b"GET /api/search?q=test&limit=10 HTTP/1.1\r\n") + + def test_serialize_post_request_with_body(self): + # Create a POST request with body + from io import BytesIO + + body = b'{"name": "test", "value": 123}' + environ = { + "REQUEST_METHOD": "POST", + "PATH_INFO": "/api/data", + "QUERY_STRING": "", + "SERVER_NAME": "localhost", + "SERVER_PORT": "8000", + "wsgi.input": BytesIO(body), + "wsgi.url_scheme": "http", + "CONTENT_LENGTH": str(len(body)), + "CONTENT_TYPE": "application/json", + "HTTP_CONTENT_TYPE": "application/json", + } + request = Request(environ) + + raw_data = serialize_request(request) + + assert b"POST /api/data HTTP/1.1\r\n" in raw_data + assert b"Content-Type: application/json" in raw_data + assert raw_data.endswith(body) + + def test_serialize_request_with_custom_headers(self): + # Create a request with custom headers + environ = { + "REQUEST_METHOD": "GET", + "PATH_INFO": "/api/test", + "QUERY_STRING": "", + "SERVER_NAME": "localhost", + "SERVER_PORT": "8000", + "wsgi.input": None, + "wsgi.url_scheme": "http", + "HTTP_AUTHORIZATION": "Bearer token123", + "HTTP_X_CUSTOM_HEADER": "custom-value", + } + request = Request(environ) + + raw_data = serialize_request(request) + + assert b"Authorization: Bearer token123" in raw_data + assert b"X-Custom-Header: custom-value" in raw_data + + +class TestDeserializeRequest: + def test_deserialize_simple_get_request(self): + raw_data = b"GET /api/test HTTP/1.1\r\nHost: localhost:8000\r\n\r\n" + + request = deserialize_request(raw_data) + + assert request.method == "GET" + assert request.path == "/api/test" + assert request.headers.get("Host") == "localhost:8000" + + def test_deserialize_request_with_query_params(self): + raw_data = b"GET /api/search?q=test&limit=10 HTTP/1.1\r\nHost: example.com\r\n\r\n" + + request = deserialize_request(raw_data) + + assert request.method == "GET" + assert request.path == "/api/search" + assert request.query_string == b"q=test&limit=10" + assert request.args.get("q") == "test" + assert request.args.get("limit") == "10" + + def test_deserialize_post_request_with_body(self): + body = b'{"name": "test", "value": 123}' + raw_data = ( + b"POST /api/data HTTP/1.1\r\n" + b"Host: localhost\r\n" + b"Content-Type: application/json\r\n" + b"Content-Length: " + str(len(body)).encode() + b"\r\n" + b"\r\n" + body + ) + + request = deserialize_request(raw_data) + + assert request.method == "POST" + assert request.path == "/api/data" + assert request.content_type == "application/json" + assert request.get_data() == body + + def test_deserialize_request_with_custom_headers(self): + raw_data = ( + b"GET /api/protected HTTP/1.1\r\n" + b"Host: api.example.com\r\n" + b"Authorization: Bearer token123\r\n" + b"X-Custom-Header: custom-value\r\n" + b"User-Agent: TestClient/1.0\r\n" + b"\r\n" + ) + + request = deserialize_request(raw_data) + + assert request.method == "GET" + assert request.headers.get("Authorization") == "Bearer token123" + assert request.headers.get("X-Custom-Header") == "custom-value" + assert request.headers.get("User-Agent") == "TestClient/1.0" + + def test_deserialize_request_with_multiline_body(self): + body = b"line1\r\nline2\r\nline3" + raw_data = b"PUT /api/text HTTP/1.1\r\nHost: localhost\r\nContent-Type: text/plain\r\n\r\n" + body + + request = deserialize_request(raw_data) + + assert request.method == "PUT" + assert request.get_data() == body + + def test_deserialize_invalid_request_line(self): + raw_data = b"INVALID\r\n\r\n" # Only one part, should fail + + with pytest.raises(ValueError, match="Invalid request line"): + deserialize_request(raw_data) + + def test_roundtrip_request(self): + # Test that serialize -> deserialize produces equivalent request + from io import BytesIO + + body = b"test body content" + environ = { + "REQUEST_METHOD": "POST", + "PATH_INFO": "/api/echo", + "QUERY_STRING": "format=json", + "SERVER_NAME": "localhost", + "SERVER_PORT": "8080", + "wsgi.input": BytesIO(body), + "wsgi.url_scheme": "http", + "CONTENT_LENGTH": str(len(body)), + "CONTENT_TYPE": "text/plain", + "HTTP_CONTENT_TYPE": "text/plain", + "HTTP_X_REQUEST_ID": "req-123", + } + original_request = Request(environ) + + # Serialize and deserialize + raw_data = serialize_request(original_request) + restored_request = deserialize_request(raw_data) + + # Verify key properties are preserved + assert restored_request.method == original_request.method + assert restored_request.path == original_request.path + assert restored_request.query_string == original_request.query_string + assert restored_request.get_data() == body + assert restored_request.headers.get("X-Request-Id") == "req-123" + + +class TestSerializeResponse: + def test_serialize_simple_response(self): + response = Response("Hello, World!", status=200) + + raw_data = serialize_response(response) + + assert raw_data.startswith(b"HTTP/1.1 200 OK\r\n") + assert b"\r\n\r\n" in raw_data + assert raw_data.endswith(b"Hello, World!") + + def test_serialize_response_with_headers(self): + response = Response( + '{"status": "success"}', + status=201, + headers={ + "Content-Type": "application/json", + "X-Request-Id": "req-456", + }, + ) + + raw_data = serialize_response(response) + + assert b"HTTP/1.1 201 CREATED\r\n" in raw_data + assert b"Content-Type: application/json" in raw_data + assert b"X-Request-Id: req-456" in raw_data + assert raw_data.endswith(b'{"status": "success"}') + + def test_serialize_error_response(self): + response = Response( + "Not Found", + status=404, + headers={"Content-Type": "text/plain"}, + ) + + raw_data = serialize_response(response) + + assert b"HTTP/1.1 404 NOT FOUND\r\n" in raw_data + assert b"Content-Type: text/plain" in raw_data + assert raw_data.endswith(b"Not Found") + + def test_serialize_response_without_body(self): + response = Response(status=204) # No Content + + raw_data = serialize_response(response) + + assert b"HTTP/1.1 204 NO CONTENT\r\n" in raw_data + assert raw_data.endswith(b"\r\n\r\n") # Should end with empty line + + def test_serialize_response_with_binary_body(self): + binary_data = b"\x00\x01\x02\x03\x04\x05" + response = Response( + binary_data, + status=200, + headers={"Content-Type": "application/octet-stream"}, + ) + + raw_data = serialize_response(response) + + assert b"HTTP/1.1 200 OK\r\n" in raw_data + assert b"Content-Type: application/octet-stream" in raw_data + assert raw_data.endswith(binary_data) + + +class TestDeserializeResponse: + def test_deserialize_simple_response(self): + raw_data = b"HTTP/1.1 200 OK\r\nContent-Type: text/plain\r\n\r\nHello, World!" + + response = deserialize_response(raw_data) + + assert response.status_code == 200 + assert response.get_data() == b"Hello, World!" + assert response.headers.get("Content-Type") == "text/plain" + + def test_deserialize_response_with_json(self): + body = b'{"result": "success", "data": [1, 2, 3]}' + raw_data = ( + b"HTTP/1.1 201 Created\r\n" + b"Content-Type: application/json\r\n" + b"Content-Length: " + str(len(body)).encode() + b"\r\n" + b"X-Custom-Header: test-value\r\n" + b"\r\n" + body + ) + + response = deserialize_response(raw_data) + + assert response.status_code == 201 + assert response.get_data() == body + assert response.headers.get("Content-Type") == "application/json" + assert response.headers.get("X-Custom-Header") == "test-value" + + def test_deserialize_error_response(self): + raw_data = b"HTTP/1.1 404 Not Found\r\nContent-Type: text/html\r\n\r\nPage not found" + + response = deserialize_response(raw_data) + + assert response.status_code == 404 + assert response.get_data() == b"Page not found" + + def test_deserialize_response_without_body(self): + raw_data = b"HTTP/1.1 204 No Content\r\n\r\n" + + response = deserialize_response(raw_data) + + assert response.status_code == 204 + assert response.get_data() == b"" + + def test_deserialize_response_with_multiline_body(self): + body = b"Line 1\r\nLine 2\r\nLine 3" + raw_data = b"HTTP/1.1 200 OK\r\nContent-Type: text/plain\r\n\r\n" + body + + response = deserialize_response(raw_data) + + assert response.status_code == 200 + assert response.get_data() == body + + def test_deserialize_response_minimal_status_line(self): + # Test with minimal status line (no status text) + raw_data = b"HTTP/1.1 200\r\n\r\nOK" + + response = deserialize_response(raw_data) + + assert response.status_code == 200 + assert response.get_data() == b"OK" + + def test_deserialize_invalid_status_line(self): + raw_data = b"INVALID\r\n\r\n" + + with pytest.raises(ValueError, match="Invalid status line"): + deserialize_response(raw_data) + + def test_roundtrip_response(self): + # Test that serialize -> deserialize produces equivalent response + original_response = Response( + '{"message": "test"}', + status=200, + headers={ + "Content-Type": "application/json", + "X-Request-Id": "abc-123", + "Cache-Control": "no-cache", + }, + ) + + # Serialize and deserialize + raw_data = serialize_response(original_response) + restored_response = deserialize_response(raw_data) + + # Verify key properties are preserved + assert restored_response.status_code == original_response.status_code + assert restored_response.get_data() == original_response.get_data() + assert restored_response.headers.get("Content-Type") == "application/json" + assert restored_response.headers.get("X-Request-Id") == "abc-123" + assert restored_response.headers.get("Cache-Control") == "no-cache" + + +class TestEdgeCases: + def test_request_with_empty_headers(self): + raw_data = b"GET / HTTP/1.1\r\n\r\n" + + request = deserialize_request(raw_data) + + assert request.method == "GET" + assert request.path == "/" + + def test_response_with_empty_headers(self): + raw_data = b"HTTP/1.1 200 OK\r\n\r\nSuccess" + + response = deserialize_response(raw_data) + + assert response.status_code == 200 + assert response.get_data() == b"Success" + + def test_request_with_special_characters_in_path(self): + raw_data = b"GET /api/test%20path?key=%26value HTTP/1.1\r\n\r\n" + + request = deserialize_request(raw_data) + + assert request.method == "GET" + assert "/api/test%20path" in request.full_path + + def test_response_with_binary_content(self): + binary_body = bytes(range(256)) # All possible byte values + raw_data = b"HTTP/1.1 200 OK\r\nContent-Type: application/octet-stream\r\n\r\n" + binary_body + + response = deserialize_response(raw_data) + + assert response.status_code == 200 + assert response.get_data() == binary_body + + +class TestFileUploads: + def test_serialize_request_with_text_file_upload(self): + # Test multipart/form-data request with text file + from io import BytesIO + + boundary = "----WebKitFormBoundary7MA4YWxkTrZu0gW" + text_content = "Hello, this is a test file content!\nWith multiple lines." + body = ( + f"------{boundary}\r\n" + f'Content-Disposition: form-data; name="file"; filename="test.txt"\r\n' + f"Content-Type: text/plain\r\n" + f"\r\n" + f"{text_content}\r\n" + f"------{boundary}\r\n" + f'Content-Disposition: form-data; name="description"\r\n' + f"\r\n" + f"Test file upload\r\n" + f"------{boundary}--\r\n" + ).encode() + + environ = { + "REQUEST_METHOD": "POST", + "PATH_INFO": "/api/upload", + "QUERY_STRING": "", + "SERVER_NAME": "localhost", + "SERVER_PORT": "8000", + "wsgi.input": BytesIO(body), + "wsgi.url_scheme": "http", + "CONTENT_LENGTH": str(len(body)), + "CONTENT_TYPE": f"multipart/form-data; boundary={boundary}", + "HTTP_CONTENT_TYPE": f"multipart/form-data; boundary={boundary}", + } + request = Request(environ) + + raw_data = serialize_request(request) + + assert b"POST /api/upload HTTP/1.1\r\n" in raw_data + assert f"Content-Type: multipart/form-data; boundary={boundary}".encode() in raw_data + assert b'Content-Disposition: form-data; name="file"; filename="test.txt"' in raw_data + assert text_content.encode() in raw_data + + def test_deserialize_request_with_text_file_upload(self): + # Test deserializing multipart/form-data request with text file + boundary = "----WebKitFormBoundary7MA4YWxkTrZu0gW" + text_content = "Sample text file content\nLine 2\nLine 3" + body = ( + f"------{boundary}\r\n" + f'Content-Disposition: form-data; name="document"; filename="document.txt"\r\n' + f"Content-Type: text/plain\r\n" + f"\r\n" + f"{text_content}\r\n" + f"------{boundary}\r\n" + f'Content-Disposition: form-data; name="title"\r\n' + f"\r\n" + f"My Document\r\n" + f"------{boundary}--\r\n" + ).encode() + + raw_data = ( + b"POST /api/documents HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Content-Type: multipart/form-data; boundary=" + boundary.encode() + b"\r\n" + b"Content-Length: " + str(len(body)).encode() + b"\r\n" + b"\r\n" + body + ) + + request = deserialize_request(raw_data) + + assert request.method == "POST" + assert request.path == "/api/documents" + assert "multipart/form-data" in request.content_type + # The body should contain the multipart data + request_body = request.get_data() + assert b"document.txt" in request_body + assert text_content.encode() in request_body + + def test_serialize_request_with_binary_file_upload(self): + # Test multipart/form-data request with binary file (e.g., image) + from io import BytesIO + + boundary = "----BoundaryString123" + # Simulate a small PNG file header + binary_content = b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10" + + # Build multipart body + body_parts = [] + body_parts.append(f"------{boundary}".encode()) + body_parts.append(b'Content-Disposition: form-data; name="image"; filename="test.png"') + body_parts.append(b"Content-Type: image/png") + body_parts.append(b"") + body_parts.append(binary_content) + body_parts.append(f"------{boundary}".encode()) + body_parts.append(b'Content-Disposition: form-data; name="caption"') + body_parts.append(b"") + body_parts.append(b"Test image") + body_parts.append(f"------{boundary}--".encode()) + + body = b"\r\n".join(body_parts) + + environ = { + "REQUEST_METHOD": "POST", + "PATH_INFO": "/api/images", + "QUERY_STRING": "", + "SERVER_NAME": "localhost", + "SERVER_PORT": "8000", + "wsgi.input": BytesIO(body), + "wsgi.url_scheme": "http", + "CONTENT_LENGTH": str(len(body)), + "CONTENT_TYPE": f"multipart/form-data; boundary={boundary}", + "HTTP_CONTENT_TYPE": f"multipart/form-data; boundary={boundary}", + } + request = Request(environ) + + raw_data = serialize_request(request) + + assert b"POST /api/images HTTP/1.1\r\n" in raw_data + assert f"Content-Type: multipart/form-data; boundary={boundary}".encode() in raw_data + assert b'filename="test.png"' in raw_data + assert b"Content-Type: image/png" in raw_data + assert binary_content in raw_data + + def test_deserialize_request_with_binary_file_upload(self): + # Test deserializing multipart/form-data request with binary file + boundary = "----BoundaryABC123" + # Simulate a small JPEG file header + binary_content = b"\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01\x01\x00\x00\x01\x00\x01\x00\x00" + + body_parts = [] + body_parts.append(f"------{boundary}".encode()) + body_parts.append(b'Content-Disposition: form-data; name="photo"; filename="photo.jpg"') + body_parts.append(b"Content-Type: image/jpeg") + body_parts.append(b"") + body_parts.append(binary_content) + body_parts.append(f"------{boundary}".encode()) + body_parts.append(b'Content-Disposition: form-data; name="album"') + body_parts.append(b"") + body_parts.append(b"Vacation 2024") + body_parts.append(f"------{boundary}--".encode()) + + body = b"\r\n".join(body_parts) + + raw_data = ( + b"POST /api/photos HTTP/1.1\r\n" + b"Host: api.example.com\r\n" + b"Content-Type: multipart/form-data; boundary=" + boundary.encode() + b"\r\n" + b"Content-Length: " + str(len(body)).encode() + b"\r\n" + b"Accept: application/json\r\n" + b"\r\n" + body + ) + + request = deserialize_request(raw_data) + + assert request.method == "POST" + assert request.path == "/api/photos" + assert "multipart/form-data" in request.content_type + assert request.headers.get("Accept") == "application/json" + + # Verify the binary content is preserved + request_body = request.get_data() + assert b"photo.jpg" in request_body + assert b"image/jpeg" in request_body + assert binary_content in request_body + assert b"Vacation 2024" in request_body + + def test_serialize_request_with_multiple_files(self): + # Test request with multiple file uploads + from io import BytesIO + + boundary = "----MultiFilesBoundary" + text_file = b"Text file contents" + binary_file = b"\x00\x01\x02\x03\x04\x05" + + body_parts = [] + # First file (text) + body_parts.append(f"------{boundary}".encode()) + body_parts.append(b'Content-Disposition: form-data; name="files"; filename="doc.txt"') + body_parts.append(b"Content-Type: text/plain") + body_parts.append(b"") + body_parts.append(text_file) + # Second file (binary) + body_parts.append(f"------{boundary}".encode()) + body_parts.append(b'Content-Disposition: form-data; name="files"; filename="data.bin"') + body_parts.append(b"Content-Type: application/octet-stream") + body_parts.append(b"") + body_parts.append(binary_file) + # Additional form field + body_parts.append(f"------{boundary}".encode()) + body_parts.append(b'Content-Disposition: form-data; name="folder"') + body_parts.append(b"") + body_parts.append(b"uploads/2024") + body_parts.append(f"------{boundary}--".encode()) + + body = b"\r\n".join(body_parts) + + environ = { + "REQUEST_METHOD": "POST", + "PATH_INFO": "/api/batch-upload", + "QUERY_STRING": "", + "SERVER_NAME": "localhost", + "SERVER_PORT": "8000", + "wsgi.input": BytesIO(body), + "wsgi.url_scheme": "https", + "CONTENT_LENGTH": str(len(body)), + "CONTENT_TYPE": f"multipart/form-data; boundary={boundary}", + "HTTP_CONTENT_TYPE": f"multipart/form-data; boundary={boundary}", + "HTTP_X_FORWARDED_PROTO": "https", + } + request = Request(environ) + + raw_data = serialize_request(request) + + assert b"POST /api/batch-upload HTTP/1.1\r\n" in raw_data + assert b"doc.txt" in raw_data + assert b"data.bin" in raw_data + assert text_file in raw_data + assert binary_file in raw_data + assert b"uploads/2024" in raw_data + + def test_roundtrip_file_upload_request(self): + # Test that file upload request survives serialize -> deserialize + from io import BytesIO + + boundary = "----RoundTripBoundary" + file_content = b"This is my file content with special chars: \xf0\x9f\x98\x80" + + body_parts = [] + body_parts.append(f"------{boundary}".encode()) + body_parts.append(b'Content-Disposition: form-data; name="upload"; filename="emoji.txt"') + body_parts.append(b"Content-Type: text/plain; charset=utf-8") + body_parts.append(b"") + body_parts.append(file_content) + body_parts.append(f"------{boundary}".encode()) + body_parts.append(b'Content-Disposition: form-data; name="metadata"') + body_parts.append(b"") + body_parts.append(b'{"encoding": "utf-8", "size": 42}') + body_parts.append(f"------{boundary}--".encode()) + + body = b"\r\n".join(body_parts) + + environ = { + "REQUEST_METHOD": "PUT", + "PATH_INFO": "/api/files/123", + "QUERY_STRING": "version=2", + "SERVER_NAME": "storage.example.com", + "SERVER_PORT": "443", + "wsgi.input": BytesIO(body), + "wsgi.url_scheme": "https", + "CONTENT_LENGTH": str(len(body)), + "CONTENT_TYPE": f"multipart/form-data; boundary={boundary}", + "HTTP_CONTENT_TYPE": f"multipart/form-data; boundary={boundary}", + "HTTP_AUTHORIZATION": "Bearer token123", + "HTTP_X_FORWARDED_PROTO": "https", + } + original_request = Request(environ) + + # Serialize and deserialize + raw_data = serialize_request(original_request) + restored_request = deserialize_request(raw_data) + + # Verify the request is preserved + assert restored_request.method == "PUT" + assert restored_request.path == "/api/files/123" + assert restored_request.query_string == b"version=2" + assert "multipart/form-data" in restored_request.content_type + assert boundary in restored_request.content_type + + # Verify file content is preserved + restored_body = restored_request.get_data() + assert b"emoji.txt" in restored_body + assert file_content in restored_body + assert b'{"encoding": "utf-8", "size": 42}' in restored_body diff --git a/api/tests/unit_tests/core/tools/utils/test_encryption.py b/api/tests/unit_tests/core/tools/utils/test_encryption.py index 6425ab0b8d..3b7c1f5678 100644 --- a/api/tests/unit_tests/core/tools/utils/test_encryption.py +++ b/api/tests/unit_tests/core/tools/utils/test_encryption.py @@ -70,7 +70,7 @@ def test_encrypt_only_secret_is_encrypted_and_non_secret_unchanged(encrypter_obj data_in = {"username": "alice", "password": "plain_pwd"} data_copy = copy.deepcopy(data_in) - with patch("core.tools.utils.encryption.encrypter.encrypt_token", return_value="CIPHERTEXT") as mock_encrypt: + with patch("core.helper.provider_encryption.encrypter.encrypt_token", return_value="CIPHERTEXT") as mock_encrypt: out = encrypter_obj.encrypt(data_in) assert out["username"] == "alice" @@ -81,7 +81,7 @@ def test_encrypt_only_secret_is_encrypted_and_non_secret_unchanged(encrypter_obj def test_encrypt_missing_secret_key_is_ok(encrypter_obj): """If secret field missing in input, no error and no encryption called.""" - with patch("core.tools.utils.encryption.encrypter.encrypt_token") as mock_encrypt: + with patch("core.helper.provider_encryption.encrypter.encrypt_token") as mock_encrypt: out = encrypter_obj.encrypt({"username": "alice"}) assert out["username"] == "alice" mock_encrypt.assert_not_called() @@ -151,7 +151,7 @@ def test_decrypt_normal_flow(encrypter_obj): data_in = {"username": "alice", "password": "ENC"} data_copy = copy.deepcopy(data_in) - with patch("core.tools.utils.encryption.encrypter.decrypt_token", return_value="PLAIN") as mock_decrypt: + with patch("core.helper.provider_encryption.encrypter.decrypt_token", return_value="PLAIN") as mock_decrypt: out = encrypter_obj.decrypt(data_in) assert out["username"] == "alice" @@ -163,7 +163,7 @@ def test_decrypt_normal_flow(encrypter_obj): @pytest.mark.parametrize("empty_val", ["", None]) def test_decrypt_skip_empty_values(encrypter_obj, empty_val): """Skip decrypt if value is empty or None, keep original.""" - with patch("core.tools.utils.encryption.encrypter.decrypt_token") as mock_decrypt: + with patch("core.helper.provider_encryption.encrypter.decrypt_token") as mock_decrypt: out = encrypter_obj.decrypt({"password": empty_val}) mock_decrypt.assert_not_called() @@ -175,7 +175,7 @@ def test_decrypt_swallow_exception_and_keep_original(encrypter_obj): If decrypt_token raises, exception should be swallowed, and original value preserved. """ - with patch("core.tools.utils.encryption.encrypter.decrypt_token", side_effect=Exception("boom")): + with patch("core.helper.provider_encryption.encrypter.decrypt_token", side_effect=Exception("boom")): out = encrypter_obj.decrypt({"password": "ENC_ERR"}) assert out["password"] == "ENC_ERR" diff --git a/api/tests/unit_tests/core/workflow/nodes/webhook/__init__.py b/api/tests/unit_tests/core/workflow/nodes/webhook/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/core/workflow/nodes/webhook/test_entities.py b/api/tests/unit_tests/core/workflow/nodes/webhook/test_entities.py new file mode 100644 index 0000000000..97e2a59578 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/nodes/webhook/test_entities.py @@ -0,0 +1,294 @@ +import pytest +from pydantic import ValidationError + +from core.workflow.nodes.trigger_webhook.entities import ( + ContentType, + Method, + WebhookBodyParameter, + WebhookData, + WebhookParameter, +) + + +def test_method_enum(): + """Test Method enum values.""" + assert Method.GET == "get" + assert Method.POST == "post" + assert Method.HEAD == "head" + assert Method.PATCH == "patch" + assert Method.PUT == "put" + assert Method.DELETE == "delete" + + # Test all enum values are strings + for method in Method: + assert isinstance(method.value, str) + + +def test_content_type_enum(): + """Test ContentType enum values.""" + assert ContentType.JSON == "application/json" + assert ContentType.FORM_DATA == "multipart/form-data" + assert ContentType.FORM_URLENCODED == "application/x-www-form-urlencoded" + assert ContentType.TEXT == "text/plain" + assert ContentType.FORM == "form" + + # Test all enum values are strings + for content_type in ContentType: + assert isinstance(content_type.value, str) + + +def test_webhook_parameter_creation(): + """Test WebhookParameter model creation and validation.""" + # Test with all fields + param = WebhookParameter(name="api_key", required=True) + assert param.name == "api_key" + assert param.required is True + + # Test with defaults + param_default = WebhookParameter(name="optional_param") + assert param_default.name == "optional_param" + assert param_default.required is False + + # Test validation - name is required + with pytest.raises(ValidationError): + WebhookParameter() + + +def test_webhook_body_parameter_creation(): + """Test WebhookBodyParameter model creation and validation.""" + # Test with all fields + body_param = WebhookBodyParameter( + name="user_data", + type="object", + required=True, + ) + assert body_param.name == "user_data" + assert body_param.type == "object" + assert body_param.required is True + + # Test with defaults + body_param_default = WebhookBodyParameter(name="message") + assert body_param_default.name == "message" + assert body_param_default.type == "string" # Default type + assert body_param_default.required is False + + # Test validation - name is required + with pytest.raises(ValidationError): + WebhookBodyParameter() + + +def test_webhook_body_parameter_types(): + """Test WebhookBodyParameter type validation.""" + valid_types = ["string", "number", "boolean", "object", "array", "file"] + + for param_type in valid_types: + param = WebhookBodyParameter(name="test", type=param_type) + assert param.type == param_type + + # Test invalid type + with pytest.raises(ValidationError): + WebhookBodyParameter(name="test", type="invalid_type") + + +def test_webhook_data_creation_minimal(): + """Test WebhookData creation with minimal required fields.""" + data = WebhookData(title="Test Webhook") + + assert data.title == "Test Webhook" + assert data.method == Method.GET # Default + assert data.content_type == ContentType.JSON # Default + assert data.headers == [] # Default + assert data.params == [] # Default + assert data.body == [] # Default + assert data.status_code == 200 # Default + assert data.response_body == "" # Default + assert data.webhook_id is None # Default + assert data.timeout == 30 # Default + + +def test_webhook_data_creation_full(): + """Test WebhookData creation with all fields.""" + headers = [ + WebhookParameter(name="Authorization", required=True), + WebhookParameter(name="Content-Type", required=False), + ] + params = [ + WebhookParameter(name="version", required=True), + WebhookParameter(name="format", required=False), + ] + body = [ + WebhookBodyParameter(name="message", type="string", required=True), + WebhookBodyParameter(name="count", type="number", required=False), + WebhookBodyParameter(name="upload", type="file", required=True), + ] + + # Use the alias for content_type to test it properly + data = WebhookData( + title="Full Webhook Test", + desc="A comprehensive webhook test", + method=Method.POST, + **{"content-type": ContentType.FORM_DATA}, + headers=headers, + params=params, + body=body, + status_code=201, + response_body='{"success": true}', + webhook_id="webhook_123", + timeout=60, + ) + + assert data.title == "Full Webhook Test" + assert data.desc == "A comprehensive webhook test" + assert data.method == Method.POST + assert data.content_type == ContentType.FORM_DATA + assert len(data.headers) == 2 + assert len(data.params) == 2 + assert len(data.body) == 3 + assert data.status_code == 201 + assert data.response_body == '{"success": true}' + assert data.webhook_id == "webhook_123" + assert data.timeout == 60 + + +def test_webhook_data_content_type_alias(): + """Test WebhookData content_type field alias.""" + # Test using the alias "content-type" + data1 = WebhookData(title="Test", **{"content-type": "application/json"}) + assert data1.content_type == ContentType.JSON + + # Test using the alias with enum value + data2 = WebhookData(title="Test", **{"content-type": ContentType.FORM_DATA}) + assert data2.content_type == ContentType.FORM_DATA + + # Test both approaches result in same field + assert hasattr(data1, "content_type") + assert hasattr(data2, "content_type") + + +def test_webhook_data_model_dump(): + """Test WebhookData model serialization.""" + data = WebhookData( + title="Test Webhook", + method=Method.POST, + content_type=ContentType.JSON, + headers=[WebhookParameter(name="Authorization", required=True)], + params=[WebhookParameter(name="version", required=False)], + body=[WebhookBodyParameter(name="message", type="string", required=True)], + status_code=200, + response_body="OK", + timeout=30, + ) + + dumped = data.model_dump() + + assert dumped["title"] == "Test Webhook" + assert dumped["method"] == "post" + assert dumped["content_type"] == "application/json" + assert len(dumped["headers"]) == 1 + assert dumped["headers"][0]["name"] == "Authorization" + assert dumped["headers"][0]["required"] is True + assert len(dumped["params"]) == 1 + assert len(dumped["body"]) == 1 + assert dumped["body"][0]["type"] == "string" + + +def test_webhook_data_model_dump_with_alias(): + """Test WebhookData model serialization includes alias.""" + data = WebhookData( + title="Test Webhook", + **{"content-type": ContentType.FORM_DATA}, + ) + + dumped = data.model_dump(by_alias=True) + assert "content-type" in dumped + assert dumped["content-type"] == "multipart/form-data" + + +def test_webhook_data_validation_errors(): + """Test WebhookData validation errors.""" + # Title is required (inherited from BaseNodeData) + with pytest.raises(ValidationError): + WebhookData() + + # Invalid method + with pytest.raises(ValidationError): + WebhookData(title="Test", method="invalid_method") + + # Invalid content_type via alias + with pytest.raises(ValidationError): + WebhookData(title="Test", **{"content-type": "invalid/type"}) + + # Invalid status_code (should be int) - use non-numeric string + with pytest.raises(ValidationError): + WebhookData(title="Test", status_code="invalid") + + # Invalid timeout (should be int) - use non-numeric string + with pytest.raises(ValidationError): + WebhookData(title="Test", timeout="invalid") + + # Valid cases that should NOT raise errors + # These should work fine (pydantic converts string numbers to int) + valid_data = WebhookData(title="Test", status_code="200", timeout="30") + assert valid_data.status_code == 200 + assert valid_data.timeout == 30 + + +def test_webhook_data_sequence_fields(): + """Test WebhookData sequence field behavior.""" + # Test empty sequences + data = WebhookData(title="Test") + assert data.headers == [] + assert data.params == [] + assert data.body == [] + + # Test immutable sequences + headers = [WebhookParameter(name="test")] + data = WebhookData(title="Test", headers=headers) + + # Original list shouldn't affect the model + headers.append(WebhookParameter(name="test2")) + assert len(data.headers) == 1 # Should still be 1 + + +def test_webhook_data_sync_mode(): + """Test WebhookData SyncMode nested enum.""" + # Test that SyncMode enum exists and has expected value + assert hasattr(WebhookData, "SyncMode") + assert WebhookData.SyncMode.SYNC == "async" # Note: confusingly named but correct + + +def test_webhook_parameter_edge_cases(): + """Test WebhookParameter edge cases.""" + # Test with special characters in name + param = WebhookParameter(name="X-Custom-Header-123", required=True) + assert param.name == "X-Custom-Header-123" + + # Test with empty string name (should be valid if pydantic allows it) + param_empty = WebhookParameter(name="", required=False) + assert param_empty.name == "" + + +def test_webhook_body_parameter_edge_cases(): + """Test WebhookBodyParameter edge cases.""" + # Test file type parameter + file_param = WebhookBodyParameter(name="upload", type="file", required=True) + assert file_param.type == "file" + assert file_param.required is True + + # Test all valid types + for param_type in ["string", "number", "boolean", "object", "array", "file"]: + param = WebhookBodyParameter(name=f"test_{param_type}", type=param_type) + assert param.type == param_type + + +def test_webhook_data_inheritance(): + """Test WebhookData inherits from BaseNodeData correctly.""" + from core.workflow.nodes.base import BaseNodeData + + # Test that WebhookData is a subclass of BaseNodeData + assert issubclass(WebhookData, BaseNodeData) + + # Test that instances have BaseNodeData properties + data = WebhookData(title="Test") + assert hasattr(data, "title") + assert hasattr(data, "desc") # Inherited from BaseNodeData diff --git a/api/tests/unit_tests/core/workflow/nodes/webhook/test_exceptions.py b/api/tests/unit_tests/core/workflow/nodes/webhook/test_exceptions.py new file mode 100644 index 0000000000..f59b6bd1ba --- /dev/null +++ b/api/tests/unit_tests/core/workflow/nodes/webhook/test_exceptions.py @@ -0,0 +1,195 @@ +import pytest + +from core.workflow.nodes.base.exc import BaseNodeError +from core.workflow.nodes.trigger_webhook.exc import ( + WebhookConfigError, + WebhookNodeError, + WebhookNotFoundError, + WebhookTimeoutError, +) + + +def test_webhook_node_error_inheritance(): + """Test WebhookNodeError inherits from BaseNodeError.""" + assert issubclass(WebhookNodeError, BaseNodeError) + + # Test instantiation + error = WebhookNodeError("Test error message") + assert str(error) == "Test error message" + assert isinstance(error, BaseNodeError) + + +def test_webhook_timeout_error(): + """Test WebhookTimeoutError functionality.""" + # Test inheritance + assert issubclass(WebhookTimeoutError, WebhookNodeError) + assert issubclass(WebhookTimeoutError, BaseNodeError) + + # Test instantiation with message + error = WebhookTimeoutError("Webhook request timed out") + assert str(error) == "Webhook request timed out" + + # Test instantiation without message + error_no_msg = WebhookTimeoutError() + assert isinstance(error_no_msg, WebhookTimeoutError) + + +def test_webhook_not_found_error(): + """Test WebhookNotFoundError functionality.""" + # Test inheritance + assert issubclass(WebhookNotFoundError, WebhookNodeError) + assert issubclass(WebhookNotFoundError, BaseNodeError) + + # Test instantiation with message + error = WebhookNotFoundError("Webhook trigger not found") + assert str(error) == "Webhook trigger not found" + + # Test instantiation without message + error_no_msg = WebhookNotFoundError() + assert isinstance(error_no_msg, WebhookNotFoundError) + + +def test_webhook_config_error(): + """Test WebhookConfigError functionality.""" + # Test inheritance + assert issubclass(WebhookConfigError, WebhookNodeError) + assert issubclass(WebhookConfigError, BaseNodeError) + + # Test instantiation with message + error = WebhookConfigError("Invalid webhook configuration") + assert str(error) == "Invalid webhook configuration" + + # Test instantiation without message + error_no_msg = WebhookConfigError() + assert isinstance(error_no_msg, WebhookConfigError) + + +def test_webhook_error_hierarchy(): + """Test the complete webhook error hierarchy.""" + # All webhook errors should inherit from WebhookNodeError + webhook_errors = [ + WebhookTimeoutError, + WebhookNotFoundError, + WebhookConfigError, + ] + + for error_class in webhook_errors: + assert issubclass(error_class, WebhookNodeError) + assert issubclass(error_class, BaseNodeError) + + +def test_webhook_error_instantiation_with_args(): + """Test webhook error instantiation with various arguments.""" + # Test with single string argument + error1 = WebhookNodeError("Simple error message") + assert str(error1) == "Simple error message" + + # Test with multiple arguments + error2 = WebhookTimeoutError("Timeout after", 30, "seconds") + # Note: The exact string representation depends on Exception.__str__ implementation + assert "Timeout after" in str(error2) + + # Test with keyword arguments (if supported by base Exception) + error3 = WebhookConfigError("Config error in field: timeout") + assert "Config error in field: timeout" in str(error3) + + +def test_webhook_error_as_exceptions(): + """Test that webhook errors can be raised and caught properly.""" + # Test raising and catching WebhookNodeError + with pytest.raises(WebhookNodeError) as exc_info: + raise WebhookNodeError("Base webhook error") + assert str(exc_info.value) == "Base webhook error" + + # Test raising and catching specific errors + with pytest.raises(WebhookTimeoutError) as exc_info: + raise WebhookTimeoutError("Request timeout") + assert str(exc_info.value) == "Request timeout" + + with pytest.raises(WebhookNotFoundError) as exc_info: + raise WebhookNotFoundError("Webhook not found") + assert str(exc_info.value) == "Webhook not found" + + with pytest.raises(WebhookConfigError) as exc_info: + raise WebhookConfigError("Invalid config") + assert str(exc_info.value) == "Invalid config" + + +def test_webhook_error_catching_hierarchy(): + """Test that webhook errors can be caught by their parent classes.""" + # WebhookTimeoutError should be catchable as WebhookNodeError + with pytest.raises(WebhookNodeError): + raise WebhookTimeoutError("Timeout error") + + # WebhookNotFoundError should be catchable as WebhookNodeError + with pytest.raises(WebhookNodeError): + raise WebhookNotFoundError("Not found error") + + # WebhookConfigError should be catchable as WebhookNodeError + with pytest.raises(WebhookNodeError): + raise WebhookConfigError("Config error") + + # All webhook errors should be catchable as BaseNodeError + with pytest.raises(BaseNodeError): + raise WebhookTimeoutError("Timeout as base error") + + with pytest.raises(BaseNodeError): + raise WebhookNotFoundError("Not found as base error") + + with pytest.raises(BaseNodeError): + raise WebhookConfigError("Config as base error") + + +def test_webhook_error_attributes(): + """Test webhook error class attributes.""" + # Test that all error classes have proper __name__ + assert WebhookNodeError.__name__ == "WebhookNodeError" + assert WebhookTimeoutError.__name__ == "WebhookTimeoutError" + assert WebhookNotFoundError.__name__ == "WebhookNotFoundError" + assert WebhookConfigError.__name__ == "WebhookConfigError" + + # Test that all error classes have proper __module__ + expected_module = "core.workflow.nodes.webhook.exc" + assert WebhookNodeError.__module__ == expected_module + assert WebhookTimeoutError.__module__ == expected_module + assert WebhookNotFoundError.__module__ == expected_module + assert WebhookConfigError.__module__ == expected_module + + +def test_webhook_error_docstrings(): + """Test webhook error class docstrings.""" + assert WebhookNodeError.__doc__ == "Base webhook node error." + assert WebhookTimeoutError.__doc__ == "Webhook timeout error." + assert WebhookNotFoundError.__doc__ == "Webhook not found error." + assert WebhookConfigError.__doc__ == "Webhook configuration error." + + +def test_webhook_error_repr_and_str(): + """Test webhook error string representations.""" + error = WebhookNodeError("Test message") + + # Test __str__ method + assert str(error) == "Test message" + + # Test __repr__ method (should include class name) + repr_str = repr(error) + assert "WebhookNodeError" in repr_str + assert "Test message" in repr_str + + +def test_webhook_error_with_no_message(): + """Test webhook errors with no message.""" + # Test that errors can be instantiated without messages + errors = [ + WebhookNodeError(), + WebhookTimeoutError(), + WebhookNotFoundError(), + WebhookConfigError(), + ] + + for error in errors: + # Should be instances of their respective classes + assert isinstance(error, type(error)) + # Should be able to be raised + with pytest.raises(type(error)): + raise error diff --git a/api/tests/unit_tests/core/workflow/nodes/webhook/test_webhook_node.py b/api/tests/unit_tests/core/workflow/nodes/webhook/test_webhook_node.py new file mode 100644 index 0000000000..627b9d73da --- /dev/null +++ b/api/tests/unit_tests/core/workflow/nodes/webhook/test_webhook_node.py @@ -0,0 +1,481 @@ +import pytest + +from core.app.entities.app_invoke_entities import InvokeFrom +from core.file import File, FileTransferMethod, FileType +from core.variables import StringVariable +from core.workflow.entities.variable_pool import VariablePool +from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus +from core.workflow.graph_engine import Graph, GraphInitParams, GraphRuntimeState +from core.workflow.nodes.answer import AnswerStreamGenerateRoute +from core.workflow.nodes.end import EndStreamParam +from core.workflow.nodes.trigger_webhook.entities import ( + ContentType, + Method, + WebhookBodyParameter, + WebhookData, + WebhookParameter, +) +from core.workflow.nodes.trigger_webhook.node import TriggerWebhookNode +from core.workflow.system_variable import SystemVariable +from models.enums import UserFrom +from models.workflow import WorkflowType + + +def create_webhook_node(webhook_data: WebhookData, variable_pool: VariablePool) -> TriggerWebhookNode: + """Helper function to create a webhook node with proper initialization.""" + node_config = { + "id": "1", + "data": webhook_data.model_dump(), + } + + node = TriggerWebhookNode( + id="1", + config=node_config, + graph_init_params=GraphInitParams( + tenant_id="1", + app_id="1", + workflow_type=WorkflowType.WORKFLOW, + workflow_id="1", + graph_config={}, + user_id="1", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.SERVICE_API, + call_depth=0, + ), + graph=Graph( + root_node_id="1", + answer_stream_generate_routes=AnswerStreamGenerateRoute( + answer_dependencies={}, + answer_generate_route={}, + ), + end_stream_param=EndStreamParam( + end_dependencies={}, + end_stream_variable_selector_mapping={}, + ), + ), + graph_runtime_state=GraphRuntimeState( + variable_pool=variable_pool, + start_at=0, + ), + ) + + node.init_node_data(node_config["data"]) + return node + + +def test_webhook_node_basic_initialization(): + """Test basic webhook node initialization and configuration.""" + data = WebhookData( + title="Test Webhook", + method=Method.POST, + content_type=ContentType.JSON, + headers=[WebhookParameter(name="X-API-Key", required=True)], + params=[WebhookParameter(name="version", required=False)], + body=[WebhookBodyParameter(name="message", type="string", required=True)], + status_code=200, + response_body="OK", + timeout=30, + ) + + variable_pool = VariablePool( + system_variables=SystemVariable.empty(), + user_inputs={}, + ) + + node = create_webhook_node(data, variable_pool) + + assert node._node_type.value == "webhook" + assert node.version() == "1" + assert node._get_title() == "Test Webhook" + assert node._node_data.method == Method.POST + assert node._node_data.content_type == ContentType.JSON + assert len(node._node_data.headers) == 1 + assert len(node._node_data.params) == 1 + assert len(node._node_data.body) == 1 + + +def test_webhook_node_default_config(): + """Test webhook node default configuration.""" + config = TriggerWebhookNode.get_default_config() + + assert config["type"] == "webhook" + assert config["config"]["method"] == "get" + assert config["config"]["content-type"] == "application/json" + assert config["config"]["headers"] == [] + assert config["config"]["params"] == [] + assert config["config"]["body"] == [] + assert config["config"]["async_mode"] is True + assert config["config"]["status_code"] == 200 + assert config["config"]["response_body"] == "" + assert config["config"]["timeout"] == 30 + + +def test_webhook_node_run_with_headers(): + """Test webhook node execution with header extraction.""" + data = WebhookData( + title="Test Webhook", + headers=[ + WebhookParameter(name="Authorization", required=True), + WebhookParameter(name="Content-Type", required=False), + ], + ) + + variable_pool = VariablePool( + system_variables=SystemVariable.empty(), + user_inputs={ + "webhook_data": { + "headers": { + "Authorization": "Bearer token123", + "content-type": "application/json", # Different case + "X-Custom": "custom-value", + }, + "query_params": {}, + "body": {}, + "files": {}, + } + }, + ) + + node = create_webhook_node(data, variable_pool) + result = node._run() + + assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert result.outputs["Authorization"] == "Bearer token123" + assert result.outputs["Content-Type"] == "application/json" # Case-insensitive match + assert "_webhook_raw" in result.outputs + + +def test_webhook_node_run_with_query_params(): + """Test webhook node execution with query parameter extraction.""" + data = WebhookData( + title="Test Webhook", + params=[ + WebhookParameter(name="page", required=True), + WebhookParameter(name="limit", required=False), + WebhookParameter(name="missing", required=False), + ], + ) + + variable_pool = VariablePool( + system_variables=SystemVariable.empty(), + user_inputs={ + "webhook_data": { + "headers": {}, + "query_params": { + "page": "1", + "limit": "10", + }, + "body": {}, + "files": {}, + } + }, + ) + + node = create_webhook_node(data, variable_pool) + result = node._run() + + assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert result.outputs["page"] == "1" + assert result.outputs["limit"] == "10" + assert result.outputs["missing"] is None # Missing parameter should be None + + +def test_webhook_node_run_with_body_params(): + """Test webhook node execution with body parameter extraction.""" + data = WebhookData( + title="Test Webhook", + body=[ + WebhookBodyParameter(name="message", type="string", required=True), + WebhookBodyParameter(name="count", type="number", required=False), + WebhookBodyParameter(name="active", type="boolean", required=False), + WebhookBodyParameter(name="metadata", type="object", required=False), + ], + ) + + variable_pool = VariablePool( + system_variables=SystemVariable.empty(), + user_inputs={ + "webhook_data": { + "headers": {}, + "query_params": {}, + "body": { + "message": "Hello World", + "count": 42, + "active": True, + "metadata": {"key": "value"}, + }, + "files": {}, + } + }, + ) + + node = create_webhook_node(data, variable_pool) + result = node._run() + + assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert result.outputs["message"] == "Hello World" + assert result.outputs["count"] == 42 + assert result.outputs["active"] is True + assert result.outputs["metadata"] == {"key": "value"} + + +def test_webhook_node_run_with_file_params(): + """Test webhook node execution with file parameter extraction.""" + # Create mock file objects + file1 = File( + tenant_id="1", + type=FileType.IMAGE, + transfer_method=FileTransferMethod.LOCAL_FILE, + related_id="file1", + filename="image.jpg", + mime_type="image/jpeg", + storage_key="", + ) + + file2 = File( + tenant_id="1", + type=FileType.DOCUMENT, + transfer_method=FileTransferMethod.LOCAL_FILE, + related_id="file2", + filename="document.pdf", + mime_type="application/pdf", + storage_key="", + ) + + data = WebhookData( + title="Test Webhook", + body=[ + WebhookBodyParameter(name="upload", type="file", required=True), + WebhookBodyParameter(name="document", type="file", required=False), + WebhookBodyParameter(name="missing_file", type="file", required=False), + ], + ) + + variable_pool = VariablePool( + system_variables=SystemVariable.empty(), + user_inputs={ + "webhook_data": { + "headers": {}, + "query_params": {}, + "body": {}, + "files": { + "upload": file1, + "document": file2, + }, + } + }, + ) + + node = create_webhook_node(data, variable_pool) + result = node._run() + + assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert result.outputs["upload"] == file1 + assert result.outputs["document"] == file2 + assert result.outputs["missing_file"] is None + + +def test_webhook_node_run_mixed_parameters(): + """Test webhook node execution with mixed parameter types.""" + file_obj = File( + tenant_id="1", + type=FileType.IMAGE, + transfer_method=FileTransferMethod.LOCAL_FILE, + related_id="file1", + filename="test.jpg", + mime_type="image/jpeg", + storage_key="", + ) + + data = WebhookData( + title="Test Webhook", + headers=[WebhookParameter(name="Authorization", required=True)], + params=[WebhookParameter(name="version", required=False)], + body=[ + WebhookBodyParameter(name="message", type="string", required=True), + WebhookBodyParameter(name="upload", type="file", required=False), + ], + ) + + variable_pool = VariablePool( + system_variables=SystemVariable.empty(), + user_inputs={ + "webhook_data": { + "headers": {"Authorization": "Bearer token"}, + "query_params": {"version": "v1"}, + "body": {"message": "Test message"}, + "files": {"upload": file_obj}, + } + }, + ) + + node = create_webhook_node(data, variable_pool) + result = node._run() + + assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert result.outputs["Authorization"] == "Bearer token" + assert result.outputs["version"] == "v1" + assert result.outputs["message"] == "Test message" + assert result.outputs["upload"] == file_obj + assert "_webhook_raw" in result.outputs + + +def test_webhook_node_run_empty_webhook_data(): + """Test webhook node execution with empty webhook data.""" + data = WebhookData( + title="Test Webhook", + headers=[WebhookParameter(name="Authorization", required=False)], + params=[WebhookParameter(name="page", required=False)], + body=[WebhookBodyParameter(name="message", type="string", required=False)], + ) + + variable_pool = VariablePool( + system_variables=SystemVariable.empty(), + user_inputs={}, # No webhook_data + ) + + node = create_webhook_node(data, variable_pool) + result = node._run() + + assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert result.outputs["Authorization"] is None + assert result.outputs["page"] is None + assert result.outputs["message"] is None + assert result.outputs["_webhook_raw"] == {} + + +def test_webhook_node_run_case_insensitive_headers(): + """Test webhook node header extraction is case-insensitive.""" + data = WebhookData( + title="Test Webhook", + headers=[ + WebhookParameter(name="Content-Type", required=True), + WebhookParameter(name="X-API-KEY", required=True), + WebhookParameter(name="authorization", required=True), + ], + ) + + variable_pool = VariablePool( + system_variables=SystemVariable.empty(), + user_inputs={ + "webhook_data": { + "headers": { + "content-type": "application/json", # lowercase + "x-api-key": "key123", # lowercase + "Authorization": "Bearer token", # different case + }, + "query_params": {}, + "body": {}, + "files": {}, + } + }, + ) + + node = create_webhook_node(data, variable_pool) + result = node._run() + + assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert result.outputs["Content-Type"] == "application/json" + assert result.outputs["X-API-KEY"] == "key123" + assert result.outputs["authorization"] == "Bearer token" + + +def test_webhook_node_variable_pool_user_inputs(): + """Test that webhook node uses user_inputs from variable pool correctly.""" + data = WebhookData(title="Test Webhook") + + # Add some additional variables to the pool + variable_pool = VariablePool( + system_variables=SystemVariable.empty(), + user_inputs={ + "webhook_data": {"headers": {}, "query_params": {}, "body": {}, "files": {}}, + "other_var": "should_be_included", + }, + ) + variable_pool.add(["node1", "extra"], StringVariable(name="extra", value="extra_value")) + + node = create_webhook_node(data, variable_pool) + result = node._run() + + assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED + # Check that all user_inputs are included in the inputs (they get converted to dict) + inputs_dict = dict(result.inputs) + assert "webhook_data" in inputs_dict + assert "other_var" in inputs_dict + assert inputs_dict["other_var"] == "should_be_included" + + +@pytest.mark.parametrize( + "method", + [Method.GET, Method.POST, Method.PUT, Method.DELETE, Method.PATCH, Method.HEAD], +) +def test_webhook_node_different_methods(method): + """Test webhook node with different HTTP methods.""" + data = WebhookData( + title="Test Webhook", + method=method, + ) + + variable_pool = VariablePool( + system_variables=SystemVariable.empty(), + user_inputs={ + "webhook_data": { + "headers": {}, + "query_params": {}, + "body": {}, + "files": {}, + } + }, + ) + + node = create_webhook_node(data, variable_pool) + result = node._run() + + assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert node._node_data.method == method + + +def test_webhook_data_alias_content_type(): + """Test that content-type field alias works correctly.""" + # Test both ways of setting content_type + data1 = WebhookData(title="Test", **{"content-type": "application/json"}) + assert data1.content_type == ContentType.JSON + + data2 = WebhookData(title="Test", **{"content-type": ContentType.FORM_DATA}) + assert data2.content_type == ContentType.FORM_DATA + + +def test_webhook_parameter_models(): + """Test webhook parameter model validation.""" + # Test WebhookParameter + param = WebhookParameter(name="test_param", required=True) + assert param.name == "test_param" + assert param.required is True + + param_default = WebhookParameter(name="test_param") + assert param_default.required is False + + # Test WebhookBodyParameter + body_param = WebhookBodyParameter(name="test_body", type="string", required=True) + assert body_param.name == "test_body" + assert body_param.type == "string" + assert body_param.required is True + + body_param_default = WebhookBodyParameter(name="test_body") + assert body_param_default.type == "string" # Default type + assert body_param_default.required is False + + +def test_webhook_data_field_defaults(): + """Test webhook data model field defaults.""" + data = WebhookData(title="Minimal Webhook") + + assert data.method == Method.GET + assert data.content_type == ContentType.JSON + assert data.headers == [] + assert data.params == [] + assert data.body == [] + assert data.status_code == 200 + assert data.response_body == "" + assert data.webhook_id is None + assert data.timeout == 30 diff --git a/api/tests/unit_tests/extensions/test_celery_ssl.py b/api/tests/unit_tests/extensions/test_celery_ssl.py index bc46fe8322..d33b7eaf23 100644 --- a/api/tests/unit_tests/extensions/test_celery_ssl.py +++ b/api/tests/unit_tests/extensions/test_celery_ssl.py @@ -131,6 +131,10 @@ class TestCelerySSLConfiguration: mock_config.ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK = False mock_config.ENABLE_DATASETS_QUEUE_MONITOR = False mock_config.ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK = False + mock_config.ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK = False + mock_config.WORKFLOW_SCHEDULE_POLLER_INTERVAL = 1 + mock_config.WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE = 100 + mock_config.WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK = 0 with patch("extensions.ext_celery.dify_config", mock_config): from dify_app import DifyApp diff --git a/api/tests/unit_tests/libs/test_cron_compatibility.py b/api/tests/unit_tests/libs/test_cron_compatibility.py new file mode 100644 index 0000000000..b696b32505 --- /dev/null +++ b/api/tests/unit_tests/libs/test_cron_compatibility.py @@ -0,0 +1,386 @@ +""" +Enhanced cron syntax compatibility tests for croniter backend. + +This test suite mirrors the frontend cron-parser tests to ensure +complete compatibility between frontend and backend cron processing. +""" +import unittest +from datetime import UTC, datetime, timedelta + +import pytest +import pytz +from croniter import CroniterBadCronError + +from libs.schedule_utils import calculate_next_run_at + + +class TestCronCompatibility(unittest.TestCase): + """Test enhanced cron syntax compatibility with frontend.""" + + def setUp(self): + """Set up test environment with fixed time.""" + self.base_time = datetime(2024, 1, 15, 10, 0, 0, tzinfo=UTC) + + def test_enhanced_dayofweek_syntax(self): + """Test enhanced day-of-week syntax compatibility.""" + test_cases = [ + ("0 9 * * 7", 0), # Sunday as 7 + ("0 9 * * 0", 0), # Sunday as 0 + ("0 9 * * MON", 1), # Monday abbreviation + ("0 9 * * TUE", 2), # Tuesday abbreviation + ("0 9 * * WED", 3), # Wednesday abbreviation + ("0 9 * * THU", 4), # Thursday abbreviation + ("0 9 * * FRI", 5), # Friday abbreviation + ("0 9 * * SAT", 6), # Saturday abbreviation + ("0 9 * * SUN", 0), # Sunday abbreviation + ] + + for expr, expected_weekday in test_cases: + with self.subTest(expr=expr): + next_time = calculate_next_run_at(expr, "UTC", self.base_time) + assert next_time is not None + assert (next_time.weekday() + 1 if next_time.weekday() < 6 else 0) == expected_weekday + assert next_time.hour == 9 + assert next_time.minute == 0 + + def test_enhanced_month_syntax(self): + """Test enhanced month syntax compatibility.""" + test_cases = [ + ("0 9 1 JAN *", 1), # January abbreviation + ("0 9 1 FEB *", 2), # February abbreviation + ("0 9 1 MAR *", 3), # March abbreviation + ("0 9 1 APR *", 4), # April abbreviation + ("0 9 1 MAY *", 5), # May abbreviation + ("0 9 1 JUN *", 6), # June abbreviation + ("0 9 1 JUL *", 7), # July abbreviation + ("0 9 1 AUG *", 8), # August abbreviation + ("0 9 1 SEP *", 9), # September abbreviation + ("0 9 1 OCT *", 10), # October abbreviation + ("0 9 1 NOV *", 11), # November abbreviation + ("0 9 1 DEC *", 12), # December abbreviation + ] + + for expr, expected_month in test_cases: + with self.subTest(expr=expr): + next_time = calculate_next_run_at(expr, "UTC", self.base_time) + assert next_time is not None + assert next_time.month == expected_month + assert next_time.day == 1 + assert next_time.hour == 9 + + def test_predefined_expressions(self): + """Test predefined cron expressions compatibility.""" + test_cases = [ + ("@yearly", lambda dt: dt.month == 1 and dt.day == 1 and dt.hour == 0), + ("@annually", lambda dt: dt.month == 1 and dt.day == 1 and dt.hour == 0), + ("@monthly", lambda dt: dt.day == 1 and dt.hour == 0), + ("@weekly", lambda dt: dt.weekday() == 6 and dt.hour == 0), # Sunday = 6 in weekday() + ("@daily", lambda dt: dt.hour == 0 and dt.minute == 0), + ("@midnight", lambda dt: dt.hour == 0 and dt.minute == 0), + ("@hourly", lambda dt: dt.minute == 0), + ] + + for expr, validator in test_cases: + with self.subTest(expr=expr): + next_time = calculate_next_run_at(expr, "UTC", self.base_time) + assert next_time is not None + assert validator(next_time), f"Validator failed for {expr}: {next_time}" + + def test_special_characters(self): + """Test special characters in cron expressions.""" + test_cases = [ + "0 9 ? * 1", # ? wildcard + "0 12 * * 7", # Sunday as 7 + "0 15 L * *", # Last day of month + ] + + for expr in test_cases: + with self.subTest(expr=expr): + try: + next_time = calculate_next_run_at(expr, "UTC", self.base_time) + assert next_time is not None + assert next_time > self.base_time + except Exception as e: + self.fail(f"Expression '{expr}' should be valid but raised: {e}") + + def test_range_and_list_syntax(self): + """Test range and list syntax with abbreviations.""" + test_cases = [ + "0 9 * * MON-FRI", # Weekday range with abbreviations + "0 9 * JAN-MAR *", # Month range with abbreviations + "0 9 * * SUN,WED,FRI", # Weekday list with abbreviations + "0 9 1 JAN,JUN,DEC *", # Month list with abbreviations + ] + + for expr in test_cases: + with self.subTest(expr=expr): + try: + next_time = calculate_next_run_at(expr, "UTC", self.base_time) + assert next_time is not None + assert next_time > self.base_time + except Exception as e: + self.fail(f"Expression '{expr}' should be valid but raised: {e}") + + def test_invalid_enhanced_syntax(self): + """Test that invalid enhanced syntax is properly rejected.""" + invalid_expressions = [ + "0 12 * JANUARY *", # Full month name (not supported) + "0 12 * * MONDAY", # Full day name (not supported) + "0 12 32 JAN *", # Invalid day with valid month + "15 10 1 * 8", # Invalid day of week + "15 10 1 INVALID *", # Invalid month abbreviation + "15 10 1 * INVALID", # Invalid day abbreviation + "@invalid", # Invalid predefined expression + ] + + for expr in invalid_expressions: + with self.subTest(expr=expr): + with pytest.raises((CroniterBadCronError, ValueError)): + calculate_next_run_at(expr, "UTC", self.base_time) + + def test_edge_cases_with_enhanced_syntax(self): + """Test edge cases with enhanced syntax.""" + test_cases = [ + ("0 0 29 FEB *", lambda dt: dt.month == 2 and dt.day == 29), # Feb 29 with month abbreviation + ] + + for expr, validator in test_cases: + with self.subTest(expr=expr): + try: + next_time = calculate_next_run_at(expr, "UTC", self.base_time) + if next_time: # Some combinations might not occur soon + assert validator(next_time), f"Validator failed for {expr}: {next_time}" + except (CroniterBadCronError, ValueError): + # Some edge cases might be valid but not have upcoming occurrences + pass + + # Test complex expressions that have specific constraints + complex_expr = "59 23 31 DEC SAT" # December 31st at 23:59 on Saturday + try: + next_time = calculate_next_run_at(complex_expr, "UTC", self.base_time) + if next_time: + # The next occurrence might not be exactly Dec 31 if it's not a Saturday + # Just verify it's a valid result + assert next_time is not None + assert next_time.hour == 23 + assert next_time.minute == 59 + except Exception: + # Complex date constraints might not have near-future occurrences + pass + + +class TestTimezoneCompatibility(unittest.TestCase): + """Test timezone compatibility between frontend and backend.""" + + def setUp(self): + """Set up test environment.""" + self.base_time = datetime(2024, 1, 15, 10, 0, 0, tzinfo=UTC) + + def test_timezone_consistency(self): + """Test that calculations are consistent across different timezones.""" + timezones = [ + "UTC", + "America/New_York", + "Europe/London", + "Asia/Tokyo", + "Asia/Kolkata", + "Australia/Sydney", + ] + + expression = "0 12 * * *" # Daily at noon + + for timezone in timezones: + with self.subTest(timezone=timezone): + next_time = calculate_next_run_at(expression, timezone, self.base_time) + assert next_time is not None + + # Convert back to the target timezone to verify it's noon + tz = pytz.timezone(timezone) + local_time = next_time.astimezone(tz) + assert local_time.hour == 12 + assert local_time.minute == 0 + + def test_dst_handling(self): + """Test DST boundary handling.""" + # Test around DST spring forward (March 2024) + dst_base = datetime(2024, 3, 8, 10, 0, 0, tzinfo=UTC) + expression = "0 2 * * *" # 2 AM daily (problematic during DST) + timezone = "America/New_York" + + try: + next_time = calculate_next_run_at(expression, timezone, dst_base) + assert next_time is not None + + # During DST spring forward, 2 AM becomes 3 AM - both are acceptable + tz = pytz.timezone(timezone) + local_time = next_time.astimezone(tz) + assert local_time.hour in [2, 3] # Either 2 AM or 3 AM is acceptable + except Exception as e: + self.fail(f"DST handling failed: {e}") + + def test_half_hour_timezones(self): + """Test timezones with half-hour offsets.""" + timezones_with_offsets = [ + ("Asia/Kolkata", 17, 30), # UTC+5:30 -> 12:00 UTC = 17:30 IST + ("Australia/Adelaide", 22, 30), # UTC+10:30 -> 12:00 UTC = 22:30 ACDT (summer time) + ] + + expression = "0 12 * * *" # Noon UTC + + for timezone, expected_hour, expected_minute in timezones_with_offsets: + with self.subTest(timezone=timezone): + try: + next_time = calculate_next_run_at(expression, timezone, self.base_time) + assert next_time is not None + + tz = pytz.timezone(timezone) + local_time = next_time.astimezone(tz) + assert local_time.hour == expected_hour + assert local_time.minute == expected_minute + except Exception: + # Some complex timezone calculations might vary + pass + + def test_invalid_timezone_handling(self): + """Test handling of invalid timezones.""" + expression = "0 12 * * *" + invalid_timezone = "Invalid/Timezone" + + with pytest.raises((ValueError, Exception)): # Should raise an exception + calculate_next_run_at(expression, invalid_timezone, self.base_time) + + +class TestFrontendBackendIntegration(unittest.TestCase): + """Test integration patterns that mirror frontend usage.""" + + def setUp(self): + """Set up test environment.""" + self.base_time = datetime(2024, 1, 15, 10, 0, 0, tzinfo=UTC) + + def test_execution_time_calculator_pattern(self): + """Test the pattern used by execution-time-calculator.ts.""" + # This mirrors the exact usage from execution-time-calculator.ts:47 + test_data = { + "cron_expression": "30 14 * * 1-5", # 2:30 PM weekdays + "timezone": "America/New_York" + } + + # Get next 5 execution times (like the frontend does) + execution_times = [] + current_base = self.base_time + + for _ in range(5): + next_time = calculate_next_run_at( + test_data["cron_expression"], + test_data["timezone"], + current_base + ) + assert next_time is not None + execution_times.append(next_time) + current_base = next_time + timedelta(seconds=1) # Move slightly forward + + assert len(execution_times) == 5 + + # Validate each execution time + for exec_time in execution_times: + # Convert to local timezone + tz = pytz.timezone(test_data["timezone"]) + local_time = exec_time.astimezone(tz) + + # Should be weekdays (1-5) + assert local_time.weekday() in [0, 1, 2, 3, 4] # Mon-Fri in Python weekday + + # Should be 2:30 PM in local time + assert local_time.hour == 14 + assert local_time.minute == 30 + assert local_time.second == 0 + + def test_schedule_service_integration(self): + """Test integration with ScheduleService patterns.""" + from core.workflow.nodes.trigger_schedule.entities import VisualConfig + from services.schedule_service import ScheduleService + + # Test enhanced syntax through visual config conversion + visual_configs = [ + # Test with month abbreviations + { + "frequency": "monthly", + "config": VisualConfig(time="9:00 AM", monthly_days=[1]), + "expected_cron": "0 9 1 * *" + }, + # Test with weekday abbreviations + { + "frequency": "weekly", + "config": VisualConfig(time="2:30 PM", weekdays=["mon", "wed", "fri"]), + "expected_cron": "30 14 * * 1,3,5" + } + ] + + for test_case in visual_configs: + with self.subTest(frequency=test_case["frequency"]): + cron_expr = ScheduleService.visual_to_cron( + test_case["frequency"], + test_case["config"] + ) + assert cron_expr == test_case["expected_cron"] + + # Verify the generated cron expression is valid + next_time = calculate_next_run_at(cron_expr, "UTC", self.base_time) + assert next_time is not None + + def test_error_handling_consistency(self): + """Test that error handling matches frontend expectations.""" + invalid_expressions = [ + "60 10 1 * *", # Invalid minute + "15 25 1 * *", # Invalid hour + "15 10 32 * *", # Invalid day + "15 10 1 13 *", # Invalid month + "15 10 1", # Too few fields + "15 10 1 * * *", # 6 fields (not supported in frontend) + "0 15 10 1 * * *", # 7 fields (not supported in frontend) + "invalid expression", # Completely invalid + ] + + for expr in invalid_expressions: + with self.subTest(expr=repr(expr)): + with pytest.raises((CroniterBadCronError, ValueError, Exception)): + calculate_next_run_at(expr, "UTC", self.base_time) + + # Note: Empty/whitespace expressions are not tested here as they are + # not expected in normal usage due to database constraints (nullable=False) + + def test_performance_requirements(self): + """Test that complex expressions parse within reasonable time.""" + import time + + complex_expressions = [ + "*/5 9-17 * * 1-5", # Every 5 minutes, weekdays, business hours + "0 */2 1,15 * *", # Every 2 hours on 1st and 15th + "30 14 * * 1,3,5", # Mon, Wed, Fri at 14:30 + "15,45 8-18 * * 1-5", # 15 and 45 minutes past hour, weekdays + "0 9 * JAN-MAR MON-FRI", # Enhanced syntax: Q1 weekdays at 9 AM + "0 12 ? * SUN", # Enhanced syntax: Sundays at noon with ? + ] + + start_time = time.time() + + for expr in complex_expressions: + with self.subTest(expr=expr): + try: + next_time = calculate_next_run_at(expr, "UTC", self.base_time) + assert next_time is not None + except CroniterBadCronError: + # Some enhanced syntax might not be supported, that's OK + pass + + end_time = time.time() + execution_time = (end_time - start_time) * 1000 # Convert to milliseconds + + # Should complete within reasonable time (less than 150ms like frontend) + assert execution_time < 150, "Complex expressions should parse quickly" + + +if __name__ == "__main__": + # Import timedelta for the test + from datetime import timedelta + unittest.main() \ No newline at end of file diff --git a/api/tests/unit_tests/libs/test_schedule_utils_enhanced.py b/api/tests/unit_tests/libs/test_schedule_utils_enhanced.py new file mode 100644 index 0000000000..aefcc83539 --- /dev/null +++ b/api/tests/unit_tests/libs/test_schedule_utils_enhanced.py @@ -0,0 +1,410 @@ +""" +Enhanced schedule_utils tests for new cron syntax support. + +These tests verify that the backend schedule_utils functions properly support +the enhanced cron syntax introduced in the frontend, ensuring full compatibility. +""" +import unittest +from datetime import UTC, datetime, timedelta + +import pytest +import pytz +from croniter import CroniterBadCronError + +from libs.schedule_utils import calculate_next_run_at, convert_12h_to_24h + + +class TestEnhancedCronSyntax(unittest.TestCase): + """Test enhanced cron syntax in calculate_next_run_at.""" + + def setUp(self): + """Set up test with fixed time.""" + # Monday, January 15, 2024, 10:00 AM UTC + self.base_time = datetime(2024, 1, 15, 10, 0, 0, tzinfo=UTC) + + def test_month_abbreviations(self): + """Test month abbreviations (JAN, FEB, etc.).""" + test_cases = [ + ("0 12 1 JAN *", 1), # January + ("0 12 1 FEB *", 2), # February + ("0 12 1 MAR *", 3), # March + ("0 12 1 APR *", 4), # April + ("0 12 1 MAY *", 5), # May + ("0 12 1 JUN *", 6), # June + ("0 12 1 JUL *", 7), # July + ("0 12 1 AUG *", 8), # August + ("0 12 1 SEP *", 9), # September + ("0 12 1 OCT *", 10), # October + ("0 12 1 NOV *", 11), # November + ("0 12 1 DEC *", 12), # December + ] + + for expr, expected_month in test_cases: + with self.subTest(expr=expr): + result = calculate_next_run_at(expr, "UTC", self.base_time) + assert result is not None, f"Failed to parse: {expr}" + assert result.month == expected_month + assert result.day == 1 + assert result.hour == 12 + assert result.minute == 0 + + def test_weekday_abbreviations(self): + """Test weekday abbreviations (SUN, MON, etc.).""" + test_cases = [ + ("0 9 * * SUN", 6), # Sunday (weekday() = 6) + ("0 9 * * MON", 0), # Monday (weekday() = 0) + ("0 9 * * TUE", 1), # Tuesday + ("0 9 * * WED", 2), # Wednesday + ("0 9 * * THU", 3), # Thursday + ("0 9 * * FRI", 4), # Friday + ("0 9 * * SAT", 5), # Saturday + ] + + for expr, expected_weekday in test_cases: + with self.subTest(expr=expr): + result = calculate_next_run_at(expr, "UTC", self.base_time) + assert result is not None, f"Failed to parse: {expr}" + assert result.weekday() == expected_weekday + assert result.hour == 9 + assert result.minute == 0 + + def test_sunday_dual_representation(self): + """Test Sunday as both 0 and 7.""" + base_time = datetime(2024, 1, 14, 10, 0, 0, tzinfo=UTC) # Sunday + + # Both should give the same next Sunday + result_0 = calculate_next_run_at("0 10 * * 0", "UTC", base_time) + result_7 = calculate_next_run_at("0 10 * * 7", "UTC", base_time) + result_SUN = calculate_next_run_at("0 10 * * SUN", "UTC", base_time) + + assert result_0 is not None + assert result_7 is not None + assert result_SUN is not None + + # All should be Sundays + assert result_0.weekday() == 6 # Sunday = 6 in weekday() + assert result_7.weekday() == 6 + assert result_SUN.weekday() == 6 + + # Times should be identical + assert result_0 == result_7 + assert result_0 == result_SUN + + def test_predefined_expressions(self): + """Test predefined expressions (@daily, @weekly, etc.).""" + test_cases = [ + ("@yearly", lambda dt: dt.month == 1 and dt.day == 1 and dt.hour == 0 and dt.minute == 0), + ("@annually", lambda dt: dt.month == 1 and dt.day == 1 and dt.hour == 0 and dt.minute == 0), + ("@monthly", lambda dt: dt.day == 1 and dt.hour == 0 and dt.minute == 0), + ("@weekly", lambda dt: dt.weekday() == 6 and dt.hour == 0 and dt.minute == 0), # Sunday + ("@daily", lambda dt: dt.hour == 0 and dt.minute == 0), + ("@midnight", lambda dt: dt.hour == 0 and dt.minute == 0), + ("@hourly", lambda dt: dt.minute == 0), + ] + + for expr, validator in test_cases: + with self.subTest(expr=expr): + result = calculate_next_run_at(expr, "UTC", self.base_time) + assert result is not None, f"Failed to parse: {expr}" + assert validator(result), f"Validator failed for {expr}: {result}" + + def test_question_mark_wildcard(self): + """Test ? wildcard character.""" + # ? in day position with specific weekday + result_question = calculate_next_run_at("0 9 ? * 1", "UTC", self.base_time) # Monday + result_star = calculate_next_run_at("0 9 * * 1", "UTC", self.base_time) # Monday + + assert result_question is not None + assert result_star is not None + + # Both should return Mondays at 9:00 + assert result_question.weekday() == 0 # Monday + assert result_star.weekday() == 0 + assert result_question.hour == 9 + assert result_star.hour == 9 + + # Results should be identical + assert result_question == result_star + + def test_last_day_of_month(self): + """Test 'L' for last day of month.""" + expr = "0 12 L * *" # Last day of month at noon + + # Test for February (28 days in 2024 - not a leap year check) + feb_base = datetime(2024, 2, 15, 10, 0, 0, tzinfo=UTC) + result = calculate_next_run_at(expr, "UTC", feb_base) + assert result is not None + assert result.month == 2 + assert result.day == 29 # 2024 is a leap year + assert result.hour == 12 + + def test_range_with_abbreviations(self): + """Test ranges using abbreviations.""" + test_cases = [ + "0 9 * * MON-FRI", # Weekday range + "0 12 * JAN-MAR *", # Q1 months + "0 15 * APR-JUN *", # Q2 months + ] + + for expr in test_cases: + with self.subTest(expr=expr): + result = calculate_next_run_at(expr, "UTC", self.base_time) + assert result is not None, f"Failed to parse range expression: {expr}" + assert result > self.base_time + + def test_list_with_abbreviations(self): + """Test lists using abbreviations.""" + test_cases = [ + ("0 9 * * SUN,WED,FRI", [6, 2, 4]), # Specific weekdays + ("0 12 1 JAN,JUN,DEC *", [1, 6, 12]), # Specific months + ] + + for expr, expected_values in test_cases: + with self.subTest(expr=expr): + result = calculate_next_run_at(expr, "UTC", self.base_time) + assert result is not None, f"Failed to parse list expression: {expr}" + + if "* *" in expr: # Weekday test + assert result.weekday() in expected_values + else: # Month test + assert result.month in expected_values + + def test_mixed_syntax(self): + """Test mixed traditional and enhanced syntax.""" + test_cases = [ + "30 14 15 JAN,JUN,DEC *", # Numbers + month abbreviations + "0 9 * JAN-MAR MON-FRI", # Month range + weekday range + "45 8 1,15 * MON", # Numbers + weekday abbreviation + ] + + for expr in test_cases: + with self.subTest(expr=expr): + result = calculate_next_run_at(expr, "UTC", self.base_time) + assert result is not None, f"Failed to parse mixed syntax: {expr}" + assert result > self.base_time + + def test_complex_enhanced_expressions(self): + """Test complex expressions with multiple enhanced features.""" + # Note: Some of these might not be supported by croniter, that's OK + complex_expressions = [ + "0 9 L JAN *", # Last day of January + "30 14 * * FRI#1", # First Friday of month (if supported) + "0 12 15 JAN-DEC/3 *", # 15th of every 3rd month (quarterly) + ] + + for expr in complex_expressions: + with self.subTest(expr=expr): + try: + result = calculate_next_run_at(expr, "UTC", self.base_time) + if result: # If supported, should return valid result + assert result > self.base_time + except Exception: + # Some complex expressions might not be supported - that's acceptable + pass + + +class TestTimezoneHandlingEnhanced(unittest.TestCase): + """Test timezone handling with enhanced syntax.""" + + def setUp(self): + """Set up test with fixed time.""" + self.base_time = datetime(2024, 1, 15, 10, 0, 0, tzinfo=UTC) + + def test_enhanced_syntax_with_timezones(self): + """Test enhanced syntax works correctly across timezones.""" + timezones = ["UTC", "America/New_York", "Asia/Tokyo", "Europe/London"] + expression = "0 12 * * MON" # Monday at noon + + for timezone in timezones: + with self.subTest(timezone=timezone): + result = calculate_next_run_at(expression, timezone, self.base_time) + assert result is not None + + # Convert to local timezone to verify it's Monday at noon + tz = pytz.timezone(timezone) + local_time = result.astimezone(tz) + assert local_time.weekday() == 0 # Monday + assert local_time.hour == 12 + assert local_time.minute == 0 + + def test_predefined_expressions_with_timezones(self): + """Test predefined expressions work with different timezones.""" + expression = "@daily" + timezones = ["UTC", "America/New_York", "Asia/Tokyo"] + + for timezone in timezones: + with self.subTest(timezone=timezone): + result = calculate_next_run_at(expression, timezone, self.base_time) + assert result is not None + + # Should be midnight in the specified timezone + tz = pytz.timezone(timezone) + local_time = result.astimezone(tz) + assert local_time.hour == 0 + assert local_time.minute == 0 + + def test_dst_with_enhanced_syntax(self): + """Test DST handling with enhanced syntax.""" + # DST spring forward date in 2024 + dst_base = datetime(2024, 3, 8, 10, 0, 0, tzinfo=UTC) + expression = "0 2 * * SUN" # Sunday at 2 AM (problematic during DST) + timezone = "America/New_York" + + result = calculate_next_run_at(expression, timezone, dst_base) + assert result is not None + + # Should handle DST transition gracefully + tz = pytz.timezone(timezone) + local_time = result.astimezone(tz) + assert local_time.weekday() == 6 # Sunday + + # During DST spring forward, 2 AM might become 3 AM + assert local_time.hour in [2, 3] + + +class TestErrorHandlingEnhanced(unittest.TestCase): + """Test error handling for enhanced syntax.""" + + def setUp(self): + """Set up test with fixed time.""" + self.base_time = datetime(2024, 1, 15, 10, 0, 0, tzinfo=UTC) + + def test_invalid_enhanced_syntax(self): + """Test that invalid enhanced syntax raises appropriate errors.""" + invalid_expressions = [ + "0 12 * JANUARY *", # Full month name + "0 12 * * MONDAY", # Full day name + "0 12 32 JAN *", # Invalid day with valid month + "0 12 * * MON-SUN-FRI", # Invalid range syntax + "0 12 * JAN- *", # Incomplete range + "0 12 * * ,MON", # Invalid list syntax + "@INVALID", # Invalid predefined + ] + + for expr in invalid_expressions: + with self.subTest(expr=expr): + with pytest.raises((CroniterBadCronError, ValueError)): + calculate_next_run_at(expr, "UTC", self.base_time) + + def test_boundary_values_with_enhanced_syntax(self): + """Test boundary values work with enhanced syntax.""" + # Valid boundary expressions + valid_expressions = [ + "0 0 1 JAN *", # Minimum: January 1st midnight + "59 23 31 DEC *", # Maximum: December 31st 23:59 + "0 12 29 FEB *", # Leap year boundary + ] + + for expr in valid_expressions: + with self.subTest(expr=expr): + try: + result = calculate_next_run_at(expr, "UTC", self.base_time) + if result: # Some dates might not occur soon + assert result > self.base_time + except Exception as e: + # Some boundary cases might be complex to calculate + self.fail(f"Valid boundary expression failed: {expr} - {e}") + + +class TestPerformanceEnhanced(unittest.TestCase): + """Test performance with enhanced syntax.""" + + def setUp(self): + """Set up test with fixed time.""" + self.base_time = datetime(2024, 1, 15, 10, 0, 0, tzinfo=UTC) + + def test_complex_expression_performance(self): + """Test that complex enhanced expressions parse within reasonable time.""" + import time + + complex_expressions = [ + "*/5 9-17 * * MON-FRI", # Every 5 min, weekdays, business hours + "0 9 * JAN-MAR MON-FRI", # Q1 weekdays at 9 AM + "30 14 1,15 * * ", # 1st and 15th at 14:30 + "0 12 ? * SUN", # Sundays at noon with ? + "@daily", # Predefined expression + ] + + start_time = time.time() + + for expr in complex_expressions: + with self.subTest(expr=expr): + try: + result = calculate_next_run_at(expr, "UTC", self.base_time) + assert result is not None + except Exception: + # Some expressions might not be supported - acceptable + pass + + end_time = time.time() + execution_time = (end_time - start_time) * 1000 # milliseconds + + # Should be fast (less than 100ms for all expressions) + assert execution_time < 100, "Enhanced expressions should parse quickly" + + def test_multiple_calculations_performance(self): + """Test performance when calculating multiple next times.""" + import time + + expression = "0 9 * * MON-FRI" # Weekdays at 9 AM + iterations = 20 + + start_time = time.time() + + current_time = self.base_time + for _ in range(iterations): + result = calculate_next_run_at(expression, "UTC", current_time) + assert result is not None + current_time = result + timedelta(seconds=1) # Move forward slightly + + end_time = time.time() + total_time = (end_time - start_time) * 1000 # milliseconds + avg_time = total_time / iterations + + # Average should be very fast (less than 5ms per calculation) + assert avg_time < 5, f"Average calculation time too slow: {avg_time}ms" + + +class TestRegressionEnhanced(unittest.TestCase): + """Regression tests to ensure enhanced syntax doesn't break existing functionality.""" + + def setUp(self): + """Set up test with fixed time.""" + self.base_time = datetime(2024, 1, 15, 10, 0, 0, tzinfo=UTC) + + def test_traditional_syntax_still_works(self): + """Ensure traditional cron syntax continues to work.""" + traditional_expressions = [ + "15 10 1 * *", # Monthly 1st at 10:15 + "0 0 * * 0", # Weekly Sunday midnight + "*/5 * * * *", # Every 5 minutes + "0 9-17 * * 1-5", # Business hours weekdays + "30 14 * * 1", # Monday 14:30 + "0 0 1,15 * *", # 1st and 15th midnight + ] + + for expr in traditional_expressions: + with self.subTest(expr=expr): + result = calculate_next_run_at(expr, "UTC", self.base_time) + assert result is not None, f"Traditional expression failed: {expr}" + assert result > self.base_time + + def test_convert_12h_to_24h_unchanged(self): + """Ensure convert_12h_to_24h function is unchanged.""" + test_cases = [ + ("12:00 AM", (0, 0)), # Midnight + ("12:00 PM", (12, 0)), # Noon + ("1:30 AM", (1, 30)), # Early morning + ("11:45 PM", (23, 45)), # Late evening + ("6:15 AM", (6, 15)), # Morning + ("3:30 PM", (15, 30)), # Afternoon + ] + + for time_str, expected in test_cases: + with self.subTest(time_str=time_str): + result = convert_12h_to_24h(time_str) + assert result == expected, f"12h conversion failed: {time_str}" + + +if __name__ == "__main__": + unittest.main() \ No newline at end of file diff --git a/api/tests/unit_tests/services/test_schedule_service.py b/api/tests/unit_tests/services/test_schedule_service.py new file mode 100644 index 0000000000..c342af7a26 --- /dev/null +++ b/api/tests/unit_tests/services/test_schedule_service.py @@ -0,0 +1,780 @@ +import unittest +from datetime import UTC, datetime +from unittest.mock import MagicMock, Mock, patch + +import pytest +from sqlalchemy.orm import Session + +from core.workflow.nodes.trigger_schedule.entities import ScheduleConfig, SchedulePlanUpdate, VisualConfig +from core.workflow.nodes.trigger_schedule.exc import ScheduleConfigError +from events.event_handlers.sync_workflow_schedule_when_app_published import ( + sync_schedule_from_workflow, +) +from libs.schedule_utils import calculate_next_run_at, convert_12h_to_24h +from models.account import Account, TenantAccountJoin +from models.workflow import Workflow, WorkflowSchedulePlan +from services.schedule_service import ScheduleService + + +class TestScheduleService(unittest.TestCase): + """Test cases for ScheduleService class.""" + + def test_calculate_next_run_at_valid_cron(self): + """Test calculating next run time with valid cron expression.""" + # Test daily cron at 10:30 AM + cron_expr = "30 10 * * *" + timezone = "UTC" + base_time = datetime(2025, 8, 29, 9, 0, 0, tzinfo=UTC) + + next_run = calculate_next_run_at(cron_expr, timezone, base_time) + + assert next_run is not None + assert next_run.hour == 10 + assert next_run.minute == 30 + assert next_run.day == 29 + + def test_calculate_next_run_at_with_timezone(self): + """Test calculating next run time with different timezone.""" + cron_expr = "0 9 * * *" # 9:00 AM + timezone = "America/New_York" + base_time = datetime(2025, 8, 29, 12, 0, 0, tzinfo=UTC) # 8:00 AM EDT + + next_run = calculate_next_run_at(cron_expr, timezone, base_time) + + assert next_run is not None + # 9:00 AM EDT = 13:00 UTC (during EDT) + expected_utc_hour = 13 + assert next_run.hour == expected_utc_hour + + def test_calculate_next_run_at_with_last_day_of_month(self): + """Test calculating next run time with 'L' (last day) syntax.""" + cron_expr = "0 10 L * *" # 10:00 AM on last day of month + timezone = "UTC" + base_time = datetime(2025, 2, 15, 9, 0, 0, tzinfo=UTC) + + next_run = calculate_next_run_at(cron_expr, timezone, base_time) + + assert next_run is not None + # February 2025 has 28 days + assert next_run.day == 28 + assert next_run.month == 2 + + def test_calculate_next_run_at_invalid_cron(self): + """Test calculating next run time with invalid cron expression.""" + from croniter import CroniterBadCronError + + cron_expr = "invalid cron" + timezone = "UTC" + + with pytest.raises(CroniterBadCronError): + calculate_next_run_at(cron_expr, timezone) + + def test_calculate_next_run_at_invalid_timezone(self): + """Test calculating next run time with invalid timezone.""" + from pytz import UnknownTimeZoneError + + cron_expr = "30 10 * * *" + timezone = "Invalid/Timezone" + + with pytest.raises(UnknownTimeZoneError): + calculate_next_run_at(cron_expr, timezone) + + @patch("libs.schedule_utils.calculate_next_run_at") + def test_create_schedule(self, mock_calculate_next_run): + """Test creating a new schedule.""" + mock_session = MagicMock(spec=Session) + mock_calculate_next_run.return_value = datetime(2025, 8, 30, 10, 30, 0, tzinfo=UTC) + + config = ScheduleConfig( + node_id="start", + cron_expression="30 10 * * *", + timezone="UTC", + ) + + schedule = ScheduleService.create_schedule( + session=mock_session, + tenant_id="test-tenant", + app_id="test-app", + config=config, + ) + + assert schedule is not None + assert schedule.tenant_id == "test-tenant" + assert schedule.app_id == "test-app" + assert schedule.node_id == "start" + assert schedule.cron_expression == "30 10 * * *" + assert schedule.timezone == "UTC" + assert schedule.next_run_at is not None + mock_session.add.assert_called_once() + mock_session.flush.assert_called_once() + + @patch("services.schedule_service.calculate_next_run_at") + def test_update_schedule(self, mock_calculate_next_run): + """Test updating an existing schedule.""" + mock_session = MagicMock(spec=Session) + mock_schedule = Mock(spec=WorkflowSchedulePlan) + mock_schedule.cron_expression = "0 12 * * *" + mock_schedule.timezone = "America/New_York" + mock_session.get.return_value = mock_schedule + mock_calculate_next_run.return_value = datetime(2025, 8, 30, 12, 0, 0, tzinfo=UTC) + + updates = SchedulePlanUpdate( + cron_expression="0 12 * * *", + timezone="America/New_York", + ) + + result = ScheduleService.update_schedule( + session=mock_session, + schedule_id="test-schedule-id", + updates=updates, + ) + + assert result is not None + assert result.cron_expression == "0 12 * * *" + assert result.timezone == "America/New_York" + mock_calculate_next_run.assert_called_once() + mock_session.flush.assert_called_once() + + def test_update_schedule_not_found(self): + """Test updating a non-existent schedule raises exception.""" + from core.workflow.nodes.trigger_schedule.exc import ScheduleNotFoundError + + mock_session = MagicMock(spec=Session) + mock_session.get.return_value = None + + updates = SchedulePlanUpdate( + cron_expression="0 12 * * *", + ) + + with pytest.raises(ScheduleNotFoundError) as context: + ScheduleService.update_schedule( + session=mock_session, + schedule_id="non-existent-id", + updates=updates, + ) + + assert "Schedule not found: non-existent-id" in str(context.value) + mock_session.flush.assert_not_called() + + def test_delete_schedule(self): + """Test deleting a schedule.""" + mock_session = MagicMock(spec=Session) + mock_schedule = Mock(spec=WorkflowSchedulePlan) + mock_session.get.return_value = mock_schedule + + # Should not raise exception and complete successfully + ScheduleService.delete_schedule( + session=mock_session, + schedule_id="test-schedule-id", + ) + + mock_session.delete.assert_called_once_with(mock_schedule) + mock_session.flush.assert_called_once() + + def test_delete_schedule_not_found(self): + """Test deleting a non-existent schedule raises exception.""" + from core.workflow.nodes.trigger_schedule.exc import ScheduleNotFoundError + + mock_session = MagicMock(spec=Session) + mock_session.get.return_value = None + + # Should raise ScheduleNotFoundError + with pytest.raises(ScheduleNotFoundError) as context: + ScheduleService.delete_schedule( + session=mock_session, + schedule_id="non-existent-id", + ) + + assert "Schedule not found: non-existent-id" in str(context.value) + mock_session.delete.assert_not_called() + + @patch("services.schedule_service.select") + def test_get_tenant_owner(self, mock_select): + """Test getting tenant owner account.""" + mock_session = MagicMock(spec=Session) + mock_account = Mock(spec=Account) + mock_account.id = "owner-account-id" + + # Mock owner query + mock_owner_result = Mock(spec=TenantAccountJoin) + mock_owner_result.account_id = "owner-account-id" + + mock_session.execute.return_value.scalar_one_or_none.return_value = mock_owner_result + mock_session.get.return_value = mock_account + + result = ScheduleService.get_tenant_owner( + session=mock_session, + tenant_id="test-tenant", + ) + + assert result is not None + assert result.id == "owner-account-id" + + @patch("services.schedule_service.select") + def test_get_tenant_owner_fallback_to_admin(self, mock_select): + """Test getting tenant owner falls back to admin if no owner.""" + mock_session = MagicMock(spec=Session) + mock_account = Mock(spec=Account) + mock_account.id = "admin-account-id" + + # Mock admin query (owner returns None) + mock_admin_result = Mock(spec=TenantAccountJoin) + mock_admin_result.account_id = "admin-account-id" + + mock_session.execute.return_value.scalar_one_or_none.side_effect = [None, mock_admin_result] + mock_session.get.return_value = mock_account + + result = ScheduleService.get_tenant_owner( + session=mock_session, + tenant_id="test-tenant", + ) + + assert result is not None + assert result.id == "admin-account-id" + + @patch("services.schedule_service.calculate_next_run_at") + def test_update_next_run_at(self, mock_calculate_next_run): + """Test updating next run time after schedule triggered.""" + mock_session = MagicMock(spec=Session) + mock_schedule = Mock(spec=WorkflowSchedulePlan) + mock_schedule.cron_expression = "30 10 * * *" + mock_schedule.timezone = "UTC" + mock_session.get.return_value = mock_schedule + + next_time = datetime(2025, 8, 31, 10, 30, 0, tzinfo=UTC) + mock_calculate_next_run.return_value = next_time + + result = ScheduleService.update_next_run_at( + session=mock_session, + schedule_id="test-schedule-id", + ) + + assert result == next_time + assert mock_schedule.next_run_at == next_time + mock_session.flush.assert_called_once() + + +class TestVisualToCron(unittest.TestCase): + """Test cases for visual configuration to cron conversion.""" + + def test_visual_to_cron_hourly(self): + """Test converting hourly visual config to cron.""" + visual_config = VisualConfig(on_minute=15) + result = ScheduleService.visual_to_cron("hourly", visual_config) + assert result == "15 * * * *" + + def test_visual_to_cron_daily(self): + """Test converting daily visual config to cron.""" + visual_config = VisualConfig(time="2:30 PM") + result = ScheduleService.visual_to_cron("daily", visual_config) + assert result == "30 14 * * *" + + def test_visual_to_cron_weekly(self): + """Test converting weekly visual config to cron.""" + visual_config = VisualConfig( + time="10:00 AM", + weekdays=["mon", "wed", "fri"], + ) + result = ScheduleService.visual_to_cron("weekly", visual_config) + assert result == "0 10 * * 1,3,5" + + def test_visual_to_cron_monthly_with_specific_days(self): + """Test converting monthly visual config with specific days.""" + visual_config = VisualConfig( + time="11:30 AM", + monthly_days=[1, 15], + ) + result = ScheduleService.visual_to_cron("monthly", visual_config) + assert result == "30 11 1,15 * *" + + def test_visual_to_cron_monthly_with_last_day(self): + """Test converting monthly visual config with last day using 'L' syntax.""" + visual_config = VisualConfig( + time="11:30 AM", + monthly_days=[1, "last"], + ) + result = ScheduleService.visual_to_cron("monthly", visual_config) + assert result == "30 11 1,L * *" + + def test_visual_to_cron_monthly_only_last_day(self): + """Test converting monthly visual config with only last day.""" + visual_config = VisualConfig( + time="9:00 PM", + monthly_days=["last"], + ) + result = ScheduleService.visual_to_cron("monthly", visual_config) + assert result == "0 21 L * *" + + def test_visual_to_cron_monthly_with_end_days_and_last(self): + """Test converting monthly visual config with days 29, 30, 31 and 'last'.""" + visual_config = VisualConfig( + time="3:45 PM", + monthly_days=[29, 30, 31, "last"], + ) + result = ScheduleService.visual_to_cron("monthly", visual_config) + # Should have 29,30,31,L - the L handles all possible last days + assert result == "45 15 29,30,31,L * *" + + def test_visual_to_cron_invalid_frequency(self): + """Test converting with invalid frequency.""" + with pytest.raises(ScheduleConfigError, match="Unsupported frequency: invalid"): + ScheduleService.visual_to_cron("invalid", VisualConfig()) + + def test_visual_to_cron_weekly_no_weekdays(self): + """Test converting weekly with no weekdays specified.""" + visual_config = VisualConfig(time="10:00 AM") + with pytest.raises(ScheduleConfigError, match="Weekdays are required for weekly schedules"): + ScheduleService.visual_to_cron("weekly", visual_config) + + def test_visual_to_cron_hourly_no_minute(self): + """Test converting hourly with no on_minute specified.""" + visual_config = VisualConfig() # on_minute defaults to 0 + result = ScheduleService.visual_to_cron("hourly", visual_config) + assert result == "0 * * * *" # Should use default value 0 + + def test_visual_to_cron_daily_no_time(self): + """Test converting daily with no time specified.""" + visual_config = VisualConfig(time=None) + with pytest.raises(ScheduleConfigError, match="time is required for daily schedules"): + ScheduleService.visual_to_cron("daily", visual_config) + + def test_visual_to_cron_weekly_no_time(self): + """Test converting weekly with no time specified.""" + visual_config = VisualConfig(weekdays=["mon"]) + visual_config.time = None # Override default + with pytest.raises(ScheduleConfigError, match="time is required for weekly schedules"): + ScheduleService.visual_to_cron("weekly", visual_config) + + def test_visual_to_cron_monthly_no_time(self): + """Test converting monthly with no time specified.""" + visual_config = VisualConfig(monthly_days=[1]) + visual_config.time = None # Override default + with pytest.raises(ScheduleConfigError, match="time is required for monthly schedules"): + ScheduleService.visual_to_cron("monthly", visual_config) + + def test_visual_to_cron_monthly_duplicate_days(self): + """Test monthly with duplicate days should be deduplicated.""" + visual_config = VisualConfig( + time="10:00 AM", + monthly_days=[1, 15, 1, 15, 31], # Duplicates + ) + result = ScheduleService.visual_to_cron("monthly", visual_config) + assert result == "0 10 1,15,31 * *" # Should be deduplicated + + def test_visual_to_cron_monthly_unsorted_days(self): + """Test monthly with unsorted days should be sorted.""" + visual_config = VisualConfig( + time="2:30 PM", + monthly_days=[20, 5, 15, 1, 10], # Unsorted + ) + result = ScheduleService.visual_to_cron("monthly", visual_config) + assert result == "30 14 1,5,10,15,20 * *" # Should be sorted + + def test_visual_to_cron_weekly_all_weekdays(self): + """Test weekly with all weekdays.""" + visual_config = VisualConfig( + time="8:00 AM", + weekdays=["sun", "mon", "tue", "wed", "thu", "fri", "sat"], + ) + result = ScheduleService.visual_to_cron("weekly", visual_config) + assert result == "0 8 * * 0,1,2,3,4,5,6" + + def test_visual_to_cron_hourly_boundary_values(self): + """Test hourly with boundary minute values.""" + # Minimum value + visual_config = VisualConfig(on_minute=0) + result = ScheduleService.visual_to_cron("hourly", visual_config) + assert result == "0 * * * *" + + # Maximum value + visual_config = VisualConfig(on_minute=59) + result = ScheduleService.visual_to_cron("hourly", visual_config) + assert result == "59 * * * *" + + def test_visual_to_cron_daily_midnight_noon(self): + """Test daily at special times (midnight and noon).""" + # Midnight + visual_config = VisualConfig(time="12:00 AM") + result = ScheduleService.visual_to_cron("daily", visual_config) + assert result == "0 0 * * *" + + # Noon + visual_config = VisualConfig(time="12:00 PM") + result = ScheduleService.visual_to_cron("daily", visual_config) + assert result == "0 12 * * *" + + def test_visual_to_cron_monthly_mixed_with_last_and_duplicates(self): + """Test monthly with mixed days, 'last', and duplicates.""" + visual_config = VisualConfig( + time="11:45 PM", + monthly_days=[15, 1, "last", 15, 30, 1, "last"], # Mixed with duplicates + ) + result = ScheduleService.visual_to_cron("monthly", visual_config) + assert result == "45 23 1,15,30,L * *" # Deduplicated and sorted with L at end + + def test_visual_to_cron_weekly_single_day(self): + """Test weekly with single weekday.""" + visual_config = VisualConfig( + time="6:30 PM", + weekdays=["sun"], + ) + result = ScheduleService.visual_to_cron("weekly", visual_config) + assert result == "30 18 * * 0" + + def test_visual_to_cron_monthly_all_possible_days(self): + """Test monthly with all 31 days plus 'last'.""" + all_days = list(range(1, 32)) + ["last"] + visual_config = VisualConfig( + time="12:01 AM", + monthly_days=all_days, + ) + result = ScheduleService.visual_to_cron("monthly", visual_config) + expected_days = ",".join([str(i) for i in range(1, 32)]) + ",L" + assert result == f"1 0 {expected_days} * *" + + def test_visual_to_cron_monthly_no_days(self): + """Test monthly without any days specified should raise error.""" + visual_config = VisualConfig(time="10:00 AM", monthly_days=[]) + with pytest.raises(ScheduleConfigError, match="Monthly days are required for monthly schedules"): + ScheduleService.visual_to_cron("monthly", visual_config) + + def test_visual_to_cron_weekly_empty_weekdays_list(self): + """Test weekly with empty weekdays list should raise error.""" + visual_config = VisualConfig(time="10:00 AM", weekdays=[]) + with pytest.raises(ScheduleConfigError, match="Weekdays are required for weekly schedules"): + ScheduleService.visual_to_cron("weekly", visual_config) + + +class TestParseTime(unittest.TestCase): + """Test cases for time parsing function.""" + + def test_parse_time_am(self): + """Test parsing AM time.""" + hour, minute = convert_12h_to_24h("9:30 AM") + assert hour == 9 + assert minute == 30 + + def test_parse_time_pm(self): + """Test parsing PM time.""" + hour, minute = convert_12h_to_24h("2:45 PM") + assert hour == 14 + assert minute == 45 + + def test_parse_time_noon(self): + """Test parsing 12:00 PM (noon).""" + hour, minute = convert_12h_to_24h("12:00 PM") + assert hour == 12 + assert minute == 0 + + def test_parse_time_midnight(self): + """Test parsing 12:00 AM (midnight).""" + hour, minute = convert_12h_to_24h("12:00 AM") + assert hour == 0 + assert minute == 0 + + def test_parse_time_invalid_format(self): + """Test parsing invalid time format.""" + with pytest.raises(ValueError, match="Invalid time format"): + convert_12h_to_24h("25:00") + + def test_parse_time_invalid_hour(self): + """Test parsing invalid hour.""" + with pytest.raises(ValueError, match="Invalid hour: 13"): + convert_12h_to_24h("13:00 PM") + + def test_parse_time_invalid_minute(self): + """Test parsing invalid minute.""" + with pytest.raises(ValueError, match="Invalid minute: 60"): + convert_12h_to_24h("10:60 AM") + + def test_parse_time_empty_string(self): + """Test parsing empty string.""" + with pytest.raises(ValueError, match="Time string cannot be empty"): + convert_12h_to_24h("") + + def test_parse_time_invalid_period(self): + """Test parsing invalid period.""" + with pytest.raises(ValueError, match="Invalid period"): + convert_12h_to_24h("10:30 XM") + + +class TestExtractScheduleConfig(unittest.TestCase): + """Test cases for extracting schedule configuration from workflow.""" + + def test_extract_schedule_config_with_cron_mode(self): + """Test extracting schedule config in cron mode.""" + workflow = Mock(spec=Workflow) + workflow.graph_dict = { + "nodes": [ + { + "id": "schedule-node", + "data": { + "type": "trigger-schedule", + "mode": "cron", + "cron_expression": "0 10 * * *", + "timezone": "America/New_York", + }, + } + ] + } + + config = ScheduleService.extract_schedule_config(workflow) + + assert config is not None + assert config.node_id == "schedule-node" + assert config.cron_expression == "0 10 * * *" + assert config.timezone == "America/New_York" + + def test_extract_schedule_config_with_visual_mode(self): + """Test extracting schedule config in visual mode.""" + workflow = Mock(spec=Workflow) + workflow.graph_dict = { + "nodes": [ + { + "id": "schedule-node", + "data": { + "type": "trigger-schedule", + "mode": "visual", + "frequency": "daily", + "visual_config": {"time": "10:30 AM"}, + "timezone": "UTC", + }, + } + ] + } + + config = ScheduleService.extract_schedule_config(workflow) + + assert config is not None + assert config.node_id == "schedule-node" + assert config.cron_expression == "30 10 * * *" + assert config.timezone == "UTC" + + def test_extract_schedule_config_no_schedule_node(self): + """Test extracting config when no schedule node exists.""" + workflow = Mock(spec=Workflow) + workflow.graph_dict = { + "nodes": [ + { + "id": "other-node", + "data": {"type": "llm"}, + } + ] + } + + config = ScheduleService.extract_schedule_config(workflow) + assert config is None + + def test_extract_schedule_config_invalid_graph(self): + """Test extracting config with invalid graph data.""" + workflow = Mock(spec=Workflow) + workflow.graph_dict = None + + with pytest.raises(ScheduleConfigError, match="Workflow graph is empty"): + ScheduleService.extract_schedule_config(workflow) + + +class TestScheduleWithTimezone(unittest.TestCase): + """Test cases for schedule with timezone handling.""" + + def test_visual_schedule_with_timezone_integration(self): + """Test complete flow: visual config → cron → execution in different timezones. + + This test verifies that when a user in Shanghai sets a schedule for 10:30 AM, + it runs at 10:30 AM Shanghai time, not 10:30 AM UTC. + """ + # User in Shanghai wants to run a task at 10:30 AM local time + visual_config = VisualConfig( + time="10:30 AM", # This is Shanghai time + monthly_days=[1], + ) + + # Convert to cron expression + cron_expr = ScheduleService.visual_to_cron("monthly", visual_config) + assert cron_expr is not None + + assert cron_expr == "30 10 1 * *" # Direct conversion + + # Now test execution with Shanghai timezone + shanghai_tz = "Asia/Shanghai" + # Base time: 2025-01-01 00:00:00 UTC (08:00:00 Shanghai) + base_time = datetime(2025, 1, 1, 0, 0, 0, tzinfo=UTC) + + next_run = calculate_next_run_at(cron_expr, shanghai_tz, base_time) + + assert next_run is not None + + # Should run at 10:30 AM Shanghai time on Jan 1 + # 10:30 AM Shanghai = 02:30 AM UTC (Shanghai is UTC+8) + assert next_run.year == 2025 + assert next_run.month == 1 + assert next_run.day == 1 + assert next_run.hour == 2 # 02:30 UTC + assert next_run.minute == 30 + + def test_visual_schedule_different_timezones_same_local_time(self): + """Test that same visual config in different timezones runs at different UTC times. + + This verifies that a schedule set for "9:00 AM" runs at 9 AM local time + regardless of the timezone. + """ + visual_config = VisualConfig( + time="9:00 AM", + weekdays=["mon"], + ) + + cron_expr = ScheduleService.visual_to_cron("weekly", visual_config) + assert cron_expr is not None + assert cron_expr == "0 9 * * 1" + + # Base time: Sunday 2025-01-05 12:00:00 UTC + base_time = datetime(2025, 1, 5, 12, 0, 0, tzinfo=UTC) + + # Test New York (UTC-5 in January) + ny_next = calculate_next_run_at(cron_expr, "America/New_York", base_time) + assert ny_next is not None + # Monday 9 AM EST = Monday 14:00 UTC + assert ny_next.day == 6 + assert ny_next.hour == 14 # 9 AM EST = 2 PM UTC + + # Test Tokyo (UTC+9) + tokyo_next = calculate_next_run_at(cron_expr, "Asia/Tokyo", base_time) + assert tokyo_next is not None + # Monday 9 AM JST = Monday 00:00 UTC + assert tokyo_next.day == 6 + assert tokyo_next.hour == 0 # 9 AM JST = 0 AM UTC + + def test_visual_schedule_daily_across_dst_change(self): + """Test that daily schedules adjust correctly during DST changes. + + A schedule set for "10:00 AM" should always run at 10 AM local time, + even when DST changes. + """ + visual_config = VisualConfig( + time="10:00 AM", + ) + + cron_expr = ScheduleService.visual_to_cron("daily", visual_config) + assert cron_expr is not None + + assert cron_expr == "0 10 * * *" + + # Test before DST (EST - UTC-5) + winter_base = datetime(2025, 2, 1, 0, 0, 0, tzinfo=UTC) + winter_next = calculate_next_run_at(cron_expr, "America/New_York", winter_base) + assert winter_next is not None + # 10 AM EST = 15:00 UTC + assert winter_next.hour == 15 + + # Test during DST (EDT - UTC-4) + summer_base = datetime(2025, 6, 1, 0, 0, 0, tzinfo=UTC) + summer_next = calculate_next_run_at(cron_expr, "America/New_York", summer_base) + assert summer_next is not None + # 10 AM EDT = 14:00 UTC + assert summer_next.hour == 14 + + +class TestSyncScheduleFromWorkflow(unittest.TestCase): + """Test cases for syncing schedule from workflow.""" + + @patch("events.event_handlers.sync_workflow_schedule_when_app_published.db") + @patch("events.event_handlers.sync_workflow_schedule_when_app_published.ScheduleService") + @patch("events.event_handlers.sync_workflow_schedule_when_app_published.select") + def test_sync_schedule_create_new(self, mock_select, mock_service, mock_db): + """Test creating new schedule when none exists.""" + mock_session = MagicMock() + mock_db.engine = MagicMock() + mock_session.__enter__ = MagicMock(return_value=mock_session) + mock_session.__exit__ = MagicMock(return_value=None) + Session = MagicMock(return_value=mock_session) + with patch("events.event_handlers.sync_workflow_schedule_when_app_published.Session", Session): + mock_session.scalar.return_value = None # No existing plan + + # Mock extract_schedule_config to return a ScheduleConfig object + mock_config = Mock(spec=ScheduleConfig) + mock_config.node_id = "start" + mock_config.cron_expression = "30 10 * * *" + mock_config.timezone = "UTC" + mock_service.extract_schedule_config.return_value = mock_config + + mock_new_plan = Mock(spec=WorkflowSchedulePlan) + mock_service.create_schedule.return_value = mock_new_plan + + workflow = Mock(spec=Workflow) + result = sync_schedule_from_workflow("tenant-id", "app-id", workflow) + + assert result == mock_new_plan + mock_service.create_schedule.assert_called_once() + mock_session.commit.assert_called_once() + + @patch("events.event_handlers.sync_workflow_schedule_when_app_published.db") + @patch("events.event_handlers.sync_workflow_schedule_when_app_published.ScheduleService") + @patch("events.event_handlers.sync_workflow_schedule_when_app_published.select") + def test_sync_schedule_update_existing(self, mock_select, mock_service, mock_db): + """Test updating existing schedule.""" + mock_session = MagicMock() + mock_db.engine = MagicMock() + mock_session.__enter__ = MagicMock(return_value=mock_session) + mock_session.__exit__ = MagicMock(return_value=None) + Session = MagicMock(return_value=mock_session) + + with patch("events.event_handlers.sync_workflow_schedule_when_app_published.Session", Session): + mock_existing_plan = Mock(spec=WorkflowSchedulePlan) + mock_existing_plan.id = "existing-plan-id" + mock_session.scalar.return_value = mock_existing_plan + + # Mock extract_schedule_config to return a ScheduleConfig object + mock_config = Mock(spec=ScheduleConfig) + mock_config.node_id = "start" + mock_config.cron_expression = "0 12 * * *" + mock_config.timezone = "America/New_York" + mock_service.extract_schedule_config.return_value = mock_config + + mock_updated_plan = Mock(spec=WorkflowSchedulePlan) + mock_service.update_schedule.return_value = mock_updated_plan + + workflow = Mock(spec=Workflow) + result = sync_schedule_from_workflow("tenant-id", "app-id", workflow) + + assert result == mock_updated_plan + mock_service.update_schedule.assert_called_once() + # Verify the arguments passed to update_schedule + call_args = mock_service.update_schedule.call_args + assert call_args.kwargs["session"] == mock_session + assert call_args.kwargs["schedule_id"] == "existing-plan-id" + updates_obj = call_args.kwargs["updates"] + assert isinstance(updates_obj, SchedulePlanUpdate) + assert updates_obj.node_id == "start" + assert updates_obj.cron_expression == "0 12 * * *" + assert updates_obj.timezone == "America/New_York" + mock_session.commit.assert_called_once() + + @patch("events.event_handlers.sync_workflow_schedule_when_app_published.db") + @patch("events.event_handlers.sync_workflow_schedule_when_app_published.ScheduleService") + @patch("events.event_handlers.sync_workflow_schedule_when_app_published.select") + def test_sync_schedule_remove_when_no_config(self, mock_select, mock_service, mock_db): + """Test removing schedule when no schedule config in workflow.""" + mock_session = MagicMock() + mock_db.engine = MagicMock() + mock_session.__enter__ = MagicMock(return_value=mock_session) + mock_session.__exit__ = MagicMock(return_value=None) + Session = MagicMock(return_value=mock_session) + + with patch("events.event_handlers.sync_workflow_schedule_when_app_published.Session", Session): + mock_existing_plan = Mock(spec=WorkflowSchedulePlan) + mock_existing_plan.id = "existing-plan-id" + mock_session.scalar.return_value = mock_existing_plan + + mock_service.extract_schedule_config.return_value = None # No schedule config + + workflow = Mock(spec=Workflow) + result = sync_schedule_from_workflow("tenant-id", "app-id", workflow) + + assert result is None + # Now using ScheduleService.delete_schedule instead of session.delete + mock_service.delete_schedule.assert_called_once_with(session=mock_session, schedule_id="existing-plan-id") + mock_session.commit.assert_called_once() + + +if __name__ == "__main__": + unittest.main() diff --git a/api/tests/unit_tests/services/test_webhook_service.py b/api/tests/unit_tests/services/test_webhook_service.py new file mode 100644 index 0000000000..dbb9092ac5 --- /dev/null +++ b/api/tests/unit_tests/services/test_webhook_service.py @@ -0,0 +1,640 @@ +from io import BytesIO +from unittest.mock import MagicMock, patch + +from flask import Flask +from werkzeug.datastructures import FileStorage + +from services.webhook_service import WebhookService + + +class TestWebhookServiceUnit: + """Unit tests for WebhookService focusing on business logic without database dependencies.""" + + def test_extract_webhook_data_json(self): + """Test webhook data extraction from JSON request.""" + app = Flask(__name__) + + with app.test_request_context( + "/webhook", + method="POST", + headers={"Content-Type": "application/json", "Authorization": "Bearer token"}, + query_string="version=1&format=json", + json={"message": "hello", "count": 42}, + ): + webhook_trigger = MagicMock() + webhook_data = WebhookService.extract_webhook_data(webhook_trigger) + + assert webhook_data["method"] == "POST" + assert webhook_data["headers"]["Authorization"] == "Bearer token" + assert webhook_data["query_params"]["version"] == "1" + assert webhook_data["query_params"]["format"] == "json" + assert webhook_data["body"]["message"] == "hello" + assert webhook_data["body"]["count"] == 42 + assert webhook_data["files"] == {} + + def test_extract_webhook_data_form_urlencoded(self): + """Test webhook data extraction from form URL encoded request.""" + app = Flask(__name__) + + with app.test_request_context( + "/webhook", + method="POST", + headers={"Content-Type": "application/x-www-form-urlencoded"}, + data={"username": "test", "password": "secret"}, + ): + webhook_trigger = MagicMock() + webhook_data = WebhookService.extract_webhook_data(webhook_trigger) + + assert webhook_data["method"] == "POST" + assert webhook_data["body"]["username"] == "test" + assert webhook_data["body"]["password"] == "secret" + + def test_extract_webhook_data_multipart_with_files(self): + """Test webhook data extraction from multipart form with files.""" + app = Flask(__name__) + + # Create a mock file + file_content = b"test file content" + file_storage = FileStorage(stream=BytesIO(file_content), filename="test.txt", content_type="text/plain") + + with app.test_request_context( + "/webhook", + method="POST", + headers={"Content-Type": "multipart/form-data"}, + data={"message": "test", "upload": file_storage}, + ): + webhook_trigger = MagicMock() + webhook_trigger.tenant_id = "test_tenant" + + with patch.object(WebhookService, "_process_file_uploads") as mock_process_files: + mock_process_files.return_value = {"upload": "mocked_file_obj"} + + webhook_data = WebhookService.extract_webhook_data(webhook_trigger) + + assert webhook_data["method"] == "POST" + assert webhook_data["body"]["message"] == "test" + assert webhook_data["files"]["upload"] == "mocked_file_obj" + mock_process_files.assert_called_once() + + def test_extract_webhook_data_raw_text(self): + """Test webhook data extraction from raw text request.""" + app = Flask(__name__) + + with app.test_request_context( + "/webhook", method="POST", headers={"Content-Type": "text/plain"}, data="raw text content" + ): + webhook_trigger = MagicMock() + webhook_data = WebhookService.extract_webhook_data(webhook_trigger) + + assert webhook_data["method"] == "POST" + assert webhook_data["body"]["raw"] == "raw text content" + + def test_extract_webhook_data_invalid_json(self): + """Test webhook data extraction with invalid JSON.""" + app = Flask(__name__) + + with app.test_request_context( + "/webhook", method="POST", headers={"Content-Type": "application/json"}, data="invalid json" + ): + webhook_trigger = MagicMock() + webhook_data = WebhookService.extract_webhook_data(webhook_trigger) + + assert webhook_data["method"] == "POST" + assert webhook_data["body"] == {} # Should default to empty dict + + def test_validate_webhook_request_success(self): + """Test successful webhook request validation.""" + webhook_data = { + "method": "POST", + "headers": {"Authorization": "Bearer token", "Content-Type": "application/json"}, + "query_params": {"version": "1"}, + "body": {"message": "hello"}, + "files": {}, + } + + node_config = { + "data": { + "method": "post", + "headers": [{"name": "Authorization", "required": True}, {"name": "Content-Type", "required": False}], + "params": [{"name": "version", "required": True}], + "body": [{"name": "message", "type": "string", "required": True}], + } + } + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + + assert result["valid"] is True + + def test_validate_webhook_request_method_mismatch(self): + """Test webhook validation with HTTP method mismatch.""" + webhook_data = {"method": "GET", "headers": {}, "query_params": {}, "body": {}, "files": {}} + + node_config = {"data": {"method": "post"}} + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + + assert result["valid"] is False + assert "HTTP method mismatch" in result["error"] + assert "Expected POST, got GET" in result["error"] + + def test_validate_webhook_request_missing_required_header(self): + """Test webhook validation with missing required header.""" + webhook_data = {"method": "POST", "headers": {}, "query_params": {}, "body": {}, "files": {}} + + node_config = {"data": {"method": "post", "headers": [{"name": "Authorization", "required": True}]}} + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + + assert result["valid"] is False + assert "Required header missing: Authorization" in result["error"] + + def test_validate_webhook_request_case_insensitive_headers(self): + """Test webhook validation with case-insensitive header matching.""" + webhook_data = { + "method": "POST", + "headers": {"authorization": "Bearer token"}, # lowercase + "query_params": {}, + "body": {}, + "files": {}, + } + + node_config = { + "data": { + "method": "post", + "headers": [ + {"name": "Authorization", "required": True} # Pascal case + ], + } + } + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + + assert result["valid"] is True + + def test_validate_webhook_request_missing_required_param(self): + """Test webhook validation with missing required query parameter.""" + webhook_data = {"method": "POST", "headers": {}, "query_params": {}, "body": {}, "files": {}} + + node_config = {"data": {"method": "post", "params": [{"name": "version", "required": True}]}} + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + + assert result["valid"] is False + assert "Required query parameter missing: version" in result["error"] + + def test_validate_webhook_request_missing_required_body_param(self): + """Test webhook validation with missing required body parameter.""" + webhook_data = {"method": "POST", "headers": {}, "query_params": {}, "body": {}, "files": {}} + + node_config = {"data": {"method": "post", "body": [{"name": "message", "type": "string", "required": True}]}} + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + + assert result["valid"] is False + assert "Required body parameter missing: message" in result["error"] + + def test_validate_webhook_request_missing_required_file(self): + """Test webhook validation with missing required file parameter.""" + webhook_data = {"method": "POST", "headers": {}, "query_params": {}, "body": {}, "files": {}} + + node_config = {"data": {"method": "post", "body": [{"name": "upload", "type": "file", "required": True}]}} + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + + assert result["valid"] is False + assert "Required body parameter missing: upload" in result["error"] + + def test_validate_webhook_request_text_plain_with_required_body(self): + """Test webhook validation for text/plain content type with required body content.""" + # Test case 1: text/plain with raw content - should pass + webhook_data = { + "method": "POST", + "headers": {"content-type": "text/plain"}, + "query_params": {}, + "body": {"raw": "Hello World"}, + "files": {}, + } + + node_config = { + "data": { + "method": "post", + "content_type": "text/plain", + "body": [{"name": "message", "type": "string", "required": True}], + } + } + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + assert result["valid"] is True + + # Test case 2: text/plain without raw content but required - should fail + webhook_data_no_body = { + "method": "POST", + "headers": {"content-type": "text/plain"}, + "query_params": {}, + "body": {}, + "files": {}, + } + + result = WebhookService.validate_webhook_request(webhook_data_no_body, node_config) + assert result["valid"] is False + assert "Required body content missing for text/plain request" in result["error"] + + # Test case 3: text/plain with empty raw content but required - should fail + webhook_data_empty_body = { + "method": "POST", + "headers": {"content-type": "text/plain"}, + "query_params": {}, + "body": {"raw": ""}, + "files": {}, + } + + result = WebhookService.validate_webhook_request(webhook_data_empty_body, node_config) + assert result["valid"] is False + assert "Required body content missing for text/plain request" in result["error"] + + def test_validate_webhook_request_text_plain_no_body_params(self): + """Test webhook validation for text/plain content type with no body params configured.""" + webhook_data = { + "method": "POST", + "headers": {"content-type": "text/plain"}, + "query_params": {}, + "body": {"raw": "Hello World"}, + "files": {}, + } + + node_config = { + "data": { + "method": "post", + "content_type": "text/plain", + "body": [], # No body params configured + } + } + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + assert result["valid"] is True + + def test_validate_webhook_request_validation_exception(self): + """Test webhook validation with exception handling.""" + webhook_data = {"method": "POST", "headers": {}, "query_params": {}, "body": {}, "files": {}} + + # Invalid node config that will cause an exception + node_config = None + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + + assert result["valid"] is False + assert "Validation failed" in result["error"] + + def test_generate_webhook_response_default(self): + """Test webhook response generation with default values.""" + node_config = {"data": {}} + + response_data, status_code = WebhookService.generate_webhook_response(node_config) + + assert status_code == 200 + assert response_data["status"] == "success" + assert "Webhook processed successfully" in response_data["message"] + + def test_generate_webhook_response_custom_json(self): + """Test webhook response generation with custom JSON response.""" + node_config = {"data": {"status_code": 201, "response_body": '{"result": "created", "id": 123}'}} + + response_data, status_code = WebhookService.generate_webhook_response(node_config) + + assert status_code == 201 + assert response_data["result"] == "created" + assert response_data["id"] == 123 + + def test_generate_webhook_response_custom_text(self): + """Test webhook response generation with custom text response.""" + node_config = {"data": {"status_code": 202, "response_body": "Request accepted for processing"}} + + response_data, status_code = WebhookService.generate_webhook_response(node_config) + + assert status_code == 202 + assert response_data["message"] == "Request accepted for processing" + + def test_generate_webhook_response_invalid_json(self): + """Test webhook response generation with invalid JSON response.""" + node_config = {"data": {"status_code": 400, "response_body": '{"invalid": json}'}} + + response_data, status_code = WebhookService.generate_webhook_response(node_config) + + assert status_code == 400 + assert response_data["message"] == '{"invalid": json}' + + def test_generate_webhook_response_empty_response_body(self): + """Test webhook response generation with empty response body.""" + node_config = {"data": {"status_code": 204, "response_body": ""}} + + response_data, status_code = WebhookService.generate_webhook_response(node_config) + + assert status_code == 204 + assert response_data["status"] == "success" + assert "Webhook processed successfully" in response_data["message"] + + def test_generate_webhook_response_array_json(self): + """Test webhook response generation with JSON array response.""" + node_config = {"data": {"status_code": 200, "response_body": '[{"id": 1}, {"id": 2}]'}} + + response_data, status_code = WebhookService.generate_webhook_response(node_config) + + assert status_code == 200 + assert isinstance(response_data, list) + assert len(response_data) == 2 + assert response_data[0]["id"] == 1 + assert response_data[1]["id"] == 2 + + @patch("services.webhook_service.ToolFileManager") + @patch("services.webhook_service.file_factory") + def test_process_file_uploads_success(self, mock_file_factory, mock_tool_file_manager): + """Test successful file upload processing.""" + # Mock ToolFileManager + mock_tool_file_instance = MagicMock() + mock_tool_file_manager.return_value = mock_tool_file_instance + + # Mock file creation + mock_tool_file = MagicMock() + mock_tool_file.id = "test_file_id" + mock_tool_file_instance.create_file_by_raw.return_value = mock_tool_file + + # Mock file factory + mock_file_obj = MagicMock() + mock_file_factory.build_from_mapping.return_value = mock_file_obj + + # Create mock files + files = { + "file1": MagicMock(filename="test1.txt", content_type="text/plain"), + "file2": MagicMock(filename="test2.jpg", content_type="image/jpeg"), + } + + # Mock file reads + files["file1"].read.return_value = b"content1" + files["file2"].read.return_value = b"content2" + + webhook_trigger = MagicMock() + webhook_trigger.tenant_id = "test_tenant" + + result = WebhookService._process_file_uploads(files, webhook_trigger) + + assert len(result) == 2 + assert "file1" in result + assert "file2" in result + + # Verify file processing was called for each file + assert mock_tool_file_manager.call_count == 2 + assert mock_file_factory.build_from_mapping.call_count == 2 + + @patch("services.webhook_service.ToolFileManager") + @patch("services.webhook_service.file_factory") + def test_process_file_uploads_with_errors(self, mock_file_factory, mock_tool_file_manager): + """Test file upload processing with errors.""" + # Mock ToolFileManager + mock_tool_file_instance = MagicMock() + mock_tool_file_manager.return_value = mock_tool_file_instance + + # Mock file creation + mock_tool_file = MagicMock() + mock_tool_file.id = "test_file_id" + mock_tool_file_instance.create_file_by_raw.return_value = mock_tool_file + + # Mock file factory + mock_file_obj = MagicMock() + mock_file_factory.build_from_mapping.return_value = mock_file_obj + + # Create mock files, one will fail + files = { + "good_file": MagicMock(filename="test.txt", content_type="text/plain"), + "bad_file": MagicMock(filename="test.bad", content_type="text/plain"), + } + + files["good_file"].read.return_value = b"content" + files["bad_file"].read.side_effect = Exception("Read error") + + webhook_trigger = MagicMock() + webhook_trigger.tenant_id = "test_tenant" + + result = WebhookService._process_file_uploads(files, webhook_trigger) + + # Should process the good file and skip the bad one + assert len(result) == 1 + assert "good_file" in result + assert "bad_file" not in result + + def test_process_file_uploads_empty_filename(self): + """Test file upload processing with empty filename.""" + files = { + "no_filename": MagicMock(filename="", content_type="text/plain"), + "none_filename": MagicMock(filename=None, content_type="text/plain"), + } + + webhook_trigger = MagicMock() + webhook_trigger.tenant_id = "test_tenant" + + result = WebhookService._process_file_uploads(files, webhook_trigger) + + # Should skip files without filenames + assert len(result) == 0 + + def test_validate_json_parameter_type_string(self): + """Test JSON parameter type validation for string type.""" + # Valid string + result = WebhookService._validate_json_parameter_type("name", "hello", "string") + assert result["valid"] is True + + # Invalid string (number) + result = WebhookService._validate_json_parameter_type("name", 123, "string") + assert result["valid"] is False + assert "must be a string, got int" in result["error"] + + def test_validate_json_parameter_type_number(self): + """Test JSON parameter type validation for number type.""" + # Valid integer + result = WebhookService._validate_json_parameter_type("count", 42, "number") + assert result["valid"] is True + + # Valid float + result = WebhookService._validate_json_parameter_type("price", 19.99, "number") + assert result["valid"] is True + + # Invalid number (string) + result = WebhookService._validate_json_parameter_type("count", "42", "number") + assert result["valid"] is False + assert "must be a number, got str" in result["error"] + + def test_validate_json_parameter_type_bool(self): + """Test JSON parameter type validation for boolean type.""" + # Valid boolean + result = WebhookService._validate_json_parameter_type("enabled", True, "boolean") + assert result["valid"] is True + + result = WebhookService._validate_json_parameter_type("enabled", False, "boolean") + assert result["valid"] is True + + # Invalid boolean (string) + result = WebhookService._validate_json_parameter_type("enabled", "true", "boolean") + assert result["valid"] is False + assert "must be a boolean, got str" in result["error"] + + def test_validate_json_parameter_type_object(self): + """Test JSON parameter type validation for object type.""" + # Valid object + result = WebhookService._validate_json_parameter_type("user", {"name": "John", "age": 30}, "object") + assert result["valid"] is True + + # Invalid object (string) + result = WebhookService._validate_json_parameter_type("user", "not_an_object", "object") + assert result["valid"] is False + assert "must be an object, got str" in result["error"] + + def test_validate_json_parameter_type_array_string(self): + """Test JSON parameter type validation for array[string] type.""" + # Valid array of strings + result = WebhookService._validate_json_parameter_type("tags", ["tag1", "tag2", "tag3"], "array[string]") + assert result["valid"] is True + + # Invalid - not an array + result = WebhookService._validate_json_parameter_type("tags", "not_an_array", "array[string]") + assert result["valid"] is False + assert "must be an array, got str" in result["error"] + + # Invalid - array with non-strings + result = WebhookService._validate_json_parameter_type("tags", ["tag1", 123, "tag3"], "array[string]") + assert result["valid"] is False + assert "must be an array of strings" in result["error"] + + def test_validate_json_parameter_type_array_number(self): + """Test JSON parameter type validation for array[number] type.""" + # Valid array of numbers + result = WebhookService._validate_json_parameter_type("scores", [1, 2.5, 3, 4.7], "array[number]") + assert result["valid"] is True + + # Invalid - array with non-numbers + result = WebhookService._validate_json_parameter_type("scores", [1, "2", 3], "array[number]") + assert result["valid"] is False + assert "must be an array of numbers" in result["error"] + + def test_validate_json_parameter_type_array_bool(self): + """Test JSON parameter type validation for array[bool] type.""" + # Valid array of booleans + result = WebhookService._validate_json_parameter_type("flags", [True, False, True], "array[boolean]") + assert result["valid"] is True + + # Invalid - array with non-booleans + result = WebhookService._validate_json_parameter_type("flags", [True, "false", True], "array[boolean]") + assert result["valid"] is False + assert "must be an array of booleans" in result["error"] + + def test_validate_json_parameter_type_array_object(self): + """Test JSON parameter type validation for array[object] type.""" + # Valid array of objects + result = WebhookService._validate_json_parameter_type( + "users", [{"name": "John"}, {"name": "Jane"}], "array[object]" + ) + assert result["valid"] is True + + # Invalid - array with non-objects + result = WebhookService._validate_json_parameter_type( + "users", [{"name": "John"}, "not_object"], "array[object]" + ) + assert result["valid"] is False + assert "must be an array of objects" in result["error"] + + def test_validate_json_parameter_type_unknown_type(self): + """Test JSON parameter type validation for unknown type.""" + # Unknown type should return valid and log warning + result = WebhookService._validate_json_parameter_type("data", "anything", "unknown_type") + assert result["valid"] is True + + def test_validate_webhook_request_json_type_validation(self): + """Test webhook validation with JSON parameter type validation.""" + # Test valid JSON types + webhook_data = { + "method": "POST", + "headers": {"Content-Type": "application/json"}, + "query_params": {}, + "body": { + "name": "John", + "age": 30, + "active": True, + "profile": {"email": "john@example.com"}, + "tags": ["developer", "python"], + "scores": [85, 92.5, 78], + "flags": [True, False], + "items": [{"id": 1}, {"id": 2}], + }, + "files": {}, + } + + node_config = { + "data": { + "method": "post", + "content_type": "application/json", + "body": [ + {"name": "name", "type": "string", "required": True}, + {"name": "age", "type": "number", "required": True}, + {"name": "active", "type": "bool", "required": True}, + {"name": "profile", "type": "object", "required": True}, + {"name": "tags", "type": "array[string]", "required": True}, + {"name": "scores", "type": "array[number]", "required": True}, + {"name": "flags", "type": "array[bool]", "required": True}, + {"name": "items", "type": "array[object]", "required": True}, + ], + } + } + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + assert result["valid"] is True + + def test_validate_webhook_request_json_type_validation_invalid(self): + """Test webhook validation with invalid JSON parameter types.""" + webhook_data = { + "method": "POST", + "headers": {"Content-Type": "application/json"}, + "query_params": {}, + "body": { + "name": 123, # Should be string + "age": "thirty", # Should be number + }, + "files": {}, + } + + node_config = { + "data": { + "method": "post", + "content_type": "application/json", + "body": [ + {"name": "name", "type": "string", "required": True}, + {"name": "age", "type": "number", "required": True}, + ], + } + } + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + assert result["valid"] is False + assert "must be a string, got int" in result["error"] + + def test_validate_webhook_request_non_json_skip_type_validation(self): + """Test that type validation is skipped for non-JSON content types.""" + webhook_data = { + "method": "POST", + "headers": {"Content-Type": "application/x-www-form-urlencoded"}, + "query_params": {}, + "body": { + "name": 123, # Would be invalid for string if this was JSON + }, + "files": {}, + } + + node_config = { + "data": { + "method": "post", + "content_type": "application/x-www-form-urlencoded", + "body": [ + {"name": "name", "type": "string", "required": True}, + ], + } + } + + result = WebhookService.validate_webhook_request(webhook_data, node_config) + assert result["valid"] is True # Should pass because type validation is only for JSON diff --git a/api/uv.lock b/api/uv.lock index d696c9db21..c24b987a00 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.11, <3.13" resolution-markers = [ "python_full_version >= '3.12.4' and sys_platform == 'linux'", @@ -480,16 +480,16 @@ wheels = [ [[package]] name = "bce-python-sdk" -version = "0.9.45" +version = "0.9.46" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "future" }, { name = "pycryptodome" }, { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/53/19/0f23aedecb980288e663ba9ce81fa1545d6331d62bd75262fca49678052d/bce_python_sdk-0.9.45.tar.gz", hash = "sha256:ba60d66e80fcd012a6362bf011fee18bca616b0005814d261aba3aa202f7025f", size = 252769, upload-time = "2025-08-28T10:24:54.303Z" } +sdist = { url = "https://files.pythonhosted.org/packages/53/57/f98bc15c12cc022ef195f689ee57ed61d8a8677bda3089c4d58fb1872d45/bce_python_sdk-0.9.46.tar.gz", hash = "sha256:4bf01b22e6d172ccd94aa201f8bc6f2a98d0da4784160e77cfacfcc71c2686be", size = 253806, upload-time = "2025-09-15T06:51:52.753Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/1f/d3fd91808a1f4881b4072424390d38e85707edd75ed5d9cea2a0299a7a7a/bce_python_sdk-0.9.45-py3-none-any.whl", hash = "sha256:cce3ca7ad4de8be2cc0722c1d6a7db7be6f2833f8d9ca7f892c572e6ff78a959", size = 352012, upload-time = "2025-08-28T10:24:52.387Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f5/20e9ab324b22a77970c57bc8267e586e85e2aa1277d80f2c58ca8a39a13e/bce_python_sdk-0.9.46-py3-none-any.whl", hash = "sha256:655074da6592ce8b036f605d9a272bfdcd1f515eb2f8e3f0333bb7cc62f700cb", size = 352622, upload-time = "2025-09-15T06:51:50.811Z" }, ] [[package]] @@ -555,11 +555,11 @@ wheels = [ [[package]] name = "billiard" -version = "4.2.1" +version = "4.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/58/1546c970afcd2a2428b1bfafecf2371d8951cc34b46701bea73f4280989e/billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f", size = 155031, upload-time = "2024-09-21T13:40:22.491Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/6a/1405343016bce8354b29d90aad6b0bf6485b5e60404516e4b9a3a9646cf0/billiard-4.2.2.tar.gz", hash = "sha256:e815017a062b714958463e07ba15981d802dc53d41c5b69d28c5a7c238f8ecf3", size = 155592, upload-time = "2025-09-20T14:44:40.456Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/da/43b15f28fe5f9e027b41c539abc5469052e9d48fd75f8ff094ba2a0ae767/billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb", size = 86766, upload-time = "2024-09-21T13:40:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/a6/80/ef8dff49aae0e4430f81842f7403e14e0ca59db7bbaf7af41245b67c6b25/billiard-4.2.2-py3-none-any.whl", hash = "sha256:4bc05dcf0d1cc6addef470723aac2a6232f3c7ed7475b0b580473a9145829457", size = 86896, upload-time = "2025-09-20T14:44:39.157Z" }, ] [[package]] @@ -587,16 +587,16 @@ wheels = [ [[package]] name = "boto3-stubs" -version = "1.40.29" +version = "1.40.35" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore-stubs" }, { name = "types-s3transfer" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dd/35/0cdc62641577e8a0a6d4191ecc803fee16adf18de1e81280eb3d87c7d9e8/boto3_stubs-1.40.29.tar.gz", hash = "sha256:9fc7d24dcbcc786093daf42487a9ed4a58a6be7f1ccf28f5be0b2bad4a3edb11", size = 100996, upload-time = "2025-09-11T19:48:28.487Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/18/6a64ff9603845d635f6167b6d9a3f9a6e658d8a28eef36f8423eb5a99ae1/boto3_stubs-1.40.35.tar.gz", hash = "sha256:2d6f2dbe6e9b42deb7b8fbeed051461e7906903f26e99634d00be45cc40db41a", size = 100819, upload-time = "2025-09-19T19:42:36.372Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/a2/e47bf7595fadc6154ff2941e9ab9bb68173fba95f5ccdb24e5c13d16e5e5/boto3_stubs-1.40.29-py3-none-any.whl", hash = "sha256:1ad373b68b1c9a5e8e5deb243ef3a4c5b1d2c25c3477559eba1089ed4a0ee94e", size = 69769, upload-time = "2025-09-11T19:48:20.453Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d4/d744260908ad55903baefa086a3c9cabc50bfafd63c3f2d0e05688378013/boto3_stubs-1.40.35-py3-none-any.whl", hash = "sha256:2bb44e6c17831650a28e3e00bf5be0a6ba771fce08724ba978ffcd06a7bca7e3", size = 69689, upload-time = "2025-09-19T19:42:30.08Z" }, ] [package.optional-dependencies] @@ -620,14 +620,14 @@ wheels = [ [[package]] name = "botocore-stubs" -version = "1.40.29" +version = "1.40.33" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-awscrt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/32/5c/49b2860e2a26b7383d5915374e61d962a3853e3fd569e4370444f0b902c0/botocore_stubs-1.40.29.tar.gz", hash = "sha256:324669d5ed7b5f7271bf3c3ea7208191b1d183f17d7e73398f11fef4a31fdf6b", size = 42742, upload-time = "2025-09-11T20:22:35.451Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/94/16f8e1f41feaa38f1350aa5a4c60c5724b6c8524ca0e6c28523bf5070e74/botocore_stubs-1.40.33.tar.gz", hash = "sha256:89c51ae0b28d9d79fde8c497cf908ddf872ce027d2737d4d4ba473fde9cdaa82", size = 42742, upload-time = "2025-09-17T20:25:56.388Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/3c/f901ca6c4d66e0bebbfc56e614fc214416db72c613f768ee2fc84ffdbff4/botocore_stubs-1.40.29-py3-none-any.whl", hash = "sha256:84cbcc6328dddaa1f825830f7dec8fa0dcd3bac8002211322e8529cbfb5eaddd", size = 66843, upload-time = "2025-09-11T20:22:32.576Z" }, + { url = "https://files.pythonhosted.org/packages/af/7b/6d8fe12a955b16094460e89ea7c4e063f131f4b3bd461b96bcd625d0c79e/botocore_stubs-1.40.33-py3-none-any.whl", hash = "sha256:ad21fee32cbdc7ad4730f29baf88424c7086bf88a745f8e43660ca3e9a7e5f89", size = 66843, upload-time = "2025-09-17T20:25:54.052Z" }, ] [[package]] @@ -935,14 +935,14 @@ wheels = [ [[package]] name = "click" -version = "8.2.1" +version = "8.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, ] [[package]] @@ -1193,45 +1193,64 @@ version = "1.7" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/6b/b0/e595ce2a2527e169c3bcd6c33d2473c1918e0b7f6826a043ca1245dd4e5b/crcmod-1.7.tar.gz", hash = "sha256:dc7051a0db5f2bd48665a990d3ec1cc305a466a77358ca4492826f41f283601e", size = 89670, upload-time = "2010-06-27T14:35:29.538Z" } +[[package]] +name = "croniter" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dateutil" }, + { name = "pytz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/2f/44d1ae153a0e27be56be43465e5cb39b9650c781e001e7864389deb25090/croniter-6.0.0.tar.gz", hash = "sha256:37c504b313956114a983ece2c2b07790b1f1094fe9d81cc94739214748255577", size = 64481, upload-time = "2024-12-17T17:17:47.32Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/4b/290b4c3efd6417a8b0c284896de19b1d5855e6dbdb97d2a35e68fa42de85/croniter-6.0.0-py2.py3-none-any.whl", hash = "sha256:2f878c3856f17896979b2a4379ba1f09c83e374931ea15cc835c5dd2eee9b368", size = 25468, upload-time = "2024-12-17T17:17:45.359Z" }, +] + [[package]] name = "cryptography" -version = "45.0.7" +version = "46.0.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/35/c495bffc2056f2dadb32434f1feedd79abde2a7f8363e1974afa9c33c7e2/cryptography-45.0.7.tar.gz", hash = "sha256:4b1654dfc64ea479c242508eb8c724044f1e964a47d1d1cacc5132292d851971", size = 744980, upload-time = "2025-09-01T11:15:03.146Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/62/e3664e6ffd7743e1694b244dde70b43a394f6f7fbcacf7014a8ff5197c73/cryptography-46.0.1.tar.gz", hash = "sha256:ed570874e88f213437f5cf758f9ef26cbfc3f336d889b1e592ee11283bb8d1c7", size = 749198, upload-time = "2025-09-17T00:10:35.797Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/91/925c0ac74362172ae4516000fe877912e33b5983df735ff290c653de4913/cryptography-45.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:3be4f21c6245930688bd9e162829480de027f8bf962ede33d4f8ba7d67a00cee", size = 7041105, upload-time = "2025-09-01T11:13:59.684Z" }, - { url = "https://files.pythonhosted.org/packages/fc/63/43641c5acce3a6105cf8bd5baeceeb1846bb63067d26dae3e5db59f1513a/cryptography-45.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:67285f8a611b0ebc0857ced2081e30302909f571a46bfa7a3cc0ad303fe015c6", size = 4205799, upload-time = "2025-09-01T11:14:02.517Z" }, - { url = "https://files.pythonhosted.org/packages/bc/29/c238dd9107f10bfde09a4d1c52fd38828b1aa353ced11f358b5dd2507d24/cryptography-45.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:577470e39e60a6cd7780793202e63536026d9b8641de011ed9d8174da9ca5339", size = 4430504, upload-time = "2025-09-01T11:14:04.522Z" }, - { url = "https://files.pythonhosted.org/packages/62/62/24203e7cbcc9bd7c94739428cd30680b18ae6b18377ae66075c8e4771b1b/cryptography-45.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4bd3e5c4b9682bc112d634f2c6ccc6736ed3635fc3319ac2bb11d768cc5a00d8", size = 4209542, upload-time = "2025-09-01T11:14:06.309Z" }, - { url = "https://files.pythonhosted.org/packages/cd/e3/e7de4771a08620eef2389b86cd87a2c50326827dea5528feb70595439ce4/cryptography-45.0.7-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:465ccac9d70115cd4de7186e60cfe989de73f7bb23e8a7aa45af18f7412e75bf", size = 3889244, upload-time = "2025-09-01T11:14:08.152Z" }, - { url = "https://files.pythonhosted.org/packages/96/b8/bca71059e79a0bb2f8e4ec61d9c205fbe97876318566cde3b5092529faa9/cryptography-45.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:16ede8a4f7929b4b7ff3642eba2bf79aa1d71f24ab6ee443935c0d269b6bc513", size = 4461975, upload-time = "2025-09-01T11:14:09.755Z" }, - { url = "https://files.pythonhosted.org/packages/58/67/3f5b26937fe1218c40e95ef4ff8d23c8dc05aa950d54200cc7ea5fb58d28/cryptography-45.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8978132287a9d3ad6b54fcd1e08548033cc09dc6aacacb6c004c73c3eb5d3ac3", size = 4209082, upload-time = "2025-09-01T11:14:11.229Z" }, - { url = "https://files.pythonhosted.org/packages/0e/e4/b3e68a4ac363406a56cf7b741eeb80d05284d8c60ee1a55cdc7587e2a553/cryptography-45.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b6a0e535baec27b528cb07a119f321ac024592388c5681a5ced167ae98e9fff3", size = 4460397, upload-time = "2025-09-01T11:14:12.924Z" }, - { url = "https://files.pythonhosted.org/packages/22/49/2c93f3cd4e3efc8cb22b02678c1fad691cff9dd71bb889e030d100acbfe0/cryptography-45.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a24ee598d10befaec178efdff6054bc4d7e883f615bfbcd08126a0f4931c83a6", size = 4337244, upload-time = "2025-09-01T11:14:14.431Z" }, - { url = "https://files.pythonhosted.org/packages/04/19/030f400de0bccccc09aa262706d90f2ec23d56bc4eb4f4e8268d0ddf3fb8/cryptography-45.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa26fa54c0a9384c27fcdc905a2fb7d60ac6e47d14bc2692145f2b3b1e2cfdbd", size = 4568862, upload-time = "2025-09-01T11:14:16.185Z" }, - { url = "https://files.pythonhosted.org/packages/29/56/3034a3a353efa65116fa20eb3c990a8c9f0d3db4085429040a7eef9ada5f/cryptography-45.0.7-cp311-abi3-win32.whl", hash = "sha256:bef32a5e327bd8e5af915d3416ffefdbe65ed975b646b3805be81b23580b57b8", size = 2936578, upload-time = "2025-09-01T11:14:17.638Z" }, - { url = "https://files.pythonhosted.org/packages/b3/61/0ab90f421c6194705a99d0fa9f6ee2045d916e4455fdbb095a9c2c9a520f/cryptography-45.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:3808e6b2e5f0b46d981c24d79648e5c25c35e59902ea4391a0dcb3e667bf7443", size = 3405400, upload-time = "2025-09-01T11:14:18.958Z" }, - { url = "https://files.pythonhosted.org/packages/63/e8/c436233ddf19c5f15b25ace33979a9dd2e7aa1a59209a0ee8554179f1cc0/cryptography-45.0.7-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bfb4c801f65dd61cedfc61a83732327fafbac55a47282e6f26f073ca7a41c3b2", size = 7021824, upload-time = "2025-09-01T11:14:20.954Z" }, - { url = "https://files.pythonhosted.org/packages/bc/4c/8f57f2500d0ccd2675c5d0cc462095adf3faa8c52294ba085c036befb901/cryptography-45.0.7-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:81823935e2f8d476707e85a78a405953a03ef7b7b4f55f93f7c2d9680e5e0691", size = 4202233, upload-time = "2025-09-01T11:14:22.454Z" }, - { url = "https://files.pythonhosted.org/packages/eb/ac/59b7790b4ccaed739fc44775ce4645c9b8ce54cbec53edf16c74fd80cb2b/cryptography-45.0.7-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3994c809c17fc570c2af12c9b840d7cea85a9fd3e5c0e0491f4fa3c029216d59", size = 4423075, upload-time = "2025-09-01T11:14:24.287Z" }, - { url = "https://files.pythonhosted.org/packages/b8/56/d4f07ea21434bf891faa088a6ac15d6d98093a66e75e30ad08e88aa2b9ba/cryptography-45.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dad43797959a74103cb59c5dac71409f9c27d34c8a05921341fb64ea8ccb1dd4", size = 4204517, upload-time = "2025-09-01T11:14:25.679Z" }, - { url = "https://files.pythonhosted.org/packages/e8/ac/924a723299848b4c741c1059752c7cfe09473b6fd77d2920398fc26bfb53/cryptography-45.0.7-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ce7a453385e4c4693985b4a4a3533e041558851eae061a58a5405363b098fcd3", size = 3882893, upload-time = "2025-09-01T11:14:27.1Z" }, - { url = "https://files.pythonhosted.org/packages/83/dc/4dab2ff0a871cc2d81d3ae6d780991c0192b259c35e4d83fe1de18b20c70/cryptography-45.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b04f85ac3a90c227b6e5890acb0edbaf3140938dbecf07bff618bf3638578cf1", size = 4450132, upload-time = "2025-09-01T11:14:28.58Z" }, - { url = "https://files.pythonhosted.org/packages/12/dd/b2882b65db8fc944585d7fb00d67cf84a9cef4e77d9ba8f69082e911d0de/cryptography-45.0.7-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:48c41a44ef8b8c2e80ca4527ee81daa4c527df3ecbc9423c41a420a9559d0e27", size = 4204086, upload-time = "2025-09-01T11:14:30.572Z" }, - { url = "https://files.pythonhosted.org/packages/5d/fa/1d5745d878048699b8eb87c984d4ccc5da4f5008dfd3ad7a94040caca23a/cryptography-45.0.7-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f3df7b3d0f91b88b2106031fd995802a2e9ae13e02c36c1fc075b43f420f3a17", size = 4449383, upload-time = "2025-09-01T11:14:32.046Z" }, - { url = "https://files.pythonhosted.org/packages/36/8b/fc61f87931bc030598e1876c45b936867bb72777eac693e905ab89832670/cryptography-45.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd342f085542f6eb894ca00ef70236ea46070c8a13824c6bde0dfdcd36065b9b", size = 4332186, upload-time = "2025-09-01T11:14:33.95Z" }, - { url = "https://files.pythonhosted.org/packages/0b/11/09700ddad7443ccb11d674efdbe9a832b4455dc1f16566d9bd3834922ce5/cryptography-45.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1993a1bb7e4eccfb922b6cd414f072e08ff5816702a0bdb8941c247a6b1b287c", size = 4561639, upload-time = "2025-09-01T11:14:35.343Z" }, - { url = "https://files.pythonhosted.org/packages/71/ed/8f4c1337e9d3b94d8e50ae0b08ad0304a5709d483bfcadfcc77a23dbcb52/cryptography-45.0.7-cp37-abi3-win32.whl", hash = "sha256:18fcf70f243fe07252dcb1b268a687f2358025ce32f9f88028ca5c364b123ef5", size = 2926552, upload-time = "2025-09-01T11:14:36.929Z" }, - { url = "https://files.pythonhosted.org/packages/bc/ff/026513ecad58dacd45d1d24ebe52b852165a26e287177de1d545325c0c25/cryptography-45.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:7285a89df4900ed3bfaad5679b1e668cb4b38a8de1ccbfc84b05f34512da0a90", size = 3392742, upload-time = "2025-09-01T11:14:38.368Z" }, - { url = "https://files.pythonhosted.org/packages/99/4e/49199a4c82946938a3e05d2e8ad9482484ba48bbc1e809e3d506c686d051/cryptography-45.0.7-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a862753b36620af6fc54209264f92c716367f2f0ff4624952276a6bbd18cbde", size = 3584634, upload-time = "2025-09-01T11:14:50.593Z" }, - { url = "https://files.pythonhosted.org/packages/16/ce/5f6ff59ea9c7779dba51b84871c19962529bdcc12e1a6ea172664916c550/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:06ce84dc14df0bf6ea84666f958e6080cdb6fe1231be2a51f3fc1267d9f3fb34", size = 4149533, upload-time = "2025-09-01T11:14:52.091Z" }, - { url = "https://files.pythonhosted.org/packages/ce/13/b3cfbd257ac96da4b88b46372e662009b7a16833bfc5da33bb97dd5631ae/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d0c5c6bac22b177bf8da7435d9d27a6834ee130309749d162b26c3105c0795a9", size = 4385557, upload-time = "2025-09-01T11:14:53.551Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c5/8c59d6b7c7b439ba4fc8d0cab868027fd095f215031bc123c3a070962912/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:2f641b64acc00811da98df63df7d59fd4706c0df449da71cb7ac39a0732b40ae", size = 4149023, upload-time = "2025-09-01T11:14:55.022Z" }, - { url = "https://files.pythonhosted.org/packages/55/32/05385c86d6ca9ab0b4d5bb442d2e3d85e727939a11f3e163fc776ce5eb40/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:f5414a788ecc6ee6bc58560e85ca624258a55ca434884445440a810796ea0e0b", size = 4385722, upload-time = "2025-09-01T11:14:57.319Z" }, - { url = "https://files.pythonhosted.org/packages/23/87/7ce86f3fa14bc11a5a48c30d8103c26e09b6465f8d8e9d74cf7a0714f043/cryptography-45.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f3d56f73595376f4244646dd5c5870c14c196949807be39e79e7bd9bac3da63", size = 3332908, upload-time = "2025-09-01T11:14:58.78Z" }, + { url = "https://files.pythonhosted.org/packages/4c/8c/44ee01267ec01e26e43ebfdae3f120ec2312aa72fa4c0507ebe41a26739f/cryptography-46.0.1-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:1cd6d50c1a8b79af1a6f703709d8973845f677c8e97b1268f5ff323d38ce8475", size = 7285044, upload-time = "2025-09-17T00:08:36.807Z" }, + { url = "https://files.pythonhosted.org/packages/22/59/9ae689a25047e0601adfcb159ec4f83c0b4149fdb5c3030cc94cd218141d/cryptography-46.0.1-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0ff483716be32690c14636e54a1f6e2e1b7bf8e22ca50b989f88fa1b2d287080", size = 4308182, upload-time = "2025-09-17T00:08:39.388Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/ca6cc9df7118f2fcd142c76b1da0f14340d77518c05b1ebfbbabca6b9e7d/cryptography-46.0.1-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9873bf7c1f2a6330bdfe8621e7ce64b725784f9f0c3a6a55c3047af5849f920e", size = 4572393, upload-time = "2025-09-17T00:08:41.663Z" }, + { url = "https://files.pythonhosted.org/packages/7f/a3/0f5296f63815d8e985922b05c31f77ce44787b3127a67c0b7f70f115c45f/cryptography-46.0.1-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:0dfb7c88d4462a0cfdd0d87a3c245a7bc3feb59de101f6ff88194f740f72eda6", size = 4308400, upload-time = "2025-09-17T00:08:43.559Z" }, + { url = "https://files.pythonhosted.org/packages/5d/8c/74fcda3e4e01be1d32775d5b4dd841acaac3c1b8fa4d0774c7ac8d52463d/cryptography-46.0.1-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e22801b61613ebdebf7deb18b507919e107547a1d39a3b57f5f855032dd7cfb8", size = 4015786, upload-time = "2025-09-17T00:08:45.758Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b8/85d23287baeef273b0834481a3dd55bbed3a53587e3b8d9f0898235b8f91/cryptography-46.0.1-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:757af4f6341ce7a1e47c326ca2a81f41d236070217e5fbbad61bbfe299d55d28", size = 4982606, upload-time = "2025-09-17T00:08:47.602Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d3/de61ad5b52433b389afca0bc70f02a7a1f074651221f599ce368da0fe437/cryptography-46.0.1-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f7a24ea78de345cfa7f6a8d3bde8b242c7fac27f2bd78fa23474ca38dfaeeab9", size = 4604234, upload-time = "2025-09-17T00:08:49.879Z" }, + { url = "https://files.pythonhosted.org/packages/dc/1f/dbd4d6570d84748439237a7478d124ee0134bf166ad129267b7ed8ea6d22/cryptography-46.0.1-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e8776dac9e660c22241b6587fae51a67b4b0147daa4d176b172c3ff768ad736", size = 4307669, upload-time = "2025-09-17T00:08:52.321Z" }, + { url = "https://files.pythonhosted.org/packages/ec/fd/ca0a14ce7f0bfe92fa727aacaf2217eb25eb7e4ed513b14d8e03b26e63ed/cryptography-46.0.1-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9f40642a140c0c8649987027867242b801486865277cbabc8c6059ddef16dc8b", size = 4947579, upload-time = "2025-09-17T00:08:54.697Z" }, + { url = "https://files.pythonhosted.org/packages/89/6b/09c30543bb93401f6f88fce556b3bdbb21e55ae14912c04b7bf355f5f96c/cryptography-46.0.1-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:449ef2b321bec7d97ef2c944173275ebdab78f3abdd005400cc409e27cd159ab", size = 4603669, upload-time = "2025-09-17T00:08:57.16Z" }, + { url = "https://files.pythonhosted.org/packages/23/9a/38cb01cb09ce0adceda9fc627c9cf98eb890fc8d50cacbe79b011df20f8a/cryptography-46.0.1-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2dd339ba3345b908fa3141ddba4025568fa6fd398eabce3ef72a29ac2d73ad75", size = 4435828, upload-time = "2025-09-17T00:08:59.606Z" }, + { url = "https://files.pythonhosted.org/packages/0f/53/435b5c36a78d06ae0bef96d666209b0ecd8f8181bfe4dda46536705df59e/cryptography-46.0.1-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7411c910fb2a412053cf33cfad0153ee20d27e256c6c3f14d7d7d1d9fec59fd5", size = 4709553, upload-time = "2025-09-17T00:09:01.832Z" }, + { url = "https://files.pythonhosted.org/packages/f5/c4/0da6e55595d9b9cd3b6eb5dc22f3a07ded7f116a3ea72629cab595abb804/cryptography-46.0.1-cp311-abi3-win32.whl", hash = "sha256:cbb8e769d4cac884bb28e3ff620ef1001b75588a5c83c9c9f1fdc9afbe7f29b0", size = 3058327, upload-time = "2025-09-17T00:09:03.726Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/cd29a35e0d6e78a0ee61793564c8cff0929c38391cb0de27627bdc7525aa/cryptography-46.0.1-cp311-abi3-win_amd64.whl", hash = "sha256:92e8cfe8bd7dd86eac0a677499894862cd5cc2fd74de917daa881d00871ac8e7", size = 3523893, upload-time = "2025-09-17T00:09:06.272Z" }, + { url = "https://files.pythonhosted.org/packages/f2/dd/eea390f3e78432bc3d2f53952375f8b37cb4d37783e626faa6a51e751719/cryptography-46.0.1-cp311-abi3-win_arm64.whl", hash = "sha256:db5597a4c7353b2e5fb05a8e6cb74b56a4658a2b7bf3cb6b1821ae7e7fd6eaa0", size = 2932145, upload-time = "2025-09-17T00:09:08.568Z" }, + { url = "https://files.pythonhosted.org/packages/98/e5/fbd632385542a3311915976f88e0dfcf09e62a3fc0aff86fb6762162a24d/cryptography-46.0.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:d84c40bdb8674c29fa192373498b6cb1e84f882889d21a471b45d1f868d8d44b", size = 7255677, upload-time = "2025-09-17T00:09:42.407Z" }, + { url = "https://files.pythonhosted.org/packages/56/3e/13ce6eab9ad6eba1b15a7bd476f005a4c1b3f299f4c2f32b22408b0edccf/cryptography-46.0.1-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9ed64e5083fa806709e74fc5ea067dfef9090e5b7a2320a49be3c9df3583a2d8", size = 4301110, upload-time = "2025-09-17T00:09:45.614Z" }, + { url = "https://files.pythonhosted.org/packages/a2/67/65dc233c1ddd688073cf7b136b06ff4b84bf517ba5529607c9d79720fc67/cryptography-46.0.1-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:341fb7a26bc9d6093c1b124b9f13acc283d2d51da440b98b55ab3f79f2522ead", size = 4562369, upload-time = "2025-09-17T00:09:47.601Z" }, + { url = "https://files.pythonhosted.org/packages/17/db/d64ae4c6f4e98c3dac5bf35dd4d103f4c7c345703e43560113e5e8e31b2b/cryptography-46.0.1-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6ef1488967e729948d424d09c94753d0167ce59afba8d0f6c07a22b629c557b2", size = 4302126, upload-time = "2025-09-17T00:09:49.335Z" }, + { url = "https://files.pythonhosted.org/packages/3d/19/5f1eea17d4805ebdc2e685b7b02800c4f63f3dd46cfa8d4c18373fea46c8/cryptography-46.0.1-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7823bc7cdf0b747ecfb096d004cc41573c2f5c7e3a29861603a2871b43d3ef32", size = 4009431, upload-time = "2025-09-17T00:09:51.239Z" }, + { url = "https://files.pythonhosted.org/packages/81/b5/229ba6088fe7abccbfe4c5edb96c7a5ad547fac5fdd0d40aa6ea540b2985/cryptography-46.0.1-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:f736ab8036796f5a119ff8211deda416f8c15ce03776db704a7a4e17381cb2ef", size = 4980739, upload-time = "2025-09-17T00:09:54.181Z" }, + { url = "https://files.pythonhosted.org/packages/3a/9c/50aa38907b201e74bc43c572f9603fa82b58e831bd13c245613a23cff736/cryptography-46.0.1-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:e46710a240a41d594953012213ea8ca398cd2448fbc5d0f1be8160b5511104a0", size = 4592289, upload-time = "2025-09-17T00:09:56.731Z" }, + { url = "https://files.pythonhosted.org/packages/5a/33/229858f8a5bb22f82468bb285e9f4c44a31978d5f5830bb4ea1cf8a4e454/cryptography-46.0.1-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:84ef1f145de5aee82ea2447224dc23f065ff4cc5791bb3b506615957a6ba8128", size = 4301815, upload-time = "2025-09-17T00:09:58.548Z" }, + { url = "https://files.pythonhosted.org/packages/52/cb/b76b2c87fbd6ed4a231884bea3ce073406ba8e2dae9defad910d33cbf408/cryptography-46.0.1-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9394c7d5a7565ac5f7d9ba38b2617448eba384d7b107b262d63890079fad77ca", size = 4943251, upload-time = "2025-09-17T00:10:00.475Z" }, + { url = "https://files.pythonhosted.org/packages/94/0f/f66125ecf88e4cb5b8017ff43f3a87ede2d064cb54a1c5893f9da9d65093/cryptography-46.0.1-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:ed957044e368ed295257ae3d212b95456bd9756df490e1ac4538857f67531fcc", size = 4591247, upload-time = "2025-09-17T00:10:02.874Z" }, + { url = "https://files.pythonhosted.org/packages/f6/22/9f3134ae436b63b463cfdf0ff506a0570da6873adb4bf8c19b8a5b4bac64/cryptography-46.0.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f7de12fa0eee6234de9a9ce0ffcfa6ce97361db7a50b09b65c63ac58e5f22fc7", size = 4428534, upload-time = "2025-09-17T00:10:04.994Z" }, + { url = "https://files.pythonhosted.org/packages/89/39/e6042bcb2638650b0005c752c38ea830cbfbcbb1830e4d64d530000aa8dc/cryptography-46.0.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7fab1187b6c6b2f11a326f33b036f7168f5b996aedd0c059f9738915e4e8f53a", size = 4699541, upload-time = "2025-09-17T00:10:06.925Z" }, + { url = "https://files.pythonhosted.org/packages/68/46/753d457492d15458c7b5a653fc9a84a1c9c7a83af6ebdc94c3fc373ca6e8/cryptography-46.0.1-cp38-abi3-win32.whl", hash = "sha256:45f790934ac1018adeba46a0f7289b2b8fe76ba774a88c7f1922213a56c98bc1", size = 3043779, upload-time = "2025-09-17T00:10:08.951Z" }, + { url = "https://files.pythonhosted.org/packages/2f/50/b6f3b540c2f6ee712feeb5fa780bb11fad76634e71334718568e7695cb55/cryptography-46.0.1-cp38-abi3-win_amd64.whl", hash = "sha256:7176a5ab56fac98d706921f6416a05e5aff7df0e4b91516f450f8627cda22af3", size = 3517226, upload-time = "2025-09-17T00:10:10.769Z" }, + { url = "https://files.pythonhosted.org/packages/ff/e8/77d17d00981cdd27cc493e81e1749a0b8bbfb843780dbd841e30d7f50743/cryptography-46.0.1-cp38-abi3-win_arm64.whl", hash = "sha256:efc9e51c3e595267ff84adf56e9b357db89ab2279d7e375ffcaf8f678606f3d9", size = 2923149, upload-time = "2025-09-17T00:10:13.236Z" }, + { url = "https://files.pythonhosted.org/packages/27/27/077e09fd92075dd1338ea0ffaf5cfee641535545925768350ad90d8c36ca/cryptography-46.0.1-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b9c79af2c3058430d911ff1a5b2b96bbfe8da47d5ed961639ce4681886614e70", size = 3722319, upload-time = "2025-09-17T00:10:20.273Z" }, + { url = "https://files.pythonhosted.org/packages/db/32/6fc7250280920418651640d76cee34d91c1e0601d73acd44364570cf041f/cryptography-46.0.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0ca4be2af48c24df689a150d9cd37404f689e2968e247b6b8ff09bff5bcd786f", size = 4249030, upload-time = "2025-09-17T00:10:22.396Z" }, + { url = "https://files.pythonhosted.org/packages/32/33/8d5398b2da15a15110b2478480ab512609f95b45ead3a105c9a9c76f9980/cryptography-46.0.1-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:13e67c4d3fb8b6bc4ef778a7ccdd8df4cd15b4bcc18f4239c8440891a11245cc", size = 4528009, upload-time = "2025-09-17T00:10:24.418Z" }, + { url = "https://files.pythonhosted.org/packages/fd/1c/4012edad2a8977ab386c36b6e21f5065974d37afa3eade83a9968cba4855/cryptography-46.0.1-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:15b5fd9358803b0d1cc42505a18d8bca81dabb35b5cfbfea1505092e13a9d96d", size = 4248902, upload-time = "2025-09-17T00:10:26.255Z" }, + { url = "https://files.pythonhosted.org/packages/58/a3/257cd5ae677302de8fa066fca9de37128f6729d1e63c04dd6a15555dd450/cryptography-46.0.1-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:e34da95e29daf8a71cb2841fd55df0511539a6cdf33e6f77c1e95e44006b9b46", size = 4527150, upload-time = "2025-09-17T00:10:28.28Z" }, + { url = "https://files.pythonhosted.org/packages/6a/cd/fe6b65e1117ec7631f6be8951d3db076bac3e1b096e3e12710ed071ffc3c/cryptography-46.0.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:34f04b7311174469ab3ac2647469743720f8b6c8b046f238e5cb27905695eb2a", size = 3448210, upload-time = "2025-09-17T00:10:30.145Z" }, ] [[package]] @@ -1291,7 +1310,7 @@ wheels = [ [[package]] name = "dify-api" -version = "2.0.0-beta2" +version = "2.0.0b2" source = { virtual = "." } dependencies = [ { name = "arize-phoenix-otel" }, @@ -1303,6 +1322,7 @@ dependencies = [ { name = "cachetools" }, { name = "celery" }, { name = "chardet" }, + { name = "croniter" }, { name = "flask" }, { name = "flask-compress" }, { name = "flask-cors" }, @@ -1498,13 +1518,14 @@ requires-dist = [ { name = "cachetools", specifier = "~=5.3.0" }, { name = "celery", specifier = "~=5.5.2" }, { name = "chardet", specifier = "~=5.1.0" }, + { name = "croniter", specifier = ">=6.0.0" }, { name = "flask", specifier = "~=3.1.2" }, { name = "flask-compress", specifier = "~=1.17" }, { name = "flask-cors", specifier = "~=6.0.0" }, { name = "flask-login", specifier = "~=0.6.3" }, { name = "flask-migrate", specifier = "~=4.0.7" }, { name = "flask-orjson", specifier = "~=2.0.0" }, - { name = "flask-restx", specifier = "~=1.3.0" }, + { name = "flask-restx", specifier = ">=1.3.0" }, { name = "flask-sqlalchemy", specifier = "~=3.1.1" }, { name = "gevent", specifier = "~=24.11.1" }, { name = "gmpy2", specifier = "~=2.2.1" }, @@ -1748,18 +1769,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922, upload-time = "2025-05-17T13:52:36.463Z" }, ] -[[package]] -name = "ecdsa" -version = "0.19.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c0/1f/924e3caae75f471eae4b26bd13b698f6af2c44279f67af317439c2f4c46a/ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61", size = 201793, upload-time = "2025-03-13T11:52:43.25Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/a3/460c57f094a4a165c84a1341c373b0a4f5ec6ac244b998d5021aade89b77/ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3", size = 150607, upload-time = "2025-03-13T11:52:41.757Z" }, -] - [[package]] name = "elastic-transport" version = "8.17.1" @@ -1787,11 +1796,11 @@ wheels = [ [[package]] name = "emoji" -version = "2.14.1" +version = "2.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cb/7d/01cddcbb6f5cc0ba72e00ddf9b1fa206c802d557fd0a20b18e130edf1336/emoji-2.14.1.tar.gz", hash = "sha256:f8c50043d79a2c1410ebfae833ae1868d5941a67a6cd4d18377e2eb0bd79346b", size = 597182, upload-time = "2025-01-16T06:31:24.983Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/78/0d2db9382c92a163d7095fc08efff7800880f830a152cfced40161e7638d/emoji-2.15.0.tar.gz", hash = "sha256:eae4ab7d86456a70a00a985125a03263a5eac54cd55e51d7e184b1ed3b6757e4", size = 615483, upload-time = "2025-09-21T12:13:02.755Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/91/db/a0335710caaa6d0aebdaa65ad4df789c15d89b7babd9a30277838a7d9aac/emoji-2.14.1-py3-none-any.whl", hash = "sha256:35a8a486c1460addb1499e3bf7929d3889b2e2841a57401903699fef595e942b", size = 590617, upload-time = "2025-01-16T06:31:23.526Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/4b5aaaabddfacfe36ba7768817bd1f71a7a810a43705e531f3ae4c690767/emoji-2.15.0-py3-none-any.whl", hash = "sha256:205296793d66a89d88af4688fa57fd6496732eb48917a87175a023c8138995eb", size = 608433, upload-time = "2025-09-21T12:13:01.197Z" }, ] [[package]] @@ -1836,16 +1845,16 @@ wheels = [ [[package]] name = "fastapi" -version = "0.116.1" +version = "0.117.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485, upload-time = "2025-07-11T16:22:32.057Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/7e/d9788300deaf416178f61fb3c2ceb16b7d0dc9f82a08fdb87a5e64ee3cc7/fastapi-0.117.1.tar.gz", hash = "sha256:fb2d42082d22b185f904ca0ecad2e195b851030bd6c5e4c032d1c981240c631a", size = 307155, upload-time = "2025-09-20T20:16:56.663Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631, upload-time = "2025-07-11T16:22:30.485Z" }, + { url = "https://files.pythonhosted.org/packages/6d/45/d9d3e8eeefbe93be1c50060a9d9a9f366dba66f288bb518a9566a23a8631/fastapi-0.117.1-py3-none-any.whl", hash = "sha256:33c51a0d21cab2b9722d4e56dbb9316f3687155be6b276191790d8da03507552", size = 95959, upload-time = "2025-09-20T20:16:53.661Z" }, ] [[package]] @@ -2536,30 +2545,33 @@ wheels = [ [[package]] name = "grpcio" -version = "1.74.0" +version = "1.75.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/38/b4/35feb8f7cab7239c5b94bd2db71abb3d6adb5f335ad8f131abb6060840b6/grpcio-1.74.0.tar.gz", hash = "sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1", size = 12756048, upload-time = "2025-07-24T18:54:23.039Z" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/88/fe2844eefd3d2188bc0d7a2768c6375b46dfd96469ea52d8aeee8587d7e0/grpcio-1.75.0.tar.gz", hash = "sha256:b989e8b09489478c2d19fecc744a298930f40d8b27c3638afbfe84d22f36ce4e", size = 12722485, upload-time = "2025-09-16T09:20:21.731Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/77/b2f06db9f240a5abeddd23a0e49eae2b6ac54d85f0e5267784ce02269c3b/grpcio-1.74.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:69e1a8180868a2576f02356565f16635b99088da7df3d45aaa7e24e73a054e31", size = 5487368, upload-time = "2025-07-24T18:53:03.548Z" }, - { url = "https://files.pythonhosted.org/packages/48/99/0ac8678a819c28d9a370a663007581744a9f2a844e32f0fa95e1ddda5b9e/grpcio-1.74.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8efe72fde5500f47aca1ef59495cb59c885afe04ac89dd11d810f2de87d935d4", size = 10999804, upload-time = "2025-07-24T18:53:05.095Z" }, - { url = "https://files.pythonhosted.org/packages/45/c6/a2d586300d9e14ad72e8dc211c7aecb45fe9846a51e558c5bca0c9102c7f/grpcio-1.74.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a8f0302f9ac4e9923f98d8e243939a6fb627cd048f5cd38595c97e38020dffce", size = 5987667, upload-time = "2025-07-24T18:53:07.157Z" }, - { url = "https://files.pythonhosted.org/packages/c9/57/5f338bf56a7f22584e68d669632e521f0de460bb3749d54533fc3d0fca4f/grpcio-1.74.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f609a39f62a6f6f05c7512746798282546358a37ea93c1fcbadf8b2fed162e3", size = 6655612, upload-time = "2025-07-24T18:53:09.244Z" }, - { url = "https://files.pythonhosted.org/packages/82/ea/a4820c4c44c8b35b1903a6c72a5bdccec92d0840cf5c858c498c66786ba5/grpcio-1.74.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c98e0b7434a7fa4e3e63f250456eaef52499fba5ae661c58cc5b5477d11e7182", size = 6219544, upload-time = "2025-07-24T18:53:11.221Z" }, - { url = "https://files.pythonhosted.org/packages/a4/17/0537630a921365928f5abb6d14c79ba4dcb3e662e0dbeede8af4138d9dcf/grpcio-1.74.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:662456c4513e298db6d7bd9c3b8df6f75f8752f0ba01fb653e252ed4a59b5a5d", size = 6334863, upload-time = "2025-07-24T18:53:12.925Z" }, - { url = "https://files.pythonhosted.org/packages/e2/a6/85ca6cb9af3f13e1320d0a806658dca432ff88149d5972df1f7b51e87127/grpcio-1.74.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3d14e3c4d65e19d8430a4e28ceb71ace4728776fd6c3ce34016947474479683f", size = 7019320, upload-time = "2025-07-24T18:53:15.002Z" }, - { url = "https://files.pythonhosted.org/packages/4f/a7/fe2beab970a1e25d2eff108b3cf4f7d9a53c185106377a3d1989216eba45/grpcio-1.74.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bf949792cee20d2078323a9b02bacbbae002b9e3b9e2433f2741c15bdeba1c4", size = 6514228, upload-time = "2025-07-24T18:53:16.999Z" }, - { url = "https://files.pythonhosted.org/packages/6a/c2/2f9c945c8a248cebc3ccda1b7a1bf1775b9d7d59e444dbb18c0014e23da6/grpcio-1.74.0-cp311-cp311-win32.whl", hash = "sha256:55b453812fa7c7ce2f5c88be3018fb4a490519b6ce80788d5913f3f9d7da8c7b", size = 3817216, upload-time = "2025-07-24T18:53:20.564Z" }, - { url = "https://files.pythonhosted.org/packages/ff/d1/a9cf9c94b55becda2199299a12b9feef0c79946b0d9d34c989de6d12d05d/grpcio-1.74.0-cp311-cp311-win_amd64.whl", hash = "sha256:86ad489db097141a907c559988c29718719aa3e13370d40e20506f11b4de0d11", size = 4495380, upload-time = "2025-07-24T18:53:22.058Z" }, - { url = "https://files.pythonhosted.org/packages/4c/5d/e504d5d5c4469823504f65687d6c8fb97b7f7bf0b34873b7598f1df24630/grpcio-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8533e6e9c5bd630ca98062e3a1326249e6ada07d05acf191a77bc33f8948f3d8", size = 5445551, upload-time = "2025-07-24T18:53:23.641Z" }, - { url = "https://files.pythonhosted.org/packages/43/01/730e37056f96f2f6ce9f17999af1556df62ee8dab7fa48bceeaab5fd3008/grpcio-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:2918948864fec2a11721d91568effffbe0a02b23ecd57f281391d986847982f6", size = 10979810, upload-time = "2025-07-24T18:53:25.349Z" }, - { url = "https://files.pythonhosted.org/packages/79/3d/09fd100473ea5c47083889ca47ffd356576173ec134312f6aa0e13111dee/grpcio-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:60d2d48b0580e70d2e1954d0d19fa3c2e60dd7cbed826aca104fff518310d1c5", size = 5941946, upload-time = "2025-07-24T18:53:27.387Z" }, - { url = "https://files.pythonhosted.org/packages/8a/99/12d2cca0a63c874c6d3d195629dcd85cdf5d6f98a30d8db44271f8a97b93/grpcio-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3601274bc0523f6dc07666c0e01682c94472402ac2fd1226fd96e079863bfa49", size = 6621763, upload-time = "2025-07-24T18:53:29.193Z" }, - { url = "https://files.pythonhosted.org/packages/9d/2c/930b0e7a2f1029bbc193443c7bc4dc2a46fedb0203c8793dcd97081f1520/grpcio-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:176d60a5168d7948539def20b2a3adcce67d72454d9ae05969a2e73f3a0feee7", size = 6180664, upload-time = "2025-07-24T18:53:30.823Z" }, - { url = "https://files.pythonhosted.org/packages/db/d5/ff8a2442180ad0867717e670f5ec42bfd8d38b92158ad6bcd864e6d4b1ed/grpcio-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e759f9e8bc908aaae0412642afe5416c9f983a80499448fcc7fab8692ae044c3", size = 6301083, upload-time = "2025-07-24T18:53:32.454Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ba/b361d390451a37ca118e4ec7dccec690422e05bc85fba2ec72b06cefec9f/grpcio-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e7c4389771855a92934b2846bd807fc25a3dfa820fd912fe6bd8136026b2707", size = 6994132, upload-time = "2025-07-24T18:53:34.506Z" }, - { url = "https://files.pythonhosted.org/packages/3b/0c/3a5fa47d2437a44ced74141795ac0251bbddeae74bf81df3447edd767d27/grpcio-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cce634b10aeab37010449124814b05a62fb5f18928ca878f1bf4750d1f0c815b", size = 6489616, upload-time = "2025-07-24T18:53:36.217Z" }, - { url = "https://files.pythonhosted.org/packages/ae/95/ab64703b436d99dc5217228babc76047d60e9ad14df129e307b5fec81fd0/grpcio-1.74.0-cp312-cp312-win32.whl", hash = "sha256:885912559974df35d92219e2dc98f51a16a48395f37b92865ad45186f294096c", size = 3807083, upload-time = "2025-07-24T18:53:37.911Z" }, - { url = "https://files.pythonhosted.org/packages/84/59/900aa2445891fc47a33f7d2f76e00ca5d6ae6584b20d19af9c06fa09bf9a/grpcio-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:42f8fee287427b94be63d916c90399ed310ed10aadbf9e2e5538b3e497d269bc", size = 4490123, upload-time = "2025-07-24T18:53:39.528Z" }, + { url = "https://files.pythonhosted.org/packages/95/b7/a6f42596fc367656970f5811e5d2d9912ca937aa90621d5468a11680ef47/grpcio-1.75.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:7f89d6d0cd43170a80ebb4605cad54c7d462d21dc054f47688912e8bf08164af", size = 5699769, upload-time = "2025-09-16T09:18:32.536Z" }, + { url = "https://files.pythonhosted.org/packages/c2/42/284c463a311cd2c5f804fd4fdbd418805460bd5d702359148dd062c1685d/grpcio-1.75.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:cb6c5b075c2d092f81138646a755f0dad94e4622300ebef089f94e6308155d82", size = 11480362, upload-time = "2025-09-16T09:18:35.562Z" }, + { url = "https://files.pythonhosted.org/packages/0b/10/60d54d5a03062c3ae91bddb6e3acefe71264307a419885f453526d9203ff/grpcio-1.75.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:494dcbade5606128cb9f530ce00331a90ecf5e7c5b243d373aebdb18e503c346", size = 6284753, upload-time = "2025-09-16T09:18:38.055Z" }, + { url = "https://files.pythonhosted.org/packages/cf/af/381a4bfb04de5e2527819452583e694df075c7a931e9bf1b2a603b593ab2/grpcio-1.75.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:050760fd29c8508844a720f06c5827bb00de8f5e02f58587eb21a4444ad706e5", size = 6944103, upload-time = "2025-09-16T09:18:40.844Z" }, + { url = "https://files.pythonhosted.org/packages/16/18/c80dd7e1828bd6700ce242c1616871927eef933ed0c2cee5c636a880e47b/grpcio-1.75.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:266fa6209b68a537b2728bb2552f970e7e78c77fe43c6e9cbbe1f476e9e5c35f", size = 6464036, upload-time = "2025-09-16T09:18:43.351Z" }, + { url = "https://files.pythonhosted.org/packages/79/3f/78520c7ed9ccea16d402530bc87958bbeb48c42a2ec8032738a7864d38f8/grpcio-1.75.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:06d22e1d8645e37bc110f4c589cb22c283fd3de76523065f821d6e81de33f5d4", size = 7097455, upload-time = "2025-09-16T09:18:45.465Z" }, + { url = "https://files.pythonhosted.org/packages/ad/69/3cebe4901a865eb07aefc3ee03a02a632e152e9198dadf482a7faf926f31/grpcio-1.75.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9880c323595d851292785966cadb6c708100b34b163cab114e3933f5773cba2d", size = 8037203, upload-time = "2025-09-16T09:18:47.878Z" }, + { url = "https://files.pythonhosted.org/packages/04/ed/1e483d1eba5032642c10caf28acf07ca8de0508244648947764956db346a/grpcio-1.75.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:55a2d5ae79cd0f68783fb6ec95509be23746e3c239290b2ee69c69a38daa961a", size = 7492085, upload-time = "2025-09-16T09:18:50.907Z" }, + { url = "https://files.pythonhosted.org/packages/ee/65/6ef676aa7dbd9578dfca990bb44d41a49a1e36344ca7d79de6b59733ba96/grpcio-1.75.0-cp311-cp311-win32.whl", hash = "sha256:352dbdf25495eef584c8de809db280582093bc3961d95a9d78f0dfb7274023a2", size = 3944697, upload-time = "2025-09-16T09:18:53.427Z" }, + { url = "https://files.pythonhosted.org/packages/0d/83/b753373098b81ec5cb01f71c21dfd7aafb5eb48a1566d503e9fd3c1254fe/grpcio-1.75.0-cp311-cp311-win_amd64.whl", hash = "sha256:678b649171f229fb16bda1a2473e820330aa3002500c4f9fd3a74b786578e90f", size = 4642235, upload-time = "2025-09-16T09:18:56.095Z" }, + { url = "https://files.pythonhosted.org/packages/0d/93/a1b29c2452d15cecc4a39700fbf54721a3341f2ddbd1bd883f8ec0004e6e/grpcio-1.75.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:fa35ccd9501ffdd82b861809cbfc4b5b13f4b4c5dc3434d2d9170b9ed38a9054", size = 5661861, upload-time = "2025-09-16T09:18:58.748Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ce/7280df197e602d14594e61d1e60e89dfa734bb59a884ba86cdd39686aadb/grpcio-1.75.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:0fcb77f2d718c1e58cc04ef6d3b51e0fa3b26cf926446e86c7eba105727b6cd4", size = 11459982, upload-time = "2025-09-16T09:19:01.211Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9b/37e61349771f89b543a0a0bbc960741115ea8656a2414bfb24c4de6f3dd7/grpcio-1.75.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:36764a4ad9dc1eb891042fab51e8cdf7cc014ad82cee807c10796fb708455041", size = 6239680, upload-time = "2025-09-16T09:19:04.443Z" }, + { url = "https://files.pythonhosted.org/packages/a6/66/f645d9d5b22ca307f76e71abc83ab0e574b5dfef3ebde4ec8b865dd7e93e/grpcio-1.75.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:725e67c010f63ef17fc052b261004942763c0b18dcd84841e6578ddacf1f9d10", size = 6908511, upload-time = "2025-09-16T09:19:07.884Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9a/34b11cd62d03c01b99068e257595804c695c3c119596c7077f4923295e19/grpcio-1.75.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91fbfc43f605c5ee015c9056d580a70dd35df78a7bad97e05426795ceacdb59f", size = 6429105, upload-time = "2025-09-16T09:19:10.085Z" }, + { url = "https://files.pythonhosted.org/packages/1a/46/76eaceaad1f42c1e7e6a5b49a61aac40fc5c9bee4b14a1630f056ac3a57e/grpcio-1.75.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a9337ac4ce61c388e02019d27fa837496c4b7837cbbcec71b05934337e51531", size = 7060578, upload-time = "2025-09-16T09:19:12.283Z" }, + { url = "https://files.pythonhosted.org/packages/3d/82/181a0e3f1397b6d43239e95becbeb448563f236c0db11ce990f073b08d01/grpcio-1.75.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ee16e232e3d0974750ab5f4da0ab92b59d6473872690b5e40dcec9a22927f22e", size = 8003283, upload-time = "2025-09-16T09:19:15.601Z" }, + { url = "https://files.pythonhosted.org/packages/de/09/a335bca211f37a3239be4b485e3c12bf3da68d18b1f723affdff2b9e9680/grpcio-1.75.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:55dfb9122973cc69520b23d39867726722cafb32e541435707dc10249a1bdbc6", size = 7460319, upload-time = "2025-09-16T09:19:18.409Z" }, + { url = "https://files.pythonhosted.org/packages/aa/59/6330105cdd6bc4405e74c96838cd7e148c3653ae3996e540be6118220c79/grpcio-1.75.0-cp312-cp312-win32.whl", hash = "sha256:fb64dd62face3d687a7b56cd881e2ea39417af80f75e8b36f0f81dfd93071651", size = 3934011, upload-time = "2025-09-16T09:19:21.013Z" }, + { url = "https://files.pythonhosted.org/packages/ff/14/e1309a570b7ebdd1c8ca24c4df6b8d6690009fa8e0d997cb2c026ce850c9/grpcio-1.75.0-cp312-cp312-win_amd64.whl", hash = "sha256:6b365f37a9c9543a9e91c6b4103d68d38d5bcb9965b11d5092b3c157bd6a5ee7", size = 4637934, upload-time = "2025-09-16T09:19:23.19Z" }, ] [[package]] @@ -2641,17 +2653,17 @@ wheels = [ [[package]] name = "hf-xet" -version = "1.1.9" +version = "1.1.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/0f/5b60fc28ee7f8cc17a5114a584fd6b86e11c3e0a6e142a7f97a161e9640a/hf_xet-1.1.9.tar.gz", hash = "sha256:c99073ce404462e909f1d5839b2d14a3827b8fe75ed8aed551ba6609c026c803", size = 484242, upload-time = "2025-08-27T23:05:19.441Z" } +sdist = { url = "https://files.pythonhosted.org/packages/74/31/feeddfce1748c4a233ec1aa5b7396161c07ae1aa9b7bdbc9a72c3c7dd768/hf_xet-1.1.10.tar.gz", hash = "sha256:408aef343800a2102374a883f283ff29068055c111f003ff840733d3b715bb97", size = 487910, upload-time = "2025-09-12T20:10:27.12Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/de/12/56e1abb9a44cdef59a411fe8a8673313195711b5ecce27880eb9c8fa90bd/hf_xet-1.1.9-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:a3b6215f88638dd7a6ff82cb4e738dcbf3d863bf667997c093a3c990337d1160", size = 2762553, upload-time = "2025-08-27T23:05:15.153Z" }, - { url = "https://files.pythonhosted.org/packages/3a/e6/2d0d16890c5f21b862f5df3146519c182e7f0ae49b4b4bf2bd8a40d0b05e/hf_xet-1.1.9-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:9b486de7a64a66f9a172f4b3e0dfe79c9f0a93257c501296a2521a13495a698a", size = 2623216, upload-time = "2025-08-27T23:05:13.778Z" }, - { url = "https://files.pythonhosted.org/packages/81/42/7e6955cf0621e87491a1fb8cad755d5c2517803cea174229b0ec00ff0166/hf_xet-1.1.9-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c5a840c2c4e6ec875ed13703a60e3523bc7f48031dfd750923b2a4d1a5fc3c", size = 3186789, upload-time = "2025-08-27T23:05:12.368Z" }, - { url = "https://files.pythonhosted.org/packages/df/8b/759233bce05457f5f7ec062d63bbfd2d0c740b816279eaaa54be92aa452a/hf_xet-1.1.9-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:96a6139c9e44dad1c52c52520db0fffe948f6bce487cfb9d69c125f254bb3790", size = 3088747, upload-time = "2025-08-27T23:05:10.439Z" }, - { url = "https://files.pythonhosted.org/packages/6c/3c/28cc4db153a7601a996985bcb564f7b8f5b9e1a706c7537aad4b4809f358/hf_xet-1.1.9-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ad1022e9a998e784c97b2173965d07fe33ee26e4594770b7785a8cc8f922cd95", size = 3251429, upload-time = "2025-08-27T23:05:16.471Z" }, - { url = "https://files.pythonhosted.org/packages/84/17/7caf27a1d101bfcb05be85850d4aa0a265b2e1acc2d4d52a48026ef1d299/hf_xet-1.1.9-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:86754c2d6d5afb11b0a435e6e18911a4199262fe77553f8c50d75e21242193ea", size = 3354643, upload-time = "2025-08-27T23:05:17.828Z" }, - { url = "https://files.pythonhosted.org/packages/cd/50/0c39c9eed3411deadcc98749a6699d871b822473f55fe472fad7c01ec588/hf_xet-1.1.9-cp37-abi3-win_amd64.whl", hash = "sha256:5aad3933de6b725d61d51034e04174ed1dce7a57c63d530df0014dea15a40127", size = 2804797, upload-time = "2025-08-27T23:05:20.77Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a2/343e6d05de96908366bdc0081f2d8607d61200be2ac802769c4284cc65bd/hf_xet-1.1.10-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:686083aca1a6669bc85c21c0563551cbcdaa5cf7876a91f3d074a030b577231d", size = 2761466, upload-time = "2025-09-12T20:10:22.836Z" }, + { url = "https://files.pythonhosted.org/packages/31/f9/6215f948ac8f17566ee27af6430ea72045e0418ce757260248b483f4183b/hf_xet-1.1.10-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:71081925383b66b24eedff3013f8e6bbd41215c3338be4b94ba75fd75b21513b", size = 2623807, upload-time = "2025-09-12T20:10:21.118Z" }, + { url = "https://files.pythonhosted.org/packages/15/07/86397573efefff941e100367bbda0b21496ffcdb34db7ab51912994c32a2/hf_xet-1.1.10-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6bceb6361c80c1cc42b5a7b4e3efd90e64630bcf11224dcac50ef30a47e435", size = 3186960, upload-time = "2025-09-12T20:10:19.336Z" }, + { url = "https://files.pythonhosted.org/packages/01/a7/0b2e242b918cc30e1f91980f3c4b026ff2eedaf1e2ad96933bca164b2869/hf_xet-1.1.10-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eae7c1fc8a664e54753ffc235e11427ca61f4b0477d757cc4eb9ae374b69f09c", size = 3087167, upload-time = "2025-09-12T20:10:17.255Z" }, + { url = "https://files.pythonhosted.org/packages/4a/25/3e32ab61cc7145b11eee9d745988e2f0f4fafda81b25980eebf97d8cff15/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0a0005fd08f002180f7a12d4e13b22be277725bc23ed0529f8add5c7a6309c06", size = 3248612, upload-time = "2025-09-12T20:10:24.093Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3d/ab7109e607ed321afaa690f557a9ada6d6d164ec852fd6bf9979665dc3d6/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f900481cf6e362a6c549c61ff77468bd59d6dd082f3170a36acfef2eb6a6793f", size = 3353360, upload-time = "2025-09-12T20:10:25.563Z" }, + { url = "https://files.pythonhosted.org/packages/ee/0e/471f0a21db36e71a2f1752767ad77e92d8cde24e974e03d662931b1305ec/hf_xet-1.1.10-cp37-abi3-win_amd64.whl", hash = "sha256:5f54b19cc347c13235ae7ee98b330c26dd65ef1df47e5316ffb1e87713ca7045", size = 2804691, upload-time = "2025-09-12T20:10:28.433Z" }, ] [[package]] @@ -2796,7 +2808,7 @@ wheels = [ [[package]] name = "huggingface-hub" -version = "0.34.4" +version = "0.35.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -2808,9 +2820,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/c9/bdbe19339f76d12985bc03572f330a01a93c04dffecaaea3061bdd7fb892/huggingface_hub-0.34.4.tar.gz", hash = "sha256:a4228daa6fb001be3f4f4bdaf9a0db00e1739235702848df00885c9b5742c85c", size = 459768, upload-time = "2025-08-08T09:14:52.365Z" } +sdist = { url = "https://files.pythonhosted.org/packages/37/79/d71d40efa058e8c4a075158f8855bc2998037b5ff1c84f249f34435c1df7/huggingface_hub-0.35.0.tar.gz", hash = "sha256:ccadd2a78eef75effff184ad89401413629fabc52cefd76f6bbacb9b1c0676ac", size = 461486, upload-time = "2025-09-16T13:49:33.282Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/7b/bb06b061991107cd8783f300adff3e7b7f284e330fd82f507f2a1417b11d/huggingface_hub-0.34.4-py3-none-any.whl", hash = "sha256:9b365d781739c93ff90c359844221beef048403f1bc1f1c123c191257c3c890a", size = 561452, upload-time = "2025-08-08T09:14:50.159Z" }, + { url = "https://files.pythonhosted.org/packages/fe/85/a18508becfa01f1e4351b5e18651b06d210dbd96debccd48a452acccb901/huggingface_hub-0.35.0-py3-none-any.whl", hash = "sha256:f2e2f693bca9a26530b1c0b9bcd4c1495644dad698e6a0060f90e22e772c31e9", size = 563436, upload-time = "2025-09-16T13:49:30.627Z" }, ] [[package]] @@ -2836,15 +2848,15 @@ wheels = [ [[package]] name = "hypothesis" -version = "6.138.15" +version = "6.140.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, { name = "sortedcontainers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3b/68/adc338edec178cf6c08b4843ea2b2d639d47bed4b06ea9331433b71acc0a/hypothesis-6.138.15.tar.gz", hash = "sha256:6b0e1aa182eacde87110995a3543530d69ef411f642162a656efcd46c2823ad1", size = 466116, upload-time = "2025-09-08T05:34:15.956Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e6/4d/bc00079d02c6ffc014e9bcc0533f12b4f5e74b22698df01552c4e0dcf377/hypothesis-6.140.0.tar.gz", hash = "sha256:bf6f79f19bb4e262c33849739639d94f8656510261d50a9ab74f539bcec8b782", size = 466332, upload-time = "2025-09-22T03:43:16.972Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/49/911eb0cd17884a7a6f510e78acf0a70592e414d194695a0c7c1db91645b2/hypothesis-6.138.15-py3-none-any.whl", hash = "sha256:b7cf743d461c319eb251a13c8e1dcf00f4ef7085e4ab5bf5abf102b2a5ffd694", size = 533621, upload-time = "2025-09-08T05:34:12.272Z" }, + { url = "https://files.pythonhosted.org/packages/52/01/02849c14b948fdc5b7a6959c1f22ebeebc4002fd0f0db02f4a216353c114/hypothesis-6.140.0-py3-none-any.whl", hash = "sha256:ff251c41e4ffed6484720ffef68b31f705e62f50e22947f9f7ef1206b715f376", size = 534072, upload-time = "2025-09-22T03:43:13.951Z" }, ] [[package]] @@ -2858,16 +2870,16 @@ wheels = [ [[package]] name = "import-linter" -version = "2.4" +version = "2.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "grimp" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/db/33/e3c29beb4d8a33cfacdbe2858a3a4533694a0c1d0c060daaa761eff6d929/import_linter-2.4.tar.gz", hash = "sha256:4888fde83dd18bdbecd57ea1a98a1f3d52c6b6507d700f89f8678b44306c0ab4", size = 29942, upload-time = "2025-08-15T06:57:23.423Z" } +sdist = { url = "https://files.pythonhosted.org/packages/25/75/631f37f063121f102a629392e377618de5d8930450b22e4750842159c93e/import_linter-2.5.tar.gz", hash = "sha256:208916ca6cadf6a0f94bdc3e68cf6a084cce9d731803cc62e8d835bc2c7b79ea", size = 31056, upload-time = "2025-09-15T08:27:54.954Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/11/2c108fc1138e506762db332c4a7ebc589cb379bc443939a81ec738b4cf73/import_linter-2.4-py3-none-any.whl", hash = "sha256:2ad6d5a164cdcd5ebdda4172cf0169f73dde1a8925ef7216672c321cd38f8499", size = 42355, upload-time = "2025-08-15T06:57:22.221Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d0/1ba160b7d9d1e9d9407cdfb4b6dea11676901d29c82bc8f1f059616c8fa2/import_linter-2.5-py3-none-any.whl", hash = "sha256:0ac3920f40658d47439101cd782c10bd2de2b2721102ea18f65ddcc1ed9a5515", size = 44302, upload-time = "2025-09-15T08:27:53.774Z" }, ] [[package]] @@ -2947,34 +2959,35 @@ wheels = [ [[package]] name = "jiter" -version = "0.10.0" +version = "0.11.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/9d/ae7ddb4b8ab3fb1b51faf4deb36cb48a4fbbd7cb36bad6a5fca4741306f7/jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500", size = 162759, upload-time = "2025-05-18T19:04:59.73Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/c0/a3bb4cc13aced219dd18191ea66e874266bd8aa7b96744e495e1c733aa2d/jiter-0.11.0.tar.gz", hash = "sha256:1d9637eaf8c1d6a63d6562f2a6e5ab3af946c66037eb1b894e8fad75422266e4", size = 167094, upload-time = "2025-09-15T09:20:38.212Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/dd/6cefc6bd68b1c3c979cecfa7029ab582b57690a31cd2f346c4d0ce7951b6/jiter-0.10.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3bebe0c558e19902c96e99217e0b8e8b17d570906e72ed8a87170bc290b1e978", size = 317473, upload-time = "2025-05-18T19:03:25.942Z" }, - { url = "https://files.pythonhosted.org/packages/be/cf/fc33f5159ce132be1d8dd57251a1ec7a631c7df4bd11e1cd198308c6ae32/jiter-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:558cc7e44fd8e507a236bee6a02fa17199ba752874400a0ca6cd6e2196cdb7dc", size = 321971, upload-time = "2025-05-18T19:03:27.255Z" }, - { url = "https://files.pythonhosted.org/packages/68/a4/da3f150cf1d51f6c472616fb7650429c7ce053e0c962b41b68557fdf6379/jiter-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d613e4b379a07d7c8453c5712ce7014e86c6ac93d990a0b8e7377e18505e98d", size = 345574, upload-time = "2025-05-18T19:03:28.63Z" }, - { url = "https://files.pythonhosted.org/packages/84/34/6e8d412e60ff06b186040e77da5f83bc158e9735759fcae65b37d681f28b/jiter-0.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f62cf8ba0618eda841b9bf61797f21c5ebd15a7a1e19daab76e4e4b498d515b2", size = 371028, upload-time = "2025-05-18T19:03:30.292Z" }, - { url = "https://files.pythonhosted.org/packages/fb/d9/9ee86173aae4576c35a2f50ae930d2ccb4c4c236f6cb9353267aa1d626b7/jiter-0.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:919d139cdfa8ae8945112398511cb7fca58a77382617d279556b344867a37e61", size = 491083, upload-time = "2025-05-18T19:03:31.654Z" }, - { url = "https://files.pythonhosted.org/packages/d9/2c/f955de55e74771493ac9e188b0f731524c6a995dffdcb8c255b89c6fb74b/jiter-0.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13ddbc6ae311175a3b03bd8994881bc4635c923754932918e18da841632349db", size = 388821, upload-time = "2025-05-18T19:03:33.184Z" }, - { url = "https://files.pythonhosted.org/packages/81/5a/0e73541b6edd3f4aada586c24e50626c7815c561a7ba337d6a7eb0a915b4/jiter-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c440ea003ad10927a30521a9062ce10b5479592e8a70da27f21eeb457b4a9c5", size = 352174, upload-time = "2025-05-18T19:03:34.965Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c0/61eeec33b8c75b31cae42be14d44f9e6fe3ac15a4e58010256ac3abf3638/jiter-0.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc347c87944983481e138dea467c0551080c86b9d21de6ea9306efb12ca8f606", size = 391869, upload-time = "2025-05-18T19:03:36.436Z" }, - { url = "https://files.pythonhosted.org/packages/41/22/5beb5ee4ad4ef7d86f5ea5b4509f680a20706c4a7659e74344777efb7739/jiter-0.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:13252b58c1f4d8c5b63ab103c03d909e8e1e7842d302473f482915d95fefd605", size = 523741, upload-time = "2025-05-18T19:03:38.168Z" }, - { url = "https://files.pythonhosted.org/packages/ea/10/768e8818538e5817c637b0df52e54366ec4cebc3346108a4457ea7a98f32/jiter-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d1bbf3c465de4a24ab12fb7766a0003f6f9bce48b8b6a886158c4d569452dc5", size = 514527, upload-time = "2025-05-18T19:03:39.577Z" }, - { url = "https://files.pythonhosted.org/packages/73/6d/29b7c2dc76ce93cbedabfd842fc9096d01a0550c52692dfc33d3cc889815/jiter-0.10.0-cp311-cp311-win32.whl", hash = "sha256:db16e4848b7e826edca4ccdd5b145939758dadf0dc06e7007ad0e9cfb5928ae7", size = 210765, upload-time = "2025-05-18T19:03:41.271Z" }, - { url = "https://files.pythonhosted.org/packages/c2/c9/d394706deb4c660137caf13e33d05a031d734eb99c051142e039d8ceb794/jiter-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c9c1d5f10e18909e993f9641f12fe1c77b3e9b533ee94ffa970acc14ded3812", size = 209234, upload-time = "2025-05-18T19:03:42.918Z" }, - { url = "https://files.pythonhosted.org/packages/6d/b5/348b3313c58f5fbfb2194eb4d07e46a35748ba6e5b3b3046143f3040bafa/jiter-0.10.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1e274728e4a5345a6dde2d343c8da018b9d4bd4350f5a472fa91f66fda44911b", size = 312262, upload-time = "2025-05-18T19:03:44.637Z" }, - { url = "https://files.pythonhosted.org/packages/9c/4a/6a2397096162b21645162825f058d1709a02965606e537e3304b02742e9b/jiter-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7202ae396446c988cb2a5feb33a543ab2165b786ac97f53b59aafb803fef0744", size = 320124, upload-time = "2025-05-18T19:03:46.341Z" }, - { url = "https://files.pythonhosted.org/packages/2a/85/1ce02cade7516b726dd88f59a4ee46914bf79d1676d1228ef2002ed2f1c9/jiter-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ba7722d6748b6920ed02a8f1726fb4b33e0fd2f3f621816a8b486c66410ab2", size = 345330, upload-time = "2025-05-18T19:03:47.596Z" }, - { url = "https://files.pythonhosted.org/packages/75/d0/bb6b4f209a77190ce10ea8d7e50bf3725fc16d3372d0a9f11985a2b23eff/jiter-0.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:371eab43c0a288537d30e1f0b193bc4eca90439fc08a022dd83e5e07500ed026", size = 369670, upload-time = "2025-05-18T19:03:49.334Z" }, - { url = "https://files.pythonhosted.org/packages/a0/f5/a61787da9b8847a601e6827fbc42ecb12be2c925ced3252c8ffcb56afcaf/jiter-0.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c675736059020365cebc845a820214765162728b51ab1e03a1b7b3abb70f74c", size = 489057, upload-time = "2025-05-18T19:03:50.66Z" }, - { url = "https://files.pythonhosted.org/packages/12/e4/6f906272810a7b21406c760a53aadbe52e99ee070fc5c0cb191e316de30b/jiter-0.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c5867d40ab716e4684858e4887489685968a47e3ba222e44cde6e4a2154f959", size = 389372, upload-time = "2025-05-18T19:03:51.98Z" }, - { url = "https://files.pythonhosted.org/packages/e2/ba/77013b0b8ba904bf3762f11e0129b8928bff7f978a81838dfcc958ad5728/jiter-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:395bb9a26111b60141757d874d27fdea01b17e8fac958b91c20128ba8f4acc8a", size = 352038, upload-time = "2025-05-18T19:03:53.703Z" }, - { url = "https://files.pythonhosted.org/packages/67/27/c62568e3ccb03368dbcc44a1ef3a423cb86778a4389e995125d3d1aaa0a4/jiter-0.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6842184aed5cdb07e0c7e20e5bdcfafe33515ee1741a6835353bb45fe5d1bd95", size = 391538, upload-time = "2025-05-18T19:03:55.046Z" }, - { url = "https://files.pythonhosted.org/packages/c0/72/0d6b7e31fc17a8fdce76164884edef0698ba556b8eb0af9546ae1a06b91d/jiter-0.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:62755d1bcea9876770d4df713d82606c8c1a3dca88ff39046b85a048566d56ea", size = 523557, upload-time = "2025-05-18T19:03:56.386Z" }, - { url = "https://files.pythonhosted.org/packages/2f/09/bc1661fbbcbeb6244bd2904ff3a06f340aa77a2b94e5a7373fd165960ea3/jiter-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533efbce2cacec78d5ba73a41756beff8431dfa1694b6346ce7af3a12c42202b", size = 514202, upload-time = "2025-05-18T19:03:57.675Z" }, - { url = "https://files.pythonhosted.org/packages/1b/84/5a5d5400e9d4d54b8004c9673bbe4403928a00d28529ff35b19e9d176b19/jiter-0.10.0-cp312-cp312-win32.whl", hash = "sha256:8be921f0cadd245e981b964dfbcd6fd4bc4e254cdc069490416dd7a2632ecc01", size = 211781, upload-time = "2025-05-18T19:03:59.025Z" }, - { url = "https://files.pythonhosted.org/packages/9b/52/7ec47455e26f2d6e5f2ea4951a0652c06e5b995c291f723973ae9e724a65/jiter-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7c7d785ae9dda68c2678532a5a1581347e9c15362ae9f6e68f3fdbfb64f2e49", size = 206176, upload-time = "2025-05-18T19:04:00.305Z" }, + { url = "https://files.pythonhosted.org/packages/38/55/a69fefeef09c2eaabae44b935a1aa81517e49639c0a0c25d861cb18cd7ac/jiter-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cb5d9db02979c3f49071fce51a48f4b4e4cf574175fb2b11c7a535fa4867b222", size = 309503, upload-time = "2025-09-15T09:19:08.191Z" }, + { url = "https://files.pythonhosted.org/packages/bd/d5/a6aba9e6551f32f9c127184f398208e4eddb96c59ac065c8a92056089d28/jiter-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1dc6a123f3471c4730db7ca8ba75f1bb3dcb6faeb8d46dd781083e7dee88b32d", size = 317688, upload-time = "2025-09-15T09:19:09.918Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f3/5e86f57c1883971cdc8535d0429c2787bf734840a231da30a3be12850562/jiter-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09858f8d230f031c7b8e557429102bf050eea29c77ad9c34c8fe253c5329acb7", size = 337418, upload-time = "2025-09-15T09:19:11.078Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4f/a71d8a24c2a70664970574a8e0b766663f5ef788f7fe1cc20ee0c016d488/jiter-0.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbe2196c4a0ce760925a74ab4456bf644748ab0979762139626ad138f6dac72d", size = 361423, upload-time = "2025-09-15T09:19:13.286Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e5/b09076f4e7fd9471b91e16f9f3dc7330b161b738f3b39b2c37054a36e26a/jiter-0.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5beb56d22b63647bafd0b74979216fdee80c580c0c63410be8c11053860ffd09", size = 486367, upload-time = "2025-09-15T09:19:14.546Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f1/98cb3a36f5e62f80cd860f0179f948d9eab5a316d55d3e1bab98d9767af5/jiter-0.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97025d09ef549795d8dc720a824312cee3253c890ac73c621721ddfc75066789", size = 376335, upload-time = "2025-09-15T09:19:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d8/ec74886497ea393c29dbd7651ddecc1899e86404a6b1f84a3ddab0ab59fd/jiter-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50880a6da65d8c23a2cf53c412847d9757e74cc9a3b95c5704a1d1a24667347", size = 348981, upload-time = "2025-09-15T09:19:17.568Z" }, + { url = "https://files.pythonhosted.org/packages/24/93/d22ad7fa3b86ade66c86153ceea73094fc2af8b20c59cb7fceab9fea4704/jiter-0.11.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:452d80a1c86c095a242007bd9fc5d21b8a8442307193378f891cb8727e469648", size = 385797, upload-time = "2025-09-15T09:19:19.121Z" }, + { url = "https://files.pythonhosted.org/packages/c8/bd/e25ff4a4df226e9b885f7cb01ee4b9dc74e3000e612d6f723860d71a1f34/jiter-0.11.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e84e58198d4894668eec2da660ffff60e0f3e60afa790ecc50cb12b0e02ca1d4", size = 516597, upload-time = "2025-09-15T09:19:20.301Z" }, + { url = "https://files.pythonhosted.org/packages/be/fb/beda613db7d93ffa2fdd2683f90f2f5dce8daf4bc2d0d2829e7de35308c6/jiter-0.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df64edcfc5dd5279a791eea52aa113d432c933119a025b0b5739f90d2e4e75f1", size = 508853, upload-time = "2025-09-15T09:19:22.075Z" }, + { url = "https://files.pythonhosted.org/packages/20/64/c5b0d93490634e41e38e2a15de5d54fdbd2c9f64a19abb0f95305b63373c/jiter-0.11.0-cp311-cp311-win32.whl", hash = "sha256:144fc21337d21b1d048f7f44bf70881e1586401d405ed3a98c95a114a9994982", size = 205140, upload-time = "2025-09-15T09:19:23.351Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e6/c347c0e6f5796e97d4356b7e5ff0ce336498b7f4ef848fae621a56f1ccf3/jiter-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:b0f32e644d241293b892b1a6dd8f0b9cc029bfd94c97376b2681c36548aabab7", size = 204311, upload-time = "2025-09-15T09:19:24.591Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b5/3009b112b8f673e568ef79af9863d8309a15f0a8cdcc06ed6092051f377e/jiter-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb7b377688cc3850bbe5c192a6bd493562a0bc50cbc8b047316428fbae00ada", size = 305510, upload-time = "2025-09-15T09:19:25.893Z" }, + { url = "https://files.pythonhosted.org/packages/fe/82/15514244e03b9e71e086bbe2a6de3e4616b48f07d5f834200c873956fb8c/jiter-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b7cbe3f25bd0d8abb468ba4302a5d45617ee61b2a7a638f63fee1dc086be99", size = 316521, upload-time = "2025-09-15T09:19:27.525Z" }, + { url = "https://files.pythonhosted.org/packages/92/94/7a2e905f40ad2d6d660e00b68d818f9e29fb87ffe82774f06191e93cbe4a/jiter-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a7f0ec81d5b7588c5cade1eb1925b91436ae6726dc2df2348524aeabad5de6", size = 338214, upload-time = "2025-09-15T09:19:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9c/5791ed5bdc76f12110158d3316a7a3ec0b1413d018b41c5ed399549d3ad5/jiter-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07630bb46ea2a6b9c6ed986c6e17e35b26148cce2c535454b26ee3f0e8dcaba1", size = 361280, upload-time = "2025-09-15T09:19:30.013Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7f/b7d82d77ff0d2cb06424141000176b53a9e6b16a1125525bb51ea4990c2e/jiter-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7764f27d28cd4a9cbc61704dfcd80c903ce3aad106a37902d3270cd6673d17f4", size = 487895, upload-time = "2025-09-15T09:19:31.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/44/10a1475d46f1fc1fd5cc2e82c58e7bca0ce5852208e0fa5df2f949353321/jiter-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4a6c4a737d486f77f842aeb22807edecb4a9417e6700c7b981e16d34ba7c72", size = 378421, upload-time = "2025-09-15T09:19:32.746Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5f/0dc34563d8164d31d07bc09d141d3da08157a68dcd1f9b886fa4e917805b/jiter-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf408d2a0abd919b60de8c2e7bc5eeab72d4dafd18784152acc7c9adc3291591", size = 347932, upload-time = "2025-09-15T09:19:34.612Z" }, + { url = "https://files.pythonhosted.org/packages/f7/de/b68f32a4fcb7b4a682b37c73a0e5dae32180140cd1caf11aef6ad40ddbf2/jiter-0.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cdef53eda7d18e799625023e1e250dbc18fbc275153039b873ec74d7e8883e09", size = 386959, upload-time = "2025-09-15T09:19:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/c08c92e713b6e28972a846a81ce374883dac2f78ec6f39a0dad9f2339c3a/jiter-0.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:53933a38ef7b551dd9c7f1064f9d7bb235bb3168d0fa5f14f0798d1b7ea0d9c5", size = 517187, upload-time = "2025-09-15T09:19:37.426Z" }, + { url = "https://files.pythonhosted.org/packages/89/b5/4a283bec43b15aad54fcae18d951f06a2ec3f78db5708d3b59a48e9c3fbd/jiter-0.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11840d2324c9ab5162fc1abba23bc922124fedcff0d7b7f85fffa291e2f69206", size = 509461, upload-time = "2025-09-15T09:19:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/34/a5/f8bad793010534ea73c985caaeef8cc22dfb1fedb15220ecdf15c623c07a/jiter-0.11.0-cp312-cp312-win32.whl", hash = "sha256:4f01a744d24a5f2bb4a11657a1b27b61dc038ae2e674621a74020406e08f749b", size = 206664, upload-time = "2025-09-15T09:19:40.096Z" }, + { url = "https://files.pythonhosted.org/packages/ed/42/5823ec2b1469395a160b4bf5f14326b4a098f3b6898fbd327366789fa5d3/jiter-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:29fff31190ab3a26de026da2f187814f4b9c6695361e20a9ac2123e4d4378a4c", size = 203520, upload-time = "2025-09-15T09:19:41.798Z" }, + { url = "https://files.pythonhosted.org/packages/70/f3/ce100253c80063a7b8b406e1d1562657fd4b9b4e1b562db40e68645342fb/jiter-0.11.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:902b43386c04739229076bd1c4c69de5d115553d982ab442a8ae82947c72ede7", size = 336380, upload-time = "2025-09-15T09:20:36.867Z" }, ] [[package]] @@ -2997,11 +3010,11 @@ wheels = [ [[package]] name = "json-repair" -version = "0.50.1" +version = "0.51.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/91/71/6d57ed93e43e98cdd124e82ab6231c6817f06a10743e7ae4bc6f66d03a02/json_repair-0.50.1.tar.gz", hash = "sha256:4ee69bc4be7330fbb90a3f19e890852c5fe1ceacec5ed1d2c25cdeeebdfaec76", size = 34864, upload-time = "2025-09-06T05:43:34.331Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4f/3a/f30f3c92da3a285dcbe469c50b058f2d349dc9a20fc1b60c3219befda53f/json_repair-0.51.0.tar.gz", hash = "sha256:487e00042d5bc5cc4897ea9c3cccd4f6641e926b732cc09f98691a832485098a", size = 35289, upload-time = "2025-09-19T04:23:16.745Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ad/be/b1e05740d9c6f333dab67910f3894e2e2416c1ef00f9f7e20a327ab1f396/json_repair-0.50.1-py3-none-any.whl", hash = "sha256:9b78358bb7572a6e0b8effe7a8bd8cb959a3e311144842b1d2363fe39e2f13c5", size = 26020, upload-time = "2025-09-06T05:43:32.718Z" }, + { url = "https://files.pythonhosted.org/packages/d0/fc/eb15e39547b29dbf2b786bbbd1e79e7f1d87ec4e7c9ea61786f093181481/json_repair-0.51.0-py3-none-any.whl", hash = "sha256:871f7651ee82abf72efc50a80d3a9af0ade8abf5b4541b418eeeabe4e677e314", size = 26263, upload-time = "2025-09-19T04:23:15.064Z" }, ] [[package]] @@ -3144,25 +3157,25 @@ wheels = [ [[package]] name = "llvmlite" -version = "0.44.0" +version = "0.45.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/89/6a/95a3d3610d5c75293d5dbbb2a76480d5d4eeba641557b69fe90af6c5b84e/llvmlite-0.44.0.tar.gz", hash = "sha256:07667d66a5d150abed9157ab6c0b9393c9356f229784a4385c02f99e94fc94d4", size = 171880, upload-time = "2025-01-20T11:14:41.342Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/73/4b29b502618766276816f2f2a7cf9017bd3889bc38a49319bee9ad492b75/llvmlite-0.45.0.tar.gz", hash = "sha256:ceb0bcd20da949178bd7ab78af8de73e9f3c483ac46b5bef39f06a4862aa8336", size = 185289, upload-time = "2025-09-18T17:47:14.293Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/e2/86b245397052386595ad726f9742e5223d7aea999b18c518a50e96c3aca4/llvmlite-0.44.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:eed7d5f29136bda63b6d7804c279e2b72e08c952b7c5df61f45db408e0ee52f3", size = 28132305, upload-time = "2025-01-20T11:12:53.936Z" }, - { url = "https://files.pythonhosted.org/packages/ff/ec/506902dc6870249fbe2466d9cf66d531265d0f3a1157213c8f986250c033/llvmlite-0.44.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ace564d9fa44bb91eb6e6d8e7754977783c68e90a471ea7ce913bff30bd62427", size = 26201090, upload-time = "2025-01-20T11:12:59.847Z" }, - { url = "https://files.pythonhosted.org/packages/99/fe/d030f1849ebb1f394bb3f7adad5e729b634fb100515594aca25c354ffc62/llvmlite-0.44.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5d22c3bfc842668168a786af4205ec8e3ad29fb1bc03fd11fd48460d0df64c1", size = 42361858, upload-time = "2025-01-20T11:13:07.623Z" }, - { url = "https://files.pythonhosted.org/packages/d7/7a/ce6174664b9077fc673d172e4c888cb0b128e707e306bc33fff8c2035f0d/llvmlite-0.44.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f01a394e9c9b7b1d4e63c327b096d10f6f0ed149ef53d38a09b3749dcf8c9610", size = 41184200, upload-time = "2025-01-20T11:13:20.058Z" }, - { url = "https://files.pythonhosted.org/packages/5f/c6/258801143975a6d09a373f2641237992496e15567b907a4d401839d671b8/llvmlite-0.44.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8489634d43c20cd0ad71330dde1d5bc7b9966937a263ff1ec1cebb90dc50955", size = 30331193, upload-time = "2025-01-20T11:13:26.976Z" }, - { url = "https://files.pythonhosted.org/packages/15/86/e3c3195b92e6e492458f16d233e58a1a812aa2bfbef9bdd0fbafcec85c60/llvmlite-0.44.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:1d671a56acf725bf1b531d5ef76b86660a5ab8ef19bb6a46064a705c6ca80aad", size = 28132297, upload-time = "2025-01-20T11:13:32.57Z" }, - { url = "https://files.pythonhosted.org/packages/d6/53/373b6b8be67b9221d12b24125fd0ec56b1078b660eeae266ec388a6ac9a0/llvmlite-0.44.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f79a728e0435493611c9f405168682bb75ffd1fbe6fc360733b850c80a026db", size = 26201105, upload-time = "2025-01-20T11:13:38.744Z" }, - { url = "https://files.pythonhosted.org/packages/cb/da/8341fd3056419441286c8e26bf436923021005ece0bff5f41906476ae514/llvmlite-0.44.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0143a5ef336da14deaa8ec26c5449ad5b6a2b564df82fcef4be040b9cacfea9", size = 42361901, upload-time = "2025-01-20T11:13:46.711Z" }, - { url = "https://files.pythonhosted.org/packages/53/ad/d79349dc07b8a395a99153d7ce8b01d6fcdc9f8231355a5df55ded649b61/llvmlite-0.44.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d752f89e31b66db6f8da06df8b39f9b91e78c5feea1bf9e8c1fba1d1c24c065d", size = 41184247, upload-time = "2025-01-20T11:13:56.159Z" }, - { url = "https://files.pythonhosted.org/packages/e2/3b/a9a17366af80127bd09decbe2a54d8974b6d8b274b39bf47fbaedeec6307/llvmlite-0.44.0-cp312-cp312-win_amd64.whl", hash = "sha256:eae7e2d4ca8f88f89d315b48c6b741dcb925d6a1042da694aa16ab3dd4cbd3a1", size = 30332380, upload-time = "2025-01-20T11:14:02.442Z" }, + { url = "https://files.pythonhosted.org/packages/03/a4/6a9f9745c80639eee5a6e112de7811ba0a2e9d7f2a6cef226ce54d00d63a/llvmlite-0.45.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:9b1b37e00b553e9420d9a2e327e84c5ac65a5690dcacf7fc153014780d97532a", size = 43043438, upload-time = "2025-09-18T17:40:48.769Z" }, + { url = "https://files.pythonhosted.org/packages/9b/8b/1d7d8f5daaaff4eb8e1673f304fbae24ad4b02e15ce1f47602c163486ac0/llvmlite-0.45.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cd039b8da5514db2729b7c9ae7526cae8da748a540fa3ab721b50c54651d2362", size = 37253033, upload-time = "2025-09-18T17:42:33.206Z" }, + { url = "https://files.pythonhosted.org/packages/e6/95/a13362fe71d1e88bea9e3cc58a3337b3302a3e4af68391df10389f3b7f78/llvmlite-0.45.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c6815d0d3f96de34491d3dc192e11e933e3448ceff0b58572a53f39795996e01", size = 56288124, upload-time = "2025-09-18T17:35:45.017Z" }, + { url = "https://files.pythonhosted.org/packages/2d/cf/4ab3677e11aff8f32573d4bbc617b7707454d47125c86263e189ef576bb1/llvmlite-0.45.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba79cc2cbdd0f61632ca8e9235fef3657a8aacd636d5775cd13807ceb8265f63", size = 55140874, upload-time = "2025-09-18T17:38:40.018Z" }, + { url = "https://files.pythonhosted.org/packages/92/31/63bbf92c51f49ed2f50c6097ffa11b831246dacd30f9476b8516bde70771/llvmlite-0.45.0-cp311-cp311-win_amd64.whl", hash = "sha256:6188da8e9e3906b167fb64bc84a05e6bf98095d982f45f323bed5def2ba7db1c", size = 37946103, upload-time = "2025-09-18T17:44:08.348Z" }, + { url = "https://files.pythonhosted.org/packages/af/b0/81419371eb6154b7ad5c4ded693fa6c9bbfbc8920f9c3ebacc0747e8bf0b/llvmlite-0.45.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:3928119253849e7c9aad4f881feb3e886370bb7ac6eccbc728b35a1be89064cc", size = 43043441, upload-time = "2025-09-18T17:41:21.519Z" }, + { url = "https://files.pythonhosted.org/packages/49/0a/0a2c2cedfbf4bbf61be2db83fe4d7416f234ba2f0e564375f9f45ff7ed7a/llvmlite-0.45.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3e9b5dad694edb9e43904ede037458ee73a18b4e2f227e44fc0f808aceab824", size = 37253035, upload-time = "2025-09-18T17:42:55.189Z" }, + { url = "https://files.pythonhosted.org/packages/d1/ee/6584480d0dcd101bc8800de4d3bfef93cea92161b43903719825f4497449/llvmlite-0.45.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4955635f316e3ffc0271ee7a3da586ae92cd3e70709b6cd59df641e980636d4c", size = 56288125, upload-time = "2025-09-18T17:36:32.038Z" }, + { url = "https://files.pythonhosted.org/packages/10/7b/81c72824f5197154236589cbd4fabd04ae59c57be80b0b401b168deef952/llvmlite-0.45.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e7497f1b75d741e568bf4a2dfccd5c702d6b5f3d232dd4a59ed851a82e587bd", size = 55140873, upload-time = "2025-09-18T17:39:07.152Z" }, + { url = "https://files.pythonhosted.org/packages/b4/b5/acc977fcd891c0fb155c9edcf3fa8c6cded1d5163625137ef696c5e725e3/llvmlite-0.45.0-cp312-cp312-win_amd64.whl", hash = "sha256:6404f5363986efbe1c7c1afd19da495534e46180466d593ace5a5c042b2f3f94", size = 37946104, upload-time = "2025-09-18T17:44:30.299Z" }, ] [[package]] name = "locust" -version = "2.40.4" +version = "2.40.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "configargparse" }, @@ -3184,14 +3197,14 @@ dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.12'" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c8/40/31ff56ab6f46c7c77e61bbbd23f87fdf6a4aaf674dc961a3c573320caedc/locust-2.40.4.tar.gz", hash = "sha256:3a3a470459edc4ba1349229bf1aca4c0cb651c4e2e3f85d3bc28fe8118f5a18f", size = 1412529, upload-time = "2025-09-11T09:26:13.713Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0e/7f/8e8a8a79b5bb6cedb840a41bf2b9e6156a8fb608b143498ee0206d4c8170/locust-2.40.5.tar.gz", hash = "sha256:4332f03ebfac83c115763e462f22f495783a88f1d237ccbd618d5b27863f5053", size = 1412591, upload-time = "2025-09-17T11:17:21.112Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/79/7e/db1d969caf45ce711e81cd4f3e7c4554c3925a02383a1dcadb442eae3802/locust-2.40.4-py3-none-any.whl", hash = "sha256:50e647a73c5a4e7a775c6e4311979472fce8b00ed783837a2ce9bb36786f7d1a", size = 1430961, upload-time = "2025-09-11T09:26:11.623Z" }, + { url = "https://files.pythonhosted.org/packages/04/0d/bc2f7bc286bd5737049327d0a3c6086dbe10b2bc3f4fe81115f023cf1240/locust-2.40.5-py3-none-any.whl", hash = "sha256:c44a6c415c5218824895bd652a182a958c27a2ceb8427c835d2f4b90d735579b", size = 1431034, upload-time = "2025-09-17T11:17:18.329Z" }, ] [[package]] name = "locust-cloud" -version = "1.26.3" +version = "1.27.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "configargparse" }, @@ -3200,55 +3213,57 @@ dependencies = [ { name = "python-engineio" }, { name = "python-socketio", extra = ["client"] }, ] -sdist = { url = "https://files.pythonhosted.org/packages/84/ad/10b299b134068a4250a9156e6832a717406abe1dfea2482a07ae7bdca8f3/locust_cloud-1.26.3.tar.gz", hash = "sha256:587acfd4d2dee715fb5f0c3c2d922770babf0b7cff7b2927afbb693a9cd193cc", size = 456042, upload-time = "2025-07-15T19:51:53.791Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/6a/32823a379d6f979e17e5c00ab3f68f4e52738907ccc198fb8e3f43ab6ee1/locust_cloud-1.27.0.tar.gz", hash = "sha256:b371a6940d978bb221ada9780e796e10e3032ff49ffeacf02c515aa876679b75", size = 454369, upload-time = "2025-09-17T09:52:39.525Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/50/6a/276fc50a9d170e7cbb6715735480cb037abb526639bca85491576e6eee4a/locust_cloud-1.26.3-py3-none-any.whl", hash = "sha256:8cb4b8bb9adcd5b99327bc8ed1d98cf67a29d9d29512651e6e94869de6f1faa8", size = 410023, upload-time = "2025-07-15T19:51:52.056Z" }, + { url = "https://files.pythonhosted.org/packages/4b/fe/59435370d9fd3b887dc71af5eb05e65e1ec294a26db10937cbe096c60fd0/locust_cloud-1.27.0-py3-none-any.whl", hash = "sha256:0ddf732c1702d1d29f8359e261e23147d1bba373ac96c9125f80c290c2dcd9c1", size = 409257, upload-time = "2025-09-17T09:52:37.746Z" }, ] [[package]] name = "lxml" -version = "6.0.1" +version = "6.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8f/bd/f9d01fd4132d81c6f43ab01983caea69ec9614b913c290a26738431a015d/lxml-6.0.1.tar.gz", hash = "sha256:2b3a882ebf27dd026df3801a87cf49ff791336e0f94b0fad195db77e01240690", size = 4070214, upload-time = "2025-08-22T10:37:53.525Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426, upload-time = "2025-09-22T04:04:59.287Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/c8/262c1d19339ef644cdc9eb5aad2e85bd2d1fa2d7c71cdef3ede1a3eed84d/lxml-6.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c6acde83f7a3d6399e6d83c1892a06ac9b14ea48332a5fbd55d60b9897b9570a", size = 8422719, upload-time = "2025-08-22T10:32:24.848Z" }, - { url = "https://files.pythonhosted.org/packages/e5/d4/1b0afbeb801468a310642c3a6f6704e53c38a4a6eb1ca6faea013333e02f/lxml-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0d21c9cacb6a889cbb8eeb46c77ef2c1dd529cde10443fdeb1de847b3193c541", size = 4575763, upload-time = "2025-08-22T10:32:27.057Z" }, - { url = "https://files.pythonhosted.org/packages/5b/c1/8db9b5402bf52ceb758618313f7423cd54aea85679fcf607013707d854a8/lxml-6.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:847458b7cd0d04004895f1fb2cca8e7c0f8ec923c49c06b7a72ec2d48ea6aca2", size = 4943244, upload-time = "2025-08-22T10:32:28.847Z" }, - { url = "https://files.pythonhosted.org/packages/e7/78/838e115358dd2369c1c5186080dd874a50a691fb5cd80db6afe5e816e2c6/lxml-6.0.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1dc13405bf315d008fe02b1472d2a9d65ee1c73c0a06de5f5a45e6e404d9a1c0", size = 5081725, upload-time = "2025-08-22T10:32:30.666Z" }, - { url = "https://files.pythonhosted.org/packages/c7/b6/bdcb3a3ddd2438c5b1a1915161f34e8c85c96dc574b0ef3be3924f36315c/lxml-6.0.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f540c229a8c0a770dcaf6d5af56a5295e0fc314fc7ef4399d543328054bcea", size = 5021238, upload-time = "2025-08-22T10:32:32.49Z" }, - { url = "https://files.pythonhosted.org/packages/73/e5/1bfb96185dc1a64c7c6fbb7369192bda4461952daa2025207715f9968205/lxml-6.0.1-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:d2f73aef768c70e8deb8c4742fca4fd729b132fda68458518851c7735b55297e", size = 5343744, upload-time = "2025-08-22T10:32:34.385Z" }, - { url = "https://files.pythonhosted.org/packages/a2/ae/df3ea9ebc3c493b9c6bdc6bd8c554ac4e147f8d7839993388aab57ec606d/lxml-6.0.1-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e7f4066b85a4fa25ad31b75444bd578c3ebe6b8ed47237896341308e2ce923c3", size = 5223477, upload-time = "2025-08-22T10:32:36.256Z" }, - { url = "https://files.pythonhosted.org/packages/37/b3/65e1e33600542c08bc03a4c5c9c306c34696b0966a424a3be6ffec8038ed/lxml-6.0.1-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:0cce65db0cd8c750a378639900d56f89f7d6af11cd5eda72fde054d27c54b8ce", size = 4676626, upload-time = "2025-08-22T10:32:38.793Z" }, - { url = "https://files.pythonhosted.org/packages/7a/46/ee3ed8f3a60e9457d7aea46542d419917d81dbfd5700fe64b2a36fb5ef61/lxml-6.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c372d42f3eee5844b69dcab7b8d18b2f449efd54b46ac76970d6e06b8e8d9a66", size = 5066042, upload-time = "2025-08-22T10:32:41.134Z" }, - { url = "https://files.pythonhosted.org/packages/9c/b9/8394538e7cdbeb3bfa36bc74924be1a4383e0bb5af75f32713c2c4aa0479/lxml-6.0.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2e2b0e042e1408bbb1c5f3cfcb0f571ff4ac98d8e73f4bf37c5dd179276beedd", size = 4724714, upload-time = "2025-08-22T10:32:43.94Z" }, - { url = "https://files.pythonhosted.org/packages/b3/21/3ef7da1ea2a73976c1a5a311d7cde5d379234eec0968ee609517714940b4/lxml-6.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cc73bb8640eadd66d25c5a03175de6801f63c535f0f3cf50cac2f06a8211f420", size = 5247376, upload-time = "2025-08-22T10:32:46.263Z" }, - { url = "https://files.pythonhosted.org/packages/26/7d/0980016f124f00c572cba6f4243e13a8e80650843c66271ee692cddf25f3/lxml-6.0.1-cp311-cp311-win32.whl", hash = "sha256:7c23fd8c839708d368e406282d7953cee5134f4592ef4900026d84566d2b4c88", size = 3609499, upload-time = "2025-08-22T10:32:48.156Z" }, - { url = "https://files.pythonhosted.org/packages/b1/08/28440437521f265eff4413eb2a65efac269c4c7db5fd8449b586e75d8de2/lxml-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:2516acc6947ecd3c41a4a4564242a87c6786376989307284ddb115f6a99d927f", size = 4036003, upload-time = "2025-08-22T10:32:50.662Z" }, - { url = "https://files.pythonhosted.org/packages/7b/dc/617e67296d98099213a505d781f04804e7b12923ecd15a781a4ab9181992/lxml-6.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:cb46f8cfa1b0334b074f40c0ff94ce4d9a6755d492e6c116adb5f4a57fb6ad96", size = 3679662, upload-time = "2025-08-22T10:32:52.739Z" }, - { url = "https://files.pythonhosted.org/packages/b0/a9/82b244c8198fcdf709532e39a1751943a36b3e800b420adc739d751e0299/lxml-6.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c03ac546adaabbe0b8e4a15d9ad815a281afc8d36249c246aecf1aaad7d6f200", size = 8422788, upload-time = "2025-08-22T10:32:56.612Z" }, - { url = "https://files.pythonhosted.org/packages/c9/8d/1ed2bc20281b0e7ed3e6c12b0a16e64ae2065d99be075be119ba88486e6d/lxml-6.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33b862c7e3bbeb4ba2c96f3a039f925c640eeba9087a4dc7a572ec0f19d89392", size = 4593547, upload-time = "2025-08-22T10:32:59.016Z" }, - { url = "https://files.pythonhosted.org/packages/76/53/d7fd3af95b72a3493bf7fbe842a01e339d8f41567805cecfecd5c71aa5ee/lxml-6.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7a3ec1373f7d3f519de595032d4dcafae396c29407cfd5073f42d267ba32440d", size = 4948101, upload-time = "2025-08-22T10:33:00.765Z" }, - { url = "https://files.pythonhosted.org/packages/9d/51/4e57cba4d55273c400fb63aefa2f0d08d15eac021432571a7eeefee67bed/lxml-6.0.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:03b12214fb1608f4cffa181ec3d046c72f7e77c345d06222144744c122ded870", size = 5108090, upload-time = "2025-08-22T10:33:03.108Z" }, - { url = "https://files.pythonhosted.org/packages/f6/6e/5f290bc26fcc642bc32942e903e833472271614e24d64ad28aaec09d5dae/lxml-6.0.1-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:207ae0d5f0f03b30f95e649a6fa22aa73f5825667fee9c7ec6854d30e19f2ed8", size = 5021791, upload-time = "2025-08-22T10:33:06.972Z" }, - { url = "https://files.pythonhosted.org/packages/13/d4/2e7551a86992ece4f9a0f6eebd4fb7e312d30f1e372760e2109e721d4ce6/lxml-6.0.1-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:32297b09ed4b17f7b3f448de87a92fb31bb8747496623483788e9f27c98c0f00", size = 5358861, upload-time = "2025-08-22T10:33:08.967Z" }, - { url = "https://files.pythonhosted.org/packages/8a/5f/cb49d727fc388bf5fd37247209bab0da11697ddc5e976ccac4826599939e/lxml-6.0.1-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7e18224ea241b657a157c85e9cac82c2b113ec90876e01e1f127312006233756", size = 5652569, upload-time = "2025-08-22T10:33:10.815Z" }, - { url = "https://files.pythonhosted.org/packages/ca/b8/66c1ef8c87ad0f958b0a23998851e610607c74849e75e83955d5641272e6/lxml-6.0.1-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a07a994d3c46cd4020c1ea566345cf6815af205b1e948213a4f0f1d392182072", size = 5252262, upload-time = "2025-08-22T10:33:12.673Z" }, - { url = "https://files.pythonhosted.org/packages/1a/ef/131d3d6b9590e64fdbb932fbc576b81fcc686289da19c7cb796257310e82/lxml-6.0.1-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:2287fadaa12418a813b05095485c286c47ea58155930cfbd98c590d25770e225", size = 4710309, upload-time = "2025-08-22T10:33:14.952Z" }, - { url = "https://files.pythonhosted.org/packages/bc/3f/07f48ae422dce44902309aa7ed386c35310929dc592439c403ec16ef9137/lxml-6.0.1-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b4e597efca032ed99f418bd21314745522ab9fa95af33370dcee5533f7f70136", size = 5265786, upload-time = "2025-08-22T10:33:16.721Z" }, - { url = "https://files.pythonhosted.org/packages/11/c7/125315d7b14ab20d9155e8316f7d287a4956098f787c22d47560b74886c4/lxml-6.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9696d491f156226decdd95d9651c6786d43701e49f32bf23715c975539aa2b3b", size = 5062272, upload-time = "2025-08-22T10:33:18.478Z" }, - { url = "https://files.pythonhosted.org/packages/8b/c3/51143c3a5fc5168a7c3ee626418468ff20d30f5a59597e7b156c1e61fba8/lxml-6.0.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e4e3cd3585f3c6f87cdea44cda68e692cc42a012f0131d25957ba4ce755241a7", size = 4786955, upload-time = "2025-08-22T10:33:20.34Z" }, - { url = "https://files.pythonhosted.org/packages/11/86/73102370a420ec4529647b31c4a8ce8c740c77af3a5fae7a7643212d6f6e/lxml-6.0.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:45cbc92f9d22c28cd3b97f8d07fcefa42e569fbd587dfdac76852b16a4924277", size = 5673557, upload-time = "2025-08-22T10:33:22.282Z" }, - { url = "https://files.pythonhosted.org/packages/d7/2d/aad90afaec51029aef26ef773b8fd74a9e8706e5e2f46a57acd11a421c02/lxml-6.0.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:f8c9bcfd2e12299a442fba94459adf0b0d001dbc68f1594439bfa10ad1ecb74b", size = 5254211, upload-time = "2025-08-22T10:33:24.15Z" }, - { url = "https://files.pythonhosted.org/packages/63/01/c9e42c8c2d8b41f4bdefa42ab05448852e439045f112903dd901b8fbea4d/lxml-6.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1e9dc2b9f1586e7cd77753eae81f8d76220eed9b768f337dc83a3f675f2f0cf9", size = 5275817, upload-time = "2025-08-22T10:33:26.007Z" }, - { url = "https://files.pythonhosted.org/packages/bc/1f/962ea2696759abe331c3b0e838bb17e92224f39c638c2068bf0d8345e913/lxml-6.0.1-cp312-cp312-win32.whl", hash = "sha256:987ad5c3941c64031f59c226167f55a04d1272e76b241bfafc968bdb778e07fb", size = 3610889, upload-time = "2025-08-22T10:33:28.169Z" }, - { url = "https://files.pythonhosted.org/packages/41/e2/22c86a990b51b44442b75c43ecb2f77b8daba8c4ba63696921966eac7022/lxml-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:abb05a45394fd76bf4a60c1b7bec0e6d4e8dfc569fc0e0b1f634cd983a006ddc", size = 4010925, upload-time = "2025-08-22T10:33:29.874Z" }, - { url = "https://files.pythonhosted.org/packages/b2/21/dc0c73325e5eb94ef9c9d60dbb5dcdcb2e7114901ea9509735614a74e75a/lxml-6.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:c4be29bce35020d8579d60aa0a4e95effd66fcfce31c46ffddf7e5422f73a299", size = 3671922, upload-time = "2025-08-22T10:33:31.535Z" }, - { url = "https://files.pythonhosted.org/packages/41/37/41961f53f83ded57b37e65e4f47d1c6c6ef5fd02cb1d6ffe028ba0efa7d4/lxml-6.0.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b556aaa6ef393e989dac694b9c95761e32e058d5c4c11ddeef33f790518f7a5e", size = 3903412, upload-time = "2025-08-22T10:37:40.758Z" }, - { url = "https://files.pythonhosted.org/packages/3d/47/8631ea73f3dc776fb6517ccde4d5bd5072f35f9eacbba8c657caa4037a69/lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:64fac7a05ebb3737b79fd89fe5a5b6c5546aac35cfcfd9208eb6e5d13215771c", size = 4224810, upload-time = "2025-08-22T10:37:42.839Z" }, - { url = "https://files.pythonhosted.org/packages/3d/b8/39ae30ca3b1516729faeef941ed84bf8f12321625f2644492ed8320cb254/lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:038d3c08babcfce9dc89aaf498e6da205efad5b7106c3b11830a488d4eadf56b", size = 4329221, upload-time = "2025-08-22T10:37:45.223Z" }, - { url = "https://files.pythonhosted.org/packages/9c/ea/048dea6cdfc7a72d40ae8ed7e7d23cf4a6b6a6547b51b492a3be50af0e80/lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:445f2cee71c404ab4259bc21e20339a859f75383ba2d7fb97dfe7c163994287b", size = 4270228, upload-time = "2025-08-22T10:37:47.276Z" }, - { url = "https://files.pythonhosted.org/packages/6b/d4/c2b46e432377c45d611ae2f669aa47971df1586c1a5240675801d0f02bac/lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e352d8578e83822d70bea88f3d08b9912528e4c338f04ab707207ab12f4b7aac", size = 4416077, upload-time = "2025-08-22T10:37:49.822Z" }, - { url = "https://files.pythonhosted.org/packages/b6/db/8f620f1ac62cf32554821b00b768dd5957ac8e3fd051593532be5b40b438/lxml-6.0.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:51bd5d1a9796ca253db6045ab45ca882c09c071deafffc22e06975b7ace36300", size = 3518127, upload-time = "2025-08-22T10:37:51.66Z" }, + { url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365, upload-time = "2025-09-22T04:00:45.672Z" }, + { url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793, upload-time = "2025-09-22T04:00:47.783Z" }, + { url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362, upload-time = "2025-09-22T04:00:49.845Z" }, + { url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438", size = 5083152, upload-time = "2025-09-22T04:00:51.709Z" }, + { url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964", size = 5023539, upload-time = "2025-09-22T04:00:53.593Z" }, + { url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d", size = 5344853, upload-time = "2025-09-22T04:00:55.524Z" }, + { url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7", size = 5225133, upload-time = "2025-09-22T04:00:57.269Z" }, + { url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178", size = 4677944, upload-time = "2025-09-22T04:00:59.052Z" }, + { url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553", size = 5284535, upload-time = "2025-09-22T04:01:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb", size = 5067343, upload-time = "2025-09-22T04:01:03.13Z" }, + { url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a", size = 4725419, upload-time = "2025-09-22T04:01:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c", size = 5275008, upload-time = "2025-09-22T04:01:07.327Z" }, + { url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7", size = 5248906, upload-time = "2025-09-22T04:01:09.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46", size = 3610357, upload-time = "2025-09-22T04:01:11.102Z" }, + { url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078", size = 4036583, upload-time = "2025-09-22T04:01:12.766Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285", size = 3680591, upload-time = "2025-09-22T04:01:14.874Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887, upload-time = "2025-09-22T04:01:17.265Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818, upload-time = "2025-09-22T04:01:19.688Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807, upload-time = "2025-09-22T04:01:21.487Z" }, + { url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534", size = 5109179, upload-time = "2025-09-22T04:01:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564", size = 5023044, upload-time = "2025-09-22T04:01:25.118Z" }, + { url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f", size = 5359685, upload-time = "2025-09-22T04:01:27.398Z" }, + { url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0", size = 5654127, upload-time = "2025-09-22T04:01:29.629Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192", size = 5253958, upload-time = "2025-09-22T04:01:31.535Z" }, + { url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0", size = 4711541, upload-time = "2025-09-22T04:01:33.801Z" }, + { url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092", size = 5267426, upload-time = "2025-09-22T04:01:35.639Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f", size = 5064917, upload-time = "2025-09-22T04:01:37.448Z" }, + { url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8", size = 4788795, upload-time = "2025-09-22T04:01:39.165Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f", size = 5676759, upload-time = "2025-09-22T04:01:41.506Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6", size = 5255666, upload-time = "2025-09-22T04:01:43.363Z" }, + { url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322", size = 5277989, upload-time = "2025-09-22T04:01:45.215Z" }, + { url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456, upload-time = "2025-09-22T04:01:48.243Z" }, + { url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793, upload-time = "2025-09-22T04:01:50.042Z" }, + { url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836, upload-time = "2025-09-22T04:01:52.145Z" }, + { url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829, upload-time = "2025-09-22T04:04:45.608Z" }, + { url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277, upload-time = "2025-09-22T04:04:47.754Z" }, + { url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433, upload-time = "2025-09-22T04:04:49.907Z" }, + { url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9", size = 4272119, upload-time = "2025-09-22T04:04:51.801Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a", size = 4417314, upload-time = "2025-09-22T04:04:55.024Z" }, + { url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e", size = 3518768, upload-time = "2025-09-22T04:04:57.097Z" }, ] [[package]] @@ -3671,24 +3686,24 @@ wheels = [ [[package]] name = "numba" -version = "0.61.2" +version = "0.62.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "llvmlite" }, { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/a0/e21f57604304aa03ebb8e098429222722ad99176a4f979d34af1d1ee80da/numba-0.61.2.tar.gz", hash = "sha256:8750ee147940a6637b80ecf7f95062185ad8726c8c28a2295b8ec1160a196f7d", size = 2820615, upload-time = "2025-04-09T02:58:07.659Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/96/66dae7911cb331e99bf9afe35703317d8da0fad81ff49fed77f4855e4b60/numba-0.62.0.tar.gz", hash = "sha256:2afcc7899dc93fefecbb274a19c592170bc2dbfae02b00f83e305332a9857a5a", size = 2749680, upload-time = "2025-09-18T17:58:11.394Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/97/c99d1056aed767503c228f7099dc11c402906b42a4757fec2819329abb98/numba-0.61.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:efd3db391df53aaa5cfbee189b6c910a5b471488749fd6606c3f33fc984c2ae2", size = 2775825, upload-time = "2025-04-09T02:57:43.442Z" }, - { url = "https://files.pythonhosted.org/packages/95/9e/63c549f37136e892f006260c3e2613d09d5120672378191f2dc387ba65a2/numba-0.61.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:49c980e4171948ffebf6b9a2520ea81feed113c1f4890747ba7f59e74be84b1b", size = 2778695, upload-time = "2025-04-09T02:57:44.968Z" }, - { url = "https://files.pythonhosted.org/packages/97/c8/8740616c8436c86c1b9a62e72cb891177d2c34c2d24ddcde4c390371bf4c/numba-0.61.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3945615cd73c2c7eba2a85ccc9c1730c21cd3958bfcf5a44302abae0fb07bb60", size = 3829227, upload-time = "2025-04-09T02:57:46.63Z" }, - { url = "https://files.pythonhosted.org/packages/fc/06/66e99ae06507c31d15ff3ecd1f108f2f59e18b6e08662cd5f8a5853fbd18/numba-0.61.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbfdf4eca202cebade0b7d43896978e146f39398909a42941c9303f82f403a18", size = 3523422, upload-time = "2025-04-09T02:57:48.222Z" }, - { url = "https://files.pythonhosted.org/packages/0f/a4/2b309a6a9f6d4d8cfba583401c7c2f9ff887adb5d54d8e2e130274c0973f/numba-0.61.2-cp311-cp311-win_amd64.whl", hash = "sha256:76bcec9f46259cedf888041b9886e257ae101c6268261b19fda8cfbc52bec9d1", size = 2831505, upload-time = "2025-04-09T02:57:50.108Z" }, - { url = "https://files.pythonhosted.org/packages/b4/a0/c6b7b9c615cfa3b98c4c63f4316e3f6b3bbe2387740277006551784218cd/numba-0.61.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:34fba9406078bac7ab052efbf0d13939426c753ad72946baaa5bf9ae0ebb8dd2", size = 2776626, upload-time = "2025-04-09T02:57:51.857Z" }, - { url = "https://files.pythonhosted.org/packages/92/4a/fe4e3c2ecad72d88f5f8cd04e7f7cff49e718398a2fac02d2947480a00ca/numba-0.61.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ddce10009bc097b080fc96876d14c051cc0c7679e99de3e0af59014dab7dfe8", size = 2779287, upload-time = "2025-04-09T02:57:53.658Z" }, - { url = "https://files.pythonhosted.org/packages/9a/2d/e518df036feab381c23a624dac47f8445ac55686ec7f11083655eb707da3/numba-0.61.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b1bb509d01f23d70325d3a5a0e237cbc9544dd50e50588bc581ba860c213546", size = 3885928, upload-time = "2025-04-09T02:57:55.206Z" }, - { url = "https://files.pythonhosted.org/packages/10/0f/23cced68ead67b75d77cfcca3df4991d1855c897ee0ff3fe25a56ed82108/numba-0.61.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48a53a3de8f8793526cbe330f2a39fe9a6638efcbf11bd63f3d2f9757ae345cd", size = 3577115, upload-time = "2025-04-09T02:57:56.818Z" }, - { url = "https://files.pythonhosted.org/packages/68/1d/ddb3e704c5a8fb90142bf9dc195c27db02a08a99f037395503bfbc1d14b3/numba-0.61.2-cp312-cp312-win_amd64.whl", hash = "sha256:97cf4f12c728cf77c9c1d7c23707e4d8fb4632b46275f8f3397de33e5877af18", size = 2831929, upload-time = "2025-04-09T02:57:58.45Z" }, + { url = "https://files.pythonhosted.org/packages/4d/ba/691508c81c3e8ff6c4a131755556a39a6f73f8aec3750ff8ba7bb9b23585/numba-0.62.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1370708a54281e1dd3e4b73f423f88d3b34b64cf3f5fa0e460a1fbe6bd4e0f3f", size = 2684281, upload-time = "2025-09-18T17:59:14.333Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f0/9c1b0a23e09297e292f1f2deea0b7bbe52b112fb6d9fb46beb1f7016f6d6/numba-0.62.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6bd7032d6c1e771967fc1d07a499bb10ce1639662451fc0a86089fa8efc420e7", size = 2687331, upload-time = "2025-09-18T17:59:28.232Z" }, + { url = "https://files.pythonhosted.org/packages/ee/77/b497d480abf9c3547b8374e58794532a7e3600a378408e0ff8fbf2532dc9/numba-0.62.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:87cdc476ea1b2feefb7f893a648be2f1e7a04f671f355ac9bbeb007eaf039f8c", size = 3450243, upload-time = "2025-09-18T17:58:41.724Z" }, + { url = "https://files.pythonhosted.org/packages/a4/42/68bcb890bc5e8c254145f4a5f2c7e90ec653b27271780e3eef36086522a4/numba-0.62.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:144a57e504a5423acfc91fcd3be4e6481cb0667ce0bcc6cd3e8bd43a735b58a4", size = 3445595, upload-time = "2025-09-18T17:58:58.989Z" }, + { url = "https://files.pythonhosted.org/packages/5e/8c/889b895f5daafc44cbd7b798f748fd9b9555cb0604fa03004dc535bd8b5c/numba-0.62.0-cp311-cp311-win_amd64.whl", hash = "sha256:499b00e0bd95c83fedf1cbf349b7132a432a90292cbe2014eeaf482ce7c3b9f8", size = 2745535, upload-time = "2025-09-18T17:59:42.001Z" }, + { url = "https://files.pythonhosted.org/packages/5f/cc/8c519b15d51647bd092a3b935e92681c0ec983647bb7ec1b48ca05094eb5/numba-0.62.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:82edb589c9607ec2dbe0b2d34793d8c5104daf766277acc49ad7e179f8634fd2", size = 2685349, upload-time = "2025-09-18T17:59:17.651Z" }, + { url = "https://files.pythonhosted.org/packages/b1/0f/992aa8b62b23ebc56db97ac29fa6c8e5b097e30d575745048de4e99364b8/numba-0.62.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:469e042750d5a6aa6847dc89d64de5f0bfaf2208b6d442e4634de3318b7043de", size = 2688140, upload-time = "2025-09-18T17:59:31.191Z" }, + { url = "https://files.pythonhosted.org/packages/0b/1f/a67f3a94f42a3bc90c052f446e4fa1089b513129b8dbf61df74b25ab24ea/numba-0.62.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2ad2dc2b3583f8f24f35c8ade7e215c44590c9aa757ccba640dd293297cb15bb", size = 3506358, upload-time = "2025-09-18T17:58:46.296Z" }, + { url = "https://files.pythonhosted.org/packages/e0/8a/0c451c2626cbaf6a1c3f3665bd5859671e9f065b9ee9a101fb08659a46e2/numba-0.62.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0266998a842074fc91bfc406dd91c8ee12c196ea834375af6174f62647ffd9b1", size = 3496571, upload-time = "2025-09-18T17:59:03.009Z" }, + { url = "https://files.pythonhosted.org/packages/16/9a/40e66e5992d5365f4f2f636148e3a333eb012e1690cbc0b5d7d296e5d11c/numba-0.62.0-cp312-cp312-win_amd64.whl", hash = "sha256:cbc84e030548a5aad74971eb1a579f69edc7da961d89ef09a5ee1fe01c207795", size = 2745542, upload-time = "2025-09-18T17:59:44.942Z" }, ] [[package]] @@ -4564,7 +4579,7 @@ wheels = [ [[package]] name = "posthog" -version = "6.7.4" +version = "6.7.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "backoff" }, @@ -4574,9 +4589,9 @@ dependencies = [ { name = "six" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0f/40/d7f585e09e47f492ebaeb8048a8e2ce5d9f49a3896856a7a975cbc1484fa/posthog-6.7.4.tar.gz", hash = "sha256:2bfa74f321ac18efe4a48a256d62034a506ca95477af7efa32292ed488a742c5", size = 118209, upload-time = "2025-09-05T15:29:21.517Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/f3/d9055bcd190980730bdc600318b34290e85fcb0afb1e31f83cdc33f92615/posthog-6.7.5.tar.gz", hash = "sha256:f4f32b4a4b0df531ae8f80f255a33a49e8880c8c1b62712e6b640535e33a905f", size = 118558, upload-time = "2025-09-16T12:40:34.431Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/95/e795059ef73d480a7f11f1be201087f65207509525920897fb514a04914c/posthog-6.7.4-py3-none-any.whl", hash = "sha256:7f1872c53ec7e9a29b088a5a1ad03fa1be3b871d10d70c8bf6c2dafb91beaac5", size = 136409, upload-time = "2025-09-05T15:29:19.995Z" }, + { url = "https://files.pythonhosted.org/packages/42/b4/b40f8467252b4ff481e54a9767b211b4ff83114e6d0b6f481852d0ef3e46/posthog-6.7.5-py3-none-any.whl", hash = "sha256:95b00f915365939e63fa183635bad1caaf89cf4a24b63c8bb6983f2a22a56cb3", size = 136766, upload-time = "2025-09-16T12:40:32.741Z" }, ] [[package]] @@ -4660,17 +4675,18 @@ wheels = [ [[package]] name = "psutil" -version = "7.0.0" +version = "7.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003, upload-time = "2025-02-13T21:54:07.946Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/31/4723d756b59344b643542936e37a31d1d3204bcdc42a7daa8ee9eb06fb50/psutil-7.1.0.tar.gz", hash = "sha256:655708b3c069387c8b77b072fc429a57d0e214221d01c0a772df7dfedcb3bcd2", size = 497660, upload-time = "2025-09-17T20:14:52.902Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051, upload-time = "2025-02-13T21:54:12.36Z" }, - { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535, upload-time = "2025-02-13T21:54:16.07Z" }, - { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004, upload-time = "2025-02-13T21:54:18.662Z" }, - { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986, upload-time = "2025-02-13T21:54:21.811Z" }, - { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544, upload-time = "2025-02-13T21:54:24.68Z" }, - { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053, upload-time = "2025-02-13T21:54:34.31Z" }, - { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885, upload-time = "2025-02-13T21:54:37.486Z" }, + { url = "https://files.pythonhosted.org/packages/46/62/ce4051019ee20ce0ed74432dd73a5bb087a6704284a470bb8adff69a0932/psutil-7.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76168cef4397494250e9f4e73eb3752b146de1dd950040b29186d0cce1d5ca13", size = 245242, upload-time = "2025-09-17T20:14:56.126Z" }, + { url = "https://files.pythonhosted.org/packages/38/61/f76959fba841bf5b61123fbf4b650886dc4094c6858008b5bf73d9057216/psutil-7.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:5d007560c8c372efdff9e4579c2846d71de737e4605f611437255e81efcca2c5", size = 246682, upload-time = "2025-09-17T20:14:58.25Z" }, + { url = "https://files.pythonhosted.org/packages/88/7a/37c99d2e77ec30d63398ffa6a660450b8a62517cabe44b3e9bae97696e8d/psutil-7.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22e4454970b32472ce7deaa45d045b34d3648ce478e26a04c7e858a0a6e75ff3", size = 287994, upload-time = "2025-09-17T20:14:59.901Z" }, + { url = "https://files.pythonhosted.org/packages/9d/de/04c8c61232f7244aa0a4b9a9fbd63a89d5aeaf94b2fc9d1d16e2faa5cbb0/psutil-7.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c70e113920d51e89f212dd7be06219a9b88014e63a4cec69b684c327bc474e3", size = 291163, upload-time = "2025-09-17T20:15:01.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/58/c4f976234bf6d4737bc8c02a81192f045c307b72cf39c9e5c5a2d78927f6/psutil-7.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d4a113425c037300de3ac8b331637293da9be9713855c4fc9d2d97436d7259d", size = 293625, upload-time = "2025-09-17T20:15:04.492Z" }, + { url = "https://files.pythonhosted.org/packages/79/87/157c8e7959ec39ced1b11cc93c730c4fb7f9d408569a6c59dbd92ceb35db/psutil-7.1.0-cp37-abi3-win32.whl", hash = "sha256:09ad740870c8d219ed8daae0ad3b726d3bf9a028a198e7f3080f6a1888b99bca", size = 244812, upload-time = "2025-09-17T20:15:07.462Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e9/b44c4f697276a7a95b8e94d0e320a7bf7f3318521b23de69035540b39838/psutil-7.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:57f5e987c36d3146c0dd2528cd42151cf96cd359b9d67cfff836995cc5df9a3d", size = 247965, upload-time = "2025-09-17T20:15:09.673Z" }, + { url = "https://files.pythonhosted.org/packages/26/65/1070a6e3c036f39142c2820c4b52e9243246fcfc3f96239ac84472ba361e/psutil-7.1.0-cp37-abi3-win_arm64.whl", hash = "sha256:6937cb68133e7c97b6cc9649a570c9a18ba0efebed46d8c5dae4c07fa1b67a07", size = 244971, upload-time = "2025-09-17T20:15:12.262Z" }, ] [[package]] @@ -4804,7 +4820,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.7" +version = "2.11.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -4812,9 +4828,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, ] [[package]] @@ -4917,7 +4933,7 @@ crypto = [ [[package]] name = "pymilvus" -version = "2.5.15" +version = "2.5.16" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "grpcio" }, @@ -4928,9 +4944,9 @@ dependencies = [ { name = "setuptools" }, { name = "ujson" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cd/f9/dee7f0d42979bf4cbe0bf23f8db9bf4c331b53c4c9f8692d2e027073c928/pymilvus-2.5.15.tar.gz", hash = "sha256:350396ef3bb40aa62c8a2ecaccb5c664bbb1569eef8593b74dd1d5125eb0deb2", size = 1278109, upload-time = "2025-08-21T11:57:58.416Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/e2/5613bc7b2af0ccd760177ca4255243c284cfc0f2cba3f10ff63325c4ca34/pymilvus-2.5.16.tar.gz", hash = "sha256:65f56b81806bc217cca3cf29b70a27d053dea4b1ffada910cf63a38f96381618", size = 1280614, upload-time = "2025-09-19T07:02:14.747Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/af/10a620686025e5b59889d7075f5d426e45e57a0180c4465051645a88ccb0/pymilvus-2.5.15-py3-none-any.whl", hash = "sha256:a155a3b436e2e3ca4b85aac80c92733afe0bd172c497c3bc0dfaca0b804b90c9", size = 241683, upload-time = "2025-08-21T11:57:56.663Z" }, + { url = "https://files.pythonhosted.org/packages/c6/09/b67a55abee0a53ea50ba0de0cba6e1c0f7ca7ce2c15ffd6f40c059c25e88/pymilvus-2.5.16-py3-none-any.whl", hash = "sha256:76258a324f19c60fee247467e11cd7d6f35a64d2a9c753f5d7b1a5fa15dd6c8a", size = 243272, upload-time = "2025-09-19T07:02:12.443Z" }, ] [[package]] @@ -4984,20 +5000,20 @@ wheels = [ [[package]] name = "pyparsing" -version = "3.2.3" +version = "3.2.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", size = 1088608, upload-time = "2025-03-25T05:01:28.114Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/181488fc2b9d093e3972d2a472855aae8a03f000592dbfce716a512b3359/pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6", size = 1099274, upload-time = "2025-09-21T04:11:06.277Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120, upload-time = "2025-03-25T05:01:24.908Z" }, + { url = "https://files.pythonhosted.org/packages/10/5e/1aa9a93198c6b64513c9d7752de7422c06402de6600a8767da1524f9570b/pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e", size = 113890, upload-time = "2025-09-21T04:11:04.117Z" }, ] [[package]] name = "pypdf" -version = "6.0.0" +version = "6.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/ac/a300a03c3b34967c050677ccb16e7a4b65607ee5df9d51e8b6d713de4098/pypdf-6.0.0.tar.gz", hash = "sha256:282a99d2cc94a84a3a3159f0d9358c0af53f85b4d28d76ea38b96e9e5ac2a08d", size = 5033827, upload-time = "2025-08-11T14:22:02.352Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/ac/44d86f16b8ad9b42ea1da4b9aa145be71c89927566d9be87fe74bda1dfef/pypdf-6.1.0.tar.gz", hash = "sha256:0cba440d024da5a2a9304f03cd645346052827b84c5a461c6123e24ed5a3b0b9", size = 5072609, upload-time = "2025-09-21T13:38:39.1Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/83/2cacc506eb322bb31b747bc06ccb82cc9aa03e19ee9c1245e538e49d52be/pypdf-6.0.0-py3-none-any.whl", hash = "sha256:56ea60100ce9f11fc3eec4f359da15e9aec3821b036c1f06d2b660d35683abb8", size = 310465, upload-time = "2025-08-11T14:22:00.481Z" }, + { url = "https://files.pythonhosted.org/packages/07/f3/4939b609cfd374e495450b22a0385ee3f531e9aa40e8812e5c405f030c54/pypdf-6.1.0-py3-none-any.whl", hash = "sha256:6b34e4147df20978bf270af19826692e0485431a9d3944617b9533bc77efb695", size = 322468, upload-time = "2025-09-21T13:38:37.467Z" }, ] [[package]] @@ -5535,38 +5551,38 @@ wheels = [ [[package]] name = "regex" -version = "2025.9.1" +version = "2025.9.18" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/5a/4c63457fbcaf19d138d72b2e9b39405954f98c0349b31c601bfcb151582c/regex-2025.9.1.tar.gz", hash = "sha256:88ac07b38d20b54d79e704e38aa3bd2c0f8027432164226bdee201a1c0c9c9ff", size = 400852, upload-time = "2025-09-01T22:10:10.479Z" } +sdist = { url = "https://files.pythonhosted.org/packages/49/d3/eaa0d28aba6ad1827ad1e716d9a93e1ba963ada61887498297d3da715133/regex-2025.9.18.tar.gz", hash = "sha256:c5ba23274c61c6fef447ba6a39333297d0c247f53059dba0bca415cac511edc4", size = 400917, upload-time = "2025-09-19T00:38:35.79Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/06/4d/f741543c0c59f96c6625bc6c11fea1da2e378b7d293ffff6f318edc0ce14/regex-2025.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e5bcf112b09bfd3646e4db6bf2e598534a17d502b0c01ea6550ba4eca780c5e6", size = 484811, upload-time = "2025-09-01T22:08:12.834Z" }, - { url = "https://files.pythonhosted.org/packages/c2/bd/27e73e92635b6fbd51afc26a414a3133243c662949cd1cda677fe7bb09bd/regex-2025.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:67a0295a3c31d675a9ee0238d20238ff10a9a2fdb7a1323c798fc7029578b15c", size = 288977, upload-time = "2025-09-01T22:08:14.499Z" }, - { url = "https://files.pythonhosted.org/packages/eb/7d/7dc0c6efc8bc93cd6e9b947581f5fde8a5dbaa0af7c4ec818c5729fdc807/regex-2025.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea8267fbadc7d4bd7c1301a50e85c2ff0de293ff9452a1a9f8d82c6cafe38179", size = 286606, upload-time = "2025-09-01T22:08:15.881Z" }, - { url = "https://files.pythonhosted.org/packages/d1/01/9b5c6dd394f97c8f2c12f6e8f96879c9ac27292a718903faf2e27a0c09f6/regex-2025.9.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6aeff21de7214d15e928fb5ce757f9495214367ba62875100d4c18d293750cc1", size = 792436, upload-time = "2025-09-01T22:08:17.38Z" }, - { url = "https://files.pythonhosted.org/packages/fc/24/b7430cfc6ee34bbb3db6ff933beb5e7692e5cc81e8f6f4da63d353566fb0/regex-2025.9.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d89f1bbbbbc0885e1c230f7770d5e98f4f00b0ee85688c871d10df8b184a6323", size = 858705, upload-time = "2025-09-01T22:08:19.037Z" }, - { url = "https://files.pythonhosted.org/packages/d6/98/155f914b4ea6ae012663188545c4f5216c11926d09b817127639d618b003/regex-2025.9.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ca3affe8ddea498ba9d294ab05f5f2d3b5ad5d515bc0d4a9016dd592a03afe52", size = 905881, upload-time = "2025-09-01T22:08:20.377Z" }, - { url = "https://files.pythonhosted.org/packages/8a/a7/a470e7bc8259c40429afb6d6a517b40c03f2f3e455c44a01abc483a1c512/regex-2025.9.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91892a7a9f0a980e4c2c85dd19bc14de2b219a3a8867c4b5664b9f972dcc0c78", size = 798968, upload-time = "2025-09-01T22:08:22.081Z" }, - { url = "https://files.pythonhosted.org/packages/1d/fa/33f6fec4d41449fea5f62fdf5e46d668a1c046730a7f4ed9f478331a8e3a/regex-2025.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e1cb40406f4ae862710615f9f636c1e030fd6e6abe0e0f65f6a695a2721440c6", size = 781884, upload-time = "2025-09-01T22:08:23.832Z" }, - { url = "https://files.pythonhosted.org/packages/42/de/2b45f36ab20da14eedddf5009d370625bc5942d9953fa7e5037a32d66843/regex-2025.9.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94f6cff6f7e2149c7e6499a6ecd4695379eeda8ccbccb9726e8149f2fe382e92", size = 852935, upload-time = "2025-09-01T22:08:25.536Z" }, - { url = "https://files.pythonhosted.org/packages/1e/f9/878f4fc92c87e125e27aed0f8ee0d1eced9b541f404b048f66f79914475a/regex-2025.9.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6c0226fb322b82709e78c49cc33484206647f8a39954d7e9de1567f5399becd0", size = 844340, upload-time = "2025-09-01T22:08:27.141Z" }, - { url = "https://files.pythonhosted.org/packages/90/c2/5b6f2bce6ece5f8427c718c085eca0de4bbb4db59f54db77aa6557aef3e9/regex-2025.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a12f59c7c380b4fcf7516e9cbb126f95b7a9518902bcf4a852423ff1dcd03e6a", size = 787238, upload-time = "2025-09-01T22:08:28.75Z" }, - { url = "https://files.pythonhosted.org/packages/47/66/1ef1081c831c5b611f6f55f6302166cfa1bc9574017410ba5595353f846a/regex-2025.9.1-cp311-cp311-win32.whl", hash = "sha256:49865e78d147a7a4f143064488da5d549be6bfc3f2579e5044cac61f5c92edd4", size = 264118, upload-time = "2025-09-01T22:08:30.388Z" }, - { url = "https://files.pythonhosted.org/packages/ad/e0/8adc550d7169df1d6b9be8ff6019cda5291054a0107760c2f30788b6195f/regex-2025.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:d34b901f6f2f02ef60f4ad3855d3a02378c65b094efc4b80388a3aeb700a5de7", size = 276151, upload-time = "2025-09-01T22:08:32.073Z" }, - { url = "https://files.pythonhosted.org/packages/cb/bd/46fef29341396d955066e55384fb93b0be7d64693842bf4a9a398db6e555/regex-2025.9.1-cp311-cp311-win_arm64.whl", hash = "sha256:47d7c2dab7e0b95b95fd580087b6ae196039d62306a592fa4e162e49004b6299", size = 268460, upload-time = "2025-09-01T22:08:33.281Z" }, - { url = "https://files.pythonhosted.org/packages/39/ef/a0372febc5a1d44c1be75f35d7e5aff40c659ecde864d7fa10e138f75e74/regex-2025.9.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:84a25164bd8dcfa9f11c53f561ae9766e506e580b70279d05a7946510bdd6f6a", size = 486317, upload-time = "2025-09-01T22:08:34.529Z" }, - { url = "https://files.pythonhosted.org/packages/b5/25/d64543fb7eb41a1024786d518cc57faf1ce64aa6e9ddba097675a0c2f1d2/regex-2025.9.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:645e88a73861c64c1af558dd12294fb4e67b5c1eae0096a60d7d8a2143a611c7", size = 289698, upload-time = "2025-09-01T22:08:36.162Z" }, - { url = "https://files.pythonhosted.org/packages/d8/dc/fbf31fc60be317bd9f6f87daa40a8a9669b3b392aa8fe4313df0a39d0722/regex-2025.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10a450cba5cd5409526ee1d4449f42aad38dd83ac6948cbd6d7f71ca7018f7db", size = 287242, upload-time = "2025-09-01T22:08:37.794Z" }, - { url = "https://files.pythonhosted.org/packages/0f/74/f933a607a538f785da5021acf5323961b4620972e2c2f1f39b6af4b71db7/regex-2025.9.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9dc5991592933a4192c166eeb67b29d9234f9c86344481173d1bc52f73a7104", size = 797441, upload-time = "2025-09-01T22:08:39.108Z" }, - { url = "https://files.pythonhosted.org/packages/89/d0/71fc49b4f20e31e97f199348b8c4d6e613e7b6a54a90eb1b090c2b8496d7/regex-2025.9.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a32291add816961aab472f4fad344c92871a2ee33c6c219b6598e98c1f0108f2", size = 862654, upload-time = "2025-09-01T22:08:40.586Z" }, - { url = "https://files.pythonhosted.org/packages/59/05/984edce1411a5685ba9abbe10d42cdd9450aab4a022271f9585539788150/regex-2025.9.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:588c161a68a383478e27442a678e3b197b13c5ba51dbba40c1ccb8c4c7bee9e9", size = 910862, upload-time = "2025-09-01T22:08:42.416Z" }, - { url = "https://files.pythonhosted.org/packages/b2/02/5c891bb5fe0691cc1bad336e3a94b9097fbcf9707ec8ddc1dce9f0397289/regex-2025.9.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47829ffaf652f30d579534da9085fe30c171fa2a6744a93d52ef7195dc38218b", size = 801991, upload-time = "2025-09-01T22:08:44.072Z" }, - { url = "https://files.pythonhosted.org/packages/f1/ae/fd10d6ad179910f7a1b3e0a7fde1ef8bb65e738e8ac4fd6ecff3f52252e4/regex-2025.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e978e5a35b293ea43f140c92a3269b6ab13fe0a2bf8a881f7ac740f5a6ade85", size = 786651, upload-time = "2025-09-01T22:08:46.079Z" }, - { url = "https://files.pythonhosted.org/packages/30/cf/9d686b07bbc5bf94c879cc168db92542d6bc9fb67088d03479fef09ba9d3/regex-2025.9.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf09903e72411f4bf3ac1eddd624ecfd423f14b2e4bf1c8b547b72f248b7bf7", size = 856556, upload-time = "2025-09-01T22:08:48.376Z" }, - { url = "https://files.pythonhosted.org/packages/91/9d/302f8a29bb8a49528abbab2d357a793e2a59b645c54deae0050f8474785b/regex-2025.9.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d016b0f77be63e49613c9e26aaf4a242f196cd3d7a4f15898f5f0ab55c9b24d2", size = 849001, upload-time = "2025-09-01T22:08:50.067Z" }, - { url = "https://files.pythonhosted.org/packages/93/fa/b4c6dbdedc85ef4caec54c817cd5f4418dbfa2453214119f2538082bf666/regex-2025.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:656563e620de6908cd1c9d4f7b9e0777e3341ca7db9d4383bcaa44709c90281e", size = 788138, upload-time = "2025-09-01T22:08:51.933Z" }, - { url = "https://files.pythonhosted.org/packages/4a/1b/91ee17a3cbf87f81e8c110399279d0e57f33405468f6e70809100f2ff7d8/regex-2025.9.1-cp312-cp312-win32.whl", hash = "sha256:df33f4ef07b68f7ab637b1dbd70accbf42ef0021c201660656601e8a9835de45", size = 264524, upload-time = "2025-09-01T22:08:53.75Z" }, - { url = "https://files.pythonhosted.org/packages/92/28/6ba31cce05b0f1ec6b787921903f83bd0acf8efde55219435572af83c350/regex-2025.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:5aba22dfbc60cda7c0853516104724dc904caa2db55f2c3e6e984eb858d3edf3", size = 275489, upload-time = "2025-09-01T22:08:55.037Z" }, - { url = "https://files.pythonhosted.org/packages/bd/ed/ea49f324db00196e9ef7fe00dd13c6164d5173dd0f1bbe495e61bb1fb09d/regex-2025.9.1-cp312-cp312-win_arm64.whl", hash = "sha256:ec1efb4c25e1849c2685fa95da44bfde1b28c62d356f9c8d861d4dad89ed56e9", size = 268589, upload-time = "2025-09-01T22:08:56.369Z" }, + { url = "https://files.pythonhosted.org/packages/58/61/80eda662fc4eb32bfedc331f42390974c9e89c7eac1b79cd9eea4d7c458c/regex-2025.9.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:51076980cd08cd13c88eb7365427ae27f0d94e7cebe9ceb2bb9ffdae8fc4d82a", size = 484832, upload-time = "2025-09-19T00:35:30.011Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d9/33833d9abddf3f07ad48504ddb53fe3b22f353214bbb878a72eee1e3ddbf/regex-2025.9.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:828446870bd7dee4e0cbeed767f07961aa07f0ea3129f38b3ccecebc9742e0b8", size = 288994, upload-time = "2025-09-19T00:35:31.733Z" }, + { url = "https://files.pythonhosted.org/packages/2a/b3/526ee96b0d70ea81980cbc20c3496fa582f775a52e001e2743cc33b2fa75/regex-2025.9.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c28821d5637866479ec4cc23b8c990f5bc6dd24e5e4384ba4a11d38a526e1414", size = 286619, upload-time = "2025-09-19T00:35:33.221Z" }, + { url = "https://files.pythonhosted.org/packages/65/4f/c2c096b02a351b33442aed5895cdd8bf87d372498d2100927c5a053d7ba3/regex-2025.9.18-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:726177ade8e481db669e76bf99de0b278783be8acd11cef71165327abd1f170a", size = 792454, upload-time = "2025-09-19T00:35:35.361Z" }, + { url = "https://files.pythonhosted.org/packages/24/15/b562c9d6e47c403c4b5deb744f8b4bf6e40684cf866c7b077960a925bdff/regex-2025.9.18-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f5cca697da89b9f8ea44115ce3130f6c54c22f541943ac8e9900461edc2b8bd4", size = 858723, upload-time = "2025-09-19T00:35:36.949Z" }, + { url = "https://files.pythonhosted.org/packages/f2/01/dba305409849e85b8a1a681eac4c03ed327d8de37895ddf9dc137f59c140/regex-2025.9.18-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dfbde38f38004703c35666a1e1c088b778e35d55348da2b7b278914491698d6a", size = 905899, upload-time = "2025-09-19T00:35:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d0/c51d1e6a80eab11ef96a4cbad17fc0310cf68994fb01a7283276b7e5bbd6/regex-2025.9.18-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f2f422214a03fab16bfa495cfec72bee4aaa5731843b771860a471282f1bf74f", size = 798981, upload-time = "2025-09-19T00:35:40.416Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5e/72db90970887bbe02296612bd61b0fa31e6d88aa24f6a4853db3e96c575e/regex-2025.9.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a295916890f4df0902e4286bc7223ee7f9e925daa6dcdec4192364255b70561a", size = 781900, upload-time = "2025-09-19T00:35:42.077Z" }, + { url = "https://files.pythonhosted.org/packages/50/ff/596be45eea8e9bc31677fde243fa2904d00aad1b32c31bce26c3dbba0b9e/regex-2025.9.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5db95ff632dbabc8c38c4e82bf545ab78d902e81160e6e455598014f0abe66b9", size = 852952, upload-time = "2025-09-19T00:35:43.751Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1b/2dfa348fa551e900ed3f5f63f74185b6a08e8a76bc62bc9c106f4f92668b/regex-2025.9.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb967eb441b0f15ae610b7069bdb760b929f267efbf522e814bbbfffdf125ce2", size = 844355, upload-time = "2025-09-19T00:35:45.309Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/aefb1def27fe33b8cbbb19c75c13aefccfbef1c6686f8e7f7095705969c7/regex-2025.9.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f04d2f20da4053d96c08f7fde6e1419b7ec9dbcee89c96e3d731fca77f411b95", size = 787254, upload-time = "2025-09-19T00:35:46.904Z" }, + { url = "https://files.pythonhosted.org/packages/e3/4e/8ef042e7cf0dbbb401e784e896acfc1b367b95dfbfc9ada94c2ed55a081f/regex-2025.9.18-cp311-cp311-win32.whl", hash = "sha256:895197241fccf18c0cea7550c80e75f185b8bd55b6924fcae269a1a92c614a07", size = 264129, upload-time = "2025-09-19T00:35:48.597Z" }, + { url = "https://files.pythonhosted.org/packages/b4/7d/c4fcabf80dcdd6821c0578ad9b451f8640b9110fb3dcb74793dd077069ff/regex-2025.9.18-cp311-cp311-win_amd64.whl", hash = "sha256:7e2b414deae99166e22c005e154a5513ac31493db178d8aec92b3269c9cce8c9", size = 276160, upload-time = "2025-09-19T00:36:00.45Z" }, + { url = "https://files.pythonhosted.org/packages/64/f8/0e13c8ae4d6df9d128afaba138342d532283d53a4c1e7a8c93d6756c8f4a/regex-2025.9.18-cp311-cp311-win_arm64.whl", hash = "sha256:fb137ec7c5c54f34a25ff9b31f6b7b0c2757be80176435bf367111e3f71d72df", size = 268471, upload-time = "2025-09-19T00:36:02.149Z" }, + { url = "https://files.pythonhosted.org/packages/b0/99/05859d87a66ae7098222d65748f11ef7f2dff51bfd7482a4e2256c90d72b/regex-2025.9.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:436e1b31d7efd4dcd52091d076482031c611dde58bf9c46ca6d0a26e33053a7e", size = 486335, upload-time = "2025-09-19T00:36:03.661Z" }, + { url = "https://files.pythonhosted.org/packages/97/7e/d43d4e8b978890932cf7b0957fce58c5b08c66f32698f695b0c2c24a48bf/regex-2025.9.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c190af81e5576b9c5fdc708f781a52ff20f8b96386c6e2e0557a78402b029f4a", size = 289720, upload-time = "2025-09-19T00:36:05.471Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3b/ff80886089eb5dcf7e0d2040d9aaed539e25a94300403814bb24cc775058/regex-2025.9.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e4121f1ce2b2b5eec4b397cc1b277686e577e658d8f5870b7eb2d726bd2300ab", size = 287257, upload-time = "2025-09-19T00:36:07.072Z" }, + { url = "https://files.pythonhosted.org/packages/ee/66/243edf49dd8720cba8d5245dd4d6adcb03a1defab7238598c0c97cf549b8/regex-2025.9.18-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:300e25dbbf8299d87205e821a201057f2ef9aa3deb29caa01cd2cac669e508d5", size = 797463, upload-time = "2025-09-19T00:36:08.399Z" }, + { url = "https://files.pythonhosted.org/packages/df/71/c9d25a1142c70432e68bb03211d4a82299cd1c1fbc41db9409a394374ef5/regex-2025.9.18-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7b47fcf9f5316c0bdaf449e879407e1b9937a23c3b369135ca94ebc8d74b1742", size = 862670, upload-time = "2025-09-19T00:36:10.101Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8f/329b1efc3a64375a294e3a92d43372bf1a351aa418e83c21f2f01cf6ec41/regex-2025.9.18-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:57a161bd3acaa4b513220b49949b07e252165e6b6dc910ee7617a37ff4f5b425", size = 910881, upload-time = "2025-09-19T00:36:12.223Z" }, + { url = "https://files.pythonhosted.org/packages/35/9e/a91b50332a9750519320ed30ec378b74c996f6befe282cfa6bb6cea7e9fd/regex-2025.9.18-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f130c3a7845ba42de42f380fff3c8aebe89a810747d91bcf56d40a069f15352", size = 802011, upload-time = "2025-09-19T00:36:13.901Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1d/6be3b8d7856b6e0d7ee7f942f437d0a76e0d5622983abbb6d21e21ab9a17/regex-2025.9.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f96fa342b6f54dcba928dd452e8d8cb9f0d63e711d1721cd765bb9f73bb048d", size = 786668, upload-time = "2025-09-19T00:36:15.391Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ce/4a60e53df58bd157c5156a1736d3636f9910bdcc271d067b32b7fcd0c3a8/regex-2025.9.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f0d676522d68c207828dcd01fb6f214f63f238c283d9f01d85fc664c7c85b56", size = 856578, upload-time = "2025-09-19T00:36:16.845Z" }, + { url = "https://files.pythonhosted.org/packages/86/e8/162c91bfe7217253afccde112868afb239f94703de6580fb235058d506a6/regex-2025.9.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:40532bff8a1a0621e7903ae57fce88feb2e8a9a9116d341701302c9302aef06e", size = 849017, upload-time = "2025-09-19T00:36:18.597Z" }, + { url = "https://files.pythonhosted.org/packages/35/34/42b165bc45289646ea0959a1bc7531733e90b47c56a72067adfe6b3251f6/regex-2025.9.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:039f11b618ce8d71a1c364fdee37da1012f5a3e79b1b2819a9f389cd82fd6282", size = 788150, upload-time = "2025-09-19T00:36:20.464Z" }, + { url = "https://files.pythonhosted.org/packages/79/5d/cdd13b1f3c53afa7191593a7ad2ee24092a5a46417725ffff7f64be8342d/regex-2025.9.18-cp312-cp312-win32.whl", hash = "sha256:e1dd06f981eb226edf87c55d523131ade7285137fbde837c34dc9d1bf309f459", size = 264536, upload-time = "2025-09-19T00:36:21.922Z" }, + { url = "https://files.pythonhosted.org/packages/e0/f5/4a7770c9a522e7d2dc1fa3ffc83ab2ab33b0b22b447e62cffef186805302/regex-2025.9.18-cp312-cp312-win_amd64.whl", hash = "sha256:3d86b5247bf25fa3715e385aa9ff272c307e0636ce0c9595f64568b41f0a9c77", size = 275501, upload-time = "2025-09-19T00:36:23.4Z" }, + { url = "https://files.pythonhosted.org/packages/df/05/9ce3e110e70d225ecbed455b966003a3afda5e58e8aec2964042363a18f4/regex-2025.9.18-cp312-cp312-win_arm64.whl", hash = "sha256:032720248cbeeae6444c269b78cb15664458b7bb9ed02401d3da59fe4d68c3a5", size = 268601, upload-time = "2025-09-19T00:36:25.092Z" }, ] [[package]] @@ -5784,16 +5800,16 @@ wheels = [ [[package]] name = "sendgrid" -version = "6.12.4" +version = "6.12.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "ecdsa" }, + { name = "cryptography" }, { name = "python-http-client" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/11/31/62e00433878dccf33edf07f8efa417b9030a2464eb3b04bbd797a11b4447/sendgrid-6.12.4.tar.gz", hash = "sha256:9e88b849daf0fa4bdf256c3b5da9f5a3272402c0c2fd6b1928c9de440db0a03d", size = 50271, upload-time = "2025-06-12T10:29:37.213Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/fa/f718b2b953f99c1f0085811598ac7e31ccbd4229a81ec2a5290be868187a/sendgrid-6.12.5.tar.gz", hash = "sha256:ea9aae30cd55c332e266bccd11185159482edfc07c149b6cd15cf08869fabdb7", size = 50310, upload-time = "2025-09-19T06:23:09.229Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/9c/45d068fd831a65e6ed1e2ab3233de58784842afdc62fdcdd0a01bbb6b39d/sendgrid-6.12.4-py3-none-any.whl", hash = "sha256:9a211b96241e63bd5b9ed9afcc8608f4bcac426e4a319b3920ab877c8426e92c", size = 102122, upload-time = "2025-06-12T10:29:35.457Z" }, + { url = "https://files.pythonhosted.org/packages/bd/55/b3c3880a77082e8f7374954e0074aafafaa9bc78bdf9c8f5a92c2e7afc6a/sendgrid-6.12.5-py3-none-any.whl", hash = "sha256:96f92cc91634bf552fdb766b904bbb53968018da7ae41fdac4d1090dc0311ca8", size = 102173, upload-time = "2025-09-19T06:23:07.93Z" }, ] [[package]] @@ -6198,27 +6214,27 @@ wheels = [ [[package]] name = "tokenizers" -version = "0.21.4" +version = "0.22.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c2/2f/402986d0823f8d7ca139d969af2917fefaa9b947d1fb32f6168c509f2492/tokenizers-0.21.4.tar.gz", hash = "sha256:fa23f85fbc9a02ec5c6978da172cdcbac23498c3ca9f3645c5c68740ac007880", size = 351253, upload-time = "2025-07-28T15:48:54.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hash = "sha256:61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9", size = 363123, upload-time = "2025-09-19T09:49:23.424Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/c6/fdb6f72bf6454f52eb4a2510be7fb0f614e541a2554d6210e370d85efff4/tokenizers-0.21.4-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:2ccc10a7c3bcefe0f242867dc914fc1226ee44321eb618cfe3019b5df3400133", size = 2863987, upload-time = "2025-07-28T15:48:44.877Z" }, - { url = "https://files.pythonhosted.org/packages/8d/a6/28975479e35ddc751dc1ddc97b9b69bf7fcf074db31548aab37f8116674c/tokenizers-0.21.4-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:5e2f601a8e0cd5be5cc7506b20a79112370b9b3e9cb5f13f68ab11acd6ca7d60", size = 2732457, upload-time = "2025-07-28T15:48:43.265Z" }, - { url = "https://files.pythonhosted.org/packages/aa/8f/24f39d7b5c726b7b0be95dca04f344df278a3fe3a4deb15a975d194cbb32/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b376f5a1aee67b4d29032ee85511bbd1b99007ec735f7f35c8a2eb104eade5", size = 3012624, upload-time = "2025-07-28T13:22:43.895Z" }, - { url = "https://files.pythonhosted.org/packages/58/47/26358925717687a58cb74d7a508de96649544fad5778f0cd9827398dc499/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2107ad649e2cda4488d41dfd031469e9da3fcbfd6183e74e4958fa729ffbf9c6", size = 2939681, upload-time = "2025-07-28T13:22:47.499Z" }, - { url = "https://files.pythonhosted.org/packages/99/6f/cc300fea5db2ab5ddc2c8aea5757a27b89c84469899710c3aeddc1d39801/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c73012da95afafdf235ba80047699df4384fdc481527448a078ffd00e45a7d9", size = 3247445, upload-time = "2025-07-28T15:48:39.711Z" }, - { url = "https://files.pythonhosted.org/packages/be/bf/98cb4b9c3c4afd8be89cfa6423704337dc20b73eb4180397a6e0d456c334/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f23186c40395fc390d27f519679a58023f368a0aad234af145e0f39ad1212732", size = 3428014, upload-time = "2025-07-28T13:22:49.569Z" }, - { url = "https://files.pythonhosted.org/packages/75/c7/96c1cc780e6ca7f01a57c13235dd05b7bc1c0f3588512ebe9d1331b5f5ae/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc88bb34e23a54cc42713d6d98af5f1bf79c07653d24fe984d2d695ba2c922a2", size = 3193197, upload-time = "2025-07-28T13:22:51.471Z" }, - { url = "https://files.pythonhosted.org/packages/f2/90/273b6c7ec78af547694eddeea9e05de771278bd20476525ab930cecaf7d8/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51b7eabb104f46c1c50b486520555715457ae833d5aee9ff6ae853d1130506ff", size = 3115426, upload-time = "2025-07-28T15:48:41.439Z" }, - { url = "https://files.pythonhosted.org/packages/91/43/c640d5a07e95f1cf9d2c92501f20a25f179ac53a4f71e1489a3dcfcc67ee/tokenizers-0.21.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:714b05b2e1af1288bd1bc56ce496c4cebb64a20d158ee802887757791191e6e2", size = 9089127, upload-time = "2025-07-28T15:48:46.472Z" }, - { url = "https://files.pythonhosted.org/packages/44/a1/dd23edd6271d4dca788e5200a807b49ec3e6987815cd9d0a07ad9c96c7c2/tokenizers-0.21.4-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:1340ff877ceedfa937544b7d79f5b7becf33a4cfb58f89b3b49927004ef66f78", size = 9055243, upload-time = "2025-07-28T15:48:48.539Z" }, - { url = "https://files.pythonhosted.org/packages/21/2b/b410d6e9021c4b7ddb57248304dc817c4d4970b73b6ee343674914701197/tokenizers-0.21.4-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:3c1f4317576e465ac9ef0d165b247825a2a4078bcd01cba6b54b867bdf9fdd8b", size = 9298237, upload-time = "2025-07-28T15:48:50.443Z" }, - { url = "https://files.pythonhosted.org/packages/b7/0a/42348c995c67e2e6e5c89ffb9cfd68507cbaeb84ff39c49ee6e0a6dd0fd2/tokenizers-0.21.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:c212aa4e45ec0bb5274b16b6f31dd3f1c41944025c2358faaa5782c754e84c24", size = 9461980, upload-time = "2025-07-28T15:48:52.325Z" }, - { url = "https://files.pythonhosted.org/packages/3d/d3/dacccd834404cd71b5c334882f3ba40331ad2120e69ded32cf5fda9a7436/tokenizers-0.21.4-cp39-abi3-win32.whl", hash = "sha256:6c42a930bc5f4c47f4ea775c91de47d27910881902b0f20e4990ebe045a415d0", size = 2329871, upload-time = "2025-07-28T15:48:56.841Z" }, - { url = "https://files.pythonhosted.org/packages/41/f2/fd673d979185f5dcbac4be7d09461cbb99751554ffb6718d0013af8604cb/tokenizers-0.21.4-cp39-abi3-win_amd64.whl", hash = "sha256:475d807a5c3eb72c59ad9b5fcdb254f6e17f53dfcbb9903233b0dfa9c943b597", size = 2507568, upload-time = "2025-07-28T15:48:55.456Z" }, + { url = "https://files.pythonhosted.org/packages/bf/33/f4b2d94ada7ab297328fc671fed209368ddb82f965ec2224eb1892674c3a/tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:59fdb013df17455e5f950b4b834a7b3ee2e0271e6378ccb33aa74d178b513c73", size = 3069318, upload-time = "2025-09-19T09:49:11.848Z" }, + { url = "https://files.pythonhosted.org/packages/1c/58/2aa8c874d02b974990e89ff95826a4852a8b2a273c7d1b4411cdd45a4565/tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8d4e484f7b0827021ac5f9f71d4794aaef62b979ab7608593da22b1d2e3c4edc", size = 2926478, upload-time = "2025-09-19T09:49:09.759Z" }, + { url = "https://files.pythonhosted.org/packages/1e/3b/55e64befa1e7bfea963cf4b787b2cea1011362c4193f5477047532ce127e/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d2962dd28bc67c1f205ab180578a78eef89ac60ca7ef7cbe9635a46a56422a", size = 3256994, upload-time = "2025-09-19T09:48:56.701Z" }, + { url = "https://files.pythonhosted.org/packages/71/0b/fbfecf42f67d9b7b80fde4aabb2b3110a97fac6585c9470b5bff103a80cb/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38201f15cdb1f8a6843e6563e6e79f4abd053394992b9bbdf5213ea3469b4ae7", size = 3153141, upload-time = "2025-09-19T09:48:59.749Z" }, + { url = "https://files.pythonhosted.org/packages/17/a9/b38f4e74e0817af8f8ef925507c63c6ae8171e3c4cb2d5d4624bf58fca69/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1cbe5454c9a15df1b3443c726063d930c16f047a3cc724b9e6e1a91140e5a21", size = 3508049, upload-time = "2025-09-19T09:49:05.868Z" }, + { url = "https://files.pythonhosted.org/packages/d2/48/dd2b3dac46bb9134a88e35d72e1aa4869579eacc1a27238f1577270773ff/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7d094ae6312d69cc2a872b54b91b309f4f6fbce871ef28eb27b52a98e4d0214", size = 3710730, upload-time = "2025-09-19T09:49:01.832Z" }, + { url = "https://files.pythonhosted.org/packages/93/0e/ccabc8d16ae4ba84a55d41345207c1e2ea88784651a5a487547d80851398/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afd7594a56656ace95cdd6df4cca2e4059d294c5cfb1679c57824b605556cb2f", size = 3412560, upload-time = "2025-09-19T09:49:03.867Z" }, + { url = "https://files.pythonhosted.org/packages/d0/c6/dc3a0db5a6766416c32c034286d7c2d406da1f498e4de04ab1b8959edd00/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ef6063d7a84994129732b47e7915e8710f27f99f3a3260b8a38fc7ccd083f4", size = 3250221, upload-time = "2025-09-19T09:49:07.664Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a6/2c8486eef79671601ff57b093889a345dd3d576713ef047776015dc66de7/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ba0a64f450b9ef412c98f6bcd2a50c6df6e2443b560024a09fa6a03189726879", size = 9345569, upload-time = "2025-09-19T09:49:14.214Z" }, + { url = "https://files.pythonhosted.org/packages/6b/16/32ce667f14c35537f5f605fe9bea3e415ea1b0a646389d2295ec348d5657/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:331d6d149fa9c7d632cde4490fb8bbb12337fa3a0232e77892be656464f4b446", size = 9271599, upload-time = "2025-09-19T09:49:16.639Z" }, + { url = "https://files.pythonhosted.org/packages/51/7c/a5f7898a3f6baa3fc2685c705e04c98c1094c523051c805cdd9306b8f87e/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:607989f2ea68a46cb1dfbaf3e3aabdf3f21d8748312dbeb6263d1b3b66c5010a", size = 9533862, upload-time = "2025-09-19T09:49:19.146Z" }, + { url = "https://files.pythonhosted.org/packages/36/65/7e75caea90bc73c1dd8d40438adf1a7bc26af3b8d0a6705ea190462506e1/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a0f307d490295717726598ef6fa4f24af9d484809223bbc253b201c740a06390", size = 9681250, upload-time = "2025-09-19T09:49:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/30/2c/959dddef581b46e6209da82df3b78471e96260e2bc463f89d23b1bf0e52a/tokenizers-0.22.1-cp39-abi3-win32.whl", hash = "sha256:b5120eed1442765cd90b903bb6cfef781fd8fe64e34ccaecbae4c619b7b12a82", size = 2472003, upload-time = "2025-09-19T09:49:27.089Z" }, + { url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684, upload-time = "2025-09-19T09:49:24.953Z" }, ] [[package]] @@ -6286,7 +6302,7 @@ wheels = [ [[package]] name = "transformers" -version = "4.56.1" +version = "4.56.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -6300,39 +6316,39 @@ dependencies = [ { name = "tokenizers" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/89/21/dc88ef3da1e49af07ed69386a11047a31dcf1aaf4ded3bc4b173fbf94116/transformers-4.56.1.tar.gz", hash = "sha256:0d88b1089a563996fc5f2c34502f10516cad3ea1aa89f179f522b54c8311fe74", size = 9855473, upload-time = "2025-09-04T20:47:13.14Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/82/0bcfddd134cdf53440becb5e738257cc3cf34cf229d63b57bfd288e6579f/transformers-4.56.2.tar.gz", hash = "sha256:5e7c623e2d7494105c726dd10f6f90c2c99a55ebe86eef7233765abd0cb1c529", size = 9844296, upload-time = "2025-09-19T15:16:26.778Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/71/7c/283c3dd35e00e22a7803a0b2a65251347b745474a82399be058bde1c9f15/transformers-4.56.1-py3-none-any.whl", hash = "sha256:1697af6addfb6ddbce9618b763f4b52d5a756f6da4899ffd1b4febf58b779248", size = 11608197, upload-time = "2025-09-04T20:47:04.895Z" }, + { url = "https://files.pythonhosted.org/packages/70/26/2591b48412bde75e33bfd292034103ffe41743cacd03120e3242516cd143/transformers-4.56.2-py3-none-any.whl", hash = "sha256:79c03d0e85b26cb573c109ff9eafa96f3c8d4febfd8a0774e8bba32702dd6dde", size = 11608055, upload-time = "2025-09-19T15:16:23.736Z" }, ] [[package]] name = "ty" -version = "0.0.1a20" +version = "0.0.1a21" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7a/82/a5e3b4bc5280ec49c4b0b43d0ff727d58c7df128752c9c6f97ad0b5f575f/ty-0.0.1a20.tar.gz", hash = "sha256:933b65a152f277aa0e23ba9027e5df2c2cc09e18293e87f2a918658634db5f15", size = 4194773, upload-time = "2025-09-03T12:35:46.775Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/0f/65606ccee2da5a05a3c3362f5233f058e9d29d3c5521697c7ae79545d246/ty-0.0.1a21.tar.gz", hash = "sha256:e941e9a9d1e54b03eeaf9c3197c26a19cf76009fd5e41e16e5657c1c827bd6d3", size = 4263980, upload-time = "2025-09-19T06:54:06.412Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/c8/f7d39392043d5c04936f6cad90e50eb661965ed092ca4bfc01db917d7b8a/ty-0.0.1a20-py3-none-linux_armv6l.whl", hash = "sha256:f73a7aca1f0d38af4d6999b375eb00553f3bfcba102ae976756cc142e14f3450", size = 8443599, upload-time = "2025-09-03T12:35:04.289Z" }, - { url = "https://files.pythonhosted.org/packages/1e/57/5aec78f9b8a677b7439ccded7d66c3361e61247e0f6b14e659b00dd01008/ty-0.0.1a20-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cad12c857ea4b97bf61e02f6796e13061ccca5e41f054cbd657862d80aa43bae", size = 8618102, upload-time = "2025-09-03T12:35:07.448Z" }, - { url = "https://files.pythonhosted.org/packages/15/20/50c9107d93cdb55676473d9dc4e2339af6af606660c9428d3b86a1b2a476/ty-0.0.1a20-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f153b65c7fcb6b8b59547ddb6353761b3e8d8bb6f0edd15e3e3ac14405949f7a", size = 8192167, upload-time = "2025-09-03T12:35:09.706Z" }, - { url = "https://files.pythonhosted.org/packages/85/28/018b2f330109cee19e81c5ca9df3dc29f06c5778440eb9af05d4550c4302/ty-0.0.1a20-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8c4336987a6a781d4392a9fd7b3a39edb7e4f3dd4f860e03f46c932b52aefa2", size = 8349256, upload-time = "2025-09-03T12:35:11.76Z" }, - { url = "https://files.pythonhosted.org/packages/cd/c9/2f8797a05587158f52b142278796ffd72c893bc5ad41840fce5aeb65c6f2/ty-0.0.1a20-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ff75cd4c744d09914e8c9db8d99e02f82c9379ad56b0a3fc4c5c9c923cfa84e", size = 8271214, upload-time = "2025-09-03T12:35:13.741Z" }, - { url = "https://files.pythonhosted.org/packages/30/d4/2cac5e5eb9ee51941358cb3139aadadb59520cfaec94e4fcd2b166969748/ty-0.0.1a20-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e26437772be7f7808868701f2bf9e14e706a6ec4c7d02dbd377ff94d7ba60c11", size = 9264939, upload-time = "2025-09-03T12:35:16.896Z" }, - { url = "https://files.pythonhosted.org/packages/93/96/a6f2b54e484b2c6a5488f217882237dbdf10f0fdbdb6cd31333d57afe494/ty-0.0.1a20-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:83a7ee12465841619b5eb3ca962ffc7d576bb1c1ac812638681aee241acbfbbe", size = 9743137, upload-time = "2025-09-03T12:35:19.799Z" }, - { url = "https://files.pythonhosted.org/packages/6e/67/95b40dcbec3d222f3af5fe5dd1ce066d42f8a25a2f70d5724490457048e7/ty-0.0.1a20-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:726d0738be4459ac7ffae312ba96c5f486d6cbc082723f322555d7cba9397871", size = 9368153, upload-time = "2025-09-03T12:35:22.569Z" }, - { url = "https://files.pythonhosted.org/packages/2c/24/689fa4c4270b9ef9a53dc2b1d6ffade259ba2c4127e451f0629e130ea46a/ty-0.0.1a20-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b481f26513f38543df514189fb16744690bcba8d23afee95a01927d93b46e36", size = 9099637, upload-time = "2025-09-03T12:35:24.94Z" }, - { url = "https://files.pythonhosted.org/packages/a1/5b/913011cbf3ea4030097fb3c4ce751856114c9e1a5e1075561a4c5242af9b/ty-0.0.1a20-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7abbe3c02218c12228b1d7c5f98c57240029cc3bcb15b6997b707c19be3908c1", size = 8952000, upload-time = "2025-09-03T12:35:27.288Z" }, - { url = "https://files.pythonhosted.org/packages/df/f9/f5ba2ae455b20c5bb003f9940ef8142a8c4ed9e27de16e8f7472013609db/ty-0.0.1a20-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:fff51c75ee3f7cc6d7722f2f15789ef8ffe6fd2af70e7269ac785763c906688e", size = 8217938, upload-time = "2025-09-03T12:35:29.54Z" }, - { url = "https://files.pythonhosted.org/packages/eb/62/17002cf9032f0981cdb8c898d02422c095c30eefd69ca62a8b705d15bd0f/ty-0.0.1a20-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b4124ab75e0e6f09fe7bc9df4a77ee43c5e0ef7e61b0c149d7c089d971437cbd", size = 8292369, upload-time = "2025-09-03T12:35:31.748Z" }, - { url = "https://files.pythonhosted.org/packages/28/d6/0879b1fb66afe1d01d45c7658f3849aa641ac4ea10679404094f3b40053e/ty-0.0.1a20-py3-none-musllinux_1_2_i686.whl", hash = "sha256:8a138fa4f74e6ed34e9fd14652d132409700c7ff57682c2fed656109ebfba42f", size = 8811973, upload-time = "2025-09-03T12:35:33.997Z" }, - { url = "https://files.pythonhosted.org/packages/60/1e/70bf0348cfe8ba5f7532983f53c508c293ddf5fa9f942ed79a3c4d576df3/ty-0.0.1a20-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8eff8871d6b88d150e2a67beba2c57048f20c090c219f38ed02eebaada04c124", size = 9010990, upload-time = "2025-09-03T12:35:36.766Z" }, - { url = "https://files.pythonhosted.org/packages/b7/ca/03d85c7650359247b1ca3f38a0d869a608ef540450151920e7014ed58292/ty-0.0.1a20-py3-none-win32.whl", hash = "sha256:3c2ace3a22fab4bd79f84c74e3dab26e798bfba7006bea4008d6321c1bd6efc6", size = 8100746, upload-time = "2025-09-03T12:35:40.007Z" }, - { url = "https://files.pythonhosted.org/packages/94/53/7a1937b8c7a66d0c8ed7493de49ed454a850396fe137d2ae12ed247e0b2f/ty-0.0.1a20-py3-none-win_amd64.whl", hash = "sha256:f41e77ff118da3385915e13c3f366b3a2f823461de54abd2e0ca72b170ba0f19", size = 8748861, upload-time = "2025-09-03T12:35:42.175Z" }, - { url = "https://files.pythonhosted.org/packages/27/36/5a3a70c5d497d3332f9e63cabc9c6f13484783b832fecc393f4f1c0c4aa8/ty-0.0.1a20-py3-none-win_arm64.whl", hash = "sha256:d8ac1c5a14cda5fad1a8b53959d9a5d979fe16ce1cc2785ea8676fed143ac85f", size = 8269906, upload-time = "2025-09-03T12:35:45.045Z" }, + { url = "https://files.pythonhosted.org/packages/d3/7a/c87a42d0a45cfa2d5c06c8d66aa1b243db16dc31b25e545fb0263308523b/ty-0.0.1a21-py3-none-linux_armv6l.whl", hash = "sha256:1f276ceab23a1410aec09508248c76ae0989c67fb7a0c287e0d4564994295531", size = 8421116, upload-time = "2025-09-19T06:53:35.029Z" }, + { url = "https://files.pythonhosted.org/packages/99/c2/721bf4fa21c84d4cdae0e57a06a88e7e64fc2dca38820232bd6cbeef644f/ty-0.0.1a21-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3c3bc66fcae41eff133cfe326dd65d82567a2fb5d4efe2128773b10ec2766819", size = 8512556, upload-time = "2025-09-19T06:53:37.455Z" }, + { url = "https://files.pythonhosted.org/packages/6c/58/b0585d9d61673e864a87e95760dfa2a90ac15702e7612ab064d354f6752a/ty-0.0.1a21-py3-none-macosx_11_0_arm64.whl", hash = "sha256:cc0880ec344fbdf736b05d8d0da01f0caaaa02409bd9a24b68d18d0127a79b0e", size = 8109188, upload-time = "2025-09-19T06:53:39.469Z" }, + { url = "https://files.pythonhosted.org/packages/ea/08/edf7b59ba24bb1a1af341207fc5a0106eb1fe4264c1d7fb672c171dd2daf/ty-0.0.1a21-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:334d2a212ebf42a0e55d57561926af7679fe1e878175e11dcb81ad8df892844e", size = 8279000, upload-time = "2025-09-19T06:53:41.309Z" }, + { url = "https://files.pythonhosted.org/packages/05/8e/4b5e562623e0aa24a3972510287b4bc5d98251afb353388d14008ea99954/ty-0.0.1a21-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a8c769987d00fbc33054ff7e342633f475ea10dc43bc60fb9fb056159d48cb90", size = 8243261, upload-time = "2025-09-19T06:53:42.736Z" }, + { url = "https://files.pythonhosted.org/packages/c3/09/6476fa21f9962d5b9c8e8053fd0442ed8e3ceb7502e39700ab1935555199/ty-0.0.1a21-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:218d53e7919e885bd98e9196d9cb952d82178b299aa36da6f7f39333eb7400ed", size = 9150228, upload-time = "2025-09-19T06:53:44.242Z" }, + { url = "https://files.pythonhosted.org/packages/d2/96/49c158b6255fc1e22a5701c38f7d4c1b7f8be17a476ce9226fcae82a7b36/ty-0.0.1a21-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:84243455f295ed850bd53f7089819321807d4e6ee3b1cbff6086137ae0259466", size = 9628323, upload-time = "2025-09-19T06:53:45.998Z" }, + { url = "https://files.pythonhosted.org/packages/f4/65/37a8a5cb7b3254365c54b5e10f069e311c4252ed160b86fabd1203fbca5c/ty-0.0.1a21-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87a200c21e02962e8a27374d9d152582331d57d709672431be58f4f898bf6cad", size = 9251233, upload-time = "2025-09-19T06:53:48.042Z" }, + { url = "https://files.pythonhosted.org/packages/a3/30/5b06120747da4a0f0bc54a4b051b42172603033dbee0bcf51bce7c21ada9/ty-0.0.1a21-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:be8f457d7841b7ead2a3f6b65ba668abc172a1150a0f1f6c0958af3725dbb61a", size = 8996186, upload-time = "2025-09-19T06:53:49.753Z" }, + { url = "https://files.pythonhosted.org/packages/af/fc/5aa122536b1acb57389f404f6328c20342242b78513a60459fee9b7d6f27/ty-0.0.1a21-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1474d883129bb63da3b2380fc7ead824cd3baf6a9551e6aa476ffefc58057af3", size = 8722848, upload-time = "2025-09-19T06:53:51.566Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c1/456dcc65a149df8410b1d75f0197a31d4beef74b7bb44cce42b03bf074e8/ty-0.0.1a21-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0efba2e52b58f536f4198ba5c4a36cac2ba67d83ec6f429ebc7704233bcda4c3", size = 8220727, upload-time = "2025-09-19T06:53:53.753Z" }, + { url = "https://files.pythonhosted.org/packages/a4/86/b37505d942cd68235be5be407e43e15afa36669aaa2db9b6e5b43c1d9f91/ty-0.0.1a21-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5dfc73299d441cc6454e36ed0a976877415024143dfca6592dc36f7701424383", size = 8279114, upload-time = "2025-09-19T06:53:55.343Z" }, + { url = "https://files.pythonhosted.org/packages/55/fe/0d9816f36d258e6b2a3d7518421be17c68954ea9a66b638de49588cc2e27/ty-0.0.1a21-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ba13d03b9e095216ceb4e4d554a308517f28ab0a6e4dcd07cfe94563e4c2c489", size = 8701798, upload-time = "2025-09-19T06:53:57.17Z" }, + { url = "https://files.pythonhosted.org/packages/4e/7a/70539932e3e5a36c54bd5432ff44ed0c301c41a528365d8de5b8f79f4317/ty-0.0.1a21-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9463cac96b8f1bb5ba740fe1d42cd6bd152b43c5b159b2f07f8fd629bcdded34", size = 8872676, upload-time = "2025-09-19T06:53:59.357Z" }, + { url = "https://files.pythonhosted.org/packages/ea/94/809d85f6982841fe28526ace3b282b0458d0a96bbc6b1a982d9269a5e481/ty-0.0.1a21-py3-none-win32.whl", hash = "sha256:ecf41706b803827b0de8717f32a434dad1e67be9f4b8caf403e12013179ea06a", size = 8003866, upload-time = "2025-09-19T06:54:01.393Z" }, + { url = "https://files.pythonhosted.org/packages/50/16/b3e914cec2a6344d2c30d3780ca6ecd39667173611f8776cecfd1294eab9/ty-0.0.1a21-py3-none-win_amd64.whl", hash = "sha256:7505aeb8bf2a62f00f12cfa496f6c965074d75c8126268776565284c8a12d5dd", size = 8675300, upload-time = "2025-09-19T06:54:02.893Z" }, + { url = "https://files.pythonhosted.org/packages/16/0b/293be6bc19f6da5e9b15e615a7100504f307dd4294d2c61cee3de91198e5/ty-0.0.1a21-py3-none-win_arm64.whl", hash = "sha256:21f708d02b6588323ffdbfdba38830dd0ecfd626db50aa6006b296b5470e52f9", size = 8193800, upload-time = "2025-09-19T06:54:04.583Z" }, ] [[package]] name = "typer" -version = "0.17.4" +version = "0.19.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -6340,9 +6356,9 @@ dependencies = [ { name = "shellingham" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/92/e8/2a73ccf9874ec4c7638f172efc8972ceab13a0e3480b389d6ed822f7a822/typer-0.17.4.tar.gz", hash = "sha256:b77dc07d849312fd2bb5e7f20a7af8985c7ec360c45b051ed5412f64d8dc1580", size = 103734, upload-time = "2025-09-05T18:14:40.746Z" } +sdist = { url = "https://files.pythonhosted.org/packages/03/ea/9cc57c3c627fd7a6a0907ea371019fe74c3ec00e3cf209a6864140a602ad/typer-0.19.1.tar.gz", hash = "sha256:cb881433a4b15dacc875bb0583d1a61e78497806741f9aba792abcab390c03e6", size = 104802, upload-time = "2025-09-20T08:59:22.692Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/72/6b3e70d32e89a5cbb6a4513726c1ae8762165b027af569289e19ec08edd8/typer-0.17.4-py3-none-any.whl", hash = "sha256:015534a6edaa450e7007eba705d5c18c3349dcea50a6ad79a5ed530967575824", size = 46643, upload-time = "2025-09-05T18:14:39.166Z" }, + { url = "https://files.pythonhosted.org/packages/1e/fa/6473c00b5eb26a2ba427813107699d3e6f4e1a4afad3f7494b17bdef3422/typer-0.19.1-py3-none-any.whl", hash = "sha256:914b2b39a1da4bafca5f30637ca26fa622a5bf9f515e5fdc772439f306d5682a", size = 46876, upload-time = "2025-09-20T08:59:21.153Z" }, ] [[package]] @@ -6386,14 +6402,14 @@ wheels = [ [[package]] name = "types-cffi" -version = "1.17.0.20250822" +version = "1.17.0.20250915" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/0c/76a48cb6e742cac4d61a4ec632dd30635b6d302f5acdc2c0a27572ac7ae3/types_cffi-1.17.0.20250822.tar.gz", hash = "sha256:bf6f5a381ea49da7ff895fae69711271e6192c434470ce6139bf2b2e0d0fa08d", size = 17130, upload-time = "2025-08-22T03:04:02.445Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/98/ea454cea03e5f351323af6a482c65924f3c26c515efd9090dede58f2b4b6/types_cffi-1.17.0.20250915.tar.gz", hash = "sha256:4362e20368f78dabd5c56bca8004752cc890e07a71605d9e0d9e069dbaac8c06", size = 17229, upload-time = "2025-09-15T03:01:25.31Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/21/f7/68029931e7539e3246b33386a19c475f234c71d2a878411847b20bb31960/types_cffi-1.17.0.20250822-py3-none-any.whl", hash = "sha256:183dd76c1871a48936d7b931488e41f0f25a7463abe10b5816be275fc11506d5", size = 20083, upload-time = "2025-08-22T03:04:01.466Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ec/092f2b74b49ec4855cdb53050deb9699f7105b8fda6fe034c0781b8687f3/types_cffi-1.17.0.20250915-py3-none-any.whl", hash = "sha256:cef4af1116c83359c11bb4269283c50f0688e9fc1d7f0eeb390f3661546da52c", size = 20112, upload-time = "2025-09-15T03:01:24.187Z" }, ] [[package]] @@ -6481,11 +6497,11 @@ wheels = [ [[package]] name = "types-html5lib" -version = "1.1.11.20250809" +version = "1.1.11.20250917" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/70/ab/6aa4c487ae6f4f9da5153143bdc9e9b4fbc2b105df7ef8127fb920dc1f21/types_html5lib-1.1.11.20250809.tar.gz", hash = "sha256:7976ec7426bb009997dc5e072bca3ed988dd747d0cbfe093c7dfbd3d5ec8bf57", size = 16793, upload-time = "2025-08-09T03:14:20.819Z" } +sdist = { url = "https://files.pythonhosted.org/packages/68/4b/a970718e8bd9324ee8fb8eaf02ff069f6d03c20d4523bb4232892ecc3d06/types_html5lib-1.1.11.20250917.tar.gz", hash = "sha256:7b52743377f33f9b4fd7385afbd2d457b8864ee51f90ff2a795ad9e8c053373a", size = 16868, upload-time = "2025-09-17T02:47:41.18Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/05/328a2d6ecbd8aa3e16512600da78b1fe4605125896794a21824f3cac6f14/types_html5lib-1.1.11.20250809-py3-none-any.whl", hash = "sha256:e5f48ab670ae4cdeafd88bbc47113d8126dcf08318e0b8d70df26ecc13eca9b6", size = 22867, upload-time = "2025-08-09T03:14:20.048Z" }, + { url = "https://files.pythonhosted.org/packages/78/8a/da91a9c64dcb5e69beb567519857411996d8ecae9f6f128bcef8260e7a8d/types_html5lib-1.1.11.20250917-py3-none-any.whl", hash = "sha256:b294fd06d60da205daeb2f615485ca4d475088d2eff1009cf427f4a80fcd5346", size = 22908, upload-time = "2025-09-17T02:47:40.39Z" }, ] [[package]] @@ -6547,20 +6563,20 @@ wheels = [ [[package]] name = "types-openpyxl" -version = "3.1.5.20250822" +version = "3.1.5.20250919" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/7f/ea358482217448deafdb9232f198603511d2efa99e429822256f2b38975a/types_openpyxl-3.1.5.20250822.tar.gz", hash = "sha256:c8704a163e3798290d182c13c75da85f68cd97ff9b35f0ebfb94cf72f8b67bb3", size = 100858, upload-time = "2025-08-22T03:03:31.835Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/12/8bc4a25d49f1e4b7bbca868daa3ee80b1983d8137b4986867b5b65ab2ecd/types_openpyxl-3.1.5.20250919.tar.gz", hash = "sha256:232b5906773eebace1509b8994cdadda043f692cfdba9bfbb86ca921d54d32d7", size = 100880, upload-time = "2025-09-19T02:54:39.997Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/e8/cac4728e8dcbeb69d6de7de26bb9edb508e9f5c82476ecda22b58b939e60/types_openpyxl-3.1.5.20250822-py3-none-any.whl", hash = "sha256:da7a430d99c48347acf2dc351695f9db6ff90ecb761fed577b4a98fef2d0f831", size = 166093, upload-time = "2025-08-22T03:03:30.686Z" }, + { url = "https://files.pythonhosted.org/packages/36/3c/d49cf3f4489a10e9ddefde18fd258f120754c5825d06d145d9a0aaac770b/types_openpyxl-3.1.5.20250919-py3-none-any.whl", hash = "sha256:bd06f18b12fd5e1c9f0b666ee6151d8140216afa7496f7ebb9fe9d33a1a3ce99", size = 166078, upload-time = "2025-09-19T02:54:38.657Z" }, ] [[package]] name = "types-pexpect" -version = "4.9.0.20250809" +version = "4.9.0.20250916" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/a2/29564e69dee62f0f887ba7bfffa82fa4975504952e6199b218d3b403becd/types_pexpect-4.9.0.20250809.tar.gz", hash = "sha256:17a53c785b847c90d0be9149b00b0254e6e92c21cd856e853dac810ddb20101f", size = 13240, upload-time = "2025-08-09T03:15:04.554Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0c/e6/cc43e306dc7de14ec7861c24ac4957f688741ae39ae685049695d796b587/types_pexpect-4.9.0.20250916.tar.gz", hash = "sha256:69e5fed6199687a730a572de780a5749248a4c5df2ff1521e194563475c9928d", size = 13322, upload-time = "2025-09-16T02:49:25.61Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/1b/4d557287e6672feb749cf0d8ef5eb19189aff043e73e509e3775febc1cf1/types_pexpect-4.9.0.20250809-py3-none-any.whl", hash = "sha256:d19d206b8a7c282dac9376f26f072e036d22e9cf3e7d8eba3f477500b1f39101", size = 17039, upload-time = "2025-08-09T03:15:03.528Z" }, + { url = "https://files.pythonhosted.org/packages/aa/6d/7740e235a9fb2570968da7d386d7feb511ce68cd23472402ff8cdf7fc78f/types_pexpect-4.9.0.20250916-py3-none-any.whl", hash = "sha256:7fa43cb96042ac58bc74f7c28e5d85782be0ee01344149886849e9d90936fe8a", size = 17057, upload-time = "2025-09-16T02:49:24.546Z" }, ] [[package]] @@ -6583,11 +6599,11 @@ wheels = [ [[package]] name = "types-psycopg2" -version = "2.9.21.20250809" +version = "2.9.21.20250915" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/17/d0/66f3f04bab48bfdb2c8b795b2b3e75eb20c7d1fb0516916db3be6aa4a683/types_psycopg2-2.9.21.20250809.tar.gz", hash = "sha256:b7c2cbdcf7c0bd16240f59ba694347329b0463e43398de69784ea4dee45f3c6d", size = 26539, upload-time = "2025-08-09T03:14:54.711Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8f/20/3dcb89df8d1661cf6c4c2d9f84d4ba94dde48559cdcf7b536a380a9c387f/types_psycopg2-2.9.21.20250915.tar.gz", hash = "sha256:bfeb8f54c32490e7b5edc46215ab4163693192bc90407b4a023822de9239f5c8", size = 26678, upload-time = "2025-09-15T03:01:08.863Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/98/182497602921c47fadc8470d51a32e5c75343c8931c0b572a5c4ae3b948b/types_psycopg2-2.9.21.20250809-py3-none-any.whl", hash = "sha256:59b7b0ed56dcae9efae62b8373497274fc1a0484bdc5135cdacbe5a8f44e1d7b", size = 24824, upload-time = "2025-08-09T03:14:53.908Z" }, + { url = "https://files.pythonhosted.org/packages/93/4d/ebf1c72809a30150ad142074e1ad5101304f7569c0df2fa872906d76d0af/types_psycopg2-2.9.21.20250915-py3-none-any.whl", hash = "sha256:eefe5ccdc693fc086146e84c9ba437bb278efe1ef330b299a0cb71169dc6c55f", size = 24868, upload-time = "2025-09-15T03:01:07.613Z" }, ] [[package]] @@ -6604,11 +6620,11 @@ wheels = [ [[package]] name = "types-pymysql" -version = "1.1.0.20250909" +version = "1.1.0.20250916" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ae/0f/bb4331221fd560379ec702d61a11d5a5eead9a2866bb39eae294bde29988/types_pymysql-1.1.0.20250909.tar.gz", hash = "sha256:5ba7230425635b8c59316353701b99a087b949e8002dfeff652be0b62cee445b", size = 22189, upload-time = "2025-09-09T02:55:31.039Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/12/bda1d977c07e0e47502bede1c44a986dd45946494d89e005e04cdeb0f8de/types_pymysql-1.1.0.20250916.tar.gz", hash = "sha256:98d75731795fcc06723a192786662bdfa760e1e00f22809c104fbb47bac5e29b", size = 22131, upload-time = "2025-09-16T02:49:22.039Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/35/5681d881506a31bbbd9f7d5f6edcbf65489835081965b539b0802a665036/types_pymysql-1.1.0.20250909-py3-none-any.whl", hash = "sha256:c9957d4c10a31748636da5c16b0a0eef6751354d05adcd1b86acb27e8df36fb6", size = 23179, upload-time = "2025-09-09T02:55:29.873Z" }, + { url = "https://files.pythonhosted.org/packages/21/eb/a225e32a6e7b196af67ab2f1b07363595f63255374cc3b88bfdab53b4ee8/types_pymysql-1.1.0.20250916-py3-none-any.whl", hash = "sha256:873eb9836bb5e3de4368cc7010ca72775f86e9692a5c7810f8c7f48da082e55b", size = 23063, upload-time = "2025-09-16T02:49:20.933Z" }, ] [[package]] @@ -6662,11 +6678,11 @@ wheels = [ [[package]] name = "types-pyyaml" -version = "6.0.12.20250822" +version = "6.0.12.20250915" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/49/85/90a442e538359ab5c9e30de415006fb22567aa4301c908c09f19e42975c2/types_pyyaml-6.0.12.20250822.tar.gz", hash = "sha256:259f1d93079d335730a9db7cff2bcaf65d7e04b4a56b5927d49a612199b59413", size = 17481, upload-time = "2025-08-22T03:02:16.209Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/69/3c51b36d04da19b92f9e815be12753125bd8bc247ba0470a982e6979e71c/types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3", size = 17522, upload-time = "2025-09-15T03:01:00.728Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/8e/8f0aca667c97c0d76024b37cffa39e76e2ce39ca54a38f285a64e6ae33ba/types_pyyaml-6.0.12.20250822-py3-none-any.whl", hash = "sha256:1fe1a5e146aa315483592d292b72a172b65b946a6d98aa6ddd8e4aa838ab7098", size = 20314, upload-time = "2025-08-22T03:02:15.002Z" }, + { url = "https://files.pythonhosted.org/packages/bd/e0/1eed384f02555dde685fff1a1ac805c1c7dcb6dd019c916fe659b1c1f9ec/types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6", size = 20338, upload-time = "2025-09-15T03:00:59.218Z" }, ] [[package]] @@ -6693,14 +6709,14 @@ wheels = [ [[package]] name = "types-requests" -version = "2.32.4.20250809" +version = "2.32.4.20250913" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ed/b0/9355adb86ec84d057fea765e4c49cce592aaf3d5117ce5609a95a7fc3dac/types_requests-2.32.4.20250809.tar.gz", hash = "sha256:d8060de1c8ee599311f56ff58010fb4902f462a1470802cf9f6ed27bc46c4df3", size = 23027, upload-time = "2025-08-09T03:17:10.664Z" } +sdist = { url = "https://files.pythonhosted.org/packages/36/27/489922f4505975b11de2b5ad07b4fe1dca0bca9be81a703f26c5f3acfce5/types_requests-2.32.4.20250913.tar.gz", hash = "sha256:abd6d4f9ce3a9383f269775a9835a4c24e5cd6b9f647d64f88aa4613c33def5d", size = 23113, upload-time = "2025-09-13T02:40:02.309Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/6f/ec0012be842b1d888d46884ac5558fd62aeae1f0ec4f7a581433d890d4b5/types_requests-2.32.4.20250809-py3-none-any.whl", hash = "sha256:f73d1832fb519ece02c85b1f09d5f0dd3108938e7d47e7f94bbfa18a6782b163", size = 20644, upload-time = "2025-08-09T03:17:09.716Z" }, + { url = "https://files.pythonhosted.org/packages/2a/20/9a227ea57c1285986c4cf78400d0a91615d25b24e257fd9e2969606bdfae/types_requests-2.32.4.20250913-py3-none-any.whl", hash = "sha256:78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1", size = 20658, upload-time = "2025-09-13T02:40:01.115Z" }, ] [[package]] @@ -6990,15 +7006,15 @@ wheels = [ [[package]] name = "uvicorn" -version = "0.35.0" +version = "0.36.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473, upload-time = "2025-06-28T16:15:46.058Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/5e/f0cd46063a02fd8515f0e880c37d2657845b7306c16ce6c4ffc44afd9036/uvicorn-0.36.0.tar.gz", hash = "sha256:527dc68d77819919d90a6b267be55f0e76704dca829d34aea9480be831a9b9d9", size = 80032, upload-time = "2025-09-20T01:07:14.418Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" }, + { url = "https://files.pythonhosted.org/packages/96/06/5cc0542b47c0338c1cb676b348e24a1c29acabc81000bced518231dded6f/uvicorn-0.36.0-py3-none-any.whl", hash = "sha256:6bb4ba67f16024883af8adf13aba3a9919e415358604ce46780d3f9bdc36d731", size = 67675, upload-time = "2025-09-20T01:07:12.984Z" }, ] [package.optional-dependencies] @@ -7070,7 +7086,7 @@ wheels = [ [[package]] name = "wandb" -version = "0.21.4" +version = "0.22.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -7084,17 +7100,16 @@ dependencies = [ { name = "sentry-sdk" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/59/a8/aaa3f3f8e410f34442466aac10b1891b3084d35b98aef59ebcb4c0efb941/wandb-0.21.4.tar.gz", hash = "sha256:b350d50973409658deb455010fafcfa81e6be3470232e316286319e839ffb67b", size = 40175929, upload-time = "2025-09-11T21:14:29.161Z" } +sdist = { url = "https://files.pythonhosted.org/packages/93/37/0d4194707ceaa3168fa9ce54c1332bf15958bdbf67837f39cfac2e3b98bb/wandb-0.22.0.tar.gz", hash = "sha256:717e3d085f8f57dbde745c9ec6d605e51b2da51e47a7d2a7bfa82c9c6e3d3f5a", size = 40241826, upload-time = "2025-09-18T19:13:22.256Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/6b/3a8d9db18a4c4568599a8792c0c8b1f422d9864c7123e8301a9477fbf0ac/wandb-0.21.4-py3-none-macosx_10_14_x86_64.whl", hash = "sha256:c681ef7adb09925251d8d995c58aa76ae86a46dbf8de3b67353ad99fdef232d5", size = 18845369, upload-time = "2025-09-11T21:14:02.879Z" }, - { url = "https://files.pythonhosted.org/packages/60/e0/d7d6818938ec6958c93d979f9a90ea3d06bdc41e130b30f8cd89ae03c245/wandb-0.21.4-py3-none-macosx_12_0_arm64.whl", hash = "sha256:d35acc65c10bb7ac55d1331f7b1b8ab761f368f7b051131515f081a56ea5febc", size = 18339122, upload-time = "2025-09-11T21:14:06.455Z" }, - { url = "https://files.pythonhosted.org/packages/13/29/9bb8ed4adf32bed30e4d5df74d956dd1e93b6fd4bbc29dbe84167c84804b/wandb-0.21.4-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:765e66b57b7be5f393ecebd9a9d2c382c9f979d19cdee4a3f118eaafed43fca1", size = 19081975, upload-time = "2025-09-11T21:14:09.317Z" }, - { url = "https://files.pythonhosted.org/packages/30/6e/4aa33bc2c56b70c0116e73687c72c7a674f4072442633b3b23270d2215e3/wandb-0.21.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06127ec49245d12fdb3922c1eca1ab611cefc94adabeaaaba7b069707c516cba", size = 18161358, upload-time = "2025-09-11T21:14:12.092Z" }, - { url = "https://files.pythonhosted.org/packages/f7/56/d9f845ecfd5e078cf637cb29d8abe3350b8a174924c54086168783454a8f/wandb-0.21.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48d4f65f1be5f5a25b868695e09cdbfe481678220df349a8c2cbed3992fb497f", size = 19602680, upload-time = "2025-09-11T21:14:14.987Z" }, - { url = "https://files.pythonhosted.org/packages/68/ea/237a3c2b679a35e02e577c5bf844d6a221a7d32925ab8d5230529e9f2841/wandb-0.21.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ebd11f78351a3ca22caa1045146a6d2ad9e62fed6d0de2e67a0db5710d75103a", size = 18166392, upload-time = "2025-09-11T21:14:17.478Z" }, - { url = "https://files.pythonhosted.org/packages/12/e3/dbf2c575c79c99d94f16ce1a2cbbb2529d5029a76348c1ddac7e47f6873f/wandb-0.21.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:595b9e77591a805653e05db8b892805ee0a5317d147ef4976353e4f1cc16ebdc", size = 19678800, upload-time = "2025-09-11T21:14:20.264Z" }, - { url = "https://files.pythonhosted.org/packages/fa/eb/4ed04879d697772b8eb251c0e5af9a4ff7e2cc2b3fcd4b8eee91253ec2f1/wandb-0.21.4-py3-none-win32.whl", hash = "sha256:f9c86eb7eb7d40c6441533428188b1ae3205674e80c940792d850e2c1fe8d31e", size = 18738950, upload-time = "2025-09-11T21:14:23.08Z" }, - { url = "https://files.pythonhosted.org/packages/c3/4a/86c5e19600cb6a616a45f133c26826b46133499cd72d592772929d530ccd/wandb-0.21.4-py3-none-win_amd64.whl", hash = "sha256:2da3d5bb310a9f9fb7f680f4aef285348095a4cc6d1ce22b7343ba4e3fffcd84", size = 18738953, upload-time = "2025-09-11T21:14:25.539Z" }, + { url = "https://files.pythonhosted.org/packages/19/7d/8841e39e4f97a8777babad57b13856b5e24d6efe35ad75649c8da28472d9/wandb-0.22.0-py3-none-macosx_12_0_arm64.whl", hash = "sha256:8650a14615c23dcfc8cf393f88d41a879d6bfffb3c290a556aeb6ee62986c359", size = 18343096, upload-time = "2025-09-18T19:12:58.473Z" }, + { url = "https://files.pythonhosted.org/packages/c1/6e/0416fea679527b80109c083782ae2696a6c37ac45e7f8901c27b665ea94b/wandb-0.22.0-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:94ec449b3ed9516cad7008ab37c55b299d0036cdadfa83688b7245bd6ba04dd3", size = 19373158, upload-time = "2025-09-18T19:13:02.441Z" }, + { url = "https://files.pythonhosted.org/packages/db/58/48499272541eb21c3db2e28a0dc128270e8acb533a358944306210b1cb9e/wandb-0.22.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b2fe78b5f2d1ec7396f7925c7ac33f04ea0a62f07779cb654c45633d17dfc45", size = 18149252, upload-time = "2025-09-18T19:13:05.344Z" }, + { url = "https://files.pythonhosted.org/packages/06/c7/93a70c6f31ea127fd1c89800e6e733e172d9eaba6a33c9e08348503df78b/wandb-0.22.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44da9a83301d89c008f608832b74237f9e0a0758b2bb6d69ba51652818fffb5e", size = 19564075, upload-time = "2025-09-18T19:13:07.882Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d8/910e4dee2dc2010d688087244d0502621105d5f314088af9265081c73079/wandb-0.22.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:21f05cc609c62c8ccba7c3338f9288d723c64d16ffd4fa70c02d6db60b42abae", size = 18188310, upload-time = "2025-09-18T19:13:10.321Z" }, + { url = "https://files.pythonhosted.org/packages/97/ac/2c09e536aca56d01b50207acc25aadbe0ee6ae8b825ec0f30c5ea7c1cd2f/wandb-0.22.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:884d37fb8d4daeb4d1f68ad8b5ea2817cabecc715efaff2f89bf006f2e977e37", size = 19658593, upload-time = "2025-09-18T19:13:13.812Z" }, + { url = "https://files.pythonhosted.org/packages/29/cb/d5f832adfd68f3a4700928e0cbdac78acb0f3182983a57a020cd1c5bab26/wandb-0.22.0-py3-none-win32.whl", hash = "sha256:60776fae528c3f64caf47a94dec08899c308f96fe974e0a82cefddb9a65e223c", size = 18742395, upload-time = "2025-09-18T19:13:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/3c/c9/d9f0c7b8a743af589e694ce8fec8e6cffa46873179912d4ed4f992d08381/wandb-0.22.0-py3-none-win_amd64.whl", hash = "sha256:53ba0fa048b766c1aa44592f1e530fb7eead7749089a66c3892b35f153a8d8bd", size = 18742399, upload-time = "2025-09-18T19:13:19.26Z" }, ] [[package]] @@ -7321,20 +7336,20 @@ wheels = [ [[package]] name = "xlsxwriter" -version = "3.2.5" +version = "3.2.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/47/7704bac42ac6fe1710ae099b70e6a1e68ed173ef14792b647808c357da43/xlsxwriter-3.2.5.tar.gz", hash = "sha256:7e88469d607cdc920151c0ab3ce9cf1a83992d4b7bc730c5ffdd1a12115a7dbe", size = 213306, upload-time = "2025-06-17T08:59:14.619Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/2c/c06ef49dc36e7954e55b802a8b231770d286a9758b3d936bd1e04ce5ba88/xlsxwriter-3.2.9.tar.gz", hash = "sha256:254b1c37a368c444eac6e2f867405cc9e461b0ed97a3233b2ac1e574efb4140c", size = 215940, upload-time = "2025-09-16T00:16:21.63Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/34/a22e6664211f0c8879521328000bdcae9bf6dbafa94a923e531f6d5b3f73/xlsxwriter-3.2.5-py3-none-any.whl", hash = "sha256:4f4824234e1eaf9d95df9a8fe974585ff91d0f5e3d3f12ace5b71e443c1c6abd", size = 172347, upload-time = "2025-06-17T08:59:13.453Z" }, + { url = "https://files.pythonhosted.org/packages/3a/0c/3662f4a66880196a590b202f0db82d919dd2f89e99a27fadef91c4a33d41/xlsxwriter-3.2.9-py3-none-any.whl", hash = "sha256:9a5db42bc5dff014806c58a20b9eae7322a134abb6fce3c92c181bfb275ec5b3", size = 175315, upload-time = "2025-09-16T00:16:20.108Z" }, ] [[package]] name = "xmltodict" -version = "0.15.1" +version = "1.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d7/7a/42f705c672e77dc3ce85a6823bb289055323aac30de7c4b9eca1e28b2c17/xmltodict-0.15.1.tar.gz", hash = "sha256:3d8d49127f3ce6979d40a36dbcad96f8bab106d232d24b49efdd4bd21716983c", size = 62984, upload-time = "2025-09-08T18:33:19.349Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/aa/917ceeed4dbb80d2f04dbd0c784b7ee7bba8ae5a54837ef0e5e062cd3cfb/xmltodict-1.0.2.tar.gz", hash = "sha256:54306780b7c2175a3967cad1db92f218207e5bc1aba697d887807c0fb68b7649", size = 25725, upload-time = "2025-09-17T21:59:26.459Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/4e/001c53a22f6bd5f383f49915a53e40f0cab2d3f1884d968f3ae14be367b7/xmltodict-0.15.1-py2.py3-none-any.whl", hash = "sha256:dcd84b52f30a15be5ac4c9099a0cb234df8758624b035411e329c5c1e7a49089", size = 11260, upload-time = "2025-09-08T18:33:17.87Z" }, + { url = "https://files.pythonhosted.org/packages/c0/20/69a0e6058bc5ea74892d089d64dfc3a62ba78917ec5e2cfa70f7c92ba3a5/xmltodict-1.0.2-py3-none-any.whl", hash = "sha256:62d0fddb0dcbc9f642745d8bbf4d81fd17d6dfaec5a15b5c1876300aad92af0d", size = 13893, upload-time = "2025-09-17T21:59:24.859Z" }, ] [[package]] @@ -7429,42 +7444,42 @@ wheels = [ [[package]] name = "zstandard" -version = "0.24.0" +version = "0.25.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/09/1b/c20b2ef1d987627765dcd5bf1dadb8ef6564f00a87972635099bb76b7a05/zstandard-0.24.0.tar.gz", hash = "sha256:fe3198b81c00032326342d973e526803f183f97aa9e9a98e3f897ebafe21178f", size = 905681, upload-time = "2025-08-17T18:36:36.352Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/01/1f/5c72806f76043c0ef9191a2b65281dacdf3b65b0828eb13bb2c987c4fb90/zstandard-0.24.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:addfc23e3bd5f4b6787b9ca95b2d09a1a67ad5a3c318daaa783ff90b2d3a366e", size = 795228, upload-time = "2025-08-17T18:21:46.978Z" }, - { url = "https://files.pythonhosted.org/packages/0b/ba/3059bd5cd834666a789251d14417621b5c61233bd46e7d9023ea8bc1043a/zstandard-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6b005bcee4be9c3984b355336283afe77b2defa76ed6b89332eced7b6fa68b68", size = 640520, upload-time = "2025-08-17T18:21:48.162Z" }, - { url = "https://files.pythonhosted.org/packages/57/07/f0e632bf783f915c1fdd0bf68614c4764cae9dd46ba32cbae4dd659592c3/zstandard-0.24.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:3f96a9130171e01dbb6c3d4d9925d604e2131a97f540e223b88ba45daf56d6fb", size = 5347682, upload-time = "2025-08-17T18:21:50.266Z" }, - { url = "https://files.pythonhosted.org/packages/a6/4c/63523169fe84773a7462cd090b0989cb7c7a7f2a8b0a5fbf00009ba7d74d/zstandard-0.24.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd0d3d16e63873253bad22b413ec679cf6586e51b5772eb10733899832efec42", size = 5057650, upload-time = "2025-08-17T18:21:52.634Z" }, - { url = "https://files.pythonhosted.org/packages/c6/16/49013f7ef80293f5cebf4c4229535a9f4c9416bbfd238560edc579815dbe/zstandard-0.24.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:b7a8c30d9bf4bd5e4dcfe26900bef0fcd9749acde45cdf0b3c89e2052fda9a13", size = 5404893, upload-time = "2025-08-17T18:21:54.54Z" }, - { url = "https://files.pythonhosted.org/packages/4d/38/78e8bcb5fc32a63b055f2b99e0be49b506f2351d0180173674f516cf8a7a/zstandard-0.24.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:52cd7d9fa0a115c9446abb79b06a47171b7d916c35c10e0c3aa6f01d57561382", size = 5452389, upload-time = "2025-08-17T18:21:56.822Z" }, - { url = "https://files.pythonhosted.org/packages/55/8a/81671f05619edbacd49bd84ce6899a09fc8299be20c09ae92f6618ccb92d/zstandard-0.24.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a0f6fc2ea6e07e20df48752e7700e02e1892c61f9a6bfbacaf2c5b24d5ad504b", size = 5558888, upload-time = "2025-08-17T18:21:58.68Z" }, - { url = "https://files.pythonhosted.org/packages/49/cc/e83feb2d7d22d1f88434defbaeb6e5e91f42a4f607b5d4d2d58912b69d67/zstandard-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e46eb6702691b24ddb3e31e88b4a499e31506991db3d3724a85bd1c5fc3cfe4e", size = 5048038, upload-time = "2025-08-17T18:22:00.642Z" }, - { url = "https://files.pythonhosted.org/packages/08/c3/7a5c57ff49ef8943877f85c23368c104c2aea510abb339a2dc31ad0a27c3/zstandard-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5e3b9310fd7f0d12edc75532cd9a56da6293840c84da90070d692e0bb15f186", size = 5573833, upload-time = "2025-08-17T18:22:02.402Z" }, - { url = "https://files.pythonhosted.org/packages/f9/00/64519983cd92535ba4bdd4ac26ac52db00040a52d6c4efb8d1764abcc343/zstandard-0.24.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76cdfe7f920738ea871f035568f82bad3328cbc8d98f1f6988264096b5264efd", size = 4961072, upload-time = "2025-08-17T18:22:04.384Z" }, - { url = "https://files.pythonhosted.org/packages/72/ab/3a08a43067387d22994fc87c3113636aa34ccd2914a4d2d188ce365c5d85/zstandard-0.24.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3f2fe35ec84908dddf0fbf66b35d7c2878dbe349552dd52e005c755d3493d61c", size = 5268462, upload-time = "2025-08-17T18:22:06.095Z" }, - { url = "https://files.pythonhosted.org/packages/49/cf/2abb3a1ad85aebe18c53e7eca73223f1546ddfa3bf4d2fb83fc5a064c5ca/zstandard-0.24.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:aa705beb74ab116563f4ce784fa94771f230c05d09ab5de9c397793e725bb1db", size = 5443319, upload-time = "2025-08-17T18:22:08.572Z" }, - { url = "https://files.pythonhosted.org/packages/40/42/0dd59fc2f68f1664cda11c3b26abdf987f4e57cb6b6b0f329520cd074552/zstandard-0.24.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:aadf32c389bb7f02b8ec5c243c38302b92c006da565e120dfcb7bf0378f4f848", size = 5822355, upload-time = "2025-08-17T18:22:10.537Z" }, - { url = "https://files.pythonhosted.org/packages/99/c0/ea4e640fd4f7d58d6f87a1e7aca11fb886ac24db277fbbb879336c912f63/zstandard-0.24.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e40cd0fc734aa1d4bd0e7ad102fd2a1aefa50ce9ef570005ffc2273c5442ddc3", size = 5365257, upload-time = "2025-08-17T18:22:13.159Z" }, - { url = "https://files.pythonhosted.org/packages/27/a9/92da42a5c4e7e4003271f2e1f0efd1f37cfd565d763ad3604e9597980a1c/zstandard-0.24.0-cp311-cp311-win32.whl", hash = "sha256:cda61c46343809ecda43dc620d1333dd7433a25d0a252f2dcc7667f6331c7b61", size = 435559, upload-time = "2025-08-17T18:22:17.29Z" }, - { url = "https://files.pythonhosted.org/packages/e2/8e/2c8e5c681ae4937c007938f954a060fa7c74f36273b289cabdb5ef0e9a7e/zstandard-0.24.0-cp311-cp311-win_amd64.whl", hash = "sha256:3b95fc06489aa9388400d1aab01a83652bc040c9c087bd732eb214909d7fb0dd", size = 505070, upload-time = "2025-08-17T18:22:14.808Z" }, - { url = "https://files.pythonhosted.org/packages/52/10/a2f27a66bec75e236b575c9f7b0d7d37004a03aa2dcde8e2decbe9ed7b4d/zstandard-0.24.0-cp311-cp311-win_arm64.whl", hash = "sha256:ad9fd176ff6800a0cf52bcf59c71e5de4fa25bf3ba62b58800e0f84885344d34", size = 461507, upload-time = "2025-08-17T18:22:15.964Z" }, - { url = "https://files.pythonhosted.org/packages/26/e9/0bd281d9154bba7fc421a291e263911e1d69d6951aa80955b992a48289f6/zstandard-0.24.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a2bda8f2790add22773ee7a4e43c90ea05598bffc94c21c40ae0a9000b0133c3", size = 795710, upload-time = "2025-08-17T18:22:19.189Z" }, - { url = "https://files.pythonhosted.org/packages/36/26/b250a2eef515caf492e2d86732e75240cdac9d92b04383722b9753590c36/zstandard-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cc76de75300f65b8eb574d855c12518dc25a075dadb41dd18f6322bda3fe15d5", size = 640336, upload-time = "2025-08-17T18:22:20.466Z" }, - { url = "https://files.pythonhosted.org/packages/79/bf/3ba6b522306d9bf097aac8547556b98a4f753dc807a170becaf30dcd6f01/zstandard-0.24.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:d2b3b4bda1a025b10fe0269369475f420177f2cb06e0f9d32c95b4873c9f80b8", size = 5342533, upload-time = "2025-08-17T18:22:22.326Z" }, - { url = "https://files.pythonhosted.org/packages/ea/ec/22bc75bf054e25accdf8e928bc68ab36b4466809729c554ff3a1c1c8bce6/zstandard-0.24.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b84c6c210684286e504022d11ec294d2b7922d66c823e87575d8b23eba7c81f", size = 5062837, upload-time = "2025-08-17T18:22:24.416Z" }, - { url = "https://files.pythonhosted.org/packages/48/cc/33edfc9d286e517fb5b51d9c3210e5bcfce578d02a675f994308ca587ae1/zstandard-0.24.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c59740682a686bf835a1a4d8d0ed1eefe31ac07f1c5a7ed5f2e72cf577692b00", size = 5393855, upload-time = "2025-08-17T18:22:26.786Z" }, - { url = "https://files.pythonhosted.org/packages/73/36/59254e9b29da6215fb3a717812bf87192d89f190f23817d88cb8868c47ac/zstandard-0.24.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6324fde5cf5120fbf6541d5ff3c86011ec056e8d0f915d8e7822926a5377193a", size = 5451058, upload-time = "2025-08-17T18:22:28.885Z" }, - { url = "https://files.pythonhosted.org/packages/9a/c7/31674cb2168b741bbbe71ce37dd397c9c671e73349d88ad3bca9e9fae25b/zstandard-0.24.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:51a86bd963de3f36688553926a84e550d45d7f9745bd1947d79472eca27fcc75", size = 5546619, upload-time = "2025-08-17T18:22:31.115Z" }, - { url = "https://files.pythonhosted.org/packages/e6/01/1a9f22239f08c00c156f2266db857545ece66a6fc0303d45c298564bc20b/zstandard-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d82ac87017b734f2fb70ff93818c66f0ad2c3810f61040f077ed38d924e19980", size = 5046676, upload-time = "2025-08-17T18:22:33.077Z" }, - { url = "https://files.pythonhosted.org/packages/a7/91/6c0cf8fa143a4988a0361380ac2ef0d7cb98a374704b389fbc38b5891712/zstandard-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92ea7855d5bcfb386c34557516c73753435fb2d4a014e2c9343b5f5ba148b5d8", size = 5576381, upload-time = "2025-08-17T18:22:35.391Z" }, - { url = "https://files.pythonhosted.org/packages/e2/77/1526080e22e78871e786ccf3c84bf5cec9ed25110a9585507d3c551da3d6/zstandard-0.24.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3adb4b5414febf074800d264ddf69ecade8c658837a83a19e8ab820e924c9933", size = 4953403, upload-time = "2025-08-17T18:22:37.266Z" }, - { url = "https://files.pythonhosted.org/packages/6e/d0/a3a833930bff01eab697eb8abeafb0ab068438771fa066558d96d7dafbf9/zstandard-0.24.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6374feaf347e6b83ec13cc5dcfa70076f06d8f7ecd46cc71d58fac798ff08b76", size = 5267396, upload-time = "2025-08-17T18:22:39.757Z" }, - { url = "https://files.pythonhosted.org/packages/f3/5e/90a0db9a61cd4769c06374297ecfcbbf66654f74cec89392519deba64d76/zstandard-0.24.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:13fc548e214df08d896ee5f29e1f91ee35db14f733fef8eabea8dca6e451d1e2", size = 5433269, upload-time = "2025-08-17T18:22:42.131Z" }, - { url = "https://files.pythonhosted.org/packages/ce/58/fc6a71060dd67c26a9c5566e0d7c99248cbe5abfda6b3b65b8f1a28d59f7/zstandard-0.24.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0a416814608610abf5488889c74e43ffa0343ca6cf43957c6b6ec526212422da", size = 5814203, upload-time = "2025-08-17T18:22:44.017Z" }, - { url = "https://files.pythonhosted.org/packages/5c/6a/89573d4393e3ecbfa425d9a4e391027f58d7810dec5cdb13a26e4cdeef5c/zstandard-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0d66da2649bb0af4471699aeb7a83d6f59ae30236fb9f6b5d20fb618ef6c6777", size = 5359622, upload-time = "2025-08-17T18:22:45.802Z" }, - { url = "https://files.pythonhosted.org/packages/60/ff/2cbab815d6f02a53a9d8d8703bc727d8408a2e508143ca9af6c3cca2054b/zstandard-0.24.0-cp312-cp312-win32.whl", hash = "sha256:ff19efaa33e7f136fe95f9bbcc90ab7fb60648453b03f95d1de3ab6997de0f32", size = 435968, upload-time = "2025-08-17T18:22:49.493Z" }, - { url = "https://files.pythonhosted.org/packages/ce/a3/8f96b8ddb7ad12344218fbd0fd2805702dafd126ae9f8a1fb91eef7b33da/zstandard-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc05f8a875eb651d1cc62e12a4a0e6afa5cd0cc231381adb830d2e9c196ea895", size = 505195, upload-time = "2025-08-17T18:22:47.193Z" }, - { url = "https://files.pythonhosted.org/packages/a3/4a/bfca20679da63bfc236634ef2e4b1b4254203098b0170e3511fee781351f/zstandard-0.24.0-cp312-cp312-win_arm64.whl", hash = "sha256:b04c94718f7a8ed7cdd01b162b6caa1954b3c9d486f00ecbbd300f149d2b2606", size = 461605, upload-time = "2025-08-17T18:22:48.317Z" }, + { url = "https://files.pythonhosted.org/packages/2a/83/c3ca27c363d104980f1c9cee1101cc8ba724ac8c28a033ede6aab89585b1/zstandard-0.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:933b65d7680ea337180733cf9e87293cc5500cc0eb3fc8769f4d3c88d724ec5c", size = 795254, upload-time = "2025-09-14T22:16:26.137Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4d/e66465c5411a7cf4866aeadc7d108081d8ceba9bc7abe6b14aa21c671ec3/zstandard-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3f79487c687b1fc69f19e487cd949bf3aae653d181dfb5fde3bf6d18894706f", size = 640559, upload-time = "2025-09-14T22:16:27.973Z" }, + { url = "https://files.pythonhosted.org/packages/12/56/354fe655905f290d3b147b33fe946b0f27e791e4b50a5f004c802cb3eb7b/zstandard-0.25.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:0bbc9a0c65ce0eea3c34a691e3c4b6889f5f3909ba4822ab385fab9057099431", size = 5348020, upload-time = "2025-09-14T22:16:29.523Z" }, + { url = "https://files.pythonhosted.org/packages/3b/13/2b7ed68bd85e69a2069bcc72141d378f22cae5a0f3b353a2c8f50ef30c1b/zstandard-0.25.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:01582723b3ccd6939ab7b3a78622c573799d5d8737b534b86d0e06ac18dbde4a", size = 5058126, upload-time = "2025-09-14T22:16:31.811Z" }, + { url = "https://files.pythonhosted.org/packages/c9/dd/fdaf0674f4b10d92cb120ccff58bbb6626bf8368f00ebfd2a41ba4a0dc99/zstandard-0.25.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5f1ad7bf88535edcf30038f6919abe087f606f62c00a87d7e33e7fc57cb69fcc", size = 5405390, upload-time = "2025-09-14T22:16:33.486Z" }, + { url = "https://files.pythonhosted.org/packages/0f/67/354d1555575bc2490435f90d67ca4dd65238ff2f119f30f72d5cde09c2ad/zstandard-0.25.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:06acb75eebeedb77b69048031282737717a63e71e4ae3f77cc0c3b9508320df6", size = 5452914, upload-time = "2025-09-14T22:16:35.277Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/e9cfd801a3f9190bf3e759c422bbfd2247db9d7f3d54a56ecde70137791a/zstandard-0.25.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9300d02ea7c6506f00e627e287e0492a5eb0371ec1670ae852fefffa6164b072", size = 5559635, upload-time = "2025-09-14T22:16:37.141Z" }, + { url = "https://files.pythonhosted.org/packages/21/88/5ba550f797ca953a52d708c8e4f380959e7e3280af029e38fbf47b55916e/zstandard-0.25.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfd06b1c5584b657a2892a6014c2f4c20e0db0208c159148fa78c65f7e0b0277", size = 5048277, upload-time = "2025-09-14T22:16:38.807Z" }, + { url = "https://files.pythonhosted.org/packages/46/c0/ca3e533b4fa03112facbe7fbe7779cb1ebec215688e5df576fe5429172e0/zstandard-0.25.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f373da2c1757bb7f1acaf09369cdc1d51d84131e50d5fa9863982fd626466313", size = 5574377, upload-time = "2025-09-14T22:16:40.523Z" }, + { url = "https://files.pythonhosted.org/packages/12/9b/3fb626390113f272abd0799fd677ea33d5fc3ec185e62e6be534493c4b60/zstandard-0.25.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c0e5a65158a7946e7a7affa6418878ef97ab66636f13353b8502d7ea03c8097", size = 4961493, upload-time = "2025-09-14T22:16:43.3Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d3/23094a6b6a4b1343b27ae68249daa17ae0651fcfec9ed4de09d14b940285/zstandard-0.25.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c8e167d5adf59476fa3e37bee730890e389410c354771a62e3c076c86f9f7778", size = 5269018, upload-time = "2025-09-14T22:16:45.292Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a7/bb5a0c1c0f3f4b5e9d5b55198e39de91e04ba7c205cc46fcb0f95f0383c1/zstandard-0.25.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:98750a309eb2f020da61e727de7d7ba3c57c97cf6213f6f6277bb7fb42a8e065", size = 5443672, upload-time = "2025-09-14T22:16:47.076Z" }, + { url = "https://files.pythonhosted.org/packages/27/22/503347aa08d073993f25109c36c8d9f029c7d5949198050962cb568dfa5e/zstandard-0.25.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22a086cff1b6ceca18a8dd6096ec631e430e93a8e70a9ca5efa7561a00f826fa", size = 5822753, upload-time = "2025-09-14T22:16:49.316Z" }, + { url = "https://files.pythonhosted.org/packages/e2/be/94267dc6ee64f0f8ba2b2ae7c7a2df934a816baaa7291db9e1aa77394c3c/zstandard-0.25.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72d35d7aa0bba323965da807a462b0966c91608ef3a48ba761678cb20ce5d8b7", size = 5366047, upload-time = "2025-09-14T22:16:51.328Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a3/732893eab0a3a7aecff8b99052fecf9f605cf0fb5fb6d0290e36beee47a4/zstandard-0.25.0-cp311-cp311-win32.whl", hash = "sha256:f5aeea11ded7320a84dcdd62a3d95b5186834224a9e55b92ccae35d21a8b63d4", size = 436484, upload-time = "2025-09-14T22:16:55.005Z" }, + { url = "https://files.pythonhosted.org/packages/43/a3/c6155f5c1cce691cb80dfd38627046e50af3ee9ddc5d0b45b9b063bfb8c9/zstandard-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:daab68faadb847063d0c56f361a289c4f268706b598afbf9ad113cbe5c38b6b2", size = 506183, upload-time = "2025-09-14T22:16:52.753Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3e/8945ab86a0820cc0e0cdbf38086a92868a9172020fdab8a03ac19662b0e5/zstandard-0.25.0-cp311-cp311-win_arm64.whl", hash = "sha256:22a06c5df3751bb7dc67406f5374734ccee8ed37fc5981bf1ad7041831fa1137", size = 462533, upload-time = "2025-09-14T22:16:53.878Z" }, + { url = "https://files.pythonhosted.org/packages/82/fc/f26eb6ef91ae723a03e16eddb198abcfce2bc5a42e224d44cc8b6765e57e/zstandard-0.25.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b", size = 795738, upload-time = "2025-09-14T22:16:56.237Z" }, + { url = "https://files.pythonhosted.org/packages/aa/1c/d920d64b22f8dd028a8b90e2d756e431a5d86194caa78e3819c7bf53b4b3/zstandard-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00", size = 640436, upload-time = "2025-09-14T22:16:57.774Z" }, + { url = "https://files.pythonhosted.org/packages/53/6c/288c3f0bd9fcfe9ca41e2c2fbfd17b2097f6af57b62a81161941f09afa76/zstandard-0.25.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64", size = 5343019, upload-time = "2025-09-14T22:16:59.302Z" }, + { url = "https://files.pythonhosted.org/packages/1e/15/efef5a2f204a64bdb5571e6161d49f7ef0fffdbca953a615efbec045f60f/zstandard-0.25.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6dffecc361d079bb48d7caef5d673c88c8988d3d33fb74ab95b7ee6da42652ea", size = 5063012, upload-time = "2025-09-14T22:17:01.156Z" }, + { url = "https://files.pythonhosted.org/packages/b7/37/a6ce629ffdb43959e92e87ebdaeebb5ac81c944b6a75c9c47e300f85abdf/zstandard-0.25.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7149623bba7fdf7e7f24312953bcf73cae103db8cae49f8154dd1eadc8a29ecb", size = 5394148, upload-time = "2025-09-14T22:17:03.091Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/2bf870b3abeb5c070fe2d670a5a8d1057a8270f125ef7676d29ea900f496/zstandard-0.25.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6a573a35693e03cf1d67799fd01b50ff578515a8aeadd4595d2a7fa9f3ec002a", size = 5451652, upload-time = "2025-09-14T22:17:04.979Z" }, + { url = "https://files.pythonhosted.org/packages/53/60/7be26e610767316c028a2cbedb9a3beabdbe33e2182c373f71a1c0b88f36/zstandard-0.25.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5a56ba0db2d244117ed744dfa8f6f5b366e14148e00de44723413b2f3938a902", size = 5546993, upload-time = "2025-09-14T22:17:06.781Z" }, + { url = "https://files.pythonhosted.org/packages/85/c7/3483ad9ff0662623f3648479b0380d2de5510abf00990468c286c6b04017/zstandard-0.25.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:10ef2a79ab8e2974e2075fb984e5b9806c64134810fac21576f0668e7ea19f8f", size = 5046806, upload-time = "2025-09-14T22:17:08.415Z" }, + { url = "https://files.pythonhosted.org/packages/08/b3/206883dd25b8d1591a1caa44b54c2aad84badccf2f1de9e2d60a446f9a25/zstandard-0.25.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aaf21ba8fb76d102b696781bddaa0954b782536446083ae3fdaa6f16b25a1c4b", size = 5576659, upload-time = "2025-09-14T22:17:10.164Z" }, + { url = "https://files.pythonhosted.org/packages/9d/31/76c0779101453e6c117b0ff22565865c54f48f8bd807df2b00c2c404b8e0/zstandard-0.25.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1869da9571d5e94a85a5e8d57e4e8807b175c9e4a6294e3b66fa4efb074d90f6", size = 4953933, upload-time = "2025-09-14T22:17:11.857Z" }, + { url = "https://files.pythonhosted.org/packages/18/e1/97680c664a1bf9a247a280a053d98e251424af51f1b196c6d52f117c9720/zstandard-0.25.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:809c5bcb2c67cd0ed81e9229d227d4ca28f82d0f778fc5fea624a9def3963f91", size = 5268008, upload-time = "2025-09-14T22:17:13.627Z" }, + { url = "https://files.pythonhosted.org/packages/1e/73/316e4010de585ac798e154e88fd81bb16afc5c5cb1a72eeb16dd37e8024a/zstandard-0.25.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f27662e4f7dbf9f9c12391cb37b4c4c3cb90ffbd3b1fb9284dadbbb8935fa708", size = 5433517, upload-time = "2025-09-14T22:17:16.103Z" }, + { url = "https://files.pythonhosted.org/packages/5b/60/dd0f8cfa8129c5a0ce3ea6b7f70be5b33d2618013a161e1ff26c2b39787c/zstandard-0.25.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99c0c846e6e61718715a3c9437ccc625de26593fea60189567f0118dc9db7512", size = 5814292, upload-time = "2025-09-14T22:17:17.827Z" }, + { url = "https://files.pythonhosted.org/packages/fc/5f/75aafd4b9d11b5407b641b8e41a57864097663699f23e9ad4dbb91dc6bfe/zstandard-0.25.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:474d2596a2dbc241a556e965fb76002c1ce655445e4e3bf38e5477d413165ffa", size = 5360237, upload-time = "2025-09-14T22:17:19.954Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8d/0309daffea4fcac7981021dbf21cdb2e3427a9e76bafbcdbdf5392ff99a4/zstandard-0.25.0-cp312-cp312-win32.whl", hash = "sha256:23ebc8f17a03133b4426bcc04aabd68f8236eb78c3760f12783385171b0fd8bd", size = 436922, upload-time = "2025-09-14T22:17:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/79/3b/fa54d9015f945330510cb5d0b0501e8253c127cca7ebe8ba46a965df18c5/zstandard-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffef5a74088f1e09947aecf91011136665152e0b4b359c42be3373897fb39b01", size = 506276, upload-time = "2025-09-14T22:17:21.429Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6b/8b51697e5319b1f9ac71087b0af9a40d8a6288ff8025c36486e0c12abcc4/zstandard-0.25.0-cp312-cp312-win_arm64.whl", hash = "sha256:181eb40e0b6a29b3cd2849f825e0fa34397f649170673d385f3598ae17cca2e9", size = 462679, upload-time = "2025-09-14T22:17:23.147Z" }, ] diff --git a/conflict-files.txt b/conflict-files.txt new file mode 100644 index 0000000000..547d3faa7b --- /dev/null +++ b/conflict-files.txt @@ -0,0 +1,25 @@ +web/app/components/base/form/components/base/base-field.tsx +web/app/components/plugins/types.ts +web/app/components/workflow-app/components/workflow-children.tsx +web/app/components/workflow-app/components/workflow-header/app-publisher-trigger.tsx +web/app/components/workflow-app/hooks/use-nodes-sync-draft.ts +web/app/components/workflow/block-selector/constants.tsx +web/app/components/workflow/block-selector/hooks.ts +web/app/components/workflow/block-selector/index.tsx +web/app/components/workflow/block-selector/tabs.tsx +web/app/components/workflow/block-selector/types.ts +web/app/components/workflow/constants.ts +web/app/components/workflow/header/header-in-normal.tsx +web/app/components/workflow/header/run-and-history.tsx +web/app/components/workflow/hooks/use-checklist.ts +web/app/components/workflow/hooks/use-nodes-interactions.ts +web/app/components/workflow/hooks/use-workflow.ts +web/app/components/workflow/nodes/_base/components/form-input-item.tsx +web/app/components/workflow/nodes/_base/components/variable/utils.ts +web/app/components/workflow/nodes/_base/components/variable/var-reference-picker.tsx +web/app/components/workflow/nodes/_base/components/workflow-panel/index.tsx +web/app/components/workflow/nodes/tool/components/tool-form/index.tsx +web/app/components/workflow/nodes/tool/components/tool-form/item.tsx +web/app/components/workflow/types.ts +web/app/components/workflow/utils/workflow.ts +web/service/use-tools.ts diff --git a/dev/start-beat b/dev/start-beat new file mode 100755 index 0000000000..e417874b25 --- /dev/null +++ b/dev/start-beat @@ -0,0 +1,60 @@ +#!/bin/bash + +set -x + +# Help function +show_help() { + echo "Usage: $0 [OPTIONS]" + echo "" + echo "Options:" + echo " --loglevel LEVEL Log level (default: INFO)" + echo " --scheduler SCHEDULER Scheduler class (default: celery.beat:PersistentScheduler)" + echo " -h, --help Show this help message" + echo "" + echo "Examples:" + echo " $0" + echo " $0 --loglevel DEBUG" + echo " $0 --scheduler django_celery_beat.schedulers:DatabaseScheduler" + echo "" + echo "Description:" + echo " Starts Celery Beat scheduler for periodic task execution." + echo " Beat sends scheduled tasks to worker queues at specified intervals." +} + +# Parse command line arguments +LOGLEVEL="INFO" +SCHEDULER="celery.beat:PersistentScheduler" + +while [[ $# -gt 0 ]]; do + case $1 in + --loglevel) + LOGLEVEL="$2" + shift 2 + ;; + --scheduler) + SCHEDULER="$2" + shift 2 + ;; + -h|--help) + show_help + exit 0 + ;; + *) + echo "Unknown option: $1" + show_help + exit 1 + ;; + esac +done + +SCRIPT_DIR="$(dirname "$(realpath "$0")")" +cd "$SCRIPT_DIR/.." + +echo "Starting Celery Beat with:" +echo " Log Level: ${LOGLEVEL}" +echo " Scheduler: ${SCHEDULER}" + +uv --directory api run \ + celery -A app.celery beat \ + --loglevel ${LOGLEVEL} \ + --scheduler ${SCHEDULER} \ No newline at end of file diff --git a/dev/start-worker b/dev/start-worker index a2af04c01c..37dd62ea0b 100755 --- a/dev/start-worker +++ b/dev/start-worker @@ -2,10 +2,102 @@ set -x +# Help function +show_help() { + echo "Usage: $0 [OPTIONS]" + echo "" + echo "Options:" + echo " -q, --queues QUEUES Comma-separated list of queues to process" + echo " -c, --concurrency NUM Number of worker processes (default: 1)" + echo " -P, --pool POOL Pool implementation (default: gevent)" + echo " --loglevel LEVEL Log level (default: INFO)" + echo " -h, --help Show this help message" + echo "" + echo "Examples:" + echo " $0 --queues dataset,workflow" + echo " $0 --queues workflow_professional,workflow_team --concurrency 4" + echo " $0 --queues dataset --concurrency 2 --pool prefork" + echo "" + echo "Available queues:" + echo " dataset - RAG indexing and document processing" + echo " workflow - Workflow triggers (community edition)" + echo " workflow_professional - Professional tier workflows (cloud edition)" + echo " workflow_team - Team tier workflows (cloud edition)" + echo " workflow_sandbox - Sandbox tier workflows (cloud edition)" + echo " schedule_poller - Schedule polling tasks" + echo " schedule_executor - Schedule execution tasks" + echo " generation - Content generation tasks" + echo " mail - Email notifications" + echo " ops_trace - Operations tracing" + echo " app_deletion - Application cleanup" + echo " plugin - Plugin operations" + echo " workflow_storage - Workflow storage tasks" +} + +# Parse command line arguments +QUEUES="" +CONCURRENCY=1 +POOL="gevent" +LOGLEVEL="INFO" + +while [[ $# -gt 0 ]]; do + case $1 in + -q|--queues) + QUEUES="$2" + shift 2 + ;; + -c|--concurrency) + CONCURRENCY="$2" + shift 2 + ;; + -P|--pool) + POOL="$2" + shift 2 + ;; + --loglevel) + LOGLEVEL="$2" + shift 2 + ;; + -h|--help) + show_help + exit 0 + ;; + *) + echo "Unknown option: $1" + show_help + exit 1 + ;; + esac +done + SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/.." +# If no queues specified, use edition-based defaults +if [[ -z "${QUEUES}" ]]; then + # Get EDITION from environment, default to SELF_HOSTED (community edition) + EDITION=${EDITION:-"SELF_HOSTED"} + + # Configure queues based on edition + if [[ "${EDITION}" == "CLOUD" ]]; then + # Cloud edition: separate queues for dataset and trigger tasks + QUEUES="dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor" + else + # Community edition (SELF_HOSTED): dataset and workflow have separate queues + QUEUES="dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor" + fi + + echo "No queues specified, using edition-based defaults: ${QUEUES}" +else + echo "Using specified queues: ${QUEUES}" +fi + +echo "Starting Celery worker with:" +echo " Queues: ${QUEUES}" +echo " Concurrency: ${CONCURRENCY}" +echo " Pool: ${POOL}" +echo " Log Level: ${LOGLEVEL}" uv --directory api run \ celery -A app.celery worker \ - -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation + -P ${POOL} -c ${CONCURRENCY} --loglevel ${LOGLEVEL} -Q ${QUEUES} diff --git a/docker/.env.example b/docker/.env.example index 4575e11b99..a90b6d7601 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -929,6 +929,9 @@ HTTP_REQUEST_NODE_SSL_VERIFY=True # Base64 encoded client private key data for mutual TLS authentication (PEM format, optional) # HTTP_REQUEST_NODE_SSL_CLIENT_KEY_DATA=LS0tLS1CRUdJTi... +# Webhook request configuration +WEBHOOK_REQUEST_BODY_MAX_SIZE=10485760 + # Respect X-* headers to redirect clients RESPECT_XFORWARD_HEADERS_ENABLED=false @@ -1294,3 +1297,7 @@ ENABLE_CLEAN_MESSAGES=false ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false ENABLE_DATASETS_QUEUE_MONITOR=false ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true +ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK=true +WORKFLOW_SCHEDULE_POLLER_INTERVAL=1 +WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE=100 +WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK=0 diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index 096bddae0b..05d940b763 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -29,14 +29,14 @@ services: - default # worker service - # The Celery worker for processing the queue. + # The Celery worker for processing all queues (dataset, workflow, mail, etc.) worker: image: langgenius/dify-api:2.0.0-beta.2 restart: always environment: # Use the shared environment variables. <<: *shared-api-worker-env - # Startup mode, 'worker' starts the Celery worker for processing the queue. + # Startup mode, 'worker' starts the Celery worker for processing all queues. MODE: worker SENTRY_DSN: ${API_SENTRY_DSN:-} SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0} diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 1d412d714f..41af2daf2d 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -412,6 +412,7 @@ x-shared-env: &shared-api-worker-env HTTP_REQUEST_NODE_MAX_BINARY_SIZE: ${HTTP_REQUEST_NODE_MAX_BINARY_SIZE:-10485760} HTTP_REQUEST_NODE_MAX_TEXT_SIZE: ${HTTP_REQUEST_NODE_MAX_TEXT_SIZE:-1048576} HTTP_REQUEST_NODE_SSL_VERIFY: ${HTTP_REQUEST_NODE_SSL_VERIFY:-True} + WEBHOOK_REQUEST_BODY_MAX_SIZE: ${WEBHOOK_REQUEST_BODY_MAX_SIZE:-10485760} RESPECT_XFORWARD_HEADERS_ENABLED: ${RESPECT_XFORWARD_HEADERS_ENABLED:-false} SSRF_PROXY_HTTP_URL: ${SSRF_PROXY_HTTP_URL:-http://ssrf_proxy:3128} SSRF_PROXY_HTTPS_URL: ${SSRF_PROXY_HTTPS_URL:-http://ssrf_proxy:3128} @@ -586,6 +587,10 @@ x-shared-env: &shared-api-worker-env ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: ${ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK:-false} ENABLE_DATASETS_QUEUE_MONITOR: ${ENABLE_DATASETS_QUEUE_MONITOR:-false} ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK: ${ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK:-true} + ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK: ${ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK:-true} + WORKFLOW_SCHEDULE_POLLER_INTERVAL: ${WORKFLOW_SCHEDULE_POLLER_INTERVAL:-1} + WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE: ${WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE:-100} + WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK: ${WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK:-0} services: # API service diff --git a/spec.http b/spec.http deleted file mode 100644 index dc3a37d08a..0000000000 --- a/spec.http +++ /dev/null @@ -1,4 +0,0 @@ -GET /console/api/spec/schema-definitions -Host: cloud-rag.dify.dev -authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoiNzExMDZhYTQtZWJlMC00NGMzLWI4NWYtMWQ4Mjc5ZTExOGZmIiwiZXhwIjoxNzU2MTkyNDE4LCJpc3MiOiJDTE9VRCIsInN1YiI6IkNvbnNvbGUgQVBJIFBhc3Nwb3J0In0.Yx_TMdWVXCp5YEoQ8WR90lRhHHKggxAQvEl5RUnkZuc -### \ No newline at end of file diff --git a/web/__tests__/workflow-onboarding-integration.test.tsx b/web/__tests__/workflow-onboarding-integration.test.tsx new file mode 100644 index 0000000000..577bc20db1 --- /dev/null +++ b/web/__tests__/workflow-onboarding-integration.test.tsx @@ -0,0 +1,572 @@ +import { BlockEnum } from '@/app/components/workflow/types' +import { useWorkflowStore } from '@/app/components/workflow/store' + +// Mock zustand store +jest.mock('@/app/components/workflow/store') + +// Mock ReactFlow store +const mockGetNodes = jest.fn() +jest.mock('reactflow', () => ({ + useStoreApi: () => ({ + getState: () => ({ + getNodes: mockGetNodes, + }), + }), +})) + +describe('Workflow Onboarding Integration Logic', () => { + const mockSetShowOnboarding = jest.fn() + const mockSetHasSelectedStartNode = jest.fn() + const mockSetHasShownOnboarding = jest.fn() + + beforeEach(() => { + jest.clearAllMocks() + + // Mock store implementation + ;(useWorkflowStore as jest.Mock).mockReturnValue({ + showOnboarding: false, + setShowOnboarding: mockSetShowOnboarding, + hasSelectedStartNode: false, + setHasSelectedStartNode: mockSetHasSelectedStartNode, + hasShownOnboarding: false, + setHasShownOnboarding: mockSetHasShownOnboarding, + notInitialWorkflow: false, + }) + }) + + describe('Onboarding State Management', () => { + it('should initialize onboarding state correctly', () => { + const store = useWorkflowStore() + + expect(store.showOnboarding).toBe(false) + expect(store.hasSelectedStartNode).toBe(false) + expect(store.hasShownOnboarding).toBe(false) + }) + + it('should update onboarding visibility', () => { + const store = useWorkflowStore() + + store.setShowOnboarding(true) + expect(mockSetShowOnboarding).toHaveBeenCalledWith(true) + + store.setShowOnboarding(false) + expect(mockSetShowOnboarding).toHaveBeenCalledWith(false) + }) + + it('should track node selection state', () => { + const store = useWorkflowStore() + + store.setHasSelectedStartNode(true) + expect(mockSetHasSelectedStartNode).toHaveBeenCalledWith(true) + }) + + it('should track onboarding show state', () => { + const store = useWorkflowStore() + + store.setHasShownOnboarding(true) + expect(mockSetHasShownOnboarding).toHaveBeenCalledWith(true) + }) + }) + + describe('Node Validation Logic', () => { + /** + * Test the critical fix in use-nodes-sync-draft.ts + * This ensures trigger nodes are recognized as valid start nodes + */ + it('should validate Start node as valid start node', () => { + const mockNode = { + data: { type: BlockEnum.Start }, + id: 'start-1', + } + + // Simulate the validation logic from use-nodes-sync-draft.ts + const isValidStartNode = mockNode.data.type === BlockEnum.Start + || mockNode.data.type === BlockEnum.TriggerSchedule + || mockNode.data.type === BlockEnum.TriggerWebhook + || mockNode.data.type === BlockEnum.TriggerPlugin + + expect(isValidStartNode).toBe(true) + }) + + it('should validate TriggerSchedule as valid start node', () => { + const mockNode = { + data: { type: BlockEnum.TriggerSchedule }, + id: 'trigger-schedule-1', + } + + const isValidStartNode = mockNode.data.type === BlockEnum.Start + || mockNode.data.type === BlockEnum.TriggerSchedule + || mockNode.data.type === BlockEnum.TriggerWebhook + || mockNode.data.type === BlockEnum.TriggerPlugin + + expect(isValidStartNode).toBe(true) + }) + + it('should validate TriggerWebhook as valid start node', () => { + const mockNode = { + data: { type: BlockEnum.TriggerWebhook }, + id: 'trigger-webhook-1', + } + + const isValidStartNode = mockNode.data.type === BlockEnum.Start + || mockNode.data.type === BlockEnum.TriggerSchedule + || mockNode.data.type === BlockEnum.TriggerWebhook + || mockNode.data.type === BlockEnum.TriggerPlugin + + expect(isValidStartNode).toBe(true) + }) + + it('should validate TriggerPlugin as valid start node', () => { + const mockNode = { + data: { type: BlockEnum.TriggerPlugin }, + id: 'trigger-plugin-1', + } + + const isValidStartNode = mockNode.data.type === BlockEnum.Start + || mockNode.data.type === BlockEnum.TriggerSchedule + || mockNode.data.type === BlockEnum.TriggerWebhook + || mockNode.data.type === BlockEnum.TriggerPlugin + + expect(isValidStartNode).toBe(true) + }) + + it('should reject non-trigger nodes as invalid start nodes', () => { + const mockNode = { + data: { type: BlockEnum.LLM }, + id: 'llm-1', + } + + const isValidStartNode = mockNode.data.type === BlockEnum.Start + || mockNode.data.type === BlockEnum.TriggerSchedule + || mockNode.data.type === BlockEnum.TriggerWebhook + || mockNode.data.type === BlockEnum.TriggerPlugin + + expect(isValidStartNode).toBe(false) + }) + + it('should handle array of nodes with mixed types', () => { + const mockNodes = [ + { data: { type: BlockEnum.LLM }, id: 'llm-1' }, + { data: { type: BlockEnum.TriggerWebhook }, id: 'webhook-1' }, + { data: { type: BlockEnum.Answer }, id: 'answer-1' }, + ] + + // Simulate hasStartNode logic from use-nodes-sync-draft.ts + const hasStartNode = mockNodes.find(node => + node.data.type === BlockEnum.Start + || node.data.type === BlockEnum.TriggerSchedule + || node.data.type === BlockEnum.TriggerWebhook + || node.data.type === BlockEnum.TriggerPlugin, + ) + + expect(hasStartNode).toBeTruthy() + expect(hasStartNode?.id).toBe('webhook-1') + }) + + it('should return undefined when no valid start nodes exist', () => { + const mockNodes = [ + { data: { type: BlockEnum.LLM }, id: 'llm-1' }, + { data: { type: BlockEnum.Answer }, id: 'answer-1' }, + ] + + const hasStartNode = mockNodes.find(node => + node.data.type === BlockEnum.Start + || node.data.type === BlockEnum.TriggerSchedule + || node.data.type === BlockEnum.TriggerWebhook + || node.data.type === BlockEnum.TriggerPlugin, + ) + + expect(hasStartNode).toBeUndefined() + }) + }) + + describe('Auto-expand Logic for Node Handles', () => { + /** + * Test the auto-expand logic from node-handle.tsx + * This ensures all trigger types auto-expand the block selector + */ + it('should auto-expand for Start node in new workflow', () => { + const notInitialWorkflow = true + const nodeType = BlockEnum.Start + const isChatMode = false + + const shouldAutoExpand = notInitialWorkflow && ( + nodeType === BlockEnum.Start + || nodeType === BlockEnum.TriggerSchedule + || nodeType === BlockEnum.TriggerWebhook + || nodeType === BlockEnum.TriggerPlugin + ) && !isChatMode + + expect(shouldAutoExpand).toBe(true) + }) + + it('should auto-expand for TriggerSchedule in new workflow', () => { + const notInitialWorkflow = true + const nodeType = BlockEnum.TriggerSchedule + const isChatMode = false + + const shouldAutoExpand = notInitialWorkflow && ( + nodeType === BlockEnum.Start + || nodeType === BlockEnum.TriggerSchedule + || nodeType === BlockEnum.TriggerWebhook + || nodeType === BlockEnum.TriggerPlugin + ) && !isChatMode + + expect(shouldAutoExpand).toBe(true) + }) + + it('should auto-expand for TriggerWebhook in new workflow', () => { + const notInitialWorkflow = true + const nodeType = BlockEnum.TriggerWebhook + const isChatMode = false + + const shouldAutoExpand = notInitialWorkflow && ( + nodeType === BlockEnum.Start + || nodeType === BlockEnum.TriggerSchedule + || nodeType === BlockEnum.TriggerWebhook + || nodeType === BlockEnum.TriggerPlugin + ) && !isChatMode + + expect(shouldAutoExpand).toBe(true) + }) + + it('should auto-expand for TriggerPlugin in new workflow', () => { + const notInitialWorkflow = true + const nodeType = BlockEnum.TriggerPlugin + const isChatMode = false + + const shouldAutoExpand = notInitialWorkflow && ( + nodeType === BlockEnum.Start + || nodeType === BlockEnum.TriggerSchedule + || nodeType === BlockEnum.TriggerWebhook + || nodeType === BlockEnum.TriggerPlugin + ) && !isChatMode + + expect(shouldAutoExpand).toBe(true) + }) + + it('should not auto-expand for non-trigger nodes', () => { + const notInitialWorkflow = true + const nodeType = BlockEnum.LLM + const isChatMode = false + + const shouldAutoExpand = notInitialWorkflow && ( + nodeType === BlockEnum.Start + || nodeType === BlockEnum.TriggerSchedule + || nodeType === BlockEnum.TriggerWebhook + || nodeType === BlockEnum.TriggerPlugin + ) && !isChatMode + + expect(shouldAutoExpand).toBe(false) + }) + + it('should not auto-expand in chat mode', () => { + const notInitialWorkflow = true + const nodeType = BlockEnum.Start + const isChatMode = true + + const shouldAutoExpand = notInitialWorkflow && ( + nodeType === BlockEnum.Start + || nodeType === BlockEnum.TriggerSchedule + || nodeType === BlockEnum.TriggerWebhook + || nodeType === BlockEnum.TriggerPlugin + ) && !isChatMode + + expect(shouldAutoExpand).toBe(false) + }) + + it('should not auto-expand for existing workflows', () => { + const notInitialWorkflow = false + const nodeType = BlockEnum.Start + const isChatMode = false + + const shouldAutoExpand = notInitialWorkflow && ( + nodeType === BlockEnum.Start + || nodeType === BlockEnum.TriggerSchedule + || nodeType === BlockEnum.TriggerWebhook + || nodeType === BlockEnum.TriggerPlugin + ) && !isChatMode + + expect(shouldAutoExpand).toBe(false) + }) + }) + + describe('Node Creation Without Auto-selection', () => { + /** + * Test that nodes are created without the 'selected: true' property + * This prevents auto-opening the properties panel + */ + it('should create Start node without auto-selection', () => { + const nodeData = { type: BlockEnum.Start, title: 'Start' } + + // Simulate node creation logic from workflow-children.tsx + const createdNodeData = { + ...nodeData, + // Note: 'selected: true' should NOT be added + } + + expect(createdNodeData.selected).toBeUndefined() + expect(createdNodeData.type).toBe(BlockEnum.Start) + }) + + it('should create TriggerWebhook node without auto-selection', () => { + const nodeData = { type: BlockEnum.TriggerWebhook, title: 'Webhook Trigger' } + const toolConfig = { webhook_url: 'https://example.com/webhook' } + + const createdNodeData = { + ...nodeData, + ...toolConfig, + // Note: 'selected: true' should NOT be added + } + + expect(createdNodeData.selected).toBeUndefined() + expect(createdNodeData.type).toBe(BlockEnum.TriggerWebhook) + expect(createdNodeData.webhook_url).toBe('https://example.com/webhook') + }) + + it('should preserve other node properties while avoiding auto-selection', () => { + const nodeData = { + type: BlockEnum.TriggerSchedule, + title: 'Schedule Trigger', + config: { interval: '1h' }, + } + + const createdNodeData = { + ...nodeData, + } + + expect(createdNodeData.selected).toBeUndefined() + expect(createdNodeData.type).toBe(BlockEnum.TriggerSchedule) + expect(createdNodeData.title).toBe('Schedule Trigger') + expect(createdNodeData.config).toEqual({ interval: '1h' }) + }) + }) + + describe('Workflow Initialization Logic', () => { + /** + * Test the initialization logic from use-workflow-init.ts + * This ensures onboarding is triggered correctly for new workflows + */ + it('should trigger onboarding for new workflow when draft does not exist', () => { + // Simulate the error handling logic from use-workflow-init.ts + const error = { + json: jest.fn().mockResolvedValue({ code: 'draft_workflow_not_exist' }), + bodyUsed: false, + } + + const mockWorkflowStore = { + setState: jest.fn(), + } + + // Simulate error handling + if (error && error.json && !error.bodyUsed) { + error.json().then((err: any) => { + if (err.code === 'draft_workflow_not_exist') { + mockWorkflowStore.setState({ + notInitialWorkflow: true, + showOnboarding: true, + }) + } + }) + } + + return error.json().then(() => { + expect(mockWorkflowStore.setState).toHaveBeenCalledWith({ + notInitialWorkflow: true, + showOnboarding: true, + }) + }) + }) + + it('should not trigger onboarding for existing workflows', () => { + // Simulate successful draft fetch + const mockWorkflowStore = { + setState: jest.fn(), + } + + // Normal initialization path should not set showOnboarding: true + mockWorkflowStore.setState({ + environmentVariables: [], + conversationVariables: [], + }) + + expect(mockWorkflowStore.setState).not.toHaveBeenCalledWith( + expect.objectContaining({ showOnboarding: true }), + ) + }) + + it('should create empty draft with proper structure', () => { + const mockSyncWorkflowDraft = jest.fn() + const appId = 'test-app-id' + + // Simulate the syncWorkflowDraft call from use-workflow-init.ts + const draftParams = { + url: `/apps/${appId}/workflows/draft`, + params: { + graph: { + nodes: [], // Empty nodes initially + edges: [], + }, + features: { + retriever_resource: { enabled: true }, + }, + environment_variables: [], + conversation_variables: [], + }, + } + + mockSyncWorkflowDraft(draftParams) + + expect(mockSyncWorkflowDraft).toHaveBeenCalledWith({ + url: `/apps/${appId}/workflows/draft`, + params: { + graph: { + nodes: [], + edges: [], + }, + features: { + retriever_resource: { enabled: true }, + }, + environment_variables: [], + conversation_variables: [], + }, + }) + }) + }) + + describe('Auto-Detection for Empty Canvas', () => { + beforeEach(() => { + mockGetNodes.mockClear() + }) + + it('should detect empty canvas and trigger onboarding', () => { + // Mock empty canvas + mockGetNodes.mockReturnValue([]) + + // Mock store with proper state for auto-detection + ;(useWorkflowStore as jest.Mock).mockReturnValue({ + showOnboarding: false, + hasShownOnboarding: false, + notInitialWorkflow: false, + setShowOnboarding: mockSetShowOnboarding, + setHasShownOnboarding: mockSetHasShownOnboarding, + getState: () => ({ + showOnboarding: false, + hasShownOnboarding: false, + notInitialWorkflow: false, + setShowOnboarding: mockSetShowOnboarding, + setHasShownOnboarding: mockSetHasShownOnboarding, + }), + }) + + // Simulate empty canvas check logic + const nodes = mockGetNodes() + const startNodeTypes = [ + BlockEnum.Start, + BlockEnum.TriggerSchedule, + BlockEnum.TriggerWebhook, + BlockEnum.TriggerPlugin, + ] + const hasStartNode = nodes.some(node => startNodeTypes.includes(node.data?.type)) + const isEmpty = nodes.length === 0 || !hasStartNode + + expect(isEmpty).toBe(true) + expect(nodes.length).toBe(0) + }) + + it('should detect canvas with non-start nodes as empty', () => { + // Mock canvas with non-start nodes + mockGetNodes.mockReturnValue([ + { id: '1', data: { type: BlockEnum.LLM } }, + { id: '2', data: { type: BlockEnum.Code } }, + ]) + + const nodes = mockGetNodes() + const startNodeTypes = [ + BlockEnum.Start, + BlockEnum.TriggerSchedule, + BlockEnum.TriggerWebhook, + BlockEnum.TriggerPlugin, + ] + const hasStartNode = nodes.some(node => startNodeTypes.includes(node.data.type)) + const isEmpty = nodes.length === 0 || !hasStartNode + + expect(isEmpty).toBe(true) + expect(hasStartNode).toBe(false) + }) + + it('should not detect canvas with start nodes as empty', () => { + // Mock canvas with start node + mockGetNodes.mockReturnValue([ + { id: '1', data: { type: BlockEnum.Start } }, + ]) + + const nodes = mockGetNodes() + const startNodeTypes = [ + BlockEnum.Start, + BlockEnum.TriggerSchedule, + BlockEnum.TriggerWebhook, + BlockEnum.TriggerPlugin, + ] + const hasStartNode = nodes.some(node => startNodeTypes.includes(node.data.type)) + const isEmpty = nodes.length === 0 || !hasStartNode + + expect(isEmpty).toBe(false) + expect(hasStartNode).toBe(true) + }) + + it('should not trigger onboarding if already shown in session', () => { + // Mock empty canvas + mockGetNodes.mockReturnValue([]) + + // Mock store with hasShownOnboarding = true + ;(useWorkflowStore as jest.Mock).mockReturnValue({ + showOnboarding: false, + hasShownOnboarding: true, // Already shown in this session + notInitialWorkflow: false, + setShowOnboarding: mockSetShowOnboarding, + setHasShownOnboarding: mockSetHasShownOnboarding, + getState: () => ({ + showOnboarding: false, + hasShownOnboarding: true, + notInitialWorkflow: false, + setShowOnboarding: mockSetShowOnboarding, + setHasShownOnboarding: mockSetHasShownOnboarding, + }), + }) + + // Simulate the check logic with hasShownOnboarding = true + const store = useWorkflowStore() + const shouldTrigger = !store.hasShownOnboarding && !store.showOnboarding && !store.notInitialWorkflow + + expect(shouldTrigger).toBe(false) + }) + + it('should not trigger onboarding during initial workflow creation', () => { + // Mock empty canvas + mockGetNodes.mockReturnValue([]) + + // Mock store with notInitialWorkflow = true (initial creation) + ;(useWorkflowStore as jest.Mock).mockReturnValue({ + showOnboarding: false, + hasShownOnboarding: false, + notInitialWorkflow: true, // Initial workflow creation + setShowOnboarding: mockSetShowOnboarding, + setHasShownOnboarding: mockSetHasShownOnboarding, + getState: () => ({ + showOnboarding: false, + hasShownOnboarding: false, + notInitialWorkflow: true, + setShowOnboarding: mockSetShowOnboarding, + setHasShownOnboarding: mockSetHasShownOnboarding, + }), + }) + + // Simulate the check logic with notInitialWorkflow = true + const store = useWorkflowStore() + const shouldTrigger = !store.hasShownOnboarding && !store.showOnboarding && !store.notInitialWorkflow + + expect(shouldTrigger).toBe(false) + }) + }) +}) diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/card-view.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/card-view.tsx index e58e79918f..e66dde269b 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/card-view.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/card-view.tsx @@ -6,6 +6,7 @@ import { useContext } from 'use-context-selector' import AppCard from '@/app/components/app/overview/app-card' import Loading from '@/app/components/base/loading' import MCPServiceCard from '@/app/components/tools/mcp/mcp-service-card' +import TriggerCard from '@/app/components/app/overview/trigger-card' import { ToastContext } from '@/app/components/base/toast' import { fetchAppDetail, @@ -33,6 +34,7 @@ const CardView: FC = ({ appId, isInPanel, className }) => { const setAppDetail = useAppStore(state => state.setAppDetail) const showMCPCard = isInPanel + const showTriggerCard = isInPanel && appDetail?.mode === 'workflow' const updateAppDetail = async () => { try { @@ -125,6 +127,11 @@ const CardView: FC = ({ appId, isInPanel, className }) => { appInfo={appDetail} /> )} + {showTriggerCard && ( + + )} ) } diff --git a/web/app/components/app-sidebar/app-info.tsx b/web/app/components/app-sidebar/app-info.tsx index d22577c9ad..b35a81e29c 100644 --- a/web/app/components/app-sidebar/app-info.tsx +++ b/web/app/components/app-sidebar/app-info.tsx @@ -26,7 +26,6 @@ import { fetchWorkflowDraft } from '@/service/workflow' import ContentDialog from '@/app/components/base/content-dialog' import Button from '@/app/components/base/button' import CardView from '@/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/card-view' -import Divider from '../base/divider' import type { Operation } from './app-operations' import AppOperations from './app-operations' import dynamic from 'next/dynamic' @@ -208,7 +207,7 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx if (!appDetail) return null - const operations = [ + const primaryOperations = [ { id: 'edit', title: t('app.editApp'), @@ -235,7 +234,11 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx icon: , onClick: exportCheck, }, - (appDetail.mode !== 'agent-chat' && (appDetail.mode === 'advanced-chat' || appDetail.mode === 'workflow')) ? { + ] + + const secondaryOperations: Operation[] = [ + // Import DSL (conditional) + ...(appDetail.mode !== 'agent-chat' && (appDetail.mode === 'advanced-chat' || appDetail.mode === 'workflow')) ? [{ id: 'import', title: t('workflow.common.importDSL'), icon: , @@ -244,18 +247,39 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx onDetailExpand?.(false) setShowImportDSLModal(true) }, - } : undefined, - (appDetail.mode !== 'agent-chat' && (appDetail.mode === 'completion' || appDetail.mode === 'chat')) ? { - id: 'switch', - title: t('app.switch'), - icon: , + }] : [], + // Divider + { + id: 'divider-1', + title: '', + icon: <>, + onClick: () => { /* divider has no action */ }, + type: 'divider' as const, + }, + // Delete operation + { + id: 'delete', + title: t('common.operation.delete'), + icon: , onClick: () => { setOpen(false) onDetailExpand?.(false) - setShowSwitchModal(true) + setShowConfirmDelete(true) }, - } : undefined, - ].filter((op): op is Operation => Boolean(op)) + }, + ] + + // Keep the switch operation separate as it's not part of the main operations + const switchOperation = (appDetail.mode !== 'agent-chat' && (appDetail.mode === 'completion' || appDetail.mode === 'chat')) ? { + id: 'switch', + title: t('app.switch'), + icon: , + onClick: () => { + setOpen(false) + onDetailExpand?.(false) + setShowSwitchModal(true) + }, + } : null return (
@@ -333,7 +357,8 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx {/* operations */}
- -
- -
+ {/* Switch operation (if available) */} + {switchOperation && ( +
+ +
+ )} {showSwitchModal && ( void + id: string + title: string + icon: JSX.Element + onClick: () => void + type?: 'action' | 'divider' + className?: string } -const AppOperations = ({ operations, gap }: { - operations: Operation[] +const AppOperations = ({ primaryOperations, secondaryOperations, gap }: { + primaryOperations: Operation[] + secondaryOperations: Operation[] gap: number }) => { const { t } = useTranslation() - const [visibleOpreations, setVisibleOperations] = useState([]) - const [moreOperations, setMoreOperations] = useState([]) const [showMore, setShowMore] = useState(false) - const navRef = useRef(null) const handleTriggerMore = useCallback(() => { - setShowMore(true) - }, [setShowMore]) + setShowMore(prev => !prev) + }, []) - useEffect(() => { - const moreElement = document.getElementById('more') - const navElement = document.getElementById('nav') - let width = 0 - const containerWidth = navElement?.clientWidth ?? 0 - const moreWidth = moreElement?.clientWidth ?? 0 - - if (containerWidth === 0 || moreWidth === 0) return - - const updatedEntries: Record = operations.reduce((pre, cur) => { - pre[cur.id] = false - return pre - }, {} as Record) - const childrens = Array.from(navRef.current!.children).slice(0, -1) - for (let i = 0; i < childrens.length; i++) { - const child: any = childrens[i] - const id = child.dataset.targetid - if (!id) break - const childWidth = child.clientWidth - - if (width + gap + childWidth + moreWidth <= containerWidth) { - updatedEntries[id] = true - width += gap + childWidth - } - else { - if (i === childrens.length - 1 && width + childWidth <= containerWidth) - updatedEntries[id] = true - else - updatedEntries[id] = false - break - } + const renderSecondaryOperation = (operation: Operation, index: number) => { + if (operation.type === 'divider') { + return ( + + ) } - setVisibleOperations(operations.filter(item => updatedEntries[item.id])) - setMoreOperations(operations.filter(item => !updatedEntries[item.id])) - }, [operations, gap]) + + return ( +
+ {cloneElement(operation.icon, { + className: 'h-4 w-4 text-text-tertiary', + })} + + {operation.title} + +
+ ) + } return ( - <> - {!visibleOpreations.length &&