Merge branch 'feat/trigger' of https://github.com/langgenius/dify into feat/trigger

This commit is contained in:
lyzno1 2025-10-31 20:00:51 +08:00
commit c415e5b893
No known key found for this signature in database
17 changed files with 83 additions and 92 deletions

View File

@ -6,7 +6,7 @@ from typing import cast
from flask import abort, request
from flask_restx import Resource, fields, inputs, marshal_with, reqparse
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
from werkzeug.exceptions import BadRequest, Forbidden, InternalServerError, NotFound
import services
from controllers.console import api, console_ns
@ -992,7 +992,7 @@ class DraftWorkflowTriggerRunApi(Resource):
except InvokeRateLimitError as ex:
raise InvokeRateLimitHttpError(ex.description)
except PluginInvokeError as e:
raise ValueError(e.to_user_friendly_error())
raise BadRequest(e.to_user_friendly_error())
except Exception as e:
logger.exception("Error polling trigger debug event")
raise e
@ -1050,7 +1050,7 @@ class DraftWorkflowTriggerNodeApi(Resource):
)
event = poller.poll()
except PluginInvokeError as e:
return jsonable_encoder({"status": "error", "error": e.to_user_friendly_error()}), 500
return jsonable_encoder({"status": "error", "error": e.to_user_friendly_error()}), 400
except Exception as e:
logger.exception("Error polling trigger debug event")
raise e
@ -1074,7 +1074,7 @@ class DraftWorkflowTriggerNodeApi(Resource):
logger.exception("Error running draft workflow trigger node")
return jsonable_encoder(
{"status": "error", "error": "An unexpected error occurred while running the node."}
), 500
), 400
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/trigger/run-all")
@ -1151,4 +1151,4 @@ class DraftWorkflowTriggerRunAllApi(Resource):
{
"status": "error",
}
), 500
), 400

View File

@ -13,14 +13,14 @@ import contexts
from configs import dify_config
from core.plugin.entities.plugin_daemon import CredentialType, PluginTriggerProviderEntity
from core.plugin.entities.request import TriggerInvokeEventResponse
from core.plugin.impl.exc import PluginDaemonError, PluginInvokeError, PluginNotFoundError
from core.plugin.impl.exc import PluginDaemonError, PluginNotFoundError
from core.plugin.impl.trigger import PluginTriggerClient
from core.trigger.entities.entities import (
EventEntity,
Subscription,
UnsubscribeResult,
)
from core.trigger.errors import EventIgnoreError, TriggerPluginInvokeError
from core.trigger.errors import EventIgnoreError
from core.trigger.provider import PluginTriggerProviderController
from models.provider_ids import TriggerProviderID
@ -189,13 +189,8 @@ class TriggerManager:
request=request,
payload=payload,
)
except EventIgnoreError as e:
except EventIgnoreError:
return TriggerInvokeEventResponse(variables={}, cancelled=True)
except PluginInvokeError as e:
logger.exception("Failed to invoke trigger event")
raise TriggerPluginInvokeError(
description=e.to_user_friendly_error(plugin_name=provider.entity.identity.name)
) from e
@classmethod
def subscribe_trigger(

View File

@ -1,6 +1,6 @@
[project]
name = "dify-api"
version = "1.9.2"
version = "1.10.0-rc1"
requires-python = ">=3.11,<3.13"
dependencies = [

View File

@ -62,7 +62,7 @@ def test_schedule_poller_handles_aware_next_run(monkeypatch: pytest.MonkeyPatch)
redis_client = _DummyRedis()
poller = _make_poller(monkeypatch, redis_client)
base_now = datetime(2025, 1, 1, 12, 0, 0)
base_now = datetime(2025, 1, 1, 12, 0, 10)
aware_next_run = datetime(2025, 1, 1, 12, 0, 5, tzinfo=UTC)
monkeypatch.setattr(event_selectors, "naive_utc_now", lambda: base_now)

View File

@ -4,7 +4,7 @@ from unittest.mock import patch
import pytest
from core.entities.provider_entities import BasicProviderConfig
from core.tools.utils.encryption import ProviderConfigEncrypter
from core.helper.provider_encryption import ProviderConfigEncrypter
# ---------------------------
@ -88,7 +88,7 @@ def test_encrypt_missing_secret_key_is_ok(encrypter_obj):
# ============================================================
# ProviderConfigEncrypter.mask_tool_credentials()
# ProviderConfigEncrypter.mask_plugin_credentials()
# ============================================================
@ -107,7 +107,7 @@ def test_mask_tool_credentials_long_secret(encrypter_obj, raw, prefix, suffix):
data_in = {"username": "alice", "password": raw}
data_copy = copy.deepcopy(data_in)
out = encrypter_obj.mask_tool_credentials(data_in)
out = encrypter_obj.mask_plugin_credentials(data_in)
masked = out["password"]
assert masked.startswith(prefix)
@ -122,7 +122,7 @@ def test_mask_tool_credentials_short_secret(encrypter_obj, raw):
"""
For length <= 6: fully mask with '*' of same length.
"""
out = encrypter_obj.mask_tool_credentials({"password": raw})
out = encrypter_obj.mask_plugin_credentials({"password": raw})
assert out["password"] == ("*" * len(raw))
@ -131,7 +131,7 @@ def test_mask_tool_credentials_missing_key_noop(encrypter_obj):
data_in = {"username": "alice"}
data_copy = copy.deepcopy(data_in)
out = encrypter_obj.mask_tool_credentials(data_in)
out = encrypter_obj.mask_plugin_credentials(data_in)
assert out["username"] == "alice"
assert data_in == data_copy

View File

@ -30,7 +30,7 @@ def test_content_type_enum():
assert ContentType.FORM_DATA == "multipart/form-data"
assert ContentType.FORM_URLENCODED == "application/x-www-form-urlencoded"
assert ContentType.TEXT == "text/plain"
assert ContentType.FORM == "form"
assert ContentType.BINARY == "application/octet-stream"
# Test all enum values are strings
for content_type in ContentType:
@ -79,7 +79,17 @@ def test_webhook_body_parameter_creation():
def test_webhook_body_parameter_types():
"""Test WebhookBodyParameter type validation."""
valid_types = ["string", "number", "boolean", "object", "array", "file"]
valid_types = [
"string",
"number",
"boolean",
"object",
"array[string]",
"array[number]",
"array[boolean]",
"array[object]",
"file",
]
for param_type in valid_types:
param = WebhookBodyParameter(name="test", type=param_type)
@ -127,7 +137,7 @@ def test_webhook_data_creation_full():
title="Full Webhook Test",
desc="A comprehensive webhook test",
method=Method.POST,
**{"content-type": ContentType.FORM_DATA},
content_type=ContentType.FORM_DATA,
headers=headers,
params=params,
body=body,
@ -151,19 +161,13 @@ def test_webhook_data_creation_full():
def test_webhook_data_content_type_alias():
"""Test WebhookData content_type field alias."""
# Test using the alias "content-type"
data1 = WebhookData(title="Test", **{"content-type": "application/json"})
"""Test WebhookData content_type accepts both strings and enum values."""
data1 = WebhookData(title="Test", content_type="application/json")
assert data1.content_type == ContentType.JSON
# Test using the alias with enum value
data2 = WebhookData(title="Test", **{"content-type": ContentType.FORM_DATA})
data2 = WebhookData(title="Test", content_type=ContentType.FORM_DATA)
assert data2.content_type == ContentType.FORM_DATA
# Test both approaches result in same field
assert hasattr(data1, "content_type")
assert hasattr(data2, "content_type")
def test_webhook_data_model_dump():
"""Test WebhookData model serialization."""
@ -196,12 +200,12 @@ def test_webhook_data_model_dump_with_alias():
"""Test WebhookData model serialization includes alias."""
data = WebhookData(
title="Test Webhook",
**{"content-type": ContentType.FORM_DATA},
content_type=ContentType.FORM_DATA,
)
dumped = data.model_dump(by_alias=True)
assert "content-type" in dumped
assert dumped["content-type"] == "multipart/form-data"
assert "content_type" in dumped
assert dumped["content_type"] == "multipart/form-data"
def test_webhook_data_validation_errors():
@ -214,9 +218,9 @@ def test_webhook_data_validation_errors():
with pytest.raises(ValidationError):
WebhookData(title="Test", method="invalid_method")
# Invalid content_type via alias
# Invalid content_type
with pytest.raises(ValidationError):
WebhookData(title="Test", **{"content-type": "invalid/type"})
WebhookData(title="Test", content_type="invalid/type")
# Invalid status_code (should be int) - use non-numeric string
with pytest.raises(ValidationError):
@ -276,7 +280,17 @@ def test_webhook_body_parameter_edge_cases():
assert file_param.required is True
# Test all valid types
for param_type in ["string", "number", "boolean", "object", "array", "file"]:
for param_type in [
"string",
"number",
"boolean",
"object",
"array[string]",
"array[number]",
"array[boolean]",
"array[object]",
"file",
]:
param = WebhookBodyParameter(name=f"test_{param_type}", type=param_type)
assert param.type == param_type

View File

@ -149,7 +149,7 @@ def test_webhook_error_attributes():
assert WebhookConfigError.__name__ == "WebhookConfigError"
# Test that all error classes have proper __module__
expected_module = "core.workflow.nodes.webhook.exc"
expected_module = "core.workflow.nodes.trigger_webhook.exc"
assert WebhookNodeError.__module__ == expected_module
assert WebhookTimeoutError.__module__ == expected_module
assert WebhookNotFoundError.__module__ == expected_module

View File

@ -5,8 +5,6 @@ from core.file import File, FileTransferMethod, FileType
from core.variables import StringVariable
from core.workflow.entities.graph_init_params import GraphInitParams
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus
from core.workflow.nodes.answer.entities import AnswerStreamGenerateRoute
from core.workflow.nodes.end.entities import EndStreamParam
from core.workflow.nodes.trigger_webhook.entities import (
ContentType,
Method,
@ -43,17 +41,6 @@ def create_webhook_node(webhook_data: WebhookData, variable_pool: VariablePool)
invoke_from=InvokeFrom.SERVICE_API,
call_depth=0,
),
graph=Graph(
root_node_id="1",
answer_stream_generate_routes=AnswerStreamGenerateRoute(
answer_dependencies={},
answer_generate_route={},
),
end_stream_param=EndStreamParam(
end_dependencies={},
end_stream_variable_selector_mapping={},
),
),
graph_runtime_state=GraphRuntimeState(
variable_pool=variable_pool,
start_at=0,
@ -85,7 +72,7 @@ def test_webhook_node_basic_initialization():
node = create_webhook_node(data, variable_pool)
assert node.node_type.value == "webhook"
assert node.node_type.value == "trigger-webhook"
assert node.version() == "1"
assert node._get_title() == "Test Webhook"
assert node._node_data.method == Method.POST
@ -101,7 +88,7 @@ def test_webhook_node_default_config():
assert config["type"] == "webhook"
assert config["config"]["method"] == "get"
assert config["config"]["content-type"] == "application/json"
assert config["config"]["content_type"] == "application/json"
assert config["config"]["headers"] == []
assert config["config"]["params"] == []
assert config["config"]["body"] == []
@ -142,7 +129,7 @@ def test_webhook_node_run_with_headers():
assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED
assert result.outputs["Authorization"] == "Bearer token123"
assert result.outputs["Content-Type"] == "application/json" # Case-insensitive match
assert result.outputs["Content_Type"] == "application/json" # Case-insensitive match
assert "_webhook_raw" in result.outputs
@ -376,8 +363,8 @@ def test_webhook_node_run_case_insensitive_headers():
result = node._run()
assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED
assert result.outputs["Content-Type"] == "application/json"
assert result.outputs["X-API-KEY"] == "key123"
assert result.outputs["Content_Type"] == "application/json"
assert result.outputs["X_API_KEY"] == "key123"
assert result.outputs["authorization"] == "Bearer token"
@ -436,13 +423,12 @@ def test_webhook_node_different_methods(method):
assert node._node_data.method == method
def test_webhook_data_alias_content_type():
"""Test that content-type field alias works correctly."""
# Test both ways of setting content_type
data1 = WebhookData(title="Test", **{"content-type": "application/json"})
def test_webhook_data_content_type_field():
"""Test that content_type accepts both raw strings and enum values."""
data1 = WebhookData(title="Test", content_type="application/json")
assert data1.content_type == ContentType.JSON
data2 = WebhookData(title="Test", **{"content-type": ContentType.FORM_DATA})
data2 = WebhookData(title="Test", content_type=ContentType.FORM_DATA)
assert data2.content_type == ContentType.FORM_DATA

View File

@ -135,6 +135,8 @@ class TestCelerySSLConfiguration:
mock_config.WORKFLOW_SCHEDULE_POLLER_INTERVAL = 1
mock_config.WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE = 100
mock_config.WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK = 0
mock_config.ENABLE_TRIGGER_PROVIDER_REFRESH_TASK = False
mock_config.TRIGGER_PROVIDER_REFRESH_INTERVAL = 15
with patch("extensions.ext_celery.dify_config", mock_config):
from dify_app import DifyApp

View File

@ -62,12 +62,10 @@ class TestScheduleService(unittest.TestCase):
def test_calculate_next_run_at_invalid_cron(self):
"""Test calculating next run time with invalid cron expression."""
from croniter import CroniterBadCronError
cron_expr = "invalid cron"
timezone = "UTC"
with pytest.raises(CroniterBadCronError):
with pytest.raises(ValueError):
calculate_next_run_at(cron_expr, timezone)
def test_calculate_next_run_at_invalid_timezone(self):
@ -109,7 +107,7 @@ class TestScheduleService(unittest.TestCase):
mock_session.add.assert_called_once()
mock_session.flush.assert_called_once()
@patch("services.schedule_service.calculate_next_run_at")
@patch("services.trigger.schedule_service.calculate_next_run_at")
def test_update_schedule(self, mock_calculate_next_run):
"""Test updating an existing schedule."""
mock_session = MagicMock(spec=Session)
@ -189,7 +187,7 @@ class TestScheduleService(unittest.TestCase):
assert "Schedule not found: non-existent-id" in str(context.value)
mock_session.delete.assert_not_called()
@patch("services.schedule_service.select")
@patch("services.trigger.schedule_service.select")
def test_get_tenant_owner(self, mock_select):
"""Test getting tenant owner account."""
mock_session = MagicMock(spec=Session)
@ -211,7 +209,7 @@ class TestScheduleService(unittest.TestCase):
assert result is not None
assert result.id == "owner-account-id"
@patch("services.schedule_service.select")
@patch("services.trigger.schedule_service.select")
def test_get_tenant_owner_fallback_to_admin(self, mock_select):
"""Test getting tenant owner falls back to admin if no owner."""
mock_session = MagicMock(spec=Session)
@ -233,7 +231,7 @@ class TestScheduleService(unittest.TestCase):
assert result is not None
assert result.id == "admin-account-id"
@patch("services.schedule_service.calculate_next_run_at")
@patch("services.trigger.schedule_service.calculate_next_run_at")
def test_update_next_run_at(self, mock_calculate_next_run):
"""Test updating next run time after schedule triggered."""
mock_session = MagicMock(spec=Session)

View File

@ -183,8 +183,8 @@ class TestWebhookServiceUnit:
assert response_data[0]["id"] == 1
assert response_data[1]["id"] == 2
@patch("services.webhook_service.ToolFileManager")
@patch("services.webhook_service.file_factory")
@patch("services.trigger.webhook_service.ToolFileManager")
@patch("services.trigger.webhook_service.file_factory")
def test_process_file_uploads_success(self, mock_file_factory, mock_tool_file_manager):
"""Test successful file upload processing."""
# Mock ToolFileManager
@ -223,8 +223,8 @@ class TestWebhookServiceUnit:
assert mock_tool_file_manager.call_count == 2
assert mock_file_factory.build_from_mapping.call_count == 2
@patch("services.webhook_service.ToolFileManager")
@patch("services.webhook_service.file_factory")
@patch("services.trigger.webhook_service.ToolFileManager")
@patch("services.trigger.webhook_service.file_factory")
def test_process_file_uploads_with_errors(self, mock_file_factory, mock_tool_file_manager):
"""Test file upload processing with errors."""
# Mock ToolFileManager
@ -472,15 +472,11 @@ class TestWebhookServiceUnit:
mock_get_trigger.return_value = (mock_trigger, mock_workflow, mock_config)
mock_extract.return_value = mock_data
# Test normal mode (skip_status_check=False)
result = _prepare_webhook_execution("test_webhook", is_debug=False)
mock_get_trigger.assert_called_with("test_webhook", skip_status_check=False)
assert result == (mock_trigger, mock_workflow, mock_config, mock_data, None)
# Reset mock
mock_get_trigger.reset_mock()
# Test debug mode (skip_status_check=True)
result = _prepare_webhook_execution("test_webhook", is_debug=True)
mock_get_trigger.assert_called_with("test_webhook", skip_status_check=True)
assert result == (mock_trigger, mock_workflow, mock_config, mock_data, None)

View File

@ -1326,7 +1326,7 @@ wheels = [
[[package]]
name = "dify-api"
version = "1.9.2"
version = "1.10.0-rc1"
source = { virtual = "." }
dependencies = [
{ name = "apscheduler" },

View File

@ -27,7 +27,7 @@ SERVICE_API_URL=
# Trigger external URL
# used to display trigger endpoint API Base URL to the front-end.
# Example: https://api.dify.ai
TRIGGER_URL=
TRIGGER_URL=http://localhost
# WebApp API backend Url,
# used to declare the back-end URL for the front-end API.

View File

@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
services:
# API service
api:
image: langgenius/dify-api:1.9.2
image: langgenius/dify-api:1.10.0-rc1
restart: always
environment:
# Use the shared environment variables.
@ -31,7 +31,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.9.2
image: langgenius/dify-api:1.10.0-rc1
restart: always
environment:
# Use the shared environment variables.
@ -58,7 +58,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.9.2
image: langgenius/dify-api:1.10.0-rc1
restart: always
environment:
# Use the shared environment variables.
@ -76,7 +76,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.9.2
image: langgenius/dify-web:1.10.0-rc1
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@ -180,7 +180,7 @@ services:
# plugin daemon
plugin_daemon:
image: langgenius/dify-plugin-daemon:0.3.3-local
image: langgenius/dify-plugin-daemon:0.4.0-local
restart: always
environment:
# Use the shared environment variables.

View File

@ -87,7 +87,7 @@ services:
# plugin daemon
plugin_daemon:
image: langgenius/dify-plugin-daemon:0.3.3-local
image: langgenius/dify-plugin-daemon:0.4.0-local
restart: always
env_file:
- ./middleware.env

View File

@ -8,7 +8,7 @@ x-shared-env: &shared-api-worker-env
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
CONSOLE_WEB_URL: ${CONSOLE_WEB_URL:-}
SERVICE_API_URL: ${SERVICE_API_URL:-}
TRIGGER_URL: ${TRIGGER_URL:-}
TRIGGER_URL: ${TRIGGER_URL:-http://localhost}
APP_API_URL: ${APP_API_URL:-}
APP_WEB_URL: ${APP_WEB_URL:-}
FILES_URL: ${FILES_URL:-}
@ -621,7 +621,7 @@ x-shared-env: &shared-api-worker-env
services:
# API service
api:
image: langgenius/dify-api:1.9.2
image: langgenius/dify-api:1.10.0-rc1
restart: always
environment:
# Use the shared environment variables.
@ -650,7 +650,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.9.2
image: langgenius/dify-api:1.10.0-rc1
restart: always
environment:
# Use the shared environment variables.
@ -677,7 +677,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.9.2
image: langgenius/dify-api:1.10.0-rc1
restart: always
environment:
# Use the shared environment variables.
@ -695,7 +695,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.9.2
image: langgenius/dify-web:1.10.0-rc1
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@ -799,7 +799,7 @@ services:
# plugin daemon
plugin_daemon:
image: langgenius/dify-plugin-daemon:0.3.3-local
image: langgenius/dify-plugin-daemon:0.4.0-local
restart: always
environment:
# Use the shared environment variables.

View File

@ -1,6 +1,6 @@
{
"name": "dify-web",
"version": "1.9.2",
"version": "1.10.0-rc1",
"private": true,
"packageManager": "pnpm@10.19.0+sha512.c9fc7236e92adf5c8af42fd5bf1612df99c2ceb62f27047032f4720b33f8eacdde311865e91c411f2774f618d82f320808ecb51718bfa82c060c4ba7c76a32b8",
"engines": {