fix(workflow): unblock plugin model selector tools on 1.14.0 (#35794)

This commit is contained in:
sawyer-shi 2026-05-06 15:07:43 +08:00 committed by fatelei
parent 9d401442a3
commit c4892ab788
No known key found for this signature in database
GPG Key ID: 2F91DA05646F4EED
6 changed files with 253 additions and 1 deletions

View File

@ -1078,6 +1078,13 @@ class ToolManager:
if parameter.form == ToolParameter.ToolParameterForm.FORM:
if variable_pool:
config = tool_configurations.get(parameter.name, {})
selector_value = cls._extract_runtime_selector_value(parameter, config)
if selector_value is not None:
# Selector parameters carry structured dictionaries, not scalar ToolInput values.
runtime_parameters[parameter.name] = selector_value
continue
if not (config and isinstance(config, dict) and config.get("value") is not None):
continue
tool_input = ToolNodeData.ToolInput.model_validate(tool_configurations.get(parameter.name, {}))
@ -1105,5 +1112,39 @@ class ToolManager:
runtime_parameters[parameter.name] = value
return runtime_parameters
@classmethod
def _extract_runtime_selector_value(cls, parameter: ToolParameter, config: Any) -> dict[str, Any] | None:
if parameter.type not in {
ToolParameter.ToolParameterType.MODEL_SELECTOR,
ToolParameter.ToolParameterType.APP_SELECTOR,
}:
return None
if not isinstance(config, dict):
return None
input_value = config.get("value")
if isinstance(input_value, dict) and cls._is_selector_value(parameter, input_value):
return cast("dict[str, Any]", parameter.init_frontend_parameter(input_value))
if cls._is_selector_value(parameter, config):
selector_value = dict(config)
selector_value.pop("type", None)
selector_value.pop("value", None)
return cast("dict[str, Any]", parameter.init_frontend_parameter(selector_value))
return None
@classmethod
def _is_selector_value(cls, parameter: ToolParameter, value: Mapping[str, Any]) -> bool:
if parameter.type == ToolParameter.ToolParameterType.MODEL_SELECTOR:
return (
isinstance(value.get("provider"), str)
and isinstance(value.get("model"), str)
and isinstance(value.get("model_type"), str)
)
if parameter.type == ToolParameter.ToolParameterType.APP_SELECTOR:
return isinstance(value.get("app_id"), str)
return False
ToolManager.load_hardcoded_providers_cache()

View File

@ -272,6 +272,14 @@ def _adapt_tool_node_data_for_graph(node_data: Mapping[str, Any]) -> dict[str, A
normalized_tool_configurations[name] = value
continue
selector_value = _extract_selector_configuration(value)
if selector_value is not None:
# Model/app selectors are dictionaries even when they come through the legacy tool configuration path.
# Move them to tool_parameters so graph validation does not flatten them as primitive constants.
found_legacy_tool_inputs = True
normalized_tool_parameters.setdefault(name, {"type": "constant", "value": selector_value})
continue
input_type = value.get("type")
input_value = value.get("value")
if input_type not in {"mixed", "variable", "constant"}:
@ -310,6 +318,28 @@ def _flatten_legacy_tool_configuration_value(*, input_type: Any, input_value: An
return None
def _extract_selector_configuration(value: Mapping[str, Any]) -> dict[str, Any] | None:
input_value = value.get("value")
if isinstance(input_value, Mapping) and _is_selector_configuration(input_value):
return dict(input_value)
if _is_selector_configuration(value):
selector_value = dict(value)
selector_value.pop("type", None)
selector_value.pop("value", None)
return selector_value
return None
def _is_selector_configuration(value: Mapping[str, Any]) -> bool:
return (
isinstance(value.get("provider"), str)
and isinstance(value.get("model"), str)
and isinstance(value.get("model_type"), str)
) or isinstance(value.get("app_id"), str)
def _normalize_email_recipients(recipients: Mapping[str, Any]) -> dict[str, Any]:
normalized = dict(recipients)

View File

@ -501,11 +501,15 @@ class DifyToolNodeRuntime(ToolNodeRuntimeProtocol):
@staticmethod
def _build_tool_runtime_spec(node_data: ToolNodeData) -> _WorkflowToolRuntimeSpec:
tool_configurations = dict(node_data.tool_configurations)
tool_configurations.update(
{name: tool_input.model_dump(mode="python") for name, tool_input in node_data.tool_parameters.items()}
)
return _WorkflowToolRuntimeSpec(
provider_type=CoreToolProviderType(node_data.provider_type.value),
provider_id=node_data.provider_id,
tool_name=node_data.tool_name,
tool_configurations=dict(node_data.tool_configurations),
tool_configurations=tool_configurations,
credential_id=node_data.credential_id,
)

View File

@ -925,3 +925,78 @@ def test_convert_tool_parameters_type_constant_branch():
)
assert constant == {"text": "fixed"}
def test_convert_tool_parameters_type_model_selector_from_legacy_top_level_config():
model_param = ToolParameter.get_simple_instance(
name="vision_llm_model",
llm_description="vision model",
typ=ToolParameter.ToolParameterType.MODEL_SELECTOR,
required=True,
)
model_param.form = ToolParameter.ToolParameterForm.FORM
variable_pool = Mock()
runtime_parameters = ToolManager._convert_tool_parameters_type(
parameters=[model_param],
variable_pool=variable_pool,
tool_configurations={
"vision_llm_model": {
"type": "constant",
"value": "",
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-vl-plus",
"model_type": "llm",
"mode": "chat",
}
},
typ="workflow",
)
assert runtime_parameters == {
"vision_llm_model": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-vl-plus",
"model_type": "llm",
"mode": "chat",
}
}
def test_convert_tool_parameters_type_model_selector_from_constant_value_config():
model_param = ToolParameter.get_simple_instance(
name="tts_model",
llm_description="tts model",
typ=ToolParameter.ToolParameterType.MODEL_SELECTOR,
required=True,
)
model_param.form = ToolParameter.ToolParameterForm.FORM
variable_pool = Mock()
runtime_parameters = ToolManager._convert_tool_parameters_type(
parameters=[model_param],
variable_pool=variable_pool,
tool_configurations={
"tts_model": {
"type": "constant",
"value": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-tts-flash",
"model_type": "tts",
"language": "Chinese",
"voice": "Cherry",
},
}
},
typ="workflow",
)
assert runtime_parameters == {
"tts_model": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-tts-flash",
"model_type": "tts",
"language": "Chinese",
"voice": "Cherry",
}
}

View File

@ -166,6 +166,71 @@ def test_adapt_node_data_for_graph_migrates_legacy_tool_configurations() -> None
}
def test_adapt_node_data_for_graph_preserves_model_selector_top_level_configurations() -> None:
normalized = adapt_node_data_for_graph(
{
"type": BuiltinNodeTypes.TOOL,
"tool_configurations": {
"vision_llm_model": {
"type": "constant",
"value": "",
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-vl-plus",
"model_type": "llm",
"mode": "chat",
},
},
}
)
assert normalized["tool_configurations"] == {}
assert normalized["tool_parameters"] == {
"vision_llm_model": {
"type": "constant",
"value": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-vl-plus",
"model_type": "llm",
"mode": "chat",
},
}
}
def test_adapt_node_data_for_graph_flattens_constant_model_selector_value() -> None:
normalized = adapt_node_data_for_graph(
{
"type": BuiltinNodeTypes.TOOL,
"tool_configurations": {
"tts_model": {
"type": "constant",
"value": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-tts-flash",
"model_type": "tts",
"language": "Chinese",
"voice": "Cherry",
},
},
},
}
)
assert normalized["tool_configurations"] == {}
assert normalized["tool_parameters"] == {
"tts_model": {
"type": "constant",
"value": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-tts-flash",
"model_type": "tts",
"language": "Chinese",
"voice": "Cherry",
},
}
}
def test_adapt_node_config_for_graph_rewrites_nested_node_data() -> None:
normalized = adapt_node_config_for_graph(
{

View File

@ -22,6 +22,7 @@ from core.workflow.node_runtime import (
DifyPromptMessageSerializer,
DifyRetrieverAttachmentLoader,
DifyToolFileManager,
DifyToolNodeRuntime,
apply_dify_debug_email_recipient,
build_dify_llm_file_saver,
resolve_dify_run_context,
@ -30,6 +31,7 @@ from graphon.file import FileTransferMethod, FileType
from graphon.model_runtime.entities.common_entities import I18nObject
from graphon.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType
from graphon.nodes.human_input.entities import HumanInputNodeData
from graphon.nodes.tool.entities import ToolNodeData, ToolProviderType
from tests.workflow_test_utils import build_test_run_context
@ -334,6 +336,41 @@ def test_dify_human_input_runtime_builds_debug_repository(monkeypatch: pytest.Mo
)
def test_dify_tool_runtime_spec_prefers_tool_parameters_for_runtime_form_values() -> None:
node_data = ToolNodeData(
provider_id="video-mixcut-agent",
provider_type=ToolProviderType.PLUGIN,
provider_name="sawyer-shi/video-mixcut-agent",
tool_name="mixcut",
tool_label="MixCut",
tool_configurations={"count": 2},
tool_parameters={
"vision_llm_model": {
"type": "constant",
"value": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-vl-plus",
"model_type": "llm",
},
}
},
)
spec = DifyToolNodeRuntime._build_tool_runtime_spec(node_data)
assert spec.tool_configurations == {
"count": 2,
"vision_llm_model": {
"type": "constant",
"value": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-vl-plus",
"model_type": "llm",
},
},
}
def test_dify_human_input_runtime_create_form_filters_debugger_delivery_methods() -> None:
repository = MagicMock()
repository.create_form.return_value = sentinel.form