diff --git a/api/core/app/workflow/node_factory.py b/api/core/app/workflow/node_factory.py index e0a0059a38..a5773bbef8 100644 --- a/api/core/app/workflow/node_factory.py +++ b/api/core/app/workflow/node_factory.py @@ -4,13 +4,14 @@ from typing import TYPE_CHECKING, final from typing_extensions import override from configs import dify_config -from core.file import file_manager -from core.helper import ssrf_proxy +from core.file.file_manager import file_manager from core.helper.code_executor.code_executor import CodeExecutor from core.helper.code_executor.code_node_provider import CodeNodeProvider +from core.helper.ssrf_proxy import ssrf_proxy from core.tools.tool_file_manager import ToolFileManager +from core.workflow.entities.graph_config import NodeConfigDict from core.workflow.enums import NodeType -from core.workflow.graph import NodeFactory +from core.workflow.graph.graph import NodeFactory from core.workflow.nodes.base.node import Node from core.workflow.nodes.code.code_node import CodeNode from core.workflow.nodes.code.limits import CodeNodeLimits @@ -22,7 +23,6 @@ from core.workflow.nodes.template_transform.template_renderer import ( Jinja2TemplateRenderer, ) from core.workflow.nodes.template_transform.template_transform_node import TemplateTransformNode -from libs.typing import is_str, is_str_dict if TYPE_CHECKING: from core.workflow.entities import GraphInitParams @@ -47,9 +47,9 @@ class DifyNodeFactory(NodeFactory): code_providers: Sequence[type[CodeNodeProvider]] | None = None, code_limits: CodeNodeLimits | None = None, template_renderer: Jinja2TemplateRenderer | None = None, - http_request_http_client: HttpClientProtocol = ssrf_proxy, + http_request_http_client: HttpClientProtocol | None = None, http_request_tool_file_manager_factory: Callable[[], ToolFileManager] = ToolFileManager, - http_request_file_manager: FileManagerProtocol = file_manager, + http_request_file_manager: FileManagerProtocol | None = None, ) -> None: self.graph_init_params = graph_init_params self.graph_runtime_state = graph_runtime_state @@ -68,12 +68,12 @@ class DifyNodeFactory(NodeFactory): max_object_array_length=dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH, ) self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer() - self._http_request_http_client = http_request_http_client + self._http_request_http_client = http_request_http_client or ssrf_proxy self._http_request_tool_file_manager_factory = http_request_tool_file_manager_factory - self._http_request_file_manager = http_request_file_manager + self._http_request_file_manager = http_request_file_manager or file_manager @override - def create_node(self, node_config: dict[str, object]) -> Node: + def create_node(self, node_config: NodeConfigDict) -> Node: """ Create a Node instance from node configuration data using the traditional mapping. @@ -82,23 +82,14 @@ class DifyNodeFactory(NodeFactory): :raises ValueError: if node type is unknown or configuration is invalid """ # Get node_id from config - node_id = node_config.get("id") - if not is_str(node_id): - raise ValueError("Node config missing id") + node_id = node_config["id"] # Get node type from config - node_data = node_config.get("data", {}) - if not is_str_dict(node_data): - raise ValueError(f"Node {node_id} missing data information") - - node_type_str = node_data.get("type") - if not is_str(node_type_str): - raise ValueError(f"Node {node_id} missing or invalid type information") - + node_data = node_config["data"] try: - node_type = NodeType(node_type_str) + node_type = NodeType(node_data["type"]) except ValueError: - raise ValueError(f"Unknown node type: {node_type_str}") + raise ValueError(f"Unknown node type: {node_data['type']}") # Get node class node_mapping = NODE_TYPE_CLASSES_MAPPING.get(node_type) diff --git a/api/core/file/file_manager.py b/api/core/file/file_manager.py index c0fefef3d0..9945d7c1ab 100644 --- a/api/core/file/file_manager.py +++ b/api/core/file/file_manager.py @@ -168,3 +168,18 @@ def _to_url(f: File, /): return sign_tool_file(tool_file_id=f.related_id, extension=f.extension) else: raise ValueError(f"Unsupported transfer method: {f.transfer_method}") + + +class FileManager: + """ + Adapter exposing file manager helpers behind FileManagerProtocol. + + This is intentionally a thin wrapper over the existing module-level functions so callers can inject it + where a protocol-typed file manager is expected. + """ + + def download(self, f: File, /) -> bytes: + return download(f) + + +file_manager = FileManager() diff --git a/api/core/helper/code_executor/code_node_provider.py b/api/core/helper/code_executor/code_node_provider.py index e93e1e4414..f4cce0b332 100644 --- a/api/core/helper/code_executor/code_node_provider.py +++ b/api/core/helper/code_executor/code_node_provider.py @@ -47,15 +47,16 @@ class CodeNodeProvider(BaseModel, ABC): @classmethod def get_default_config(cls) -> DefaultConfig: - return { - "type": "code", - "config": { - "variables": [ - {"variable": "arg1", "value_selector": []}, - {"variable": "arg2", "value_selector": []}, - ], - "code_language": cls.get_language(), - "code": cls.get_default_code(), - "outputs": {"result": {"type": "string", "children": None}}, - }, + variables: list[VariableConfig] = [ + {"variable": "arg1", "value_selector": []}, + {"variable": "arg2", "value_selector": []}, + ] + outputs: dict[str, OutputConfig] = {"result": {"type": "string", "children": None}} + + config: CodeConfig = { + "variables": variables, + "code_language": cls.get_language(), + "code": cls.get_default_code(), + "outputs": outputs, } + return {"type": "code", "config": config} diff --git a/api/core/helper/ssrf_proxy.py b/api/core/helper/ssrf_proxy.py index ddccfbaf45..54068fc28d 100644 --- a/api/core/helper/ssrf_proxy.py +++ b/api/core/helper/ssrf_proxy.py @@ -230,3 +230,41 @@ def delete(url: str, max_retries: int = SSRF_DEFAULT_MAX_RETRIES, **kwargs: Any) def head(url: str, max_retries: int = SSRF_DEFAULT_MAX_RETRIES, **kwargs: Any) -> httpx.Response: return make_request("HEAD", url, max_retries=max_retries, **kwargs) + + +class SSRFProxy: + """ + Adapter exposing SSRF-protected HTTP helpers behind HttpClientProtocol. + + This is intentionally a thin wrapper over the existing module-level functions so callers can inject it + where a protocol-typed HTTP client is expected. + """ + + @property + def max_retries_exceeded_error(self) -> type[Exception]: + return max_retries_exceeded_error + + @property + def request_error(self) -> type[Exception]: + return request_error + + def get(self, url: str, max_retries: int = SSRF_DEFAULT_MAX_RETRIES, **kwargs: Any) -> httpx.Response: + return get(url=url, max_retries=max_retries, **kwargs) + + def head(self, url: str, max_retries: int = SSRF_DEFAULT_MAX_RETRIES, **kwargs: Any) -> httpx.Response: + return head(url=url, max_retries=max_retries, **kwargs) + + def post(self, url: str, max_retries: int = SSRF_DEFAULT_MAX_RETRIES, **kwargs: Any) -> httpx.Response: + return post(url=url, max_retries=max_retries, **kwargs) + + def put(self, url: str, max_retries: int = SSRF_DEFAULT_MAX_RETRIES, **kwargs: Any) -> httpx.Response: + return put(url=url, max_retries=max_retries, **kwargs) + + def delete(self, url: str, max_retries: int = SSRF_DEFAULT_MAX_RETRIES, **kwargs: Any) -> httpx.Response: + return delete(url=url, max_retries=max_retries, **kwargs) + + def patch(self, url: str, max_retries: int = SSRF_DEFAULT_MAX_RETRIES, **kwargs: Any) -> httpx.Response: + return patch(url=url, max_retries=max_retries, **kwargs) + + +ssrf_proxy = SSRFProxy() diff --git a/api/core/schemas/registry.py b/api/core/schemas/registry.py index b4ecfe47ff..b87fba4eaa 100644 --- a/api/core/schemas/registry.py +++ b/api/core/schemas/registry.py @@ -35,6 +35,7 @@ class SchemaRegistry: registry.load_all_versions() cls._default_instance = registry + return cls._default_instance return cls._default_instance diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index f8213d9fd7..d561d39923 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -189,16 +189,13 @@ class ToolManager: raise ToolProviderNotFoundError(f"builtin tool {tool_name} not found") if not provider_controller.need_credentials: - return cast( - BuiltinTool, - builtin_tool.fork_tool_runtime( - runtime=ToolRuntime( - tenant_id=tenant_id, - credentials={}, - invoke_from=invoke_from, - tool_invoke_from=tool_invoke_from, - ) - ), + return builtin_tool.fork_tool_runtime( + runtime=ToolRuntime( + tenant_id=tenant_id, + credentials={}, + invoke_from=invoke_from, + tool_invoke_from=tool_invoke_from, + ) ) builtin_provider = None if isinstance(provider_controller, PluginToolProviderController): @@ -300,18 +297,15 @@ class ToolManager: decrypted_credentials = refreshed_credentials.credentials cache.delete() - return cast( - BuiltinTool, - builtin_tool.fork_tool_runtime( - runtime=ToolRuntime( - tenant_id=tenant_id, - credentials=dict(decrypted_credentials), - credential_type=CredentialType.of(builtin_provider.credential_type), - runtime_parameters={}, - invoke_from=invoke_from, - tool_invoke_from=tool_invoke_from, - ) - ), + return builtin_tool.fork_tool_runtime( + runtime=ToolRuntime( + tenant_id=tenant_id, + credentials=dict(decrypted_credentials), + credential_type=CredentialType.of(builtin_provider.credential_type), + runtime_parameters={}, + invoke_from=invoke_from, + tool_invoke_from=tool_invoke_from, + ) ) elif provider_type == ToolProviderType.API: diff --git a/api/core/workflow/entities/graph_config.py b/api/core/workflow/entities/graph_config.py new file mode 100644 index 0000000000..209dcfe6bc --- /dev/null +++ b/api/core/workflow/entities/graph_config.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +import sys + +from pydantic import TypeAdapter, with_config + +if sys.version_info >= (3, 12): + from typing import TypedDict +else: + from typing_extensions import TypedDict + + +@with_config(extra="allow") +class NodeConfigData(TypedDict): + type: str + + +@with_config(extra="allow") +class NodeConfigDict(TypedDict): + id: str + data: NodeConfigData + + +NodeConfigDictAdapter = TypeAdapter(NodeConfigDict) diff --git a/api/core/workflow/graph/graph.py b/api/core/workflow/graph/graph.py index 31bf6f3b27..52bbbb20cc 100644 --- a/api/core/workflow/graph/graph.py +++ b/api/core/workflow/graph/graph.py @@ -5,15 +5,20 @@ from collections import defaultdict from collections.abc import Mapping, Sequence from typing import Protocol, cast, final +from pydantic import TypeAdapter + +from core.workflow.entities.graph_config import NodeConfigDict from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeState, NodeType from core.workflow.nodes.base.node import Node -from libs.typing import is_str, is_str_dict +from libs.typing import is_str from .edge import Edge from .validation import get_graph_validator logger = logging.getLogger(__name__) +_ListNodeConfigDict = TypeAdapter(list[NodeConfigDict]) + class NodeFactory(Protocol): """ @@ -23,7 +28,7 @@ class NodeFactory(Protocol): allowing for different node creation strategies while maintaining type safety. """ - def create_node(self, node_config: dict[str, object]) -> Node: + def create_node(self, node_config: NodeConfigDict) -> Node: """ Create a Node instance from node configuration data. @@ -63,28 +68,24 @@ class Graph: self.root_node = root_node @classmethod - def _parse_node_configs(cls, node_configs: list[dict[str, object]]) -> dict[str, dict[str, object]]: + def _parse_node_configs(cls, node_configs: list[NodeConfigDict]) -> dict[str, NodeConfigDict]: """ Parse node configurations and build a mapping of node IDs to configs. :param node_configs: list of node configuration dictionaries :return: mapping of node ID to node config """ - node_configs_map: dict[str, dict[str, object]] = {} + node_configs_map: dict[str, NodeConfigDict] = {} for node_config in node_configs: - node_id = node_config.get("id") - if not node_id or not isinstance(node_id, str): - continue - - node_configs_map[node_id] = node_config + node_configs_map[node_config["id"]] = node_config return node_configs_map @classmethod def _find_root_node_id( cls, - node_configs_map: Mapping[str, Mapping[str, object]], + node_configs_map: Mapping[str, NodeConfigDict], edge_configs: Sequence[Mapping[str, object]], root_node_id: str | None = None, ) -> str: @@ -113,10 +114,8 @@ class Graph: # Prefer START node if available start_node_id = None for nid in root_candidates: - node_data = node_configs_map[nid].get("data") - if not is_str_dict(node_data): - continue - node_type = node_data.get("type") + node_data = node_configs_map[nid]["data"] + node_type = node_data["type"] if not isinstance(node_type, str): continue if NodeType(node_type).is_start_node: @@ -176,7 +175,7 @@ class Graph: @classmethod def _create_node_instances( cls, - node_configs_map: dict[str, dict[str, object]], + node_configs_map: dict[str, NodeConfigDict], node_factory: NodeFactory, ) -> dict[str, Node]: """ @@ -303,7 +302,7 @@ class Graph: node_configs = graph_config.get("nodes", []) edge_configs = cast(list[dict[str, object]], edge_configs) - node_configs = cast(list[dict[str, object]], node_configs) + node_configs = _ListNodeConfigDict.validate_python(node_configs) if not node_configs: raise ValueError("Graph must have at least one node") diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index 0b359a2392..2b76b563ff 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -46,7 +46,6 @@ from .graph_traversal import EdgeProcessor, SkipPropagator from .layers.base import GraphEngineLayer from .orchestration import Dispatcher, ExecutionCoordinator from .protocols.command_channel import CommandChannel -from .ready_queue import ReadyQueue from .worker_management import WorkerPool if TYPE_CHECKING: @@ -90,7 +89,7 @@ class GraphEngine: self._graph_execution.workflow_id = workflow_id # === Execution Queues === - self._ready_queue = cast(ReadyQueue, self._graph_runtime_state.ready_queue) + self._ready_queue = self._graph_runtime_state.ready_queue # Queue for events generated during execution self._event_queue: queue.Queue[GraphNodeEventBase] = queue.Queue() diff --git a/api/core/workflow/graph_engine/response_coordinator/coordinator.py b/api/core/workflow/graph_engine/response_coordinator/coordinator.py index 98e0ea91ef..e82ba29438 100644 --- a/api/core/workflow/graph_engine/response_coordinator/coordinator.py +++ b/api/core/workflow/graph_engine/response_coordinator/coordinator.py @@ -15,10 +15,10 @@ from uuid import uuid4 from pydantic import BaseModel, Field from core.workflow.enums import NodeExecutionType, NodeState -from core.workflow.graph import Graph from core.workflow.graph_events import NodeRunStreamChunkEvent, NodeRunSucceededEvent from core.workflow.nodes.base.template import TextSegment, VariableSegment from core.workflow.runtime import VariablePool +from core.workflow.runtime.graph_runtime_state import GraphProtocol from .path import Path from .session import ResponseSession @@ -75,7 +75,7 @@ class ResponseStreamCoordinator: Ensures ordered streaming of responses based on upstream node outputs and constants. """ - def __init__(self, variable_pool: "VariablePool", graph: "Graph") -> None: + def __init__(self, variable_pool: "VariablePool", graph: GraphProtocol) -> None: """ Initialize coordinator with variable pool. diff --git a/api/core/workflow/nodes/base/entities.py b/api/core/workflow/nodes/base/entities.py index e5a20c8e91..c5426e3fb7 100644 --- a/api/core/workflow/nodes/base/entities.py +++ b/api/core/workflow/nodes/base/entities.py @@ -115,7 +115,7 @@ class DefaultValue(BaseModel): @model_validator(mode="after") def validate_value_type(self) -> DefaultValue: # Type validation configuration - type_validators = { + type_validators: dict[DefaultValueType, dict[str, Any]] = { DefaultValueType.STRING: { "type": str, "converter": lambda x: x, diff --git a/api/core/workflow/nodes/code/entities.py b/api/core/workflow/nodes/code/entities.py index 10a1c897e9..8026011196 100644 --- a/api/core/workflow/nodes/code/entities.py +++ b/api/core/workflow/nodes/code/entities.py @@ -1,4 +1,4 @@ -from typing import Annotated, Literal, Self +from typing import Annotated, Literal from pydantic import AfterValidator, BaseModel @@ -34,7 +34,7 @@ class CodeNodeData(BaseNodeData): class Output(BaseModel): type: Annotated[SegmentType, AfterValidator(_validate_type)] - children: dict[str, Self] | None = None + children: dict[str, "CodeNodeData.Output"] | None = None class Dependency(BaseModel): name: str diff --git a/api/core/workflow/nodes/datasource/datasource_node.py b/api/core/workflow/nodes/datasource/datasource_node.py index 925561cf7c..fd71d610b4 100644 --- a/api/core/workflow/nodes/datasource/datasource_node.py +++ b/api/core/workflow/nodes/datasource/datasource_node.py @@ -69,11 +69,13 @@ class DatasourceNode(Node[DatasourceNodeData]): if datasource_type is None: raise DatasourceNodeError("Datasource type is not set") + datasource_type = DatasourceProviderType.value_of(datasource_type) + datasource_runtime = DatasourceManager.get_datasource_runtime( provider_id=f"{node_data.plugin_id}/{node_data.provider_name}", datasource_name=node_data.datasource_name or "", tenant_id=self.tenant_id, - datasource_type=DatasourceProviderType.value_of(datasource_type), + datasource_type=datasource_type, ) datasource_info["icon"] = datasource_runtime.get_icon_url(self.tenant_id) diff --git a/api/core/workflow/nodes/http_request/executor.py b/api/core/workflow/nodes/http_request/executor.py index 429f8411a6..7de8216562 100644 --- a/api/core/workflow/nodes/http_request/executor.py +++ b/api/core/workflow/nodes/http_request/executor.py @@ -2,7 +2,7 @@ import base64 import json import secrets import string -from collections.abc import Mapping +from collections.abc import Callable, Mapping from copy import deepcopy from typing import Any, Literal from urllib.parse import urlencode, urlparse @@ -11,9 +11,9 @@ import httpx from json_repair import repair_json from configs import dify_config -from core.file import file_manager from core.file.enums import FileTransferMethod -from core.helper import ssrf_proxy +from core.file.file_manager import file_manager as default_file_manager +from core.helper.ssrf_proxy import ssrf_proxy from core.variables.segments import ArrayFileSegment, FileSegment from core.workflow.runtime import VariablePool @@ -79,8 +79,8 @@ class Executor: timeout: HttpRequestNodeTimeout, variable_pool: VariablePool, max_retries: int = dify_config.SSRF_DEFAULT_MAX_RETRIES, - http_client: HttpClientProtocol = ssrf_proxy, - file_manager: FileManagerProtocol = file_manager, + http_client: HttpClientProtocol | None = None, + file_manager: FileManagerProtocol | None = None, ): # If authorization API key is present, convert the API key using the variable pool if node_data.authorization.type == "api-key": @@ -107,8 +107,8 @@ class Executor: self.data = None self.json = None self.max_retries = max_retries - self._http_client = http_client - self._file_manager = file_manager + self._http_client = http_client or ssrf_proxy + self._file_manager = file_manager or default_file_manager # init template self.variable_pool = variable_pool @@ -336,7 +336,7 @@ class Executor: """ do http request depending on api bundle """ - _METHOD_MAP = { + _METHOD_MAP: dict[str, Callable[..., httpx.Response]] = { "get": self._http_client.get, "head": self._http_client.head, "post": self._http_client.post, @@ -348,7 +348,7 @@ class Executor: if method_lc not in _METHOD_MAP: raise InvalidHttpMethodError(f"Invalid http method {self.method}") - request_args = { + request_args: dict[str, Any] = { "data": self.data, "files": self.files, "json": self.json, @@ -361,14 +361,13 @@ class Executor: } # request_args = {k: v for k, v in request_args.items() if v is not None} try: - response: httpx.Response = _METHOD_MAP[method_lc]( + response = _METHOD_MAP[method_lc]( url=self.url, **request_args, max_retries=self.max_retries, ) except (self._http_client.max_retries_exceeded_error, self._http_client.request_error) as e: raise HttpRequestNodeError(str(e)) from e - # FIXME: fix type ignore, this maybe httpx type issue return response def invoke(self) -> Response: diff --git a/api/core/workflow/nodes/http_request/node.py b/api/core/workflow/nodes/http_request/node.py index 964e53e03c..480482375f 100644 --- a/api/core/workflow/nodes/http_request/node.py +++ b/api/core/workflow/nodes/http_request/node.py @@ -4,8 +4,9 @@ from collections.abc import Callable, Mapping, Sequence from typing import TYPE_CHECKING, Any from configs import dify_config -from core.file import File, FileTransferMethod, file_manager -from core.helper import ssrf_proxy +from core.file import File, FileTransferMethod +from core.file.file_manager import file_manager as default_file_manager +from core.helper.ssrf_proxy import ssrf_proxy from core.tools.tool_file_manager import ToolFileManager from core.variables.segments import ArrayFileSegment from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus @@ -47,9 +48,9 @@ class HttpRequestNode(Node[HttpRequestNodeData]): graph_init_params: "GraphInitParams", graph_runtime_state: "GraphRuntimeState", *, - http_client: HttpClientProtocol = ssrf_proxy, + http_client: HttpClientProtocol | None = None, tool_file_manager_factory: Callable[[], ToolFileManager] = ToolFileManager, - file_manager: FileManagerProtocol = file_manager, + file_manager: FileManagerProtocol | None = None, ) -> None: super().__init__( id=id, @@ -57,9 +58,9 @@ class HttpRequestNode(Node[HttpRequestNodeData]): graph_init_params=graph_init_params, graph_runtime_state=graph_runtime_state, ) - self._http_client = http_client + self._http_client = http_client or ssrf_proxy self._tool_file_manager_factory = tool_file_manager_factory - self._file_manager = file_manager + self._file_manager = file_manager or default_file_manager @classmethod def get_default_config(cls, filters: Mapping[str, object] | None = None) -> Mapping[str, object]: diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py index c19182549f..25a881ea7d 100644 --- a/api/core/workflow/nodes/iteration/iteration_node.py +++ b/api/core/workflow/nodes/iteration/iteration_node.py @@ -397,7 +397,7 @@ class IterationNode(LLMUsageTrackingMixin, Node[IterationNodeData]): return outputs # Check if all non-None outputs are lists - non_none_outputs = [output for output in outputs if output is not None] + non_none_outputs: list[object] = [output for output in outputs if output is not None] if not non_none_outputs: return outputs diff --git a/api/core/workflow/nodes/list_operator/node.py b/api/core/workflow/nodes/list_operator/node.py index 813d898b9a..235f5b9c52 100644 --- a/api/core/workflow/nodes/list_operator/node.py +++ b/api/core/workflow/nodes/list_operator/node.py @@ -196,13 +196,13 @@ def _get_file_extract_string_func(*, key: str) -> Callable[[File], str]: case "name": return lambda x: x.filename or "" case "type": - return lambda x: x.type + return lambda x: str(x.type) case "extension": return lambda x: x.extension or "" case "mime_type": return lambda x: x.mime_type or "" case "transfer_method": - return lambda x: x.transfer_method + return lambda x: str(x.transfer_method) case "url": return lambda x: x.remote_url or "" case "related_id": @@ -276,7 +276,6 @@ def _get_boolean_filter_func(*, condition: FilterOperator, value: bool) -> Calla def _get_file_filter_func(*, key: str, condition: str, value: str | Sequence[str]) -> Callable[[File], bool]: - extract_func: Callable[[File], Any] if key in {"name", "extension", "mime_type", "url", "related_id"} and isinstance(value, str): extract_func = _get_file_extract_string_func(key=key) return lambda x: _get_string_filter_func(condition=condition, value=value)(extract_func(x)) @@ -284,8 +283,8 @@ def _get_file_filter_func(*, key: str, condition: str, value: str | Sequence[str extract_func = _get_file_extract_string_func(key=key) return lambda x: _get_sequence_filter_func(condition=condition, value=value)(extract_func(x)) elif key == "size" and isinstance(value, str): - extract_func = _get_file_extract_number_func(key=key) - return lambda x: _get_number_filter_func(condition=condition, value=float(value))(extract_func(x)) + extract_number = _get_file_extract_number_func(key=key) + return lambda x: _get_number_filter_func(condition=condition, value=float(value))(extract_number(x)) else: raise InvalidKeyError(f"Invalid key: {key}") diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index 17d82c2118..beccf79344 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -852,18 +852,16 @@ class LLMNode(Node[LLMNodeData]): # Insert histories into the prompt prompt_content = prompt_messages[0].content # For issue #11247 - Check if prompt content is a string or a list - prompt_content_type = type(prompt_content) - if prompt_content_type == str: + if isinstance(prompt_content, str): prompt_content = str(prompt_content) if "#histories#" in prompt_content: prompt_content = prompt_content.replace("#histories#", memory_text) else: prompt_content = memory_text + "\n" + prompt_content prompt_messages[0].content = prompt_content - elif prompt_content_type == list: - prompt_content = prompt_content if isinstance(prompt_content, list) else [] + elif isinstance(prompt_content, list): for content_item in prompt_content: - if content_item.type == PromptMessageContentType.TEXT: + if isinstance(content_item, TextPromptMessageContent): if "#histories#" in content_item.data: content_item.data = content_item.data.replace("#histories#", memory_text) else: @@ -873,13 +871,12 @@ class LLMNode(Node[LLMNodeData]): # Add current query to the prompt message if sys_query: - if prompt_content_type == str: + if isinstance(prompt_content, str): prompt_content = str(prompt_messages[0].content).replace("#sys.query#", sys_query) prompt_messages[0].content = prompt_content - elif prompt_content_type == list: - prompt_content = prompt_content if isinstance(prompt_content, list) else [] + elif isinstance(prompt_content, list): for content_item in prompt_content: - if content_item.type == PromptMessageContentType.TEXT: + if isinstance(content_item, TextPromptMessageContent): content_item.data = sys_query + "\n" + content_item.data else: raise ValueError("Invalid prompt content type") @@ -1033,14 +1030,14 @@ class LLMNode(Node[LLMNodeData]): if typed_node_data.prompt_config: enable_jinja = False - if isinstance(prompt_template, list): + if isinstance(prompt_template, LLMNodeCompletionModelPromptTemplate): + if prompt_template.edition_type == "jinja2": + enable_jinja = True + else: for prompt in prompt_template: if prompt.edition_type == "jinja2": enable_jinja = True break - else: - if prompt_template.edition_type == "jinja2": - enable_jinja = True if enable_jinja: for variable_selector in typed_node_data.prompt_config.jinja2_variables or []: diff --git a/api/core/workflow/nodes/protocols.py b/api/core/workflow/nodes/protocols.py index e7dcf62fcf..2ad39e0ab5 100644 --- a/api/core/workflow/nodes/protocols.py +++ b/api/core/workflow/nodes/protocols.py @@ -1,4 +1,4 @@ -from typing import Protocol +from typing import Any, Protocol import httpx @@ -12,17 +12,17 @@ class HttpClientProtocol(Protocol): @property def request_error(self) -> type[Exception]: ... - def get(self, url: str, max_retries: int = ..., **kwargs: object) -> httpx.Response: ... + def get(self, url: str, max_retries: int = ..., **kwargs: Any) -> httpx.Response: ... - def head(self, url: str, max_retries: int = ..., **kwargs: object) -> httpx.Response: ... + def head(self, url: str, max_retries: int = ..., **kwargs: Any) -> httpx.Response: ... - def post(self, url: str, max_retries: int = ..., **kwargs: object) -> httpx.Response: ... + def post(self, url: str, max_retries: int = ..., **kwargs: Any) -> httpx.Response: ... - def put(self, url: str, max_retries: int = ..., **kwargs: object) -> httpx.Response: ... + def put(self, url: str, max_retries: int = ..., **kwargs: Any) -> httpx.Response: ... - def delete(self, url: str, max_retries: int = ..., **kwargs: object) -> httpx.Response: ... + def delete(self, url: str, max_retries: int = ..., **kwargs: Any) -> httpx.Response: ... - def patch(self, url: str, max_retries: int = ..., **kwargs: object) -> httpx.Response: ... + def patch(self, url: str, max_retries: int = ..., **kwargs: Any) -> httpx.Response: ... class FileManagerProtocol(Protocol): diff --git a/api/core/workflow/workflow_entry.py b/api/core/workflow/workflow_entry.py index 43f15f6fd0..4b1845cda2 100644 --- a/api/core/workflow/workflow_entry.py +++ b/api/core/workflow/workflow_entry.py @@ -144,11 +144,11 @@ class WorkflowEntry: :param user_inputs: user inputs :return: """ - node_config = dict(workflow.get_node_config_by_id(node_id)) - node_config_data = node_config.get("data", {}) + node_config = workflow.get_node_config_by_id(node_id) + node_config_data = node_config["data"] # Get node type - node_type = NodeType(node_config_data.get("type")) + node_type = NodeType(node_config_data["type"]) # init graph init params and runtime state graph_init_params = GraphInitParams( diff --git a/api/models/workflow.py b/api/models/workflow.py index df83228c2a..83956b1114 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -29,6 +29,7 @@ from core.workflow.constants import ( CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID, ) +from core.workflow.entities.graph_config import NodeConfigDict, NodeConfigDictAdapter from core.workflow.entities.pause_reason import HumanInputRequired, PauseReason, PauseReasonType, SchedulingPause from core.workflow.enums import NodeType from extensions.ext_storage import Storage @@ -229,7 +230,7 @@ class Workflow(Base): # bug # - `_get_graph_and_variable_pool_for_single_node_run`. return json.loads(self.graph) if self.graph else {} - def get_node_config_by_id(self, node_id: str) -> Mapping[str, Any]: + def get_node_config_by_id(self, node_id: str) -> NodeConfigDict: """Extract a node configuration from the workflow graph by node ID. A node configuration is a dictionary containing the node's properties, including the node's id, title, and its data as a dict. @@ -247,8 +248,7 @@ class Workflow(Base): # bug node_config: dict[str, Any] = next(filter(lambda node: node["id"] == node_id, nodes)) except StopIteration: raise NodeNotFoundError(node_id) - assert isinstance(node_config, dict) - return node_config + return NodeConfigDictAdapter.validate_python(node_config) @staticmethod def get_node_type_from_node_config(node_config: Mapping[str, Any]) -> NodeType: diff --git a/api/pyproject.toml b/api/pyproject.toml index 41e532047c..97e6c83ed6 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -116,7 +116,7 @@ dev = [ "dotenv-linter~=0.5.0", "faker~=38.2.0", "lxml-stubs~=0.5.1", - "ty~=0.0.1a19", + "ty>=0.0.14", "basedpyright~=1.31.0", "ruff~=0.14.0", "pytest~=8.3.2", diff --git a/api/tests/test_containers_integration_tests/services/test_webhook_service.py b/api/tests/test_containers_integration_tests/services/test_webhook_service.py index e3431fd382..934d1bdd34 100644 --- a/api/tests/test_containers_integration_tests/services/test_webhook_service.py +++ b/api/tests/test_containers_integration_tests/services/test_webhook_service.py @@ -90,6 +90,7 @@ class TestWebhookService: "id": "webhook_node", "type": "webhook", "data": { + "type": "trigger-webhook", "title": "Test Webhook", "method": "post", "content_type": "application/json", diff --git a/api/ty.toml b/api/ty.toml index afdd37897e..380e14dbef 100644 --- a/api/ty.toml +++ b/api/ty.toml @@ -1,16 +1,15 @@ [src] exclude = [ # deps groups (A1/A2/B/C/D/E) - # A2: workflow engine/nodes - "core/workflow", - "core/app/workflow", - "core/helper/code_executor", # B: app runner + prompt "core/prompt", "core/app/apps/base_app_runner.py", "core/app/apps/workflow_app_runner.py", + "core/agent", + "core/plugin", # C: services/controllers/fields/libs "services", + "controllers/inner_api", "controllers/console/app", "controllers/console/explore", "controllers/console/datasets", @@ -28,3 +27,8 @@ exclude = [ "tests", ] + +[rules] +deprecated = "ignore" +unused-ignore-comment = "ignore" +# possibly-missing-attribute = "ignore" \ No newline at end of file diff --git a/api/uv.lock b/api/uv.lock index 0677f5ad98..04d9a7c021 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1684,7 +1684,7 @@ dev = [ { name = "scipy-stubs", specifier = ">=1.15.3.0" }, { name = "sseclient-py", specifier = ">=1.8.0" }, { name = "testcontainers", specifier = "~=4.13.2" }, - { name = "ty", specifier = "~=0.0.1a19" }, + { name = "ty", specifier = ">=0.0.14" }, { name = "types-aiofiles", specifier = "~=24.1.0" }, { name = "types-beautifulsoup4", specifier = "~=4.12.0" }, { name = "types-cachetools", specifier = "~=5.5.0" }, @@ -6239,27 +6239,26 @@ wheels = [ [[package]] name = "ty" -version = "0.0.1a27" +version = "0.0.14" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8f/65/3592d7c73d80664378fc90d0a00c33449a99cbf13b984433c883815245f3/ty-0.0.1a27.tar.gz", hash = "sha256:d34fe04979f2c912700cbf0919e8f9b4eeaa10c4a2aff7450e5e4c90f998bc28", size = 4516059, upload-time = "2025-11-18T21:55:18.381Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/57/22c3d6bf95c2229120c49ffc2f0da8d9e8823755a1c3194da56e51f1cc31/ty-0.0.14.tar.gz", hash = "sha256:a691010565f59dd7f15cf324cdcd1d9065e010c77a04f887e1ea070ba34a7de2", size = 5036573, upload-time = "2026-01-27T00:57:31.427Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/05/7945aa97356446fd53ed3ddc7ee02a88d8ad394217acd9428f472d6b109d/ty-0.0.1a27-py3-none-linux_armv6l.whl", hash = "sha256:3cbb735f5ecb3a7a5f5b82fb24da17912788c109086df4e97d454c8fb236fbc5", size = 9375047, upload-time = "2025-11-18T21:54:31.577Z" }, - { url = "https://files.pythonhosted.org/packages/69/4e/89b167a03de0e9ec329dc89bc02e8694768e4576337ef6c0699987681342/ty-0.0.1a27-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4a6367236dc456ba2416563301d498aef8c6f8959be88777ef7ba5ac1bf15f0b", size = 9169540, upload-time = "2025-11-18T21:54:34.036Z" }, - { url = "https://files.pythonhosted.org/packages/38/07/e62009ab9cc242e1becb2bd992097c80a133fce0d4f055fba6576150d08a/ty-0.0.1a27-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8e93e231a1bcde964cdb062d2d5e549c24493fb1638eecae8fcc42b81e9463a4", size = 8711942, upload-time = "2025-11-18T21:54:36.3Z" }, - { url = "https://files.pythonhosted.org/packages/b5/43/f35716ec15406f13085db52e762a3cc663c651531a8124481d0ba602eca0/ty-0.0.1a27-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5b6a8166b60117da1179851a3d719cc798bf7e61f91b35d76242f0059e9ae1d", size = 8984208, upload-time = "2025-11-18T21:54:39.453Z" }, - { url = "https://files.pythonhosted.org/packages/2d/79/486a3374809523172379768de882c7a369861165802990177fe81489b85f/ty-0.0.1a27-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfbe8b0e831c072b79a078d6c126d7f4d48ca17f64a103de1b93aeda32265dc5", size = 9157209, upload-time = "2025-11-18T21:54:42.664Z" }, - { url = "https://files.pythonhosted.org/packages/ff/08/9a7c8efcb327197d7d347c548850ef4b54de1c254981b65e8cd0672dc327/ty-0.0.1a27-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90e09678331552e7c25d7eb47868b0910dc5b9b212ae22c8ce71a52d6576ddbb", size = 9519207, upload-time = "2025-11-18T21:54:45.311Z" }, - { url = "https://files.pythonhosted.org/packages/e0/9d/7b4680683e83204b9edec551bb91c21c789ebc586b949c5218157ee474b7/ty-0.0.1a27-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:88c03e4beeca79d85a5618921e44b3a6ea957e0453e08b1cdd418b51da645939", size = 10148794, upload-time = "2025-11-18T21:54:48.329Z" }, - { url = "https://files.pythonhosted.org/packages/89/21/8b961b0ab00c28223f06b33222427a8e31aa04f39d1b236acc93021c626c/ty-0.0.1a27-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ece5811322789fefe22fc088ed36c5879489cd39e913f9c1ff2a7678f089c61", size = 9900563, upload-time = "2025-11-18T21:54:51.214Z" }, - { url = "https://files.pythonhosted.org/packages/85/eb/95e1f0b426c2ea8d443aa923fcab509059c467bbe64a15baaf573fea1203/ty-0.0.1a27-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f2ccb4f0fddcd6e2017c268dfce2489e9a36cb82a5900afe6425835248b1086", size = 9926355, upload-time = "2025-11-18T21:54:53.927Z" }, - { url = "https://files.pythonhosted.org/packages/f5/78/40e7f072049e63c414f2845df780be3a494d92198c87c2ffa65e63aecf3f/ty-0.0.1a27-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33450528312e41d003e96a1647780b2783ab7569bbc29c04fc76f2d1908061e3", size = 9480580, upload-time = "2025-11-18T21:54:56.617Z" }, - { url = "https://files.pythonhosted.org/packages/18/da/f4a2dfedab39096808ddf7475f35ceb750d9a9da840bee4afd47b871742f/ty-0.0.1a27-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a0a9ac635deaa2b15947701197ede40cdecd13f89f19351872d16f9ccd773fa1", size = 8957524, upload-time = "2025-11-18T21:54:59.085Z" }, - { url = "https://files.pythonhosted.org/packages/21/ea/26fee9a20cf77a157316fd3ab9c6db8ad5a0b20b2d38a43f3452622587ac/ty-0.0.1a27-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:797fb2cd49b6b9b3ac9f2f0e401fb02d3aa155badc05a8591d048d38d28f1e0c", size = 9201098, upload-time = "2025-11-18T21:55:01.845Z" }, - { url = "https://files.pythonhosted.org/packages/b0/53/e14591d1275108c9ae28f97ac5d4b93adcc2c8a4b1b9a880dfa9d07c15f8/ty-0.0.1a27-py3-none-musllinux_1_2_i686.whl", hash = "sha256:7fe81679a0941f85e98187d444604e24b15bde0a85874957c945751756314d03", size = 9275470, upload-time = "2025-11-18T21:55:04.23Z" }, - { url = "https://files.pythonhosted.org/packages/37/44/e2c9acecac70bf06fb41de285e7be2433c2c9828f71e3bf0e886fc85c4fd/ty-0.0.1a27-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:355f651d0cdb85535a82bd9f0583f77b28e3fd7bba7b7da33dcee5a576eff28b", size = 9592394, upload-time = "2025-11-18T21:55:06.542Z" }, - { url = "https://files.pythonhosted.org/packages/ee/a7/4636369731b24ed07c2b4c7805b8d990283d677180662c532d82e4ef1a36/ty-0.0.1a27-py3-none-win32.whl", hash = "sha256:61782e5f40e6df622093847b34c366634b75d53f839986f1bf4481672ad6cb55", size = 8783816, upload-time = "2025-11-18T21:55:09.648Z" }, - { url = "https://files.pythonhosted.org/packages/a7/1d/b76487725628d9e81d9047dc0033a5e167e0d10f27893d04de67fe1a9763/ty-0.0.1a27-py3-none-win_amd64.whl", hash = "sha256:c682b238085d3191acddcf66ef22641562946b1bba2a7f316012d5b2a2f4de11", size = 9616833, upload-time = "2025-11-18T21:55:12.457Z" }, - { url = "https://files.pythonhosted.org/packages/3a/db/c7cd5276c8f336a3cf87992b75ba9d486a7cf54e753fcd42495b3bc56fb7/ty-0.0.1a27-py3-none-win_arm64.whl", hash = "sha256:e146dfa32cbb0ac6afb0cb65659e87e4e313715e68d76fe5ae0a4b3d5b912ce8", size = 9137796, upload-time = "2025-11-18T21:55:15.897Z" }, + { url = "https://files.pythonhosted.org/packages/99/cb/cc6d1d8de59beb17a41f9a614585f884ec2d95450306c173b3b7cc090d2e/ty-0.0.14-py3-none-linux_armv6l.whl", hash = "sha256:32cf2a7596e693094621d3ae568d7ee16707dce28c34d1762947874060fdddaa", size = 10034228, upload-time = "2026-01-27T00:57:53.133Z" }, + { url = "https://files.pythonhosted.org/packages/f3/96/dd42816a2075a8f31542296ae687483a8d047f86a6538dfba573223eaf9a/ty-0.0.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f971bf9805f49ce8c0968ad53e29624d80b970b9eb597b7cbaba25d8a18ce9a2", size = 9939162, upload-time = "2026-01-27T00:57:43.857Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b4/73c4859004e0f0a9eead9ecb67021438b2e8e5fdd8d03e7f5aca77623992/ty-0.0.14-py3-none-macosx_11_0_arm64.whl", hash = "sha256:45448b9e4806423523268bc15e9208c4f3f2ead7c344f615549d2e2354d6e924", size = 9418661, upload-time = "2026-01-27T00:58:03.411Z" }, + { url = "https://files.pythonhosted.org/packages/58/35/839c4551b94613db4afa20ee555dd4f33bfa7352d5da74c5fa416ffa0fd2/ty-0.0.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee94a9b747ff40114085206bdb3205a631ef19a4d3fb89e302a88754cbbae54c", size = 9837872, upload-time = "2026-01-27T00:57:23.718Z" }, + { url = "https://files.pythonhosted.org/packages/41/2b/bbecf7e2faa20c04bebd35fc478668953ca50ee5847ce23e08acf20ea119/ty-0.0.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6756715a3c33182e9ab8ffca2bb314d3c99b9c410b171736e145773ee0ae41c3", size = 9848819, upload-time = "2026-01-27T00:57:58.501Z" }, + { url = "https://files.pythonhosted.org/packages/be/60/3c0ba0f19c0f647ad9d2b5b5ac68c0f0b4dc899001bd53b3a7537fb247a2/ty-0.0.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89d0038a2f698ba8b6fec5cf216a4e44e2f95e4a5095a8c0f57fe549f87087c2", size = 10324371, upload-time = "2026-01-27T00:57:29.291Z" }, + { url = "https://files.pythonhosted.org/packages/24/32/99d0a0b37d0397b0a989ffc2682493286aa3bc252b24004a6714368c2c3d/ty-0.0.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c64a83a2d669b77f50a4957039ca1450626fb474619f18f6f8a3eb885bf7544", size = 10865898, upload-time = "2026-01-27T00:57:33.542Z" }, + { url = "https://files.pythonhosted.org/packages/1a/88/30b583a9e0311bb474269cfa91db53350557ebec09002bfc3fb3fc364e8c/ty-0.0.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:242488bfb547ef080199f6fd81369ab9cb638a778bb161511d091ffd49c12129", size = 10555777, upload-time = "2026-01-27T00:58:05.853Z" }, + { url = "https://files.pythonhosted.org/packages/cd/a2/cb53fb6325dcf3d40f2b1d0457a25d55bfbae633c8e337bde8ec01a190eb/ty-0.0.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4790c3866f6c83a4f424fc7d09ebdb225c1f1131647ba8bdc6fcdc28f09ed0ff", size = 10412913, upload-time = "2026-01-27T00:57:38.834Z" }, + { url = "https://files.pythonhosted.org/packages/42/8f/f2f5202d725ed1e6a4e5ffaa32b190a1fe70c0b1a2503d38515da4130b4c/ty-0.0.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:950f320437f96d4ea9a2332bbfb5b68f1c1acd269ebfa4c09b6970cc1565bd9d", size = 9837608, upload-time = "2026-01-27T00:57:55.898Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ba/59a2a0521640c489dafa2c546ae1f8465f92956fede18660653cce73b4c5/ty-0.0.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4a0ec3ee70d83887f86925bbc1c56f4628bd58a0f47f6f32ddfe04e1f05466df", size = 9884324, upload-time = "2026-01-27T00:57:46.786Z" }, + { url = "https://files.pythonhosted.org/packages/03/95/8d2a49880f47b638743212f011088552ecc454dd7a665ddcbdabea25772a/ty-0.0.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a1a4e6b6da0c58b34415955279eff754d6206b35af56a18bb70eb519d8d139ef", size = 10033537, upload-time = "2026-01-27T00:58:01.149Z" }, + { url = "https://files.pythonhosted.org/packages/e9/40/4523b36f2ce69f92ccf783855a9e0ebbbd0f0bb5cdce6211ee1737159ed3/ty-0.0.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:dc04384e874c5de4c5d743369c277c8aa73d1edea3c7fc646b2064b637db4db3", size = 10495910, upload-time = "2026-01-27T00:57:26.691Z" }, + { url = "https://files.pythonhosted.org/packages/08/d5/655beb51224d1bfd4f9ddc0bb209659bfe71ff141bcf05c418ab670698f0/ty-0.0.14-py3-none-win32.whl", hash = "sha256:b20e22cf54c66b3e37e87377635da412d9a552c9bf4ad9fc449fed8b2e19dad2", size = 9507626, upload-time = "2026-01-27T00:57:41.43Z" }, + { url = "https://files.pythonhosted.org/packages/b6/d9/c569c9961760e20e0a4bc008eeb1415754564304fd53997a371b7cf3f864/ty-0.0.14-py3-none-win_amd64.whl", hash = "sha256:e312ff9475522d1a33186657fe74d1ec98e4a13e016d66f5758a452c90ff6409", size = 10437980, upload-time = "2026-01-27T00:57:36.422Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0c/186829654f5bfd9a028f6648e9caeb11271960a61de97484627d24443f91/ty-0.0.14-py3-none-win_arm64.whl", hash = "sha256:b6facdbe9b740cb2c15293a1d178e22ffc600653646452632541d01c36d5e378", size = 9885831, upload-time = "2026-01-27T00:57:49.747Z" }, ] [[package]]