From 8fc1c7d9947cbcb5fb330d17b71d6a54e7193803 Mon Sep 17 00:00:00 2001 From: Will Date: Thu, 20 Nov 2025 11:28:29 +0800 Subject: [PATCH] chore: remove redundant reimports (#28415) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Asuka Minato --- api/controllers/console/app/app.py | 4 +--- api/core/datasource/__base/datasource_runtime.py | 8 ++------ api/core/tools/tool_manager.py | 1 - api/core/workflow/runtime/graph_runtime_state.py | 5 ++--- api/extensions/ext_redis.py | 1 - .../clickzetta_volume/clickzetta_volume_storage.py | 1 - api/models/tools.py | 11 ----------- api/models/workflow.py | 2 -- api/services/trigger/trigger_service.py | 3 --- 9 files changed, 5 insertions(+), 31 deletions(-) diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py index 0724a6355d..defe82b8ae 100644 --- a/api/controllers/console/app/app.py +++ b/api/controllers/console/app/app.py @@ -250,10 +250,8 @@ class AppApi(Resource): args = parser.parse_args() app_service = AppService() - # Construct ArgsDict from parsed arguments - from services.app_service import AppService as AppServiceType - args_dict: AppServiceType.ArgsDict = { + args_dict: AppService.ArgsDict = { "name": args["name"], "description": args.get("description", ""), "icon_type": args.get("icon_type", ""), diff --git a/api/core/datasource/__base/datasource_runtime.py b/api/core/datasource/__base/datasource_runtime.py index c5d6c1d771..e021ed74a7 100644 --- a/api/core/datasource/__base/datasource_runtime.py +++ b/api/core/datasource/__base/datasource_runtime.py @@ -1,14 +1,10 @@ -from typing import TYPE_CHECKING, Any, Optional +from typing import Any from pydantic import BaseModel, Field -# Import InvokeFrom locally to avoid circular import from core.app.entities.app_invoke_entities import InvokeFrom from core.datasource.entities.datasource_entities import DatasourceInvokeFrom -if TYPE_CHECKING: - from core.app.entities.app_invoke_entities import InvokeFrom - class DatasourceRuntime(BaseModel): """ @@ -17,7 +13,7 @@ class DatasourceRuntime(BaseModel): tenant_id: str datasource_id: str | None = None - invoke_from: Optional["InvokeFrom"] = None + invoke_from: InvokeFrom | None = None datasource_invoke_from: DatasourceInvokeFrom | None = None credentials: dict[str, Any] = Field(default_factory=dict) runtime_parameters: dict[str, Any] = Field(default_factory=dict) diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 0beb42479b..8f5fa7cab5 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -63,7 +63,6 @@ from services.tools.tools_transform_service import ToolTransformService if TYPE_CHECKING: from core.workflow.nodes.tool.entities import ToolEntity - from core.workflow.runtime import VariablePool logger = logging.getLogger(__name__) diff --git a/api/core/workflow/runtime/graph_runtime_state.py b/api/core/workflow/runtime/graph_runtime_state.py index 4c322c6aa6..0fbc8ab23e 100644 --- a/api/core/workflow/runtime/graph_runtime_state.py +++ b/api/core/workflow/runtime/graph_runtime_state.py @@ -3,7 +3,6 @@ from __future__ import annotations import importlib import json from collections.abc import Mapping, Sequence -from collections.abc import Mapping as TypingMapping from copy import deepcopy from dataclasses import dataclass from typing import Any, Protocol @@ -100,8 +99,8 @@ class ResponseStreamCoordinatorProtocol(Protocol): class GraphProtocol(Protocol): """Structural interface required from graph instances attached to the runtime state.""" - nodes: TypingMapping[str, object] - edges: TypingMapping[str, object] + nodes: Mapping[str, object] + edges: Mapping[str, object] root_node: object def get_outgoing_edges(self, node_id: str) -> Sequence[object]: ... diff --git a/api/extensions/ext_redis.py b/api/extensions/ext_redis.py index 487917b2a7..588fbae285 100644 --- a/api/extensions/ext_redis.py +++ b/api/extensions/ext_redis.py @@ -10,7 +10,6 @@ from redis import RedisError from redis.cache import CacheConfig from redis.cluster import ClusterNode, RedisCluster from redis.connection import Connection, SSLConnection -from redis.lock import Lock from redis.sentinel import Sentinel from configs import dify_config diff --git a/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py b/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py index 1cabc57e74..c1608f58a5 100644 --- a/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py +++ b/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py @@ -45,7 +45,6 @@ class ClickZettaVolumeConfig(BaseModel): This method will first try to use CLICKZETTA_VOLUME_* environment variables, then fall back to CLICKZETTA_* environment variables (for vector DB config). """ - import os # Helper function to get environment variable with fallback def get_env_with_fallback(volume_key: str, fallback_key: str, default: str | None = None) -> str: diff --git a/api/models/tools.py b/api/models/tools.py index 360c80c974..a4aeda93e5 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -20,9 +20,6 @@ from .types import LongText, StringUUID if TYPE_CHECKING: from core.entities.mcp_provider import MCPProviderEntity - from core.tools.entities.common_entities import I18nObject - from core.tools.entities.tool_bundle import ApiToolBundle - from core.tools.entities.tool_entities import ApiProviderSchemaType, WorkflowToolParameterConfiguration # system level tool oauth client params (client_id, client_secret, etc.) @@ -163,14 +160,10 @@ class ApiToolProvider(TypeBase): @property def schema_type(self) -> "ApiProviderSchemaType": - from core.tools.entities.tool_entities import ApiProviderSchemaType - return ApiProviderSchemaType.value_of(self.schema_type_str) @property def tools(self) -> list["ApiToolBundle"]: - from core.tools.entities.tool_bundle import ApiToolBundle - return [ApiToolBundle.model_validate(tool) for tool in json.loads(self.tools_str)] @property @@ -263,8 +256,6 @@ class WorkflowToolProvider(TypeBase): @property def parameter_configurations(self) -> list["WorkflowToolParameterConfiguration"]: - from core.tools.entities.tool_entities import WorkflowToolParameterConfiguration - return [ WorkflowToolParameterConfiguration.model_validate(config) for config in json.loads(self.parameter_configuration) @@ -521,6 +512,4 @@ class DeprecatedPublishedAppTool(TypeBase): @property def description_i18n(self) -> "I18nObject": - from core.tools.entities.common_entities import I18nObject - return I18nObject.model_validate(json.loads(self.description)) diff --git a/api/models/workflow.py b/api/models/workflow.py index 53067744ed..fbdaf25da0 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -895,8 +895,6 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo extras: dict[str, Any] = {} if self.execution_metadata_dict: - from core.workflow.nodes import NodeType - if self.node_type == NodeType.TOOL and "tool_info" in self.execution_metadata_dict: tool_info: dict[str, Any] = self.execution_metadata_dict["tool_info"] extras["icon"] = ToolManager.get_tool_icon( diff --git a/api/services/trigger/trigger_service.py b/api/services/trigger/trigger_service.py index 0255e42546..c47c98c4de 100644 --- a/api/services/trigger/trigger_service.py +++ b/api/services/trigger/trigger_service.py @@ -298,9 +298,6 @@ class TriggerService: redis_client.delete(f"{cls.__PLUGIN_TRIGGER_NODE_CACHE_KEY__}:{node_id}") session.commit() except Exception: - import logging - - logger = logging.getLogger(__name__) logger.exception("Failed to sync plugin trigger relationships for app %s", app.id) raise finally: