From 5fa2aca2c8d7f9edd00c32ad0240f9aae8ba818b Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Wed, 21 May 2025 20:29:59 +0800 Subject: [PATCH 1/2] feat: add oauth schema to datasource --- api/core/tools/entities/tool_entities.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 37375f4a71..9884d93e9d 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -7,6 +7,7 @@ from typing import Any, Optional, Union from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_serializer, field_validator, model_validator from core.entities.provider_entities import ProviderConfig +from core.plugin.entities.oauth import OAuthSchema from core.plugin.entities.parameters import ( PluginParameter, PluginParameterOption, @@ -349,6 +350,7 @@ class ToolProviderEntity(BaseModel): identity: ToolProviderIdentity plugin_id: Optional[str] = None credentials_schema: list[ProviderConfig] = Field(default_factory=list) + oauth_schema: Optional[OAuthSchema] = Field(default=None, description="The oauth schema of the tool provider") class ToolProviderEntityWithPlugin(ToolProviderEntity): From 3bfc602561d8dc4cafbcc6e9a3799be0496be282 Mon Sep 17 00:00:00 2001 From: Yeuoly Date: Wed, 21 May 2025 20:36:26 +0800 Subject: [PATCH 2/2] refactor: update datasource entity structure and parameter handling - Renamed and split parameters in DatasourceEntity into first_step_parameters and second_step_parameters. - Updated validation methods for new parameter structure. - Adjusted datasource_node to reference first_step_parameters. - Cleaned up unused imports and improved type hints in workflow.py. --- .../entities/datasource_entities.py | 19 +++++++++++++------ .../nodes/datasource/datasource_node.py | 2 +- api/factories/variable_factory.py | 2 +- api/models/workflow.py | 6 +++--- 4 files changed, 18 insertions(+), 11 deletions(-) diff --git a/api/core/datasource/entities/datasource_entities.py b/api/core/datasource/entities/datasource_entities.py index 25d7c1c352..04e6915f31 100644 --- a/api/core/datasource/entities/datasource_entities.py +++ b/api/core/datasource/entities/datasource_entities.py @@ -22,8 +22,8 @@ class DatasourceProviderType(enum.StrEnum): """ ONLINE_DOCUMENT = "online_document" - LOCAL_FILE = "local_file" WEBSITE = "website" + ONLINE_DRIVE = "online_drive" @classmethod def value_of(cls, value: str) -> "DatasourceProviderType": @@ -125,14 +125,21 @@ class DatasourceDescription(BaseModel): class DatasourceEntity(BaseModel): identity: DatasourceIdentity - parameters: list[DatasourceParameter] = Field(default_factory=list) description: Optional[DatasourceDescription] = None - output_schema: Optional[dict] = None + first_step_parameters: list[DatasourceParameter] = Field(default_factory=list) + second_step_parameters: list[DatasourceParameter] = Field(default_factory=list) + first_step_output_schema: Optional[dict] = None + second_step_output_schema: Optional[dict] = None has_runtime_parameters: bool = Field(default=False, description="Whether the tool has runtime parameters") - @field_validator("parameters", mode="before") + @field_validator("first_step_parameters", mode="before") @classmethod - def set_parameters(cls, v, validation_info: ValidationInfo) -> list[DatasourceParameter]: + def set_first_step_parameters(cls, v, validation_info: ValidationInfo) -> list[DatasourceParameter]: + return v or [] + + @field_validator("second_step_parameters", mode="before") + @classmethod + def set_second_step_parameters(cls, v, validation_info: ValidationInfo) -> list[DatasourceParameter]: return v or [] @@ -145,7 +152,7 @@ class DatasourceProviderEntity(ToolProviderEntity): class DatasourceProviderEntityWithPlugin(DatasourceProviderEntity): - datasources: list[DatasourceEntity] = Field(default_factory=list) + datasources: list[DatasourceEntity] = Field(default_factory=list) class DatasourceInvokeMeta(BaseModel): diff --git a/api/core/workflow/nodes/datasource/datasource_node.py b/api/core/workflow/nodes/datasource/datasource_node.py index e7d4da8426..4e64c024c8 100644 --- a/api/core/workflow/nodes/datasource/datasource_node.py +++ b/api/core/workflow/nodes/datasource/datasource_node.py @@ -64,7 +64,7 @@ class DatasourceNode(BaseNode[DatasourceNodeData]): return # get parameters - datasource_parameters = datasource_runtime.entity.parameters + datasource_parameters = datasource_runtime.entity.first_step_parameters parameters = self._generate_parameters( datasource_parameters=datasource_parameters, variable_pool=self.graph_runtime_state.variable_pool, diff --git a/api/factories/variable_factory.py b/api/factories/variable_factory.py index 69a786e2f5..d829d57812 100644 --- a/api/factories/variable_factory.py +++ b/api/factories/variable_factory.py @@ -39,7 +39,6 @@ from core.variables.variables import ( from core.workflow.constants import ( CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, - PIPELINE_VARIABLE_NODE_ID, ) @@ -123,6 +122,7 @@ def _build_variable_from_mapping(*, mapping: Mapping[str, Any], selector: Sequen result = result.model_copy(update={"selector": selector}) return cast(Variable, result) + def build_segment(value: Any, /) -> Segment: if value is None: return NoneSegment() diff --git a/api/models/workflow.py b/api/models/workflow.py index d5cf71841e..5cdb769209 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -3,7 +3,7 @@ import logging from collections.abc import Mapping, Sequence from datetime import UTC, datetime from enum import Enum, StrEnum -from typing import TYPE_CHECKING, Any, List, Optional, Self, Union +from typing import TYPE_CHECKING, Any, Optional, Self, Union from uuid import uuid4 from core.variables import utils as variable_utils @@ -366,11 +366,11 @@ class Workflow(Base): self._rag_pipeline_variables = "{}" variables_dict: dict[str, Any] = json.loads(self._rag_pipeline_variables) - results = [v for v in variables_dict.values()] + results = list(variables_dict.values()) return results @rag_pipeline_variables.setter - def rag_pipeline_variables(self, values: List[dict]) -> None: + def rag_pipeline_variables(self, values: list[dict]) -> None: self._rag_pipeline_variables = json.dumps( {item["variable"]: item for item in values}, ensure_ascii=False,