[autofix.ci] apply automated fixes

This commit is contained in:
autofix-ci[bot] 2025-10-30 09:08:11 +00:00 committed by GitHub
parent 8cbd124b80
commit eb3b5f751a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 151 additions and 142 deletions

View File

@ -2,57 +2,61 @@
Start with the section that best matches your need. Each entry lists the problems it solves plus key files/concepts so you know what to expect before opening it. Start with the section that best matches your need. Each entry lists the problems it solves plus key files/concepts so you know what to expect before opening it.
--- ______________________________________________________________________
## Platform Foundations ## Platform Foundations
- **[Infrastructure Overview](agent_skills/infra.md)** - **[Infrastructure Overview](agent_skills/infra.md)**\
When to read this: When to read this:
- You need to understand where a feature belongs in the architecture.
- Youre wiring storage, Redis, vector stores, or OTEL.
- Youre about to add CLI commands or async jobs.
What it covers: configuration stack (`configs/app_config.py`, remote settings), storage entry points (`extensions/ext_storage.py`, `core/file/file_manager.py`), Redis conventions (`extensions/ext_redis.py`), plugin runtime topology, vector-store factory (`core/rag/datasource/vdb/*`), observability hooks, SSRF proxy usage, and core CLI commands.
- **[Coding Style](agent_skills/coding_style.md)** - You need to understand where a feature belongs in the architecture.
When to read this: - Youre wiring storage, Redis, vector stores, or OTEL.
- Youre writing or reviewing backend code and need the authoritative checklist. - Youre about to add CLI commands or async jobs.\
- Youre unsure about Pydantic validators, SQLAlchemy session usage, or logging patterns. What it covers: configuration stack (`configs/app_config.py`, remote settings), storage entry points (`extensions/ext_storage.py`, `core/file/file_manager.py`), Redis conventions (`extensions/ext_redis.py`), plugin runtime topology, vector-store factory (`core/rag/datasource/vdb/*`), observability hooks, SSRF proxy usage, and core CLI commands.
- You want the exact lint/type/test commands used in PRs.
Includes: Ruff & BasedPyright commands, no-annotation policy, session examples (`with Session(db.engine, ...)`), `@field_validator` usage, logging expectations, and the rule set for file size, helpers, and package management.
--- - **[Coding Style](agent_skills/coding_style.md)**\
When to read this:
- Youre writing or reviewing backend code and need the authoritative checklist.
- Youre unsure about Pydantic validators, SQLAlchemy session usage, or logging patterns.
- You want the exact lint/type/test commands used in PRs.\
Includes: Ruff & BasedPyright commands, no-annotation policy, session examples (`with Session(db.engine, ...)`), `@field_validator` usage, logging expectations, and the rule set for file size, helpers, and package management.
______________________________________________________________________
## Plugin & Extension Development ## Plugin & Extension Development
- **[Plugin Systems](agent_skills/plugin.md)** - **[Plugin Systems](agent_skills/plugin.md)**\
When to read this: When to read this:
- Youre building or debugging a marketplace plugin.
- You need to know how manifests, providers, daemons, and migrations fit together.
What it covers: plugin manifests (`core/plugin/entities/plugin.py`), installation/upgrade flows (`services/plugin/plugin_service.py`, CLI commands), runtime adapters (`core/plugin/impl/*` for tool/model/datasource/trigger/endpoint/agent), daemon coordination (`core/plugin/entities/plugin_daemon.py`), and how provider registries surface capabilities to the rest of the platform.
- **[Plugin OAuth](agent_skills/plugin_oauth.md)** - Youre building or debugging a marketplace plugin.
When to read this: - You need to know how manifests, providers, daemons, and migrations fit together.\
- You must integrate OAuth for a plugin or datasource. What it covers: plugin manifests (`core/plugin/entities/plugin.py`), installation/upgrade flows (`services/plugin/plugin_service.py`, CLI commands), runtime adapters (`core/plugin/impl/*` for tool/model/datasource/trigger/endpoint/agent), daemon coordination (`core/plugin/entities/plugin_daemon.py`), and how provider registries surface capabilities to the rest of the platform.
- Youre handling credential encryption or refresh flows.
Topics: credential storage, encryption helpers (`core/helper/provider_encryption.py`), OAuth client bootstrap (`services/plugin/oauth_service.py`, `services/plugin/plugin_parameter_service.py`), and how console/API layers expose the flows.
--- - **[Plugin OAuth](agent_skills/plugin_oauth.md)**\
When to read this:
- You must integrate OAuth for a plugin or datasource.
- Youre handling credential encryption or refresh flows.\
Topics: credential storage, encryption helpers (`core/helper/provider_encryption.py`), OAuth client bootstrap (`services/plugin/oauth_service.py`, `services/plugin/plugin_parameter_service.py`), and how console/API layers expose the flows.
______________________________________________________________________
## Workflow Entry & Execution ## Workflow Entry & Execution
- **[Trigger Concepts](agent_skills/trigger.md)** - **[Trigger Concepts](agent_skills/trigger.md)**\
When to read this: When to read this:
- Youre debugging why a workflow didnt start. - Youre debugging why a workflow didnt start.
- Youre adding a new trigger type or hook. - Youre adding a new trigger type or hook.
- You need to trace async execution, draft debugging, or webhook/schedule pipelines. - You need to trace async execution, draft debugging, or webhook/schedule pipelines.\
Details: Start-node taxonomy, webhook & schedule internals (`core/workflow/nodes/trigger_*`, `services/trigger/*`), async orchestration (`services/async_workflow_service.py`, Celery queues), debug event bus, and storage/logging interactions. Details: Start-node taxonomy, webhook & schedule internals (`core/workflow/nodes/trigger_*`, `services/trigger/*`), async orchestration (`services/async_workflow_service.py`, Celery queues), debug event bus, and storage/logging interactions.
--- ______________________________________________________________________
## Additional Notes for Agents ## Additional Notes for Agents
- All skill docs assume you follow the coding style guide—run Ruff/BasedPyright/tests listed there before submitting changes. - All skill docs assume you follow the coding style guide—run Ruff/BasedPyright/tests listed there before submitting changes.
- When you cannot find an answer in these briefs, search the codebase using the paths referenced (e.g., `core/plugin/impl/tool.py`, `services/dataset_service.py`). - When you cannot find an answer in these briefs, search the codebase using the paths referenced (e.g., `core/plugin/impl/tool.py`, `services/dataset_service.py`).
- If you run into cross-cutting concerns (tenancy, configuration, storage), check the infrastructure guide first; it links to most supporting modules. - If you run into cross-cutting concerns (tenancy, configuration, storage), check the infrastructure guide first; it links to most supporting modules.
- Keep multi-tenancy and configuration central: everything flows through `configs.dify_config` and `tenant_id`. - Keep multi-tenancy and configuration central: everything flows through `configs.dify_config` and `tenant_id`.
- When touching plugins or triggers, consult both the system overview and the specialised doc to ensure you adjust lifecycle, storage, and observability consistently. - When touching plugins or triggers, consult both the system overview and the specialised doc to ensure you adjust lifecycle, storage, and observability consistently.

View File

@ -30,6 +30,7 @@
## SQLAlchemy Patterns ## SQLAlchemy Patterns
- Models inherit from `models.base.Base`; never create ad-hoc metadata or engines. - Models inherit from `models.base.Base`; never create ad-hoc metadata or engines.
- Open sessions with context managers: - Open sessions with context managers:
```python ```python
@ -44,7 +45,9 @@
``` ```
- Use SQLAlchemy expressions; avoid raw SQL unless necessary. - Use SQLAlchemy expressions; avoid raw SQL unless necessary.
- Introduce repository abstractions only for very large tables (e.g., workflow executions) to support alternative storage strategies. - Introduce repository abstractions only for very large tables (e.g., workflow executions) to support alternative storage strategies.
- Always scope queries by `tenant_id` and protect write paths with safeguards (`FOR UPDATE`, row counts, etc.). - Always scope queries by `tenant_id` and protect write paths with safeguards (`FOR UPDATE`, row counts, etc.).
## Storage & External IO ## Storage & External IO
@ -56,7 +59,9 @@
## Pydantic Usage ## Pydantic Usage
- Define DTOs with Pydantic v2 models and forbid extras by default. - Define DTOs with Pydantic v2 models and forbid extras by default.
- Use `@field_validator` / `@model_validator` for domain rules. - Use `@field_validator` / `@model_validator` for domain rules.
- Example: - Example:
```python ```python

View File

@ -14,8 +14,8 @@ Trigger is a collection of nodes that we called `Start` nodes, also, the concept
Before `Trigger` concept is introduced, it's what we called `Start` node, but now, to avoid confusion, it was renamed to `UserInput` node, has a strong relation with `ServiceAPI` in `controllers/service_api/app` Before `Trigger` concept is introduced, it's what we called `Start` node, but now, to avoid confusion, it was renamed to `UserInput` node, has a strong relation with `ServiceAPI` in `controllers/service_api/app`
1. `UserInput` node introduces a list of arguments that need to be provided by the user, finally it will be converted into variables in the workflow variable pool. 1. `UserInput` node introduces a list of arguments that need to be provided by the user, finally it will be converted into variables in the workflow variable pool.
2. `ServiceAPI` accept those arguments, and pass through them into `UserInput` node. 1. `ServiceAPI` accept those arguments, and pass through them into `UserInput` node.
3. For its detailed implementation, please refer to `core/workflow/nodes/start` 1. For its detailed implementation, please refer to `core/workflow/nodes/start`
### Trigger Webhook ### Trigger Webhook
@ -34,7 +34,7 @@ To Achieve this, a `WorkflowSchedulePlan` model was introduced in `models/trigge
`Trigger Plugin` node allows user define there own distributed trigger plugin, whenever a request was received, Dify forwards it to the plugin and wait for parsed variables from it. `Trigger Plugin` node allows user define there own distributed trigger plugin, whenever a request was received, Dify forwards it to the plugin and wait for parsed variables from it.
1. Requests were saved in storage by `services/trigger/trigger_request_service.py`, referenced by `services/trigger/trigger_service.py`.`TriggerService`.`process_endpoint` 1. Requests were saved in storage by `services/trigger/trigger_request_service.py`, referenced by `services/trigger/trigger_service.py`.`TriggerService`.`process_endpoint`
2. Plugins accept those requests and parse variables from it, see `core/plugin/impl/trigger.py` for details. 1. Plugins accept those requests and parse variables from it, see `core/plugin/impl/trigger.py` for details.
A `subscription` concept was out here by Dify, it means an endpoint address from Dify was bound to thirdparty webhook service like `Github` `Slack` `Linear` `GoogleDrive` `Gmail` etc. Once a subscription was created, Dify continually receives requests from the platforms and handle them one by one. A `subscription` concept was out here by Dify, it means an endpoint address from Dify was bound to thirdparty webhook service like `Github` `Slack` `Linear` `GoogleDrive` `Gmail` etc. Once a subscription was created, Dify continually receives requests from the platforms and handle them one by one.

View File

@ -54,8 +54,8 @@ class WorkflowAppGenerator(BaseAppGenerator):
invoke_from: InvokeFrom, invoke_from: InvokeFrom,
streaming: Literal[True], streaming: Literal[True],
call_depth: int, call_depth: int,
triggered_from: Optional[WorkflowRunTriggeredFrom] = None, triggered_from: WorkflowRunTriggeredFrom | None = None,
root_node_id: Optional[str] = None, root_node_id: str | None = None,
graph_engine_layers: Sequence[GraphEngineLayer] = (), graph_engine_layers: Sequence[GraphEngineLayer] = (),
) -> Generator[Mapping[str, Any] | str, None, None]: ... ) -> Generator[Mapping[str, Any] | str, None, None]: ...
@ -70,8 +70,8 @@ class WorkflowAppGenerator(BaseAppGenerator):
invoke_from: InvokeFrom, invoke_from: InvokeFrom,
streaming: Literal[False], streaming: Literal[False],
call_depth: int, call_depth: int,
triggered_from: Optional[WorkflowRunTriggeredFrom] = None, triggered_from: WorkflowRunTriggeredFrom | None = None,
root_node_id: Optional[str] = None, root_node_id: str | None = None,
graph_engine_layers: Sequence[GraphEngineLayer] = (), graph_engine_layers: Sequence[GraphEngineLayer] = (),
) -> Mapping[str, Any]: ... ) -> Mapping[str, Any]: ...
@ -86,8 +86,8 @@ class WorkflowAppGenerator(BaseAppGenerator):
invoke_from: InvokeFrom, invoke_from: InvokeFrom,
streaming: bool, streaming: bool,
call_depth: int, call_depth: int,
triggered_from: Optional[WorkflowRunTriggeredFrom] = None, triggered_from: WorkflowRunTriggeredFrom | None = None,
root_node_id: Optional[str] = None, root_node_id: str | None = None,
graph_engine_layers: Sequence[GraphEngineLayer] = (), graph_engine_layers: Sequence[GraphEngineLayer] = (),
) -> Union[Mapping[str, Any], Generator[Mapping[str, Any] | str, None, None]]: ... ) -> Union[Mapping[str, Any], Generator[Mapping[str, Any] | str, None, None]]: ...
@ -101,8 +101,8 @@ class WorkflowAppGenerator(BaseAppGenerator):
invoke_from: InvokeFrom, invoke_from: InvokeFrom,
streaming: bool = True, streaming: bool = True,
call_depth: int = 0, call_depth: int = 0,
triggered_from: Optional[WorkflowRunTriggeredFrom] = None, triggered_from: WorkflowRunTriggeredFrom | None = None,
root_node_id: Optional[str] = None, root_node_id: str | None = None,
graph_engine_layers: Sequence[GraphEngineLayer] = (), graph_engine_layers: Sequence[GraphEngineLayer] = (),
) -> Union[Mapping[str, Any], Generator[Mapping[str, Any] | str, None, None]]: ) -> Union[Mapping[str, Any], Generator[Mapping[str, Any] | str, None, None]]:
files: Sequence[Mapping[str, Any]] = args.get("files") or [] files: Sequence[Mapping[str, Any]] = args.get("files") or []
@ -223,7 +223,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
workflow_node_execution_repository: WorkflowNodeExecutionRepository, workflow_node_execution_repository: WorkflowNodeExecutionRepository,
streaming: bool = True, streaming: bool = True,
variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER, variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER,
root_node_id: Optional[str] = None, root_node_id: str | None = None,
graph_engine_layers: Sequence[GraphEngineLayer] = (), graph_engine_layers: Sequence[GraphEngineLayer] = (),
) -> Union[Mapping[str, Any], Generator[str | Mapping[str, Any], None, None]]: ) -> Union[Mapping[str, Any], Generator[str | Mapping[str, Any], None, None]]:
""" """
@ -457,7 +457,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
variable_loader: VariableLoader, variable_loader: VariableLoader,
workflow_execution_repository: WorkflowExecutionRepository, workflow_execution_repository: WorkflowExecutionRepository,
workflow_node_execution_repository: WorkflowNodeExecutionRepository, workflow_node_execution_repository: WorkflowNodeExecutionRepository,
root_node_id: Optional[str] = None, root_node_id: str | None = None,
graph_engine_layers: Sequence[GraphEngineLayer] = (), graph_engine_layers: Sequence[GraphEngineLayer] = (),
) -> None: ) -> None:
""" """

View File

@ -38,7 +38,7 @@ class WorkflowAppRunner(WorkflowBasedAppRunner):
variable_loader: VariableLoader, variable_loader: VariableLoader,
workflow: Workflow, workflow: Workflow,
system_user_id: str, system_user_id: str,
root_node_id: Optional[str] = None, root_node_id: str | None = None,
workflow_execution_repository: WorkflowExecutionRepository, workflow_execution_repository: WorkflowExecutionRepository,
workflow_node_execution_repository: WorkflowNodeExecutionRepository, workflow_node_execution_repository: WorkflowNodeExecutionRepository,
graph_engine_layers: Sequence[GraphEngineLayer] = (), graph_engine_layers: Sequence[GraphEngineLayer] = (),

View File

@ -84,7 +84,7 @@ class WorkflowBasedAppRunner:
workflow_id: str = "", workflow_id: str = "",
tenant_id: str = "", tenant_id: str = "",
user_id: str = "", user_id: str = "",
root_node_id: Optional[str] = None, root_node_id: str | None = None,
) -> Graph: ) -> Graph:
""" """
Init graph Init graph

View File

@ -84,9 +84,9 @@ class SystemConfiguration(BaseModel):
""" """
enabled: bool enabled: bool
current_quota_type: Optional[ProviderQuotaType] = None current_quota_type: ProviderQuotaType | None = None
quota_configurations: list[QuotaConfiguration] = [] quota_configurations: list[QuotaConfiguration] = []
credentials: Optional[dict] = None credentials: dict | None = None
class CustomProviderConfiguration(BaseModel): class CustomProviderConfiguration(BaseModel):
@ -95,8 +95,8 @@ class CustomProviderConfiguration(BaseModel):
""" """
credentials: dict credentials: dict
current_credential_id: Optional[str] = None current_credential_id: str | None = None
current_credential_name: Optional[str] = None current_credential_name: str | None = None
available_credentials: list[CredentialConfiguration] = [] available_credentials: list[CredentialConfiguration] = []
@ -108,8 +108,8 @@ class CustomModelConfiguration(BaseModel):
model: str model: str
model_type: ModelType model_type: ModelType
credentials: dict | None credentials: dict | None
current_credential_id: Optional[str] = None current_credential_id: str | None = None
current_credential_name: Optional[str] = None current_credential_name: str | None = None
available_model_credentials: list[CredentialConfiguration] = [] available_model_credentials: list[CredentialConfiguration] = []
unadded_to_model_list: bool | None = False unadded_to_model_list: bool | None = False
@ -131,7 +131,7 @@ class CustomConfiguration(BaseModel):
Model class for provider custom configuration. Model class for provider custom configuration.
""" """
provider: Optional[CustomProviderConfiguration] = None provider: CustomProviderConfiguration | None = None
models: list[CustomModelConfiguration] = [] models: list[CustomModelConfiguration] = []
can_added_models: list[UnaddedModelConfiguration] = [] can_added_models: list[UnaddedModelConfiguration] = []

View File

@ -12,7 +12,7 @@ class ProviderConfigCache(Protocol):
Interface for provider configuration cache operations Interface for provider configuration cache operations
""" """
def get(self) -> Optional[dict[str, Any]]: def get(self) -> dict[str, Any] | None:
"""Get cached provider configuration""" """Get cached provider configuration"""
... ...

View File

@ -73,7 +73,7 @@ class PluginDeclaration(BaseModel):
models: list[str] | None = Field(default_factory=list[str]) models: list[str] | None = Field(default_factory=list[str])
endpoints: list[str] | None = Field(default_factory=list[str]) endpoints: list[str] | None = Field(default_factory=list[str])
datasources: list[str] | None = Field(default_factory=list[str]) datasources: list[str] | None = Field(default_factory=list[str])
triggers: Optional[list[str]] = Field(default_factory=list[str]) triggers: list[str] | None = Field(default_factory=list[str])
class Meta(BaseModel): class Meta(BaseModel):
minimum_dify_version: str | None = Field(default=None) minimum_dify_version: str | None = Field(default=None)

View File

@ -246,7 +246,7 @@ class RequestFetchAppInfo(BaseModel):
class TriggerInvokeEventResponse(BaseModel): class TriggerInvokeEventResponse(BaseModel):
variables: Mapping[str, Any] = Field(default_factory=dict) variables: Mapping[str, Any] = Field(default_factory=dict)
cancelled: Optional[bool] = False cancelled: bool | None = False
model_config = ConfigDict(protected_namespaces=(), arbitrary_types_allowed=True) model_config = ConfigDict(protected_namespaces=(), arbitrary_types_allowed=True)

View File

@ -31,7 +31,7 @@ class EventApiEntity(BaseModel):
identity: EventIdentity = Field(description="The identity of the trigger") identity: EventIdentity = Field(description="The identity of the trigger")
description: I18nObject = Field(description="The description of the trigger") description: I18nObject = Field(description="The description of the trigger")
parameters: list[EventParameter] = Field(description="The parameters of the trigger") parameters: list[EventParameter] = Field(description="The parameters of the trigger")
output_schema: Optional[Mapping[str, Any]] = Field(description="The output schema of the trigger") output_schema: Mapping[str, Any] | None = Field(description="The output schema of the trigger")
class TriggerProviderApiEntity(BaseModel): class TriggerProviderApiEntity(BaseModel):
@ -39,19 +39,19 @@ class TriggerProviderApiEntity(BaseModel):
name: str = Field(..., description="The name of the trigger provider") name: str = Field(..., description="The name of the trigger provider")
label: I18nObject = Field(..., description="The label of the trigger provider") label: I18nObject = Field(..., description="The label of the trigger provider")
description: I18nObject = Field(..., description="The description of the trigger provider") description: I18nObject = Field(..., description="The description of the trigger provider")
icon: Optional[str] = Field(default=None, description="The icon of the trigger provider") icon: str | None = Field(default=None, description="The icon of the trigger provider")
icon_dark: Optional[str] = Field(default=None, description="The dark icon of the trigger provider") icon_dark: str | None = Field(default=None, description="The dark icon of the trigger provider")
tags: list[str] = Field(default_factory=list, description="The tags of the trigger provider") tags: list[str] = Field(default_factory=list, description="The tags of the trigger provider")
plugin_id: Optional[str] = Field(default="", description="The plugin id of the tool") plugin_id: str | None = Field(default="", description="The plugin id of the tool")
plugin_unique_identifier: Optional[str] = Field(default="", description="The unique identifier of the tool") plugin_unique_identifier: str | None = Field(default="", description="The unique identifier of the tool")
supported_creation_methods: list[TriggerCreationMethod] = Field( supported_creation_methods: list[TriggerCreationMethod] = Field(
default_factory=list, default_factory=list,
description="Supported creation methods for the trigger provider. like 'OAUTH', 'APIKEY', 'MANUAL'.", description="Supported creation methods for the trigger provider. like 'OAUTH', 'APIKEY', 'MANUAL'.",
) )
subscription_constructor: Optional[SubscriptionConstructor] = Field( subscription_constructor: SubscriptionConstructor | None = Field(
default=None, description="The subscription constructor of the trigger provider" default=None, description="The subscription constructor of the trigger provider"
) )

View File

@ -40,12 +40,12 @@ class EventParameter(BaseModel):
name: str = Field(..., description="The name of the parameter") name: str = Field(..., description="The name of the parameter")
label: I18nObject = Field(..., description="The label presented to the user") label: I18nObject = Field(..., description="The label presented to the user")
type: EventParameterType = Field(..., description="The type of the parameter") type: EventParameterType = Field(..., description="The type of the parameter")
auto_generate: Optional[PluginParameterAutoGenerate] = Field( auto_generate: PluginParameterAutoGenerate | None = Field(
default=None, description="The auto generate of the parameter" default=None, description="The auto generate of the parameter"
) )
template: Optional[PluginParameterTemplate] = Field(default=None, description="The template of the parameter") template: PluginParameterTemplate | None = Field(default=None, description="The template of the parameter")
scope: Optional[str] = None scope: str | None = None
required: Optional[bool] = False required: bool | None = False
multiple: bool | None = Field( multiple: bool | None = Field(
default=False, default=False,
description="Whether the parameter is multiple select, only valid for select or dynamic-select type", description="Whether the parameter is multiple select, only valid for select or dynamic-select type",
@ -53,9 +53,9 @@ class EventParameter(BaseModel):
default: Union[int, float, str, list[Any], None] = None default: Union[int, float, str, list[Any], None] = None
min: Union[float, int, None] = None min: Union[float, int, None] = None
max: Union[float, int, None] = None max: Union[float, int, None] = None
precision: Optional[int] = None precision: int | None = None
options: Optional[list[PluginParameterOption]] = None options: list[PluginParameterOption] | None = None
description: Optional[I18nObject] = None description: I18nObject | None = None
class TriggerProviderIdentity(BaseModel): class TriggerProviderIdentity(BaseModel):
@ -67,8 +67,8 @@ class TriggerProviderIdentity(BaseModel):
name: str = Field(..., description="The name of the trigger provider") name: str = Field(..., description="The name of the trigger provider")
label: I18nObject = Field(..., description="The label of the trigger provider") label: I18nObject = Field(..., description="The label of the trigger provider")
description: I18nObject = Field(..., description="The description of the trigger provider") description: I18nObject = Field(..., description="The description of the trigger provider")
icon: Optional[str] = Field(default=None, description="The icon of the trigger provider") icon: str | None = Field(default=None, description="The icon of the trigger provider")
icon_dark: Optional[str] = Field(default=None, description="The dark icon of the trigger provider") icon_dark: str | None = Field(default=None, description="The dark icon of the trigger provider")
tags: list[str] = Field(default_factory=list, description="The tags of the trigger provider") tags: list[str] = Field(default_factory=list, description="The tags of the trigger provider")
@ -80,7 +80,7 @@ class EventIdentity(BaseModel):
author: str = Field(..., description="The author of the event") author: str = Field(..., description="The author of the event")
name: str = Field(..., description="The name of the event") name: str = Field(..., description="The name of the event")
label: I18nObject = Field(..., description="The label of the event") label: I18nObject = Field(..., description="The label of the event")
provider: Optional[str] = Field(default=None, description="The provider of the event") provider: str | None = Field(default=None, description="The provider of the event")
class EventEntity(BaseModel): class EventEntity(BaseModel):
@ -93,7 +93,7 @@ class EventEntity(BaseModel):
default_factory=list[EventParameter], description="The parameters of the event" default_factory=list[EventParameter], description="The parameters of the event"
) )
description: I18nObject = Field(..., description="The description of the event") description: I18nObject = Field(..., description="The description of the event")
output_schema: Optional[Mapping[str, Any]] = Field( output_schema: Mapping[str, Any] | None = Field(
default=None, description="The output schema that this event produces" default=None, description="The output schema that this event produces"
) )
@ -124,7 +124,7 @@ class SubscriptionConstructor(BaseModel):
description="The credentials schema of the subscription constructor", description="The credentials schema of the subscription constructor",
) )
oauth_schema: Optional[OAuthSchema] = Field( oauth_schema: OAuthSchema | None = Field(
default=None, default=None,
description="The OAuth schema of the subscription constructor if OAuth is supported", description="The OAuth schema of the subscription constructor if OAuth is supported",
) )
@ -183,7 +183,7 @@ class UnsubscribeResult(BaseModel):
success: bool = Field(..., description="Whether the unsubscription was successful") success: bool = Field(..., description="Whether the unsubscription was successful")
message: Optional[str] = Field( message: str | None = Field(
None, None,
description="Human-readable message about the operation result. " description="Human-readable message about the operation result. "
"Success message for successful operations, " "Success message for successful operations, "

View File

@ -39,7 +39,7 @@ class TriggerEventNodeData(BaseNodeData):
return type return type
title: str title: str
desc: Optional[str] = None desc: str | None = None
plugin_id: str = Field(..., description="Plugin ID") plugin_id: str = Field(..., description="Plugin ID")
provider_id: str = Field(..., description="Provider ID") provider_id: str = Field(..., description="Provider ID")
event_name: str = Field(..., description="Event name") event_name: str = Field(..., description="Event name")

View File

@ -20,7 +20,7 @@ class TriggerEventNode(Node):
def init_node_data(self, data: Mapping[str, Any]) -> None: def init_node_data(self, data: Mapping[str, Any]) -> None:
self._node_data = TriggerEventNodeData.model_validate(data) self._node_data = TriggerEventNodeData.model_validate(data)
def _get_error_strategy(self) -> Optional[ErrorStrategy]: def _get_error_strategy(self) -> ErrorStrategy | None:
return self._node_data.error_strategy return self._node_data.error_strategy
def _get_retry_config(self) -> RetryConfig: def _get_retry_config(self) -> RetryConfig:
@ -29,7 +29,7 @@ class TriggerEventNode(Node):
def _get_title(self) -> str: def _get_title(self) -> str:
return self._node_data.title return self._node_data.title
def _get_description(self) -> Optional[str]: def _get_description(self) -> str | None:
return self._node_data.desc return self._node_data.desc
def _get_default_value_dict(self) -> dict[str, Any]: def _get_default_value_dict(self) -> dict[str, Any]:

View File

@ -11,11 +11,11 @@ class TriggerScheduleNodeData(BaseNodeData):
""" """
mode: str = Field(default="visual", description="Schedule mode: visual or cron") mode: str = Field(default="visual", description="Schedule mode: visual or cron")
frequency: Optional[str] = Field( frequency: str | None = Field(
default=None, description="Frequency for visual mode: hourly, daily, weekly, monthly" default=None, description="Frequency for visual mode: hourly, daily, weekly, monthly"
) )
cron_expression: Optional[str] = Field(default=None, description="Cron expression for cron mode") cron_expression: str | None = Field(default=None, description="Cron expression for cron mode")
visual_config: Optional[dict] = Field(default=None, description="Visual configuration details") visual_config: dict | None = Field(default=None, description="Visual configuration details")
timezone: str = Field(default="UTC", description="Timezone for schedule execution") timezone: str = Field(default="UTC", description="Timezone for schedule execution")
@ -26,26 +26,26 @@ class ScheduleConfig(BaseModel):
class SchedulePlanUpdate(BaseModel): class SchedulePlanUpdate(BaseModel):
node_id: Optional[str] = None node_id: str | None = None
cron_expression: Optional[str] = None cron_expression: str | None = None
timezone: Optional[str] = None timezone: str | None = None
class VisualConfig(BaseModel): class VisualConfig(BaseModel):
"""Visual configuration for schedule trigger""" """Visual configuration for schedule trigger"""
# For hourly frequency # For hourly frequency
on_minute: Optional[int] = Field(default=0, ge=0, le=59, description="Minute of the hour (0-59)") on_minute: int | None = Field(default=0, ge=0, le=59, description="Minute of the hour (0-59)")
# For daily, weekly, monthly frequencies # For daily, weekly, monthly frequencies
time: Optional[str] = Field(default="12:00 AM", description="Time in 12-hour format (e.g., '2:30 PM')") time: str | None = Field(default="12:00 AM", description="Time in 12-hour format (e.g., '2:30 PM')")
# For weekly frequency # For weekly frequency
weekdays: Optional[list[Literal["sun", "mon", "tue", "wed", "thu", "fri", "sat"]]] = Field( weekdays: list[Literal["sun", "mon", "tue", "wed", "thu", "fri", "sat"]] | None = Field(
default=None, description="List of weekdays to run on" default=None, description="List of weekdays to run on"
) )
# For monthly frequency # For monthly frequency
monthly_days: Optional[list[Union[int, Literal["last"]]]] = Field( monthly_days: list[Union[int, Literal["last"]]] | None = Field(
default=None, description="Days of month to run on (1-31 or 'last')" default=None, description="Days of month to run on (1-31 or 'last')"
) )

View File

@ -19,7 +19,7 @@ class TriggerScheduleNode(Node):
def init_node_data(self, data: Mapping[str, Any]) -> None: def init_node_data(self, data: Mapping[str, Any]) -> None:
self._node_data = TriggerScheduleNodeData(**data) self._node_data = TriggerScheduleNodeData(**data)
def _get_error_strategy(self) -> Optional[ErrorStrategy]: def _get_error_strategy(self) -> ErrorStrategy | None:
return self._node_data.error_strategy return self._node_data.error_strategy
def _get_retry_config(self) -> RetryConfig: def _get_retry_config(self) -> RetryConfig:
@ -28,7 +28,7 @@ class TriggerScheduleNode(Node):
def _get_title(self) -> str: def _get_title(self) -> str:
return self._node_data.title return self._node_data.title
def _get_description(self) -> Optional[str]: def _get_description(self) -> str | None:
return self._node_data.desc return self._node_data.desc
def _get_default_value_dict(self) -> dict[str, Any]: def _get_default_value_dict(self) -> dict[str, Any]:

View File

@ -75,5 +75,5 @@ class WebhookData(BaseNodeData):
response_body: str = "" # Template for response body response_body: str = "" # Template for response body
# Webhook specific fields (not from client data, set internally) # Webhook specific fields (not from client data, set internally)
webhook_id: Optional[str] = None # Set when webhook trigger is created webhook_id: str | None = None # Set when webhook trigger is created
timeout: int = 30 # Timeout in seconds to wait for webhook response timeout: int = 30 # Timeout in seconds to wait for webhook response

View File

@ -20,7 +20,7 @@ class TriggerWebhookNode(Node):
def init_node_data(self, data: Mapping[str, Any]) -> None: def init_node_data(self, data: Mapping[str, Any]) -> None:
self._node_data = WebhookData.model_validate(data) self._node_data = WebhookData.model_validate(data)
def _get_error_strategy(self) -> Optional[ErrorStrategy]: def _get_error_strategy(self) -> ErrorStrategy | None:
return self._node_data.error_strategy return self._node_data.error_strategy
def _get_retry_config(self) -> RetryConfig: def _get_retry_config(self) -> RetryConfig:
@ -29,7 +29,7 @@ class TriggerWebhookNode(Node):
def _get_title(self) -> str: def _get_title(self) -> str:
return self._node_data.title return self._node_data.title
def _get_description(self) -> Optional[str]: def _get_description(self) -> str | None:
return self._node_data.desc return self._node_data.desc
def _get_default_value_dict(self) -> dict[str, Any]: def _get_default_value_dict(self) -> dict[str, Any]:

View File

@ -33,7 +33,7 @@ def handle(sender, **kwargs):
sync_schedule_from_workflow(tenant_id=app.tenant_id, app_id=app.id, workflow=published_workflow) sync_schedule_from_workflow(tenant_id=app.tenant_id, app_id=app.id, workflow=published_workflow)
def sync_schedule_from_workflow(tenant_id: str, app_id: str, workflow: Workflow) -> Optional[WorkflowSchedulePlan]: def sync_schedule_from_workflow(tenant_id: str, app_id: str, workflow: Workflow) -> WorkflowSchedulePlan | None:
""" """
Sync schedule plan from workflow graph configuration. Sync schedule plan from workflow graph configuration.

View File

@ -8,7 +8,7 @@ from croniter import croniter
def calculate_next_run_at( def calculate_next_run_at(
cron_expression: str, cron_expression: str,
timezone: str, timezone: str,
base_time: Optional[datetime] = None, base_time: datetime | None = None,
) -> datetime: ) -> datetime:
""" """
Calculate the next run time for a cron expression in a specific timezone. Calculate the next run time for a cron expression in a specific timezone.

View File

@ -194,32 +194,32 @@ class WorkflowTriggerLog(Base):
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
workflow_id: Mapped[str] = mapped_column(StringUUID, nullable=False) workflow_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
workflow_run_id: Mapped[Optional[str]] = mapped_column(StringUUID, nullable=True) workflow_run_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True)
root_node_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) root_node_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
trigger_metadata: Mapped[str] = mapped_column(sa.Text, nullable=False) trigger_metadata: Mapped[str] = mapped_column(sa.Text, nullable=False)
trigger_type: Mapped[str] = mapped_column(EnumText(AppTriggerType, length=50), nullable=False) trigger_type: Mapped[str] = mapped_column(EnumText(AppTriggerType, length=50), nullable=False)
trigger_data: Mapped[str] = mapped_column(sa.Text, nullable=False) # Full TriggerData as JSON trigger_data: Mapped[str] = mapped_column(sa.Text, nullable=False) # Full TriggerData as JSON
inputs: Mapped[str] = mapped_column(sa.Text, nullable=False) # Just inputs for easy viewing inputs: Mapped[str] = mapped_column(sa.Text, nullable=False) # Just inputs for easy viewing
outputs: Mapped[Optional[str]] = mapped_column(sa.Text, nullable=True) outputs: Mapped[str | None] = mapped_column(sa.Text, nullable=True)
status: Mapped[str] = mapped_column( status: Mapped[str] = mapped_column(
EnumText(WorkflowTriggerStatus, length=50), nullable=False, default=WorkflowTriggerStatus.PENDING EnumText(WorkflowTriggerStatus, length=50), nullable=False, default=WorkflowTriggerStatus.PENDING
) )
error: Mapped[Optional[str]] = mapped_column(sa.Text, nullable=True) error: Mapped[str | None] = mapped_column(sa.Text, nullable=True)
queue_name: Mapped[str] = mapped_column(String(100), nullable=False) queue_name: Mapped[str] = mapped_column(String(100), nullable=False)
celery_task_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) celery_task_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
retry_count: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0) retry_count: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0)
elapsed_time: Mapped[Optional[float]] = mapped_column(sa.Float, nullable=True) elapsed_time: Mapped[float | None] = mapped_column(sa.Float, nullable=True)
total_tokens: Mapped[Optional[int]] = mapped_column(sa.Integer, nullable=True) total_tokens: Mapped[int | None] = mapped_column(sa.Integer, nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())
created_by_role: Mapped[str] = mapped_column(String(255), nullable=False) created_by_role: Mapped[str] = mapped_column(String(255), nullable=False)
created_by: Mapped[str] = mapped_column(String(255), nullable=False) created_by: Mapped[str] = mapped_column(String(255), nullable=False)
triggered_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) triggered_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
finished_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) finished_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
@property @property
def created_by_account(self): def created_by_account(self):
@ -383,7 +383,7 @@ class AppTrigger(Base):
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuidv7()"))
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
node_id: Mapped[Optional[str]] = mapped_column(String(64), nullable=False) node_id: Mapped[str | None] = mapped_column(String(64), nullable=False)
trigger_type: Mapped[str] = mapped_column(EnumText(AppTriggerType, length=50), nullable=False) trigger_type: Mapped[str] = mapped_column(EnumText(AppTriggerType, length=50), nullable=False)
title: Mapped[str] = mapped_column(String(255), nullable=False) title: Mapped[str] = mapped_column(String(255), nullable=False)
provider_name: Mapped[str] = mapped_column(String(255), server_default="", nullable=True) provider_name: Mapped[str] = mapped_column(String(255), server_default="", nullable=True)
@ -435,7 +435,7 @@ class WorkflowSchedulePlan(Base):
timezone: Mapped[str] = mapped_column(String(64), nullable=False) timezone: Mapped[str] = mapped_column(String(64), nullable=False)
# Schedule control # Schedule control
next_run_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) next_run_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())
updated_at: Mapped[datetime] = mapped_column( updated_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()

View File

@ -36,7 +36,7 @@ class SQLAlchemyWorkflowTriggerLogRepository(WorkflowTriggerLogRepository):
self.session.flush() self.session.flush()
return trigger_log return trigger_log
def get_by_id(self, trigger_log_id: str, tenant_id: Optional[str] = None) -> Optional[WorkflowTriggerLog]: def get_by_id(self, trigger_log_id: str, tenant_id: str | None = None) -> WorkflowTriggerLog | None:
"""Get a trigger log by its ID.""" """Get a trigger log by its ID."""
query = select(WorkflowTriggerLog).where(WorkflowTriggerLog.id == trigger_log_id) query = select(WorkflowTriggerLog).where(WorkflowTriggerLog.id == trigger_log_id)

View File

@ -63,7 +63,7 @@ class WorkflowTriggerLogRepository(Protocol):
""" """
... ...
def get_by_id(self, trigger_log_id: str, tenant_id: Optional[str] = None) -> Optional[WorkflowTriggerLog]: def get_by_id(self, trigger_log_id: str, tenant_id: str | None = None) -> WorkflowTriggerLog | None:
""" """
Get a trigger log by its ID. Get a trigger log by its ID.

View File

@ -30,7 +30,7 @@ class AppGenerateService:
args: Mapping[str, Any], args: Mapping[str, Any],
invoke_from: InvokeFrom, invoke_from: InvokeFrom,
streaming: bool = True, streaming: bool = True,
root_node_id: Optional[str] = None, root_node_id: str | None = None,
): ):
""" """
App Content Generate App Content Generate

View File

@ -224,7 +224,7 @@ class AsyncWorkflowService:
return cls.trigger_workflow_async(session, user, trigger_data) return cls.trigger_workflow_async(session, user, trigger_data)
@classmethod @classmethod
def get_trigger_log(cls, workflow_trigger_log_id: str, tenant_id: Optional[str] = None) -> Optional[dict[str, Any]]: def get_trigger_log(cls, workflow_trigger_log_id: str, tenant_id: str | None = None) -> dict[str, Any] | None:
""" """
Get trigger log by ID Get trigger log by ID
@ -293,7 +293,7 @@ class AsyncWorkflowService:
return [log.to_dict() for log in logs] return [log.to_dict() for log in logs]
@staticmethod @staticmethod
def _get_workflow(workflow_service: WorkflowService, app_model: App, workflow_id: Optional[str] = None) -> Workflow: def _get_workflow(workflow_service: WorkflowService, app_model: App, workflow_id: str | None = None) -> Workflow:
""" """
Get workflow for the app Get workflow for the app

View File

@ -204,7 +204,7 @@ class ScheduleService:
return ScheduleConfig(node_id=node_id, cron_expression=cron_expression, timezone=timezone) return ScheduleConfig(node_id=node_id, cron_expression=cron_expression, timezone=timezone)
@staticmethod @staticmethod
def extract_schedule_config(workflow: Workflow) -> Optional[ScheduleConfig]: def extract_schedule_config(workflow: Workflow) -> ScheduleConfig | None:
""" """
Extracts schedule configuration from workflow graph. Extracts schedule configuration from workflow graph.

View File

@ -117,7 +117,7 @@ class TriggerProviderService:
parameters: Mapping[str, Any], parameters: Mapping[str, Any],
properties: Mapping[str, Any], properties: Mapping[str, Any],
credentials: Mapping[str, str], credentials: Mapping[str, str],
subscription_id: Optional[str] = None, subscription_id: str | None = None,
credential_expires_at: int = -1, credential_expires_at: int = -1,
expires_at: int = -1, expires_at: int = -1,
) -> Mapping[str, Any]: ) -> Mapping[str, Any]:
@ -435,7 +435,7 @@ class TriggerProviderService:
return {"result": "success", "expires_at": int(refreshed.expires_at)} return {"result": "success", "expires_at": int(refreshed.expires_at)}
@classmethod @classmethod
def get_oauth_client(cls, tenant_id: str, provider_id: TriggerProviderID) -> Optional[Mapping[str, Any]]: def get_oauth_client(cls, tenant_id: str, provider_id: TriggerProviderID) -> Mapping[str, Any] | None:
""" """
Get OAuth client configuration for a provider. Get OAuth client configuration for a provider.
First tries tenant-level OAuth, then falls back to system OAuth. First tries tenant-level OAuth, then falls back to system OAuth.
@ -509,8 +509,8 @@ class TriggerProviderService:
cls, cls,
tenant_id: str, tenant_id: str,
provider_id: TriggerProviderID, provider_id: TriggerProviderID,
client_params: Optional[Mapping[str, Any]] = None, client_params: Mapping[str, Any] | None = None,
enabled: Optional[bool] = None, enabled: bool | None = None,
) -> Mapping[str, Any]: ) -> Mapping[str, Any]:
""" """
Save or update custom OAuth client parameters for a trigger provider. Save or update custom OAuth client parameters for a trigger provider.

View File

@ -67,7 +67,7 @@ class WebhookService:
with Session(db.engine) as session: with Session(db.engine) as session:
# Get webhook trigger # Get webhook trigger
webhook_trigger = ( webhook_trigger = (
session.query(WorkflowWebhookTrigger).filter(WorkflowWebhookTrigger.webhook_id == webhook_id).first() session.query(WorkflowWebhookTrigger).where(WorkflowWebhookTrigger.webhook_id == webhook_id).first()
) )
if not webhook_trigger: if not webhook_trigger:
raise ValueError(f"Webhook not found: {webhook_id}") raise ValueError(f"Webhook not found: {webhook_id}")

View File

@ -30,7 +30,7 @@ class TriggerData(BaseModel):
app_id: str app_id: str
tenant_id: str tenant_id: str
workflow_id: Optional[str] = None workflow_id: str | None = None
root_node_id: str root_node_id: str
inputs: Mapping[str, Any] inputs: Mapping[str, Any]
files: Sequence[Mapping[str, Any]] = Field(default_factory=list) files: Sequence[Mapping[str, Any]] = Field(default_factory=list)
@ -101,10 +101,10 @@ class AsyncTriggerExecutionResult(BaseModel):
execution_id: str execution_id: str
status: AsyncTriggerStatus status: AsyncTriggerStatus
result: Optional[Mapping[str, Any]] = None result: Mapping[str, Any] | None = None
error: Optional[str] = None error: str | None = None
elapsed_time: Optional[float] = None elapsed_time: float | None = None
total_tokens: Optional[int] = None total_tokens: int | None = None
model_config = ConfigDict(use_enum_values=True) model_config = ConfigDict(use_enum_values=True)
@ -131,15 +131,15 @@ class TriggerLogResponse(BaseModel):
status: str status: str
queue_name: str queue_name: str
retry_count: int retry_count: int
celery_task_id: Optional[str] = None celery_task_id: str | None = None
workflow_run_id: Optional[str] = None workflow_run_id: str | None = None
error: Optional[str] = None error: str | None = None
outputs: Optional[str] = None outputs: str | None = None
elapsed_time: Optional[float] = None elapsed_time: float | None = None
total_tokens: Optional[int] = None total_tokens: int | None = None
created_at: Optional[str] = None created_at: str | None = None
triggered_at: Optional[str] = None triggered_at: str | None = None
finished_at: Optional[str] = None finished_at: str | None = None
model_config = ConfigDict(use_enum_values=True) model_config = ConfigDict(use_enum_values=True)

View File

@ -56,9 +56,9 @@ const getTriggerPluginNodeData = (
event_description: triggerConfig.event_description, event_description: triggerConfig.event_description,
title: triggerConfig.event_label || triggerConfig.title || fallbackTitle, title: triggerConfig.event_label || triggerConfig.title || fallbackTitle,
desc: triggerConfig.event_description || fallbackDesc, desc: triggerConfig.event_description || fallbackDesc,
output_schema: { ...(triggerConfig.output_schema || {}) }, output_schema: { ...triggerConfig.output_schema },
parameters_schema: triggerConfig.paramSchemas ? [...triggerConfig.paramSchemas] : [], parameters_schema: triggerConfig.paramSchemas ? [...triggerConfig.paramSchemas] : [],
config: { ...(triggerConfig.params || {}) }, config: { ...triggerConfig.params },
subscription_id: triggerConfig.subscription_id, subscription_id: triggerConfig.subscription_id,
plugin_unique_identifier: triggerConfig.plugin_unique_identifier, plugin_unique_identifier: triggerConfig.plugin_unique_identifier,
is_team_authorization: triggerConfig.is_team_authorization, is_team_authorization: triggerConfig.is_team_authorization,
@ -124,8 +124,8 @@ const WorkflowChildren = () => {
...baseNodeData, ...baseNodeData,
...triggerNodeData, ...triggerNodeData,
config: { config: {
...(baseNodeData as { config?: Record<string, any> }).config || {}, ...(baseNodeData as { config?: Record<string, any> }).config,
...(triggerNodeData.config || {}), ...triggerNodeData.config,
}, },
} }
})() })()

View File

@ -315,7 +315,7 @@ const FormInputItem: FC<Props> = ({
onChange({ onChange({
...value, ...value,
[variable]: { [variable]: {
...(varInput || {}), ...varInput,
type: VarKindType.constant, type: VarKindType.constant,
value: selected, value: selected,
}, },

View File

@ -186,7 +186,7 @@ const useConfig = (id: string, payload: PluginTriggerNodeType) => {
(variable: InputVar, varDetail: InputVar) => { (variable: InputVar, varDetail: InputVar) => {
const newInputs = produce(inputs, (draft) => { const newInputs = produce(inputs, (draft) => {
const nextEventParameters = normalizeEventParameters({ const nextEventParameters = normalizeEventParameters({
...(draft.event_parameters || {}), ...draft.event_parameters,
[variable.variable]: { [variable.variable]: {
type: VarKindType.variable, type: VarKindType.variable,
value: varDetail.variable, value: varDetail.variable,