This commit is contained in:
jyong 2025-09-17 23:04:03 +08:00
parent 6166c26ea6
commit 8cc6927fed
9 changed files with 27 additions and 15 deletions

View File

@ -47,3 +47,9 @@ class DatasetInUseError(BaseHTTPException):
error_code = "dataset_in_use"
description = "The dataset is being used by some apps. Please remove the dataset from the apps before deleting it."
code = 409
class PipelineRunError(BaseHTTPException):
error_code = "pipeline_run_error"
description = "An error occurred while running the pipeline."
code = 500

View File

@ -45,9 +45,9 @@ class BaseAppGenerator:
mapping=v,
tenant_id=tenant_id,
config=FileUploadConfig(
allowed_file_types=entity_dictionary[k].allowed_file_types, # pyright: ignore[reportArgumentType]
allowed_file_extensions=entity_dictionary[k].allowed_file_extensions, # pyright: ignore[reportArgumentType]
allowed_file_upload_methods=entity_dictionary[k].allowed_file_upload_methods, # pyright: ignore[reportArgumentType]
allowed_file_types=entity_dictionary[k].allowed_file_types or [],
allowed_file_extensions=entity_dictionary[k].allowed_file_extensions or [],
allowed_file_upload_methods=entity_dictionary[k].allowed_file_upload_methods or [],
),
strict_type_validation=strict_type_validation,
)
@ -60,9 +60,9 @@ class BaseAppGenerator:
mappings=v,
tenant_id=tenant_id,
config=FileUploadConfig(
allowed_file_types=entity_dictionary[k].allowed_file_types, # pyright: ignore[reportArgumentType]
allowed_file_extensions=entity_dictionary[k].allowed_file_extensions, # pyright: ignore[reportArgumentType]
allowed_file_upload_methods=entity_dictionary[k].allowed_file_upload_methods, # pyright: ignore[reportArgumentType]
allowed_file_types=entity_dictionary[k].allowed_file_types or [],
allowed_file_extensions=entity_dictionary[k].allowed_file_extensions or [],
allowed_file_upload_methods=entity_dictionary[k].allowed_file_upload_methods or [],
),
)
for k, v in user_inputs.items()

View File

@ -60,7 +60,7 @@ class WorkflowAppGenerateResponseConverter(AppGenerateResponseConverter):
data = cls._error_to_stream_response(sub_stream_response.err)
response_chunk.update(cast(dict, data))
else:
response_chunk.update(sub_stream_response.to_dict())
response_chunk.update(sub_stream_response.model_dump())
yield response_chunk
@classmethod
@ -91,5 +91,5 @@ class WorkflowAppGenerateResponseConverter(AppGenerateResponseConverter):
elif isinstance(sub_stream_response, NodeStartStreamResponse | NodeFinishStreamResponse):
response_chunk.update(cast(dict, sub_stream_response.to_ignore_detail_dict()))
else:
response_chunk.update(sub_stream_response.to_dict())
response_chunk.update(sub_stream_response.model_dump())
yield response_chunk

View File

@ -744,7 +744,7 @@ class PipelineGenerator(BaseAppGenerator):
Format datasource info list.
"""
if datasource_type == "online_drive":
all_files = []
all_files: list[Mapping[str, Any]] = []
datasource_node_data = None
datasource_nodes = workflow.graph_dict.get("nodes", [])
for datasource_node in datasource_nodes:

View File

@ -46,7 +46,7 @@ class DatasourceManager:
provider_entity = manager.fetch_datasource_provider(tenant_id, provider_id)
if not provider_entity:
raise DatasourceProviderNotFoundError(f"plugin provider {provider_id} not found")
controller = None
controller: DatasourcePluginProviderController | None = None
match datasource_type:
case DatasourceProviderType.ONLINE_DOCUMENT:
controller = OnlineDocumentDatasourcePluginProviderController(
@ -79,8 +79,12 @@ class DatasourceManager:
case _:
raise ValueError(f"Unsupported datasource type: {datasource_type}")
datasource_plugin_providers[provider_id] = controller
if controller:
datasource_plugin_providers[provider_id] = controller
if controller is None:
raise DatasourceProviderNotFoundError(f"Datasource provider {provider_id} not found.")
return controller
@classmethod

View File

@ -119,6 +119,7 @@ class File(BaseModel):
assert self.related_id is not None
assert self.extension is not None
return sign_tool_file(tool_file_id=self.related_id, extension=self.extension)
return None
def to_plugin_parameter(self) -> dict[str, Any]:
return {

View File

@ -92,7 +92,7 @@ class LoopNode(Node):
if self._node_data.loop_variables:
value_processor: dict[Literal["constant", "variable"], Callable[[LoopVariableData], Segment | None]] = {
"constant": lambda var: self._get_segment_for_constant(var.var_type, var.value),
"variable": lambda var: self.graph_runtime_state.variable_pool.get(var.value),
"variable": lambda var: self.graph_runtime_state.variable_pool.get(var.value) if isinstance(var.value, list) else None,
}
for loop_variable in self._node_data.loop_variables:
if loop_variable.value_type not in value_processor:

View File

@ -77,7 +77,7 @@ class DatasourceProviderService:
provider_id=f"{plugin_id}/{provider}",
credential_type=CredentialType.of(datasource_provider.auth_type),
)
encrypted_credentials = raw_credentials.copy()
encrypted_credentials = dict(raw_credentials)
for key, value in encrypted_credentials.items():
if key in provider_credential_secret_variables:
encrypted_credentials[key] = encrypter.encrypt_token(tenant_id, value)

View File

@ -378,12 +378,12 @@ class RagPipelineService:
Get default block configs
"""
# return default block config
default_block_configs = []
default_block_configs: list[dict[str, Any]] = []
for node_class_mapping in NODE_TYPE_CLASSES_MAPPING.values():
node_class = node_class_mapping[LATEST_VERSION]
default_config = node_class.get_default_config()
if default_config:
default_block_configs.append(default_config)
default_block_configs.append(dict(default_config))
return default_block_configs
@ -631,6 +631,7 @@ class RagPipelineService:
try:
for website_crawl_message in website_crawl_result:
end_time = time.time()
crawl_event: DatasourceCompletedEvent | DatasourceProcessingEvent
if website_crawl_message.result.status == "completed":
crawl_event = DatasourceCompletedEvent(
data=website_crawl_message.result.web_info_list or [],