This commit is contained in:
jyong 2025-07-15 17:54:53 +08:00
parent 96484731a2
commit 1ad73ccdc8
2 changed files with 12 additions and 7 deletions

View File

@ -443,21 +443,23 @@ class RagPipelineService:
repository.save(workflow_node_execution)
# Convert node_execution to WorkflowNodeExecution after save
workflow_node_execution = repository.to_db_model(workflow_node_execution)
workflow_node_execution_db_model = repository.to_db_model(workflow_node_execution)
with Session(bind=db.engine) as session, session.begin():
draft_var_saver = DraftVariableSaver(
session=session,
app_id=pipeline.id,
node_id=workflow_node_execution.node_id,
node_type=NodeType(workflow_node_execution.node_type),
node_id=workflow_node_execution_db_model.node_id,
node_type=NodeType(workflow_node_execution_db_model.node_type),
enclosing_node_id=enclosing_node_id,
node_execution_id=workflow_node_execution.id,
)
draft_var_saver.save(process_data=workflow_node_execution.process_data,
outputs=workflow_node_execution.outputs)
draft_var_saver.save(
process_data=workflow_node_execution.process_data,
outputs=workflow_node_execution.outputs,
)
session.commit()
return workflow_node_execution
return workflow_node_execution_db_model
def run_datasource_workflow_node(
self,

View File

@ -231,6 +231,9 @@ class RagPipelineDslService:
status=ImportStatus.FAILED,
error="Pipeline not found",
)
dataset = pipeline.dataset
if dataset:
dataset_name = dataset.name
# If major version mismatch, store import info in Redis
if status == ImportStatus.PENDING:
@ -285,7 +288,7 @@ class RagPipelineDslService:
and dataset.chunk_structure != knowledge_configuration.chunk_structure
):
raise ValueError("Chunk structure is not compatible with the published pipeline")
else:
if not dataset:
dataset = Dataset(
tenant_id=account.current_tenant_id,
name=name,