Merge branch 'feat/r2' into deploy/rag-dev

This commit is contained in:
jyong 2025-06-04 16:23:34 +08:00
commit e41699cbc8
4 changed files with 118 additions and 3 deletions

View File

@ -677,6 +677,53 @@ class PublishedRagPipelineSecondStepApi(Resource):
"variables": variables,
}
class PublishedRagPipelineFirstStepApi(Resource):
@setup_required
@login_required
@account_initialization_required
@get_rag_pipeline
def get(self, pipeline: Pipeline):
"""
Get first step parameters of rag pipeline
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("node_id", type=str, required=True, location="args")
args = parser.parse_args()
node_id = args.get("node_id")
if not node_id:
raise ValueError("Node ID is required")
rag_pipeline_service = RagPipelineService()
variables = rag_pipeline_service.get_published_first_step_parameters(pipeline=pipeline, node_id=node_id)
return {
"variables": variables,
}
class DraftRagPipelineFirstStepApi(Resource):
@setup_required
@login_required
@account_initialization_required
@get_rag_pipeline
def get(self, pipeline: Pipeline):
"""
Get first step parameters of rag pipeline
"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("node_id", type=str, required=True, location="args")
args = parser.parse_args()
node_id = args.get("node_id")
if not node_id:
raise ValueError("Node ID is required")
rag_pipeline_service = RagPipelineService()
variables = rag_pipeline_service.get_draft_first_step_parameters(pipeline=pipeline, node_id=node_id)
return {
"variables": variables,
}
class DraftRagPipelineSecondStepApi(Resource):
@setup_required
@ -862,7 +909,15 @@ api.add_resource(
PublishedRagPipelineSecondStepApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/published/processing/parameters",
)
api.add_resource(
PublishedRagPipelineFirstStepApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/published/pre-processing/parameters",
)
api.add_resource(
DraftRagPipelineSecondStepApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/processing/parameters",
)
api.add_resource(
DraftRagPipelineFirstStepApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/pre-processing/parameters",
)

View File

@ -59,7 +59,7 @@ class DatasourceNode(BaseNode[DatasourceNodeData]):
raise DatasourceNodeError("Datasource type is not set")
datasource_runtime = DatasourceManager.get_datasource_runtime(
provider_id=node_data.provider_id,
provider_id=f"{node_data.plugin_id}/{node_data.provider_name}",
datasource_name=node_data.datasource_name or "",
tenant_id=self.tenant_id,
datasource_type=DatasourceProviderType.value_of(datasource_type),

View File

@ -7,7 +7,7 @@ from core.workflow.nodes.base.entities import BaseNodeData
class DatasourceEntity(BaseModel):
provider_id: str
plugin_id: str
provider_name: str # redundancy
provider_type: str
datasource_name: Optional[str] = "local_file"

View File

@ -1,4 +1,5 @@
import json
import re
import threading
import time
from collections.abc import Callable, Generator, Sequence
@ -434,14 +435,19 @@ class RagPipelineService:
datasource_node_data = published_workflow.graph_dict.get("nodes", {}).get(node_id, {}).get("data", {})
if not datasource_node_data:
raise ValueError("Datasource node data not found")
datasource_parameters = datasource_node_data.get("datasource_parameters", {})
for key, value in datasource_parameters.items():
if not user_inputs.get(key):
user_inputs[key] = value["value"]
from core.datasource.datasource_manager import DatasourceManager
datasource_runtime = DatasourceManager.get_datasource_runtime(
provider_id=datasource_node_data.get("provider_id"),
provider_id=f"{datasource_node_data.get('plugin_id')}/{datasource_node_data.get('provider_name')}",
datasource_name=datasource_node_data.get("datasource_name"),
tenant_id=pipeline.tenant_id,
datasource_type=DatasourceProviderType(datasource_type),
)
if datasource_runtime.datasource_provider_type() == DatasourceProviderType.ONLINE_DOCUMENT:
datasource_runtime = cast(OnlineDocumentDatasourcePlugin, datasource_runtime)
online_document_result: GetOnlineDocumentPagesResponse = datasource_runtime._get_online_document_pages(
@ -648,6 +654,60 @@ class RagPipelineService:
if item.get("belong_to_node_id") == node_id or item.get("belong_to_node_id") == "shared"
]
return datasource_provider_variables
def get_published_first_step_parameters(self, pipeline: Pipeline, node_id: str) -> list[dict]:
"""
Get first step parameters of rag pipeline
"""
published_workflow = self.get_published_workflow(pipeline=pipeline)
if not published_workflow:
raise ValueError("Workflow not initialized")
# get second step node
datasource_node_data = published_workflow.graph_dict.get("nodes", {}).get(node_id, {}).get("data", {})
if not datasource_node_data:
raise ValueError("Datasource node data not found")
datasource_parameters = datasource_node_data.get("datasource_parameters", {})
if datasource_parameters:
datasource_parameters_map = {
item["variable"]: item for item in datasource_parameters
}
else:
datasource_parameters_map = {}
variables = datasource_node_data.get("variables", {})
user_input_variables = []
for key, value in variables.items():
if not re.match(r"\{\{#([a-zA-Z0-9_]{1,50}(?:\.[a-zA-Z_][a-zA-Z0-9_]{0,29}){1,10})#\}\}", value["value"]):
user_input_variables.append(datasource_parameters_map.get(key, {}))
return user_input_variables
def get_draft_first_step_parameters(self, pipeline: Pipeline, node_id: str) -> list[dict]:
"""
Get first step parameters of rag pipeline
"""
draft_workflow = self.get_draft_workflow(pipeline=pipeline)
if not draft_workflow:
raise ValueError("Workflow not initialized")
# get second step node
datasource_node_data = draft_workflow.graph_dict.get("nodes", {}).get(node_id, {}).get("data", {})
if not datasource_node_data:
raise ValueError("Datasource node data not found")
datasource_parameters = datasource_node_data.get("datasource_parameters", {})
if datasource_parameters:
datasource_parameters_map = {
item["variable"]: item for item in datasource_parameters
}
else:
datasource_parameters_map = {}
variables = datasource_node_data.get("variables", {})
user_input_variables = []
for key, value in variables.items():
if not re.match(r"\{\{#([a-zA-Z0-9_]{1,50}(?:\.[a-zA-Z_][a-zA-Z0-9_]{0,29}){1,10})#\}\}", value["value"]):
user_input_variables.append(datasource_parameters_map.get(key, {}))
return user_input_variables
def get_draft_second_step_parameters(self, pipeline: Pipeline, node_id: str) -> list[dict]:
"""