diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py index f4031ec5a9..45b8c0624a 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py @@ -981,6 +981,14 @@ class RagPipelineDatasourceVariableApi(Resource): ) return workflow_node_execution +class RagPipelineRecommendedPluginApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self): + rag_pipeline_service = RagPipelineService() + recommended_plugins = rag_pipeline_service.get_recommended_plugins() + return recommended_plugins api.add_resource( DraftRagPipelineApi, @@ -1090,3 +1098,8 @@ api.add_resource( RagPipelineDatasourceVariableApi, "/rag/pipelines//workflows/draft/datasource/variables-inspect", ) + +api.add_resource( + RagPipelineRecommendedPluginApi, + "/rag/pipelines/recommended-plugins", +) \ No newline at end of file diff --git a/api/migrations/versions/2025_05_15_1558-b35c3db83d09_add_pipeline_info.py b/api/migrations/versions/2025_05_15_1558-b35c3db83d09_add_pipeline_info.py index 961589a87e..5238c8e34f 100644 --- a/api/migrations/versions/2025_05_15_1558-b35c3db83d09_add_pipeline_info.py +++ b/api/migrations/versions/2025_05_15_1558-b35c3db83d09_add_pipeline_info.py @@ -12,7 +12,7 @@ from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. revision = 'b35c3db83d09' -down_revision = '0ab65e1cc7fa' +down_revision = '0e154742a5fa' branch_labels = None depends_on = None diff --git a/api/migrations/versions/2025_09_01_1443-8c5088481127_add_pipeline_info_17.py b/api/migrations/versions/2025_09_01_1443-8c5088481127_add_pipeline_info_17.py new file mode 100644 index 0000000000..0269c6a32d --- /dev/null +++ b/api/migrations/versions/2025_09_01_1443-8c5088481127_add_pipeline_info_17.py @@ -0,0 +1,38 @@ +"""add_pipeline_info_17 + +Revision ID: 8c5088481127 +Revises: 17d4db47800c +Create Date: 2025-09-01 14:43:48.417869 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '8c5088481127' +down_revision = '17d4db47800c' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('pipeline_recommended_plugins', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('plugin_id', sa.Text(), nullable=False), + sa.Column('provider_name', sa.Text(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('active', sa.Boolean(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('pipeline_recommended_plugins') + # ### end Alembic commands ### diff --git a/api/models/dataset.py b/api/models/dataset.py index ff9559d7d8..931af91fa4 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -1307,3 +1307,15 @@ class DocumentPipelineExecutionLog(Base): input_data = db.Column(db.JSON, nullable=False) created_by = db.Column(StringUUID, nullable=True) created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + +class PipelineRecommendedPlugin(Base): + __tablename__ = "pipeline_recommended_plugins" + __table_args__ = (db.PrimaryKeyConstraint("id", name="pipeline_recommended_plugin_pkey"),) + + id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) + plugin_id = db.Column(db.Text, nullable=False) + provider_name = db.Column(db.Text, nullable=False) + position = db.Column(db.Integer, nullable=False, default=0) + active = db.Column(db.Boolean, nullable=False, default=True) + created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) \ No newline at end of file diff --git a/api/services/rag_pipeline/rag_pipeline.py b/api/services/rag_pipeline/rag_pipeline.py index 550253429f..3e361cab10 100644 --- a/api/services/rag_pipeline/rag_pipeline.py +++ b/api/services/rag_pipeline/rag_pipeline.py @@ -27,6 +27,7 @@ from core.datasource.entities.datasource_entities import ( from core.datasource.online_document.online_document_plugin import OnlineDocumentDatasourcePlugin from core.datasource.online_drive.online_drive_plugin import OnlineDriveDatasourcePlugin from core.datasource.website_crawl.website_crawl_plugin import WebsiteCrawlDatasourcePlugin +from core.helper import marketplace from core.rag.entities.event import ( DatasourceCompletedEvent, DatasourceErrorEvent, @@ -52,7 +53,7 @@ from core.workflow.workflow_entry import WorkflowEntry from extensions.ext_database import db from libs.infinite_scroll_pagination import InfiniteScrollPagination from models.account import Account -from models.dataset import Document, Pipeline, PipelineCustomizedTemplate # type: ignore +from models.dataset import Document, Pipeline, PipelineCustomizedTemplate, PipelineRecommendedPlugin # type: ignore from models.enums import WorkflowRunTriggeredFrom from models.model import EndUser from models.workflow import ( @@ -70,6 +71,7 @@ from services.entities.knowledge_entities.rag_pipeline_entities import ( ) from services.errors.app import WorkflowHashNotEqualError from services.rag_pipeline.pipeline_template.pipeline_template_factory import PipelineTemplateRetrievalFactory +from services.tools.builtin_tools_manage_service import BuiltinToolManageService from services.workflow_draft_variable_service import DraftVariableSaver, DraftVarLoader logger = logging.getLogger(__name__) @@ -1226,3 +1228,37 @@ class RagPipelineService: ) session.commit() return workflow_node_execution_db_model + + def get_recommended_plugins(self) -> list[dict]: + # Query active recommended plugins + pipeline_recommended_plugins = ( + db.session.query(PipelineRecommendedPlugin) + .filter(PipelineRecommendedPlugin.active == True) + .order_by(PipelineRecommendedPlugin.position.asc()) + .all() + ) + + if not pipeline_recommended_plugins: + return [] + + # Batch fetch plugin manifests + plugin_ids = [plugin.plugin_id for plugin in pipeline_recommended_plugins] + plugin_manifests = marketplace.batch_fetch_plugin_manifests(plugin_ids) + + builtin_tools = BuiltinToolManageService.list_builtin_tools( + user_id=current_user.id, + tenant_id=current_user.current_tenant_id, + ) + installed_plugin_ids = {tool.plugin_id for tool in builtin_tools} + + # Build recommended plugins list + return [ + { + "plugin_id": manifest.plugin_id, + "name": manifest.name, + "icon": manifest.icon, + "plugin_unique_identifier": manifest.latest_package_identifier, + "installed": manifest.plugin_id in installed_plugin_ids, + } + for manifest in plugin_manifests + ] \ No newline at end of file diff --git a/web/app/components/app-sidebar/dataset-info/dropdown.tsx b/web/app/components/app-sidebar/dataset-info/dropdown.tsx index 2bc64c8f56..c3d2f599b1 100644 --- a/web/app/components/app-sidebar/dataset-info/dropdown.tsx +++ b/web/app/components/app-sidebar/dataset-info/dropdown.tsx @@ -66,7 +66,7 @@ const DropDown = ({ const a = document.createElement('a') const file = new Blob([data], { type: 'application/yaml' }) a.href = URL.createObjectURL(file) - a.download = `${name}.yml` + a.download = `${name}.pipeline` a.click() } catch { diff --git a/web/app/components/app/create-from-dsl-modal/uploader.tsx b/web/app/components/app/create-from-dsl-modal/uploader.tsx index 3ab54733dc..654c7b5952 100644 --- a/web/app/components/app/create-from-dsl-modal/uploader.tsx +++ b/web/app/components/app/create-from-dsl-modal/uploader.tsx @@ -17,12 +17,16 @@ export type Props = { file: File | undefined updateFile: (file?: File) => void className?: string + accept?: string + displayName?: string } const Uploader: FC = ({ file, updateFile, className, + accept = '.yaml,.yml', + displayName = 'YAML', }) => { const { t } = useTranslation() const { notify } = useContext(ToastContext) @@ -95,9 +99,9 @@ const Uploader: FC = ({
@@ -116,12 +120,12 @@ const Uploader: FC = ({ {file && (
- +
{file.name}
- YAML + {displayName} ยท {formatFileSize(file.size)}
diff --git a/web/app/components/datasets/create-from-pipeline/create-options/create-from-dsl-modal/uploader.tsx b/web/app/components/datasets/create-from-pipeline/create-options/create-from-dsl-modal/uploader.tsx index a21d622fac..badc2034d7 100644 --- a/web/app/components/datasets/create-from-pipeline/create-options/create-from-dsl-modal/uploader.tsx +++ b/web/app/components/datasets/create-from-pipeline/create-options/create-from-dsl-modal/uploader.tsx @@ -98,7 +98,7 @@ const Uploader: FC = ({ style={{ display: 'none' }} type='file' id='fileUploader' - accept='.yaml,.yml' + accept='.pipeline' onChange={fileChangeHandle} />
diff --git a/web/app/components/datasets/create-from-pipeline/list/template-card/index.tsx b/web/app/components/datasets/create-from-pipeline/list/template-card/index.tsx index 30bf3e2137..ef5f38a72b 100644 --- a/web/app/components/datasets/create-from-pipeline/list/template-card/index.tsx +++ b/web/app/components/datasets/create-from-pipeline/list/template-card/index.tsx @@ -103,7 +103,7 @@ const TemplateCard = ({ const blob = new Blob([res.data], { type: 'application/yaml' }) downloadFile({ data: blob, - fileName: `${pipeline.name}.yml`, + fileName: `${pipeline.name}.pipeline`, }) Toast.notify({ type: 'success', diff --git a/web/app/components/datasets/list/dataset-card/index.tsx b/web/app/components/datasets/list/dataset-card/index.tsx index fe161ee459..253d397d95 100644 --- a/web/app/components/datasets/list/dataset-card/index.tsx +++ b/web/app/components/datasets/list/dataset-card/index.tsx @@ -115,7 +115,7 @@ const DatasetCard = ({ const a = document.createElement('a') const file = new Blob([data], { type: 'application/yaml' }) a.href = URL.createObjectURL(file) - a.download = `${name}.yml` + a.download = `${name}.pipeline` a.click() } catch { diff --git a/web/app/components/rag-pipeline/components/update-dsl-modal.tsx b/web/app/components/rag-pipeline/components/update-dsl-modal.tsx index 090fe1c42f..6afa367fff 100644 --- a/web/app/components/rag-pipeline/components/update-dsl-modal.tsx +++ b/web/app/components/rag-pipeline/components/update-dsl-modal.tsx @@ -233,6 +233,8 @@ const UpdateDSLModal = ({ file={currentFile} updateFile={handleFile} className='!mt-0 w-full' + accept='.pipeline' + displayName='PIPELINE' />
diff --git a/web/app/components/rag-pipeline/hooks/use-DSL.ts b/web/app/components/rag-pipeline/hooks/use-DSL.ts index 36ac6b9e0b..f0c6782c5e 100644 --- a/web/app/components/rag-pipeline/hooks/use-DSL.ts +++ b/web/app/components/rag-pipeline/hooks/use-DSL.ts @@ -40,7 +40,7 @@ export const useDSL = () => { const a = document.createElement('a') const file = new Blob([data], { type: 'application/yaml' }) a.href = URL.createObjectURL(file) - a.download = `${knowledgeName}.yml` + a.download = `${knowledgeName}.pipeline` a.click() } catch { diff --git a/web/app/components/rag-pipeline/hooks/use-pipeline-template.ts b/web/app/components/rag-pipeline/hooks/use-pipeline-template.ts index bb92255670..2695fc74aa 100644 --- a/web/app/components/rag-pipeline/hooks/use-pipeline-template.ts +++ b/web/app/components/rag-pipeline/hooks/use-pipeline-template.ts @@ -15,6 +15,7 @@ export const usePipelineTemplate = () => { ...knowledgeBaseDefault.defaultValue as KnowledgeBaseNodeType, type: knowledgeBaseDefault.metaData.type, title: t(`workflow.blocks.${knowledgeBaseDefault.metaData.type}`), + selected: true, }, position: { x: START_INITIAL_POSITION.x + 500,