mirror of https://github.com/langgenius/dify.git
refactor: update datasource handling and improve documentation properties in pipeline components
This commit is contained in:
parent
350ea6be6e
commit
82e7c8a2f9
|
|
@ -51,8 +51,8 @@ export const useDatasourceOptions = (pipelineNodes: Node<DataSourceNodeType>[])
|
|||
return {
|
||||
nodeId: node.id,
|
||||
type: node.data.provider_type as DatasourceType,
|
||||
description: node.data.desc || '',
|
||||
docTitle: '', // todo: Add docTitle and docLink if needed, or remove these properties if not used
|
||||
description: node.data.datasource_label,
|
||||
docTitle: 'How to use?',
|
||||
docLink: '',
|
||||
fileExtensions: node.data.fileExtensions || [],
|
||||
}
|
||||
|
|
|
|||
|
|
@ -265,6 +265,11 @@ const CreateFormPipeline = () => {
|
|||
{datasource?.type === DatasourceType.onlineDocument && (
|
||||
<Notion
|
||||
nodeId={datasource?.nodeId || ''}
|
||||
headerInfo={{
|
||||
title: datasource.description,
|
||||
docTitle: datasource.docTitle || '',
|
||||
docLink: datasource.docLink || '',
|
||||
}}
|
||||
notionPages={notionPages}
|
||||
updateNotionPages={updateNotionPages}
|
||||
canPreview
|
||||
|
|
@ -283,7 +288,6 @@ const CreateFormPipeline = () => {
|
|||
onCheckedCrawlResultChange={setWebsitePages}
|
||||
onJobIdChange={setWebsiteCrawlJobId}
|
||||
onPreview={updateCurrentWebsite}
|
||||
usingPublished
|
||||
/>
|
||||
)}
|
||||
{isShowVectorSpaceFull && (
|
||||
|
|
|
|||
|
|
@ -3,6 +3,11 @@ import NotionPageSelector from './notion-page-selector'
|
|||
|
||||
type NotionProps = {
|
||||
nodeId: string
|
||||
headerInfo: {
|
||||
title: string
|
||||
docTitle: string
|
||||
docLink: string
|
||||
}
|
||||
notionPages: NotionPage[]
|
||||
updateNotionPages: (value: NotionPage[]) => void
|
||||
canPreview?: boolean
|
||||
|
|
@ -12,6 +17,7 @@ type NotionProps = {
|
|||
|
||||
const Notion = ({
|
||||
nodeId,
|
||||
headerInfo,
|
||||
notionPages,
|
||||
updateNotionPages,
|
||||
canPreview = false,
|
||||
|
|
@ -21,6 +27,7 @@ const Notion = ({
|
|||
return (
|
||||
<NotionPageSelector
|
||||
nodeId={nodeId}
|
||||
headerInfo={headerInfo}
|
||||
value={notionPages.map(page => page.page_id)}
|
||||
onSelect={updateNotionPages}
|
||||
canPreview={canPreview}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import WorkspaceSelector from '@/app/components/base/notion-page-selector/workspace-selector'
|
||||
import SearchInput from '@/app/components/base/notion-page-selector/search-input'
|
||||
import PageSelector from '@/app/components/base/notion-page-selector/page-selector'
|
||||
import type { DataSourceNotionPageMap, DataSourceNotionWorkspace, NotionPage } from '@/models/common'
|
||||
import Header from '@/app/components/datasets/create/website/base/header'
|
||||
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
||||
import { useDatasourceNodeRun } from '@/service/use-pipeline'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useDraftDatasourceNodeRun, usePublishedDatasourceNodeRun } from '@/service/use-pipeline'
|
||||
import { DatasourceType } from '@/models/pipeline'
|
||||
|
||||
type NotionPageSelectorProps = {
|
||||
value?: string[]
|
||||
|
|
@ -16,6 +16,11 @@ type NotionPageSelectorProps = {
|
|||
onPreview?: (selectedPage: NotionPage) => void
|
||||
isInPipeline?: boolean
|
||||
nodeId: string
|
||||
headerInfo: {
|
||||
title: string
|
||||
docTitle: string
|
||||
docLink: string
|
||||
}
|
||||
}
|
||||
|
||||
const NotionPageSelector = ({
|
||||
|
|
@ -26,20 +31,23 @@ const NotionPageSelector = ({
|
|||
onPreview,
|
||||
isInPipeline = false,
|
||||
nodeId,
|
||||
headerInfo,
|
||||
}: NotionPageSelectorProps) => {
|
||||
const { t } = useTranslation()
|
||||
const pipeline_id = useDatasetDetailContextWithSelector(s => s.dataset?.pipeline_id)
|
||||
const { mutateAsync: getNotionPages } = useDatasourceNodeRun()
|
||||
const [notionData, setNotionData] = useState<DataSourceNotionWorkspace[]>([])
|
||||
const [searchValue, setSearchValue] = useState('')
|
||||
const [currentWorkspaceId, setCurrentWorkspaceId] = useState('')
|
||||
|
||||
const useDatasourceNodeRun = useRef(!isInPipeline ? usePublishedDatasourceNodeRun : useDraftDatasourceNodeRun)
|
||||
const { mutateAsync: getNotionPages } = useDatasourceNodeRun.current()
|
||||
|
||||
const getNotionData = useCallback(async () => {
|
||||
if (pipeline_id) {
|
||||
await getNotionPages({
|
||||
pipeline_id,
|
||||
node_id: nodeId,
|
||||
inputs: {},
|
||||
datasource_type: DatasourceType.onlineDocument,
|
||||
}, {
|
||||
onSuccess(notionData) {
|
||||
setNotionData(notionData as DataSourceNotionWorkspace[])
|
||||
|
|
@ -106,9 +114,7 @@ const NotionPageSelector = ({
|
|||
<div className='flex flex-col gap-y-2'>
|
||||
<Header
|
||||
isInPipeline={isInPipeline}
|
||||
title={t('datasetPipeline.testRun.notion.title')}
|
||||
docTitle={t('datasetPipeline.testRun.notion.docTitle')}
|
||||
docLink={'https://www.notion.so/docs'}
|
||||
{...headerInfo}
|
||||
/>
|
||||
<div className='rounded-xl border border-components-panel-border bg-background-default-subtle'>
|
||||
<div className='flex h-12 items-center gap-x-2 rounded-t-xl border-b border-b-divider-regular bg-components-panel-bg p-2'>
|
||||
|
|
|
|||
|
|
@ -8,11 +8,13 @@ import Crawling from './crawling'
|
|||
import ErrorMessage from './error-message'
|
||||
import CrawledResult from './crawled-result'
|
||||
import {
|
||||
useDatasourceNodeRun,
|
||||
useDraftDatasourceNodeRun,
|
||||
useDraftPipelinePreProcessingParams,
|
||||
usePublishedDatasourceNodeRun,
|
||||
usePublishedPipelineProcessingParams,
|
||||
} from '@/service/use-pipeline'
|
||||
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
||||
import { DatasourceType } from '@/models/pipeline'
|
||||
|
||||
const I18N_PREFIX = 'datasetCreation.stepOne.website'
|
||||
|
||||
|
|
@ -27,7 +29,7 @@ type CrawlerProps = {
|
|||
docLink: string
|
||||
}
|
||||
onPreview?: (payload: CrawlResultItem) => void
|
||||
usingPublished?: boolean
|
||||
isInPipeline?: boolean
|
||||
}
|
||||
|
||||
enum Step {
|
||||
|
|
@ -43,14 +45,14 @@ const Crawler = ({
|
|||
onCheckedCrawlResultChange,
|
||||
onJobIdChange,
|
||||
onPreview,
|
||||
usingPublished = false,
|
||||
isInPipeline = false,
|
||||
}: CrawlerProps) => {
|
||||
const { t } = useTranslation()
|
||||
const [step, setStep] = useState<Step>(Step.init)
|
||||
const [controlFoldOptions, setControlFoldOptions] = useState<number>(0)
|
||||
const pipelineId = useDatasetDetailContextWithSelector(s => s.dataset?.pipeline_id)
|
||||
|
||||
const usePreProcessingParams = useRef(usingPublished ? usePublishedPipelineProcessingParams : useDraftPipelinePreProcessingParams)
|
||||
const usePreProcessingParams = useRef(!isInPipeline ? usePublishedPipelineProcessingParams : useDraftPipelinePreProcessingParams)
|
||||
const { data: paramsConfig } = usePreProcessingParams.current({
|
||||
pipeline_id: pipelineId!,
|
||||
node_id: nodeId,
|
||||
|
|
@ -71,7 +73,8 @@ const Crawler = ({
|
|||
const [crawlErrorMessage, setCrawlErrorMessage] = useState('')
|
||||
const showError = isCrawlFinished && crawlErrorMessage
|
||||
|
||||
const { mutateAsync: runDatasourceNode } = useDatasourceNodeRun()
|
||||
const useDatasourceNodeRun = useRef(!isInPipeline ? usePublishedDatasourceNodeRun : useDraftDatasourceNodeRun)
|
||||
const { mutateAsync: runDatasourceNode } = useDatasourceNodeRun.current()
|
||||
|
||||
const handleRun = useCallback(async (value: Record<string, any>) => {
|
||||
setStep(Step.running)
|
||||
|
|
@ -79,6 +82,7 @@ const Crawler = ({
|
|||
node_id: nodeId,
|
||||
pipeline_id: pipelineId!,
|
||||
inputs: value,
|
||||
datasource_type: DatasourceType.websiteCrawl,
|
||||
}, {
|
||||
onSuccess: (res: any) => {
|
||||
const jobId = res.job_id
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ type WebsiteCrawlProps = {
|
|||
docLink: string
|
||||
}
|
||||
onPreview?: (payload: CrawlResultItem) => void
|
||||
usingPublished?: boolean
|
||||
isInPipeline?: boolean
|
||||
}
|
||||
|
||||
const WebsiteCrawl = ({
|
||||
|
|
@ -24,7 +24,7 @@ const WebsiteCrawl = ({
|
|||
onCheckedCrawlResultChange,
|
||||
onJobIdChange,
|
||||
onPreview,
|
||||
usingPublished,
|
||||
isInPipeline,
|
||||
}: WebsiteCrawlProps) => {
|
||||
return (
|
||||
<Crawler
|
||||
|
|
@ -34,7 +34,7 @@ const WebsiteCrawl = ({
|
|||
onCheckedCrawlResultChange={onCheckedCrawlResultChange}
|
||||
onJobIdChange={onJobIdChange}
|
||||
onPreview={onPreview}
|
||||
usingPublished={usingPublished}
|
||||
isInPipeline={isInPipeline}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -49,8 +49,8 @@ export const useDatasourceOptions = () => {
|
|||
return {
|
||||
nodeId: node.id,
|
||||
type: node.data.provider_type as DatasourceType,
|
||||
description: '', // todo: Add description
|
||||
docTitle: '', // todo: Add docTitle and docLink
|
||||
description: node.data.datasource_label,
|
||||
docTitle: 'How to use?',
|
||||
docLink: '',
|
||||
fileExtensions: node.data.fileExtensions || [],
|
||||
}
|
||||
|
|
|
|||
|
|
@ -132,8 +132,14 @@ const TestRunPanel = () => {
|
|||
{datasource?.type === DatasourceType.onlineDocument && (
|
||||
<Notion
|
||||
nodeId={datasource?.nodeId || ''}
|
||||
headerInfo={{
|
||||
title: datasource.description,
|
||||
docTitle: datasource.docTitle || '',
|
||||
docLink: datasource.docLink || '',
|
||||
}}
|
||||
notionPages={notionPages}
|
||||
updateNotionPages={updateNotionPages}
|
||||
isInPipeline
|
||||
/>
|
||||
)}
|
||||
{datasource?.type === DatasourceType.websiteCrawl && (
|
||||
|
|
@ -147,6 +153,7 @@ const TestRunPanel = () => {
|
|||
}}
|
||||
onCheckedCrawlResultChange={setWebsitePages}
|
||||
onJobIdChange={setWebsiteCrawlJobId}
|
||||
isInPipeline
|
||||
/>
|
||||
)}
|
||||
{isShowVectorSpaceFull && (
|
||||
|
|
|
|||
|
|
@ -155,6 +155,7 @@ export type PipelineDatasourceNodeRunRequest = {
|
|||
pipeline_id: string
|
||||
node_id: string
|
||||
inputs: Record<string, any>
|
||||
datasource_type: DatasourceType
|
||||
}
|
||||
|
||||
export type PipelineDatasourceNodeRunResponse = Record<string, any>
|
||||
|
|
|
|||
|
|
@ -123,14 +123,29 @@ export const useCheckPipelineDependencies = (
|
|||
})
|
||||
}
|
||||
|
||||
export const useDatasourceNodeRun = (
|
||||
export const useDraftDatasourceNodeRun = (
|
||||
mutationOptions: MutationOptions<PipelineDatasourceNodeRunResponse, Error, PipelineDatasourceNodeRunRequest> = {},
|
||||
) => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'datasource-node-run'],
|
||||
mutationKey: [NAME_SPACE, 'draft-datasource-node-run'],
|
||||
mutationFn: (request: PipelineDatasourceNodeRunRequest) => {
|
||||
const { pipeline_id, node_id, ...rest } = request
|
||||
return post<PipelineDatasourceNodeRunResponse>(`/rag/pipelines/${pipeline_id}/workflows/published/nodes/${node_id}/run`, {
|
||||
return post<PipelineDatasourceNodeRunResponse>(`/rag/pipelines/${pipeline_id}/workflows/draft/datasource/nodes/${node_id}/run`, {
|
||||
body: rest,
|
||||
})
|
||||
},
|
||||
...mutationOptions,
|
||||
})
|
||||
}
|
||||
|
||||
export const usePublishedDatasourceNodeRun = (
|
||||
mutationOptions: MutationOptions<PipelineDatasourceNodeRunResponse, Error, PipelineDatasourceNodeRunRequest> = {},
|
||||
) => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'published-datasource-node-run'],
|
||||
mutationFn: (request: PipelineDatasourceNodeRunRequest) => {
|
||||
const { pipeline_id, node_id, ...rest } = request
|
||||
return post<PipelineDatasourceNodeRunResponse>(`/rag/pipelines/${pipeline_id}/workflows/published/datasource/nodes/${node_id}/run`, {
|
||||
body: rest,
|
||||
})
|
||||
},
|
||||
|
|
@ -141,7 +156,7 @@ export const useDatasourceNodeRun = (
|
|||
export const useDraftPipelineProcessingParams = (params: PipelineProcessingParamsRequest, enabled = true) => {
|
||||
const { pipeline_id, node_id } = params
|
||||
return useQuery<PipelineProcessingParamsResponse>({
|
||||
queryKey: [NAME_SPACE, 'pipeline-processing-params', pipeline_id, node_id],
|
||||
queryKey: [NAME_SPACE, 'draft-pipeline-processing-params', pipeline_id, node_id],
|
||||
queryFn: () => {
|
||||
return get<PipelineProcessingParamsResponse>(`/rag/pipelines/${pipeline_id}/workflows/draft/processing/parameters`, {
|
||||
params: {
|
||||
|
|
@ -157,7 +172,7 @@ export const useDraftPipelineProcessingParams = (params: PipelineProcessingParam
|
|||
export const usePublishedPipelineProcessingParams = (params: PipelineProcessingParamsRequest) => {
|
||||
const { pipeline_id, node_id } = params
|
||||
return useQuery<PipelineProcessingParamsResponse>({
|
||||
queryKey: [NAME_SPACE, 'pipeline-processing-params', pipeline_id, node_id],
|
||||
queryKey: [NAME_SPACE, 'published-pipeline-processing-params', pipeline_id, node_id],
|
||||
queryFn: () => {
|
||||
return get<PipelineProcessingParamsResponse>(`/rag/pipelines/${pipeline_id}/workflows/published/processing/parameters`, {
|
||||
params: {
|
||||
|
|
@ -255,7 +270,7 @@ export const useUpdateDataSourceCredentials = (
|
|||
export const useDraftPipelinePreProcessingParams = (params: PipelinePreProcessingParamsRequest, enabled = true) => {
|
||||
const { pipeline_id, node_id } = params
|
||||
return useQuery<PipelinePreProcessingParamsResponse>({
|
||||
queryKey: [NAME_SPACE, 'pipeline-pre-processing-params', pipeline_id, node_id],
|
||||
queryKey: [NAME_SPACE, 'draft-pipeline-pre-processing-params', pipeline_id, node_id],
|
||||
queryFn: () => {
|
||||
return get<PipelinePreProcessingParamsResponse>(`/rag/pipelines/${pipeline_id}/workflows/draft/pre-processing/parameters`, {
|
||||
params: {
|
||||
|
|
@ -271,7 +286,7 @@ export const useDraftPipelinePreProcessingParams = (params: PipelinePreProcessin
|
|||
export const usePublishedPipelinePreProcessingParams = (params: PipelinePreProcessingParamsRequest, enabled = true) => {
|
||||
const { pipeline_id, node_id } = params
|
||||
return useQuery<PipelinePreProcessingParamsResponse>({
|
||||
queryKey: [NAME_SPACE, 'pipeline-pre-processing-params', pipeline_id, node_id],
|
||||
queryKey: [NAME_SPACE, 'published-pipeline-pre-processing-params', pipeline_id, node_id],
|
||||
queryFn: () => {
|
||||
return get<PipelinePreProcessingParamsResponse>(`/rag/pipelines/${pipeline_id}/workflows/published/processing/parameters`, {
|
||||
params: {
|
||||
|
|
|
|||
Loading…
Reference in New Issue