mirror of
https://github.com/langgenius/dify.git
synced 2026-04-29 04:26:30 +08:00
Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev
This commit is contained in:
commit
8aca70cd50
@ -10,10 +10,10 @@ import { TopBar } from './top-bar'
|
|||||||
import { DataSourceType } from '@/models/datasets'
|
import { DataSourceType } from '@/models/datasets'
|
||||||
import type { CrawlOptions, CrawlResultItem, DataSet, FileItem, createDocumentResponse } from '@/models/datasets'
|
import type { CrawlOptions, CrawlResultItem, DataSet, FileItem, createDocumentResponse } from '@/models/datasets'
|
||||||
import { fetchDataSource } from '@/service/common'
|
import { fetchDataSource } from '@/service/common'
|
||||||
import { fetchDatasetDetail } from '@/service/datasets'
|
|
||||||
import { DataSourceProvider, type NotionPage } from '@/models/common'
|
import { DataSourceProvider, type NotionPage } from '@/models/common'
|
||||||
import { useModalContext } from '@/context/modal-context'
|
import { useModalContext } from '@/context/modal-context'
|
||||||
import { useDefaultModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
|
import { useDefaultModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
|
||||||
|
import { useDatasetDetail } from '@/service/knowledge/use-dataset'
|
||||||
|
|
||||||
type DatasetUpdateFormProps = {
|
type DatasetUpdateFormProps = {
|
||||||
datasetId?: string
|
datasetId?: string
|
||||||
@ -39,7 +39,6 @@ const DatasetUpdateForm = ({ datasetId }: DatasetUpdateFormProps) => {
|
|||||||
const [retrievalMethodCache, setRetrievalMethodCache] = useState('')
|
const [retrievalMethodCache, setRetrievalMethodCache] = useState('')
|
||||||
const [fileList, setFiles] = useState<FileItem[]>([])
|
const [fileList, setFiles] = useState<FileItem[]>([])
|
||||||
const [result, setResult] = useState<createDocumentResponse | undefined>()
|
const [result, setResult] = useState<createDocumentResponse | undefined>()
|
||||||
const [hasError, setHasError] = useState(false)
|
|
||||||
const { data: embeddingsDefaultModel } = useDefaultModel(ModelTypeEnum.textEmbedding)
|
const { data: embeddingsDefaultModel } = useDefaultModel(ModelTypeEnum.textEmbedding)
|
||||||
|
|
||||||
const [notionPages, setNotionPages] = useState<NotionPage[]>([])
|
const [notionPages, setNotionPages] = useState<NotionPage[]>([])
|
||||||
@ -104,21 +103,14 @@ const DatasetUpdateForm = ({ datasetId }: DatasetUpdateFormProps) => {
|
|||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
const [detail, setDetail] = useState<DataSet | null>(null)
|
const [detail, setDetail] = useState<DataSet | null>(null)
|
||||||
useEffect(() => {
|
|
||||||
(async () => {
|
|
||||||
if (datasetId) {
|
|
||||||
try {
|
|
||||||
const detail = await fetchDatasetDetail(datasetId)
|
|
||||||
setDetail(detail)
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
setHasError(true)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})()
|
|
||||||
}, [datasetId])
|
|
||||||
|
|
||||||
if (hasError)
|
const { data: datasetDetail, error: fetchDatasetDetailError } = useDatasetDetail(datasetId || '')
|
||||||
|
useEffect(() => {
|
||||||
|
if (!datasetDetail) return
|
||||||
|
setDetail(datasetDetail)
|
||||||
|
}, [datasetDetail])
|
||||||
|
|
||||||
|
if (fetchDatasetDetailError)
|
||||||
return <AppUnavailable code={500} unknownReason={t('datasetCreation.error.unavailable') as string} />
|
return <AppUnavailable code={500} unknownReason={t('datasetCreation.error.unavailable') as string} />
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|||||||
@ -22,7 +22,7 @@ const Datasets = ({
|
|||||||
const { t } = useTranslation()
|
const { t } = useTranslation()
|
||||||
const isCurrentWorkspaceEditor = useAppContextWithSelector(state => state.isCurrentWorkspaceEditor)
|
const isCurrentWorkspaceEditor = useAppContextWithSelector(state => state.isCurrentWorkspaceEditor)
|
||||||
const {
|
const {
|
||||||
data,
|
data: datasetList,
|
||||||
fetchNextPage,
|
fetchNextPage,
|
||||||
hasNextPage,
|
hasNextPage,
|
||||||
isFetching,
|
isFetching,
|
||||||
@ -54,13 +54,13 @@ const Datasets = ({
|
|||||||
observerRef.current.observe(anchorRef.current)
|
observerRef.current.observe(anchorRef.current)
|
||||||
}
|
}
|
||||||
return () => observerRef.current?.disconnect()
|
return () => observerRef.current?.disconnect()
|
||||||
}, [anchorRef, data, hasNextPage, fetchNextPage])
|
}, [anchorRef, datasetList, hasNextPage, fetchNextPage])
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<nav className='grid shrink-0 grow grid-cols-1 content-start gap-3 px-12 pt-2 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4'>
|
<nav className='grid shrink-0 grow grid-cols-1 content-start gap-3 px-12 pt-2 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4'>
|
||||||
{isCurrentWorkspaceEditor && <NewDatasetCard />}
|
{isCurrentWorkspaceEditor && <NewDatasetCard />}
|
||||||
{data?.pages.map(({ data: datasets }) => datasets.map(dataset => (
|
{datasetList?.pages.map(({ data: datasets }) => datasets.map(dataset => (
|
||||||
<DatasetCard key={dataset.id} dataset={dataset} onSuccess={resetDatasetList} />),
|
<DatasetCard key={dataset.id} dataset={dataset} onSuccess={resetDatasetList} />),
|
||||||
))}
|
))}
|
||||||
</nav>
|
</nav>
|
||||||
|
|||||||
@ -2,8 +2,6 @@
|
|||||||
import { useCallback, useRef, useState } from 'react'
|
import { useCallback, useRef, useState } from 'react'
|
||||||
import { useMount } from 'ahooks'
|
import { useMount } from 'ahooks'
|
||||||
import { useTranslation } from 'react-i18next'
|
import { useTranslation } from 'react-i18next'
|
||||||
import { useSWRConfig } from 'swr'
|
|
||||||
import { unstable_serialize } from 'swr/infinite'
|
|
||||||
import PermissionSelector from '../permission-selector'
|
import PermissionSelector from '../permission-selector'
|
||||||
import IndexMethod from '../index-method'
|
import IndexMethod from '../index-method'
|
||||||
import RetrievalSettings from '../../external-knowledge-base/create/RetrievalSettings'
|
import RetrievalSettings from '../../external-knowledge-base/create/RetrievalSettings'
|
||||||
@ -16,7 +14,7 @@ import Textarea from '@/app/components/base/textarea'
|
|||||||
import { ApiConnectionMod } from '@/app/components/base/icons/src/vender/solid/development'
|
import { ApiConnectionMod } from '@/app/components/base/icons/src/vender/solid/development'
|
||||||
import { updateDatasetSetting } from '@/service/datasets'
|
import { updateDatasetSetting } from '@/service/datasets'
|
||||||
import type { IconInfo } from '@/models/datasets'
|
import type { IconInfo } from '@/models/datasets'
|
||||||
import { ChunkingMode, type DataSetListResponse, DatasetPermission } from '@/models/datasets'
|
import { ChunkingMode, DatasetPermission } from '@/models/datasets'
|
||||||
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
||||||
import type { AppIconType, RetrievalConfig } from '@/types/app'
|
import type { AppIconType, RetrievalConfig } from '@/types/app'
|
||||||
import { useSelector as useAppContextWithSelector } from '@/context/app-context'
|
import { useSelector as useAppContextWithSelector } from '@/context/app-context'
|
||||||
@ -38,16 +36,11 @@ import ChunkStructure from '../chunk-structure'
|
|||||||
import Toast from '@/app/components/base/toast'
|
import Toast from '@/app/components/base/toast'
|
||||||
import { RiAlertFill } from '@remixicon/react'
|
import { RiAlertFill } from '@remixicon/react'
|
||||||
import { useDocLink } from '@/context/i18n'
|
import { useDocLink } from '@/context/i18n'
|
||||||
|
import { useResetDatasetList } from '@/service/knowledge/use-dataset'
|
||||||
|
|
||||||
const rowClass = 'flex gap-x-1'
|
const rowClass = 'flex gap-x-1'
|
||||||
const labelClass = 'flex items-center shrink-0 w-[180px] h-7 pt-1'
|
const labelClass = 'flex items-center shrink-0 w-[180px] h-7 pt-1'
|
||||||
|
|
||||||
const getKey = (pageIndex: number, previousPageData: DataSetListResponse) => {
|
|
||||||
if (!pageIndex || previousPageData.has_more)
|
|
||||||
return { url: 'datasets', params: { page: pageIndex + 1, limit: 30 } }
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const DEFAULT_APP_ICON: IconInfo = {
|
const DEFAULT_APP_ICON: IconInfo = {
|
||||||
icon_type: 'emoji',
|
icon_type: 'emoji',
|
||||||
icon: '📙',
|
icon: '📙',
|
||||||
@ -58,7 +51,6 @@ const DEFAULT_APP_ICON: IconInfo = {
|
|||||||
const Form = () => {
|
const Form = () => {
|
||||||
const { t } = useTranslation()
|
const { t } = useTranslation()
|
||||||
const docLink = useDocLink()
|
const docLink = useDocLink()
|
||||||
const { mutate } = useSWRConfig()
|
|
||||||
const isCurrentWorkspaceDatasetOperator = useAppContextWithSelector(state => state.isCurrentWorkspaceDatasetOperator)
|
const isCurrentWorkspaceDatasetOperator = useAppContextWithSelector(state => state.isCurrentWorkspaceDatasetOperator)
|
||||||
const currentDataset = useDatasetDetailContextWithSelector(state => state.dataset)
|
const currentDataset = useDatasetDetailContextWithSelector(state => state.dataset)
|
||||||
const mutateDatasets = useDatasetDetailContextWithSelector(state => state.mutateDatasetRes)
|
const mutateDatasets = useDatasetDetailContextWithSelector(state => state.mutateDatasetRes)
|
||||||
@ -135,6 +127,7 @@ const Form = () => {
|
|||||||
getMembers()
|
getMembers()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const resetDatasetList = useResetDatasetList()
|
||||||
const handleSave = async () => {
|
const handleSave = async () => {
|
||||||
if (loading)
|
if (loading)
|
||||||
return
|
return
|
||||||
@ -197,7 +190,7 @@ const Form = () => {
|
|||||||
Toast.notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
|
Toast.notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
|
||||||
if (mutateDatasets) {
|
if (mutateDatasets) {
|
||||||
await mutateDatasets()
|
await mutateDatasets()
|
||||||
mutate(unstable_serialize(getKey))
|
resetDatasetList()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch {
|
catch {
|
||||||
|
|||||||
@ -7,39 +7,31 @@ import {
|
|||||||
RiBook2Fill,
|
RiBook2Fill,
|
||||||
RiBook2Line,
|
RiBook2Line,
|
||||||
} from '@remixicon/react'
|
} from '@remixicon/react'
|
||||||
import useSWR from 'swr'
|
|
||||||
import useSWRInfinite from 'swr/infinite'
|
|
||||||
import { flatten } from 'lodash-es'
|
import { flatten } from 'lodash-es'
|
||||||
import Nav from '../nav'
|
import Nav from '../nav'
|
||||||
import type { NavItem } from '../nav/nav-selector'
|
import type { NavItem } from '../nav/nav-selector'
|
||||||
import { fetchDatasetDetail, fetchDatasets } from '@/service/datasets'
|
|
||||||
import type { DataSetListResponse } from '@/models/datasets'
|
|
||||||
import { basePath } from '@/utils/var'
|
import { basePath } from '@/utils/var'
|
||||||
|
import { useDatasetDetail, useDatasetList } from '@/service/knowledge/use-dataset'
|
||||||
const getKey = (pageIndex: number, previousPageData: DataSetListResponse) => {
|
|
||||||
if (!pageIndex || previousPageData.has_more)
|
|
||||||
return { url: 'datasets', params: { page: pageIndex + 1, limit: 30 } }
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const DatasetNav = () => {
|
const DatasetNav = () => {
|
||||||
const { t } = useTranslation()
|
const { t } = useTranslation()
|
||||||
const router = useRouter()
|
const router = useRouter()
|
||||||
const { datasetId } = useParams()
|
const { datasetId } = useParams()
|
||||||
const { data: currentDataset } = useSWR(
|
const { data: currentDataset } = useDatasetDetail(datasetId as string)
|
||||||
datasetId
|
const {
|
||||||
? {
|
data: datasetList,
|
||||||
url: 'fetchDatasetDetail',
|
fetchNextPage,
|
||||||
datasetId,
|
hasNextPage,
|
||||||
}
|
} = useDatasetList({
|
||||||
: null,
|
initialPage: 1,
|
||||||
apiParams => fetchDatasetDetail(apiParams.datasetId as string))
|
limit: 30,
|
||||||
const { data: datasetsData, setSize } = useSWRInfinite(datasetId ? getKey : () => null, fetchDatasets, { revalidateFirstPage: false, revalidateAll: true })
|
})
|
||||||
const datasetItems = flatten(datasetsData?.map(datasetData => datasetData.data))
|
const datasetItems = flatten(datasetList?.pages.map(datasetData => datasetData.data))
|
||||||
|
|
||||||
const handleLoadMore = useCallback(() => {
|
const handleLoadMore = useCallback(() => {
|
||||||
setSize(size => size + 1)
|
if (hasNextPage)
|
||||||
}, [setSize])
|
fetchNextPage()
|
||||||
|
}, [hasNextPage, fetchNextPage])
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Nav
|
<Nav
|
||||||
|
|||||||
@ -1,12 +1,14 @@
|
|||||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||||
import WorkspaceSelector from '@/app/components/base/notion-page-selector/workspace-selector'
|
import WorkspaceSelector from '@/app/components/base/notion-page-selector/workspace-selector'
|
||||||
import SearchInput from '@/app/components/base/notion-page-selector/search-input'
|
import SearchInput from '@/app/components/base/notion-page-selector/search-input'
|
||||||
import PageSelector from '@/app/components/base/notion-page-selector/page-selector'
|
import PageSelector from '@/app/components/base/notion-page-selector/page-selector'
|
||||||
import type { DataSourceNotionPageMap, DataSourceNotionWorkspace, NotionPage } from '@/models/common'
|
import type { DataSourceNotionPageMap, DataSourceNotionWorkspace, NotionPage } from '@/models/common'
|
||||||
import Header from '@/app/components/datasets/create/website/base/header'
|
import Header from '@/app/components/datasets/create/website/base/header'
|
||||||
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
||||||
import { useDraftDatasourceNodeRun, usePublishedDatasourceNodeRun } from '@/service/use-pipeline'
|
|
||||||
import { DatasourceType } from '@/models/pipeline'
|
import { DatasourceType } from '@/models/pipeline'
|
||||||
|
import { ssePost } from '@/service/base'
|
||||||
|
import Toast from '@/app/components/base/toast'
|
||||||
|
import type { DataSourceNodeCompletedResponse } from '@/types/pipeline'
|
||||||
|
|
||||||
type OnlineDocumentSelectorProps = {
|
type OnlineDocumentSelectorProps = {
|
||||||
value?: string[]
|
value?: string[]
|
||||||
@ -33,28 +35,37 @@ const OnlineDocumentSelector = ({
|
|||||||
nodeId,
|
nodeId,
|
||||||
headerInfo,
|
headerInfo,
|
||||||
}: OnlineDocumentSelectorProps) => {
|
}: OnlineDocumentSelectorProps) => {
|
||||||
const pipeline_id = useDatasetDetailContextWithSelector(s => s.dataset?.pipeline_id)
|
const pipelineId = useDatasetDetailContextWithSelector(s => s.dataset?.pipeline_id)
|
||||||
const [documentsData, setDocumentsData] = useState<DataSourceNotionWorkspace[]>([])
|
const [documentsData, setDocumentsData] = useState<DataSourceNotionWorkspace[]>([])
|
||||||
const [searchValue, setSearchValue] = useState('')
|
const [searchValue, setSearchValue] = useState('')
|
||||||
const [currentWorkspaceId, setCurrentWorkspaceId] = useState('')
|
const [currentWorkspaceId, setCurrentWorkspaceId] = useState('')
|
||||||
|
|
||||||
const useDatasourceNodeRun = useRef(!isInPipeline ? usePublishedDatasourceNodeRun : useDraftDatasourceNodeRun)
|
const datasourceNodeRunURL = !isInPipeline
|
||||||
const { mutateAsync: crawlOnlineDocuments } = useDatasourceNodeRun.current()
|
? `/rag/pipelines/${pipelineId}/workflows/published/datasource/nodes/${nodeId}/run`
|
||||||
|
: `/rag/pipelines/${pipelineId}/workflows/draft/datasource/nodes/${nodeId}/run`
|
||||||
|
|
||||||
const getOnlineDocuments = useCallback(async () => {
|
const getOnlineDocuments = useCallback(async () => {
|
||||||
if (pipeline_id) {
|
ssePost(
|
||||||
await crawlOnlineDocuments({
|
datasourceNodeRunURL,
|
||||||
pipeline_id,
|
{
|
||||||
node_id: nodeId,
|
body: {
|
||||||
inputs: {},
|
inputs: {},
|
||||||
datasource_type: DatasourceType.onlineDocument,
|
datasource_type: DatasourceType.onlineDocument,
|
||||||
}, {
|
|
||||||
onSuccess(documentsData) {
|
|
||||||
setDocumentsData(documentsData.result as DataSourceNotionWorkspace[])
|
|
||||||
},
|
},
|
||||||
})
|
},
|
||||||
}
|
{
|
||||||
}, [crawlOnlineDocuments, nodeId, pipeline_id])
|
onDataSourceNodeCompleted: (documentsData: DataSourceNodeCompletedResponse) => {
|
||||||
|
setDocumentsData(documentsData.data as DataSourceNotionWorkspace[])
|
||||||
|
},
|
||||||
|
onError: (message: string) => {
|
||||||
|
Toast.notify({
|
||||||
|
type: 'error',
|
||||||
|
message,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}, [datasourceNodeRunURL])
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
getOnlineDocuments()
|
getOnlineDocuments()
|
||||||
|
|||||||
@ -8,16 +8,16 @@ import Crawling from './crawling'
|
|||||||
import ErrorMessage from './error-message'
|
import ErrorMessage from './error-message'
|
||||||
import CrawledResult from './crawled-result'
|
import CrawledResult from './crawled-result'
|
||||||
import {
|
import {
|
||||||
useDraftDatasourceNodeRun,
|
|
||||||
useDraftDatasourceNodeRunStatus,
|
|
||||||
useDraftPipelinePreProcessingParams,
|
useDraftPipelinePreProcessingParams,
|
||||||
usePublishedDatasourceNodeRun,
|
|
||||||
usePublishedDatasourceNodeRunStatus,
|
|
||||||
usePublishedPipelinePreProcessingParams,
|
usePublishedPipelinePreProcessingParams,
|
||||||
} from '@/service/use-pipeline'
|
} from '@/service/use-pipeline'
|
||||||
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
||||||
import { DatasourceType } from '@/models/pipeline'
|
import { DatasourceType } from '@/models/pipeline'
|
||||||
import { sleep } from '@/utils'
|
import { ssePost } from '@/service/base'
|
||||||
|
import type {
|
||||||
|
DataSourceNodeCompletedResponse,
|
||||||
|
DataSourceNodeProcessingResponse,
|
||||||
|
} from '@/types/pipeline'
|
||||||
|
|
||||||
const I18N_PREFIX = 'datasetCreation.stepOne.website'
|
const I18N_PREFIX = 'datasetCreation.stepOne.website'
|
||||||
|
|
||||||
@ -51,6 +51,8 @@ const Crawler = ({
|
|||||||
const { t } = useTranslation()
|
const { t } = useTranslation()
|
||||||
const [step, setStep] = useState<Step>(Step.init)
|
const [step, setStep] = useState<Step>(Step.init)
|
||||||
const [controlFoldOptions, setControlFoldOptions] = useState<number>(0)
|
const [controlFoldOptions, setControlFoldOptions] = useState<number>(0)
|
||||||
|
const [totalNum, setTotalNum] = useState(0)
|
||||||
|
const [crawledNum, setCrawledNum] = useState(0)
|
||||||
const pipelineId = useDatasetDetailContextWithSelector(s => s.dataset?.pipeline_id)
|
const pipelineId = useDatasetDetailContextWithSelector(s => s.dataset?.pipeline_id)
|
||||||
|
|
||||||
const usePreProcessingParams = useRef(!isInPipeline ? usePublishedPipelinePreProcessingParams : useDraftPipelinePreProcessingParams)
|
const usePreProcessingParams = useRef(!isInPipeline ? usePublishedPipelinePreProcessingParams : useDraftPipelinePreProcessingParams)
|
||||||
@ -68,66 +70,49 @@ const Crawler = ({
|
|||||||
const isCrawlFinished = step === Step.finished
|
const isCrawlFinished = step === Step.finished
|
||||||
const isRunning = step === Step.running
|
const isRunning = step === Step.running
|
||||||
const [crawlResult, setCrawlResult] = useState<{
|
const [crawlResult, setCrawlResult] = useState<{
|
||||||
result: CrawlResultItem[]
|
data: CrawlResultItem[]
|
||||||
time_consuming: number | string
|
time_consuming: number | string
|
||||||
} | undefined>(undefined)
|
} | undefined>(undefined)
|
||||||
const [crawlErrorMessage, setCrawlErrorMessage] = useState('')
|
const [crawlErrorMessage, setCrawlErrorMessage] = useState('')
|
||||||
const showError = isCrawlFinished && crawlErrorMessage
|
const showError = isCrawlFinished && crawlErrorMessage
|
||||||
|
|
||||||
const useDatasourceNodeRun = useRef(!isInPipeline ? usePublishedDatasourceNodeRun : useDraftDatasourceNodeRun)
|
const datasourceNodeRunURL = !isInPipeline
|
||||||
const useDatasourceNodeRunStatus = useRef(!isInPipeline ? usePublishedDatasourceNodeRunStatus : useDraftDatasourceNodeRunStatus)
|
? `/rag/pipelines/${pipelineId}/workflows/published/datasource/nodes/${nodeId}/run`
|
||||||
const { mutateAsync: runDatasourceNode } = useDatasourceNodeRun.current()
|
: `/rag/pipelines/${pipelineId}/workflows/draft/datasource/nodes/${nodeId}/run`
|
||||||
const { mutateAsync: getDatasourceNodeRunStatus } = useDatasourceNodeRunStatus.current()
|
|
||||||
|
|
||||||
const checkCrawlStatus = useCallback(async (jobId: string) => {
|
|
||||||
const res = await getDatasourceNodeRunStatus({
|
|
||||||
node_id: nodeId,
|
|
||||||
pipeline_id: pipelineId!,
|
|
||||||
job_id: jobId,
|
|
||||||
datasource_type: DatasourceType.websiteCrawl,
|
|
||||||
}, {
|
|
||||||
onError: async (error: any) => {
|
|
||||||
const message = await error.json()
|
|
||||||
setCrawlErrorMessage(message || t(`${I18N_PREFIX}.unknownError`))
|
|
||||||
},
|
|
||||||
}) as any
|
|
||||||
if (res.status === 'completed') {
|
|
||||||
setCrawlResult(res)
|
|
||||||
onCheckedCrawlResultChange(res.result || []) // default select the crawl result
|
|
||||||
setCrawlErrorMessage('')
|
|
||||||
setStep(Step.finished)
|
|
||||||
}
|
|
||||||
else if (res.status === 'processing') {
|
|
||||||
await sleep(2500)
|
|
||||||
await checkCrawlStatus(jobId)
|
|
||||||
}
|
|
||||||
}, [getDatasourceNodeRunStatus, nodeId, pipelineId, t, onCheckedCrawlResultChange])
|
|
||||||
|
|
||||||
const handleRun = useCallback(async (value: Record<string, any>) => {
|
const handleRun = useCallback(async (value: Record<string, any>) => {
|
||||||
setStep(Step.running)
|
setStep(Step.running)
|
||||||
const res = await runDatasourceNode({
|
ssePost(
|
||||||
node_id: nodeId,
|
datasourceNodeRunURL,
|
||||||
pipeline_id: pipelineId!,
|
{
|
||||||
inputs: value,
|
body: {
|
||||||
datasource_type: DatasourceType.websiteCrawl,
|
inputs: value,
|
||||||
}, {
|
datasource_type: DatasourceType.websiteCrawl,
|
||||||
onError: async (error: any) => {
|
response_mode: 'streaming',
|
||||||
const message = await error.json()
|
},
|
||||||
setCrawlErrorMessage(message || t(`${I18N_PREFIX}.unknownError`))
|
|
||||||
setStep(Step.finished)
|
|
||||||
},
|
},
|
||||||
}) as any
|
{
|
||||||
const jobId = res.job_id
|
onDataSourceNodeProcessing: (data: DataSourceNodeProcessingResponse) => {
|
||||||
if (!jobId && res.status === 'completed') {
|
setTotalNum(data.total ?? 0)
|
||||||
setCrawlResult(res)
|
setCrawledNum(data.completed ?? 0)
|
||||||
onCheckedCrawlResultChange(res.result || []) // default select the crawl result
|
},
|
||||||
setStep(Step.finished)
|
onDataSourceNodeCompleted: (data: DataSourceNodeCompletedResponse) => {
|
||||||
}
|
const { data: crawlData, time_consuming } = data
|
||||||
else if (jobId) {
|
setCrawlResult({
|
||||||
await checkCrawlStatus(jobId)
|
data: crawlData as CrawlResultItem[],
|
||||||
}
|
time_consuming: time_consuming ?? 0,
|
||||||
setCrawlErrorMessage('')
|
})
|
||||||
}, [runDatasourceNode, nodeId, pipelineId, onCheckedCrawlResultChange, checkCrawlStatus, t])
|
onCheckedCrawlResultChange(crawlData || []) // default select the crawl result
|
||||||
|
setCrawlErrorMessage('')
|
||||||
|
setStep(Step.finished)
|
||||||
|
},
|
||||||
|
onError: (message: string) => {
|
||||||
|
setCrawlErrorMessage(message || t(`${I18N_PREFIX}.unknownError`))
|
||||||
|
setStep(Step.finished)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}, [datasourceNodeRunURL, onCheckedCrawlResultChange, t])
|
||||||
|
|
||||||
const handleSubmit = useCallback((value: Record<string, any>) => {
|
const handleSubmit = useCallback((value: Record<string, any>) => {
|
||||||
handleRun(value)
|
handleRun(value)
|
||||||
@ -152,8 +137,8 @@ const Crawler = ({
|
|||||||
<div className='relative flex flex-col'>
|
<div className='relative flex flex-col'>
|
||||||
{isRunning && (
|
{isRunning && (
|
||||||
<Crawling
|
<Crawling
|
||||||
crawledNum={0}
|
crawledNum={crawledNum}
|
||||||
totalNum={0}
|
totalNum={totalNum}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{showError && (
|
{showError && (
|
||||||
@ -166,7 +151,7 @@ const Crawler = ({
|
|||||||
{isCrawlFinished && !showError && (
|
{isCrawlFinished && !showError && (
|
||||||
<CrawledResult
|
<CrawledResult
|
||||||
className='mt-2'
|
className='mt-2'
|
||||||
list={crawlResult?.result || []}
|
list={crawlResult?.data || []}
|
||||||
checkedList={checkedCrawlResult}
|
checkedList={checkedCrawlResult}
|
||||||
onSelectedChange={onCheckedCrawlResultChange}
|
onSelectedChange={onCheckedCrawlResultChange}
|
||||||
usedTime={Number.parseFloat(crawlResult?.time_consuming as string) || 0}
|
usedTime={Number.parseFloat(crawlResult?.time_consuming as string) || 0}
|
||||||
|
|||||||
@ -1,11 +1,12 @@
|
|||||||
import { createContext, useContext, useContextSelector } from 'use-context-selector'
|
import { createContext, useContext, useContextSelector } from 'use-context-selector'
|
||||||
import type { DataSet } from '@/models/datasets'
|
import type { DataSet } from '@/models/datasets'
|
||||||
import type { IndexingType } from '@/app/components/datasets/create/step-two'
|
import type { IndexingType } from '@/app/components/datasets/create/step-two'
|
||||||
|
import type { QueryObserverResult, RefetchOptions } from '@tanstack/react-query'
|
||||||
|
|
||||||
type DatasetDetailContextValue = {
|
type DatasetDetailContextValue = {
|
||||||
indexingTechnique?: IndexingType
|
indexingTechnique?: IndexingType
|
||||||
dataset?: DataSet
|
dataset?: DataSet
|
||||||
mutateDatasetRes?: () => void
|
mutateDatasetRes?: (options?: RefetchOptions | undefined) => Promise<QueryObserverResult<DataSet, Error>>
|
||||||
}
|
}
|
||||||
const DatasetDetailContext = createContext<DatasetDetailContextValue>({})
|
const DatasetDetailContext = createContext<DatasetDetailContextValue>({})
|
||||||
|
|
||||||
|
|||||||
@ -178,10 +178,10 @@ export type FetchDatasetsParams = {
|
|||||||
|
|
||||||
export type DatasetListRequest = {
|
export type DatasetListRequest = {
|
||||||
initialPage: number
|
initialPage: number
|
||||||
tag_ids: string[]
|
tag_ids?: string[]
|
||||||
limit: number
|
limit: number
|
||||||
include_all: boolean
|
include_all?: boolean
|
||||||
keyword: string
|
keyword?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export type DataSetListResponse = {
|
export type DataSetListResponse = {
|
||||||
|
|||||||
@ -168,34 +168,6 @@ export type PipelinePreProcessingParamsResponse = {
|
|||||||
variables: RAGPipelineVariables
|
variables: RAGPipelineVariables
|
||||||
}
|
}
|
||||||
|
|
||||||
export type PipelineDatasourceNodeRunRequest = {
|
|
||||||
pipeline_id: string
|
|
||||||
node_id: string
|
|
||||||
inputs: Record<string, any>
|
|
||||||
datasource_type: DatasourceType
|
|
||||||
}
|
|
||||||
|
|
||||||
export type PipelineDatasourceNodeRunResponse = {
|
|
||||||
job_id?: string
|
|
||||||
status: 'processing' | 'completed'
|
|
||||||
result: any
|
|
||||||
provider_type: DatasourceType
|
|
||||||
}
|
|
||||||
|
|
||||||
export type PipelineDatasourceNodeRunStatusRequest = {
|
|
||||||
pipeline_id: string
|
|
||||||
node_id: string
|
|
||||||
job_id: string
|
|
||||||
datasource_type: DatasourceType
|
|
||||||
}
|
|
||||||
|
|
||||||
export type PipelineDatasourceNodeRunStatusResponse = {
|
|
||||||
provider_type: DatasourceType
|
|
||||||
result: Record<string, any>
|
|
||||||
status: 'processing' | 'completed'
|
|
||||||
job_id: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export type PublishedPipelineInfoResponse = {
|
export type PublishedPipelineInfoResponse = {
|
||||||
id: string
|
id: string
|
||||||
graph: {
|
graph: {
|
||||||
|
|||||||
@ -25,6 +25,10 @@ import { removeAccessToken } from '@/app/components/share/utils'
|
|||||||
import type { FetchOptionType, ResponseError } from './fetch'
|
import type { FetchOptionType, ResponseError } from './fetch'
|
||||||
import { ContentType, base, baseOptions, getAccessToken } from './fetch'
|
import { ContentType, base, baseOptions, getAccessToken } from './fetch'
|
||||||
import { asyncRunSafe } from '@/utils'
|
import { asyncRunSafe } from '@/utils'
|
||||||
|
import type {
|
||||||
|
DataSourceNodeCompletedResponse,
|
||||||
|
DataSourceNodeProcessingResponse,
|
||||||
|
} from '@/types/pipeline'
|
||||||
const TIME_OUT = 100000
|
const TIME_OUT = 100000
|
||||||
|
|
||||||
export type IOnDataMoreInfo = {
|
export type IOnDataMoreInfo = {
|
||||||
@ -63,6 +67,9 @@ export type IOnLoopNext = (workflowStarted: LoopNextResponse) => void
|
|||||||
export type IOnLoopFinished = (workflowFinished: LoopFinishedResponse) => void
|
export type IOnLoopFinished = (workflowFinished: LoopFinishedResponse) => void
|
||||||
export type IOnAgentLog = (agentLog: AgentLogResponse) => void
|
export type IOnAgentLog = (agentLog: AgentLogResponse) => void
|
||||||
|
|
||||||
|
export type IOnDataSourceNodeProcessing = (dataSourceNodeProcessing: DataSourceNodeProcessingResponse) => void
|
||||||
|
export type IOnDataSourceNodeCompleted = (dataSourceNodeCompleted: DataSourceNodeCompletedResponse) => void
|
||||||
|
|
||||||
export type IOtherOptions = {
|
export type IOtherOptions = {
|
||||||
isPublicAPI?: boolean
|
isPublicAPI?: boolean
|
||||||
isMarketplaceAPI?: boolean
|
isMarketplaceAPI?: boolean
|
||||||
@ -97,6 +104,10 @@ export type IOtherOptions = {
|
|||||||
onLoopNext?: IOnLoopNext
|
onLoopNext?: IOnLoopNext
|
||||||
onLoopFinish?: IOnLoopFinished
|
onLoopFinish?: IOnLoopFinished
|
||||||
onAgentLog?: IOnAgentLog
|
onAgentLog?: IOnAgentLog
|
||||||
|
|
||||||
|
// Pipeline data source node run
|
||||||
|
onDataSourceNodeProcessing?: IOnDataSourceNodeProcessing
|
||||||
|
onDataSourceNodeCompleted?: IOnDataSourceNodeCompleted
|
||||||
}
|
}
|
||||||
|
|
||||||
function unicodeToChar(text: string) {
|
function unicodeToChar(text: string) {
|
||||||
@ -152,6 +163,8 @@ const handleStream = (
|
|||||||
onTTSEnd?: IOnTTSEnd,
|
onTTSEnd?: IOnTTSEnd,
|
||||||
onTextReplace?: IOnTextReplace,
|
onTextReplace?: IOnTextReplace,
|
||||||
onAgentLog?: IOnAgentLog,
|
onAgentLog?: IOnAgentLog,
|
||||||
|
onDataSourceNodeProcessing?: IOnDataSourceNodeProcessing,
|
||||||
|
onDataSourceNodeCompleted?: IOnDataSourceNodeCompleted,
|
||||||
) => {
|
) => {
|
||||||
if (!response.ok)
|
if (!response.ok)
|
||||||
throw new Error('Network response was not ok')
|
throw new Error('Network response was not ok')
|
||||||
@ -270,6 +283,15 @@ const handleStream = (
|
|||||||
else if (bufferObj.event === 'tts_message_end') {
|
else if (bufferObj.event === 'tts_message_end') {
|
||||||
onTTSEnd?.(bufferObj.message_id, bufferObj.audio)
|
onTTSEnd?.(bufferObj.message_id, bufferObj.audio)
|
||||||
}
|
}
|
||||||
|
else if (bufferObj.event === 'datasource_processing') {
|
||||||
|
onDataSourceNodeProcessing?.(bufferObj as DataSourceNodeProcessingResponse)
|
||||||
|
}
|
||||||
|
else if (bufferObj.event === 'datasource_completed') {
|
||||||
|
onDataSourceNodeCompleted?.(bufferObj as DataSourceNodeCompletedResponse)
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
console.warn(`Unknown event: ${bufferObj.event}`, bufferObj)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
buffer = lines[lines.length - 1]
|
buffer = lines[lines.length - 1]
|
||||||
@ -363,6 +385,8 @@ export const ssePost = async (
|
|||||||
onLoopStart,
|
onLoopStart,
|
||||||
onLoopNext,
|
onLoopNext,
|
||||||
onLoopFinish,
|
onLoopFinish,
|
||||||
|
onDataSourceNodeProcessing,
|
||||||
|
onDataSourceNodeCompleted,
|
||||||
} = otherOptions
|
} = otherOptions
|
||||||
const abortController = new AbortController()
|
const abortController = new AbortController()
|
||||||
|
|
||||||
@ -460,6 +484,8 @@ export const ssePost = async (
|
|||||||
onTTSEnd,
|
onTTSEnd,
|
||||||
onTextReplace,
|
onTextReplace,
|
||||||
onAgentLog,
|
onAgentLog,
|
||||||
|
onDataSourceNodeProcessing,
|
||||||
|
onDataSourceNodeCompleted,
|
||||||
)
|
)
|
||||||
}).catch((e) => {
|
}).catch((e) => {
|
||||||
if (e.toString() !== 'AbortError: The user aborted a request.' && !e.toString().errorMessage.includes('TypeError: Cannot assign to read only property'))
|
if (e.toString() !== 'AbortError: The user aborted a request.' && !e.toString().errorMessage.includes('TypeError: Cannot assign to read only property'))
|
||||||
|
|||||||
@ -46,6 +46,7 @@ export const useDatasetDetail = (datasetId: string) => {
|
|||||||
return useQuery({
|
return useQuery({
|
||||||
queryKey: [NAME_SPACE, 'detail', datasetId],
|
queryKey: [NAME_SPACE, 'detail', datasetId],
|
||||||
queryFn: () => get<DataSet>(`/datasets/${datasetId}`),
|
queryFn: () => get<DataSet>(`/datasets/${datasetId}`),
|
||||||
|
enabled: !!datasetId,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -8,10 +8,6 @@ import type {
|
|||||||
ImportPipelineDSLRequest,
|
ImportPipelineDSLRequest,
|
||||||
ImportPipelineDSLResponse,
|
ImportPipelineDSLResponse,
|
||||||
PipelineCheckDependenciesResponse,
|
PipelineCheckDependenciesResponse,
|
||||||
PipelineDatasourceNodeRunRequest,
|
|
||||||
PipelineDatasourceNodeRunResponse,
|
|
||||||
PipelineDatasourceNodeRunStatusRequest,
|
|
||||||
PipelineDatasourceNodeRunStatusResponse,
|
|
||||||
PipelinePreProcessingParamsRequest,
|
PipelinePreProcessingParamsRequest,
|
||||||
PipelinePreProcessingParamsResponse,
|
PipelinePreProcessingParamsResponse,
|
||||||
PipelineProcessingParamsRequest,
|
PipelineProcessingParamsRequest,
|
||||||
@ -133,66 +129,6 @@ export const useCheckPipelineDependencies = (
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export const useDraftDatasourceNodeRun = (
|
|
||||||
mutationOptions: MutationOptions<PipelineDatasourceNodeRunResponse, Error, PipelineDatasourceNodeRunRequest> = {},
|
|
||||||
) => {
|
|
||||||
return useMutation({
|
|
||||||
mutationKey: [NAME_SPACE, 'draft-datasource-node-run'],
|
|
||||||
mutationFn: (request: PipelineDatasourceNodeRunRequest) => {
|
|
||||||
const { pipeline_id, node_id, ...rest } = request
|
|
||||||
return post<PipelineDatasourceNodeRunResponse>(`/rag/pipelines/${pipeline_id}/workflows/draft/datasource/nodes/${node_id}/run`, {
|
|
||||||
body: rest,
|
|
||||||
})
|
|
||||||
},
|
|
||||||
...mutationOptions,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
export const usePublishedDatasourceNodeRun = (
|
|
||||||
mutationOptions: MutationOptions<PipelineDatasourceNodeRunResponse, Error, PipelineDatasourceNodeRunRequest> = {},
|
|
||||||
) => {
|
|
||||||
return useMutation({
|
|
||||||
mutationKey: [NAME_SPACE, 'published-datasource-node-run'],
|
|
||||||
mutationFn: (request: PipelineDatasourceNodeRunRequest) => {
|
|
||||||
const { pipeline_id, node_id, ...rest } = request
|
|
||||||
return post<PipelineDatasourceNodeRunResponse>(`/rag/pipelines/${pipeline_id}/workflows/published/datasource/nodes/${node_id}/run`, {
|
|
||||||
body: rest,
|
|
||||||
})
|
|
||||||
},
|
|
||||||
...mutationOptions,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
export const useDraftDatasourceNodeRunStatus = (
|
|
||||||
mutationOptions: MutationOptions<PipelineDatasourceNodeRunStatusResponse, Error, PipelineDatasourceNodeRunStatusRequest> = {},
|
|
||||||
) => {
|
|
||||||
return useMutation({
|
|
||||||
mutationKey: [NAME_SPACE, 'draft-datasource-node-run-status'],
|
|
||||||
mutationFn: (request: PipelineDatasourceNodeRunStatusRequest) => {
|
|
||||||
const { pipeline_id, node_id, ...rest } = request
|
|
||||||
return post<PipelineDatasourceNodeRunStatusResponse>(`/rag/pipelines/${pipeline_id}/workflows/draft/datasource/nodes/${node_id}/run`, {
|
|
||||||
body: rest,
|
|
||||||
})
|
|
||||||
},
|
|
||||||
...mutationOptions,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
export const usePublishedDatasourceNodeRunStatus = (
|
|
||||||
mutationOptions: MutationOptions<PipelineDatasourceNodeRunStatusResponse, Error, PipelineDatasourceNodeRunStatusRequest> = {},
|
|
||||||
) => {
|
|
||||||
return useMutation({
|
|
||||||
mutationKey: [NAME_SPACE, 'published-datasource-node-run-status'],
|
|
||||||
mutationFn: (request: PipelineDatasourceNodeRunStatusRequest) => {
|
|
||||||
const { pipeline_id, node_id, ...rest } = request
|
|
||||||
return post<PipelineDatasourceNodeRunStatusResponse>(`/rag/pipelines/${pipeline_id}/workflows/published/datasource/nodes/${node_id}/run`, {
|
|
||||||
body: rest,
|
|
||||||
})
|
|
||||||
},
|
|
||||||
...mutationOptions,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
export const useDraftPipelineProcessingParams = (params: PipelineProcessingParamsRequest, enabled = true) => {
|
export const useDraftPipelineProcessingParams = (params: PipelineProcessingParamsRequest, enabled = true) => {
|
||||||
const { pipeline_id, node_id } = params
|
const { pipeline_id, node_id } = params
|
||||||
return useQuery<PipelineProcessingParamsResponse>({
|
return useQuery<PipelineProcessingParamsResponse>({
|
||||||
|
|||||||
17
web/types/pipeline.tsx
Normal file
17
web/types/pipeline.tsx
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
export type DataSourceNodeProcessingResponse = {
|
||||||
|
event: 'datasource_processing'
|
||||||
|
total: number
|
||||||
|
completed: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DataSourceNodeError = {
|
||||||
|
event: 'datasource_error'
|
||||||
|
message: string
|
||||||
|
code?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DataSourceNodeCompletedResponse = {
|
||||||
|
event: 'datasource_completed'
|
||||||
|
data: any
|
||||||
|
time_consuming?: number
|
||||||
|
}
|
||||||
Loading…
Reference in New Issue
Block a user