mirror of https://github.com/langgenius/dify.git
Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev
This commit is contained in:
commit
8aca70cd50
|
|
@ -10,10 +10,10 @@ import { TopBar } from './top-bar'
|
|||
import { DataSourceType } from '@/models/datasets'
|
||||
import type { CrawlOptions, CrawlResultItem, DataSet, FileItem, createDocumentResponse } from '@/models/datasets'
|
||||
import { fetchDataSource } from '@/service/common'
|
||||
import { fetchDatasetDetail } from '@/service/datasets'
|
||||
import { DataSourceProvider, type NotionPage } from '@/models/common'
|
||||
import { useModalContext } from '@/context/modal-context'
|
||||
import { useDefaultModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
|
||||
import { useDatasetDetail } from '@/service/knowledge/use-dataset'
|
||||
|
||||
type DatasetUpdateFormProps = {
|
||||
datasetId?: string
|
||||
|
|
@ -39,7 +39,6 @@ const DatasetUpdateForm = ({ datasetId }: DatasetUpdateFormProps) => {
|
|||
const [retrievalMethodCache, setRetrievalMethodCache] = useState('')
|
||||
const [fileList, setFiles] = useState<FileItem[]>([])
|
||||
const [result, setResult] = useState<createDocumentResponse | undefined>()
|
||||
const [hasError, setHasError] = useState(false)
|
||||
const { data: embeddingsDefaultModel } = useDefaultModel(ModelTypeEnum.textEmbedding)
|
||||
|
||||
const [notionPages, setNotionPages] = useState<NotionPage[]>([])
|
||||
|
|
@ -104,21 +103,14 @@ const DatasetUpdateForm = ({ datasetId }: DatasetUpdateFormProps) => {
|
|||
}, [])
|
||||
|
||||
const [detail, setDetail] = useState<DataSet | null>(null)
|
||||
useEffect(() => {
|
||||
(async () => {
|
||||
if (datasetId) {
|
||||
try {
|
||||
const detail = await fetchDatasetDetail(datasetId)
|
||||
setDetail(detail)
|
||||
}
|
||||
catch {
|
||||
setHasError(true)
|
||||
}
|
||||
}
|
||||
})()
|
||||
}, [datasetId])
|
||||
|
||||
if (hasError)
|
||||
const { data: datasetDetail, error: fetchDatasetDetailError } = useDatasetDetail(datasetId || '')
|
||||
useEffect(() => {
|
||||
if (!datasetDetail) return
|
||||
setDetail(datasetDetail)
|
||||
}, [datasetDetail])
|
||||
|
||||
if (fetchDatasetDetailError)
|
||||
return <AppUnavailable code={500} unknownReason={t('datasetCreation.error.unavailable') as string} />
|
||||
|
||||
return (
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ const Datasets = ({
|
|||
const { t } = useTranslation()
|
||||
const isCurrentWorkspaceEditor = useAppContextWithSelector(state => state.isCurrentWorkspaceEditor)
|
||||
const {
|
||||
data,
|
||||
data: datasetList,
|
||||
fetchNextPage,
|
||||
hasNextPage,
|
||||
isFetching,
|
||||
|
|
@ -54,13 +54,13 @@ const Datasets = ({
|
|||
observerRef.current.observe(anchorRef.current)
|
||||
}
|
||||
return () => observerRef.current?.disconnect()
|
||||
}, [anchorRef, data, hasNextPage, fetchNextPage])
|
||||
}, [anchorRef, datasetList, hasNextPage, fetchNextPage])
|
||||
|
||||
return (
|
||||
<>
|
||||
<nav className='grid shrink-0 grow grid-cols-1 content-start gap-3 px-12 pt-2 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4'>
|
||||
{isCurrentWorkspaceEditor && <NewDatasetCard />}
|
||||
{data?.pages.map(({ data: datasets }) => datasets.map(dataset => (
|
||||
{datasetList?.pages.map(({ data: datasets }) => datasets.map(dataset => (
|
||||
<DatasetCard key={dataset.id} dataset={dataset} onSuccess={resetDatasetList} />),
|
||||
))}
|
||||
</nav>
|
||||
|
|
|
|||
|
|
@ -2,8 +2,6 @@
|
|||
import { useCallback, useRef, useState } from 'react'
|
||||
import { useMount } from 'ahooks'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useSWRConfig } from 'swr'
|
||||
import { unstable_serialize } from 'swr/infinite'
|
||||
import PermissionSelector from '../permission-selector'
|
||||
import IndexMethod from '../index-method'
|
||||
import RetrievalSettings from '../../external-knowledge-base/create/RetrievalSettings'
|
||||
|
|
@ -16,7 +14,7 @@ import Textarea from '@/app/components/base/textarea'
|
|||
import { ApiConnectionMod } from '@/app/components/base/icons/src/vender/solid/development'
|
||||
import { updateDatasetSetting } from '@/service/datasets'
|
||||
import type { IconInfo } from '@/models/datasets'
|
||||
import { ChunkingMode, type DataSetListResponse, DatasetPermission } from '@/models/datasets'
|
||||
import { ChunkingMode, DatasetPermission } from '@/models/datasets'
|
||||
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
||||
import type { AppIconType, RetrievalConfig } from '@/types/app'
|
||||
import { useSelector as useAppContextWithSelector } from '@/context/app-context'
|
||||
|
|
@ -38,16 +36,11 @@ import ChunkStructure from '../chunk-structure'
|
|||
import Toast from '@/app/components/base/toast'
|
||||
import { RiAlertFill } from '@remixicon/react'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
import { useResetDatasetList } from '@/service/knowledge/use-dataset'
|
||||
|
||||
const rowClass = 'flex gap-x-1'
|
||||
const labelClass = 'flex items-center shrink-0 w-[180px] h-7 pt-1'
|
||||
|
||||
const getKey = (pageIndex: number, previousPageData: DataSetListResponse) => {
|
||||
if (!pageIndex || previousPageData.has_more)
|
||||
return { url: 'datasets', params: { page: pageIndex + 1, limit: 30 } }
|
||||
return null
|
||||
}
|
||||
|
||||
const DEFAULT_APP_ICON: IconInfo = {
|
||||
icon_type: 'emoji',
|
||||
icon: '📙',
|
||||
|
|
@ -58,7 +51,6 @@ const DEFAULT_APP_ICON: IconInfo = {
|
|||
const Form = () => {
|
||||
const { t } = useTranslation()
|
||||
const docLink = useDocLink()
|
||||
const { mutate } = useSWRConfig()
|
||||
const isCurrentWorkspaceDatasetOperator = useAppContextWithSelector(state => state.isCurrentWorkspaceDatasetOperator)
|
||||
const currentDataset = useDatasetDetailContextWithSelector(state => state.dataset)
|
||||
const mutateDatasets = useDatasetDetailContextWithSelector(state => state.mutateDatasetRes)
|
||||
|
|
@ -135,6 +127,7 @@ const Form = () => {
|
|||
getMembers()
|
||||
})
|
||||
|
||||
const resetDatasetList = useResetDatasetList()
|
||||
const handleSave = async () => {
|
||||
if (loading)
|
||||
return
|
||||
|
|
@ -197,7 +190,7 @@ const Form = () => {
|
|||
Toast.notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
|
||||
if (mutateDatasets) {
|
||||
await mutateDatasets()
|
||||
mutate(unstable_serialize(getKey))
|
||||
resetDatasetList()
|
||||
}
|
||||
}
|
||||
catch {
|
||||
|
|
|
|||
|
|
@ -7,39 +7,31 @@ import {
|
|||
RiBook2Fill,
|
||||
RiBook2Line,
|
||||
} from '@remixicon/react'
|
||||
import useSWR from 'swr'
|
||||
import useSWRInfinite from 'swr/infinite'
|
||||
import { flatten } from 'lodash-es'
|
||||
import Nav from '../nav'
|
||||
import type { NavItem } from '../nav/nav-selector'
|
||||
import { fetchDatasetDetail, fetchDatasets } from '@/service/datasets'
|
||||
import type { DataSetListResponse } from '@/models/datasets'
|
||||
import { basePath } from '@/utils/var'
|
||||
|
||||
const getKey = (pageIndex: number, previousPageData: DataSetListResponse) => {
|
||||
if (!pageIndex || previousPageData.has_more)
|
||||
return { url: 'datasets', params: { page: pageIndex + 1, limit: 30 } }
|
||||
return null
|
||||
}
|
||||
import { useDatasetDetail, useDatasetList } from '@/service/knowledge/use-dataset'
|
||||
|
||||
const DatasetNav = () => {
|
||||
const { t } = useTranslation()
|
||||
const router = useRouter()
|
||||
const { datasetId } = useParams()
|
||||
const { data: currentDataset } = useSWR(
|
||||
datasetId
|
||||
? {
|
||||
url: 'fetchDatasetDetail',
|
||||
datasetId,
|
||||
}
|
||||
: null,
|
||||
apiParams => fetchDatasetDetail(apiParams.datasetId as string))
|
||||
const { data: datasetsData, setSize } = useSWRInfinite(datasetId ? getKey : () => null, fetchDatasets, { revalidateFirstPage: false, revalidateAll: true })
|
||||
const datasetItems = flatten(datasetsData?.map(datasetData => datasetData.data))
|
||||
const { data: currentDataset } = useDatasetDetail(datasetId as string)
|
||||
const {
|
||||
data: datasetList,
|
||||
fetchNextPage,
|
||||
hasNextPage,
|
||||
} = useDatasetList({
|
||||
initialPage: 1,
|
||||
limit: 30,
|
||||
})
|
||||
const datasetItems = flatten(datasetList?.pages.map(datasetData => datasetData.data))
|
||||
|
||||
const handleLoadMore = useCallback(() => {
|
||||
setSize(size => size + 1)
|
||||
}, [setSize])
|
||||
if (hasNextPage)
|
||||
fetchNextPage()
|
||||
}, [hasNextPage, fetchNextPage])
|
||||
|
||||
return (
|
||||
<Nav
|
||||
|
|
|
|||
|
|
@ -1,12 +1,14 @@
|
|||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import WorkspaceSelector from '@/app/components/base/notion-page-selector/workspace-selector'
|
||||
import SearchInput from '@/app/components/base/notion-page-selector/search-input'
|
||||
import PageSelector from '@/app/components/base/notion-page-selector/page-selector'
|
||||
import type { DataSourceNotionPageMap, DataSourceNotionWorkspace, NotionPage } from '@/models/common'
|
||||
import Header from '@/app/components/datasets/create/website/base/header'
|
||||
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
||||
import { useDraftDatasourceNodeRun, usePublishedDatasourceNodeRun } from '@/service/use-pipeline'
|
||||
import { DatasourceType } from '@/models/pipeline'
|
||||
import { ssePost } from '@/service/base'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import type { DataSourceNodeCompletedResponse } from '@/types/pipeline'
|
||||
|
||||
type OnlineDocumentSelectorProps = {
|
||||
value?: string[]
|
||||
|
|
@ -33,28 +35,37 @@ const OnlineDocumentSelector = ({
|
|||
nodeId,
|
||||
headerInfo,
|
||||
}: OnlineDocumentSelectorProps) => {
|
||||
const pipeline_id = useDatasetDetailContextWithSelector(s => s.dataset?.pipeline_id)
|
||||
const pipelineId = useDatasetDetailContextWithSelector(s => s.dataset?.pipeline_id)
|
||||
const [documentsData, setDocumentsData] = useState<DataSourceNotionWorkspace[]>([])
|
||||
const [searchValue, setSearchValue] = useState('')
|
||||
const [currentWorkspaceId, setCurrentWorkspaceId] = useState('')
|
||||
|
||||
const useDatasourceNodeRun = useRef(!isInPipeline ? usePublishedDatasourceNodeRun : useDraftDatasourceNodeRun)
|
||||
const { mutateAsync: crawlOnlineDocuments } = useDatasourceNodeRun.current()
|
||||
const datasourceNodeRunURL = !isInPipeline
|
||||
? `/rag/pipelines/${pipelineId}/workflows/published/datasource/nodes/${nodeId}/run`
|
||||
: `/rag/pipelines/${pipelineId}/workflows/draft/datasource/nodes/${nodeId}/run`
|
||||
|
||||
const getOnlineDocuments = useCallback(async () => {
|
||||
if (pipeline_id) {
|
||||
await crawlOnlineDocuments({
|
||||
pipeline_id,
|
||||
node_id: nodeId,
|
||||
inputs: {},
|
||||
datasource_type: DatasourceType.onlineDocument,
|
||||
}, {
|
||||
onSuccess(documentsData) {
|
||||
setDocumentsData(documentsData.result as DataSourceNotionWorkspace[])
|
||||
ssePost(
|
||||
datasourceNodeRunURL,
|
||||
{
|
||||
body: {
|
||||
inputs: {},
|
||||
datasource_type: DatasourceType.onlineDocument,
|
||||
},
|
||||
})
|
||||
}
|
||||
}, [crawlOnlineDocuments, nodeId, pipeline_id])
|
||||
},
|
||||
{
|
||||
onDataSourceNodeCompleted: (documentsData: DataSourceNodeCompletedResponse) => {
|
||||
setDocumentsData(documentsData.data as DataSourceNotionWorkspace[])
|
||||
},
|
||||
onError: (message: string) => {
|
||||
Toast.notify({
|
||||
type: 'error',
|
||||
message,
|
||||
})
|
||||
},
|
||||
},
|
||||
)
|
||||
}, [datasourceNodeRunURL])
|
||||
|
||||
useEffect(() => {
|
||||
getOnlineDocuments()
|
||||
|
|
|
|||
|
|
@ -8,16 +8,16 @@ import Crawling from './crawling'
|
|||
import ErrorMessage from './error-message'
|
||||
import CrawledResult from './crawled-result'
|
||||
import {
|
||||
useDraftDatasourceNodeRun,
|
||||
useDraftDatasourceNodeRunStatus,
|
||||
useDraftPipelinePreProcessingParams,
|
||||
usePublishedDatasourceNodeRun,
|
||||
usePublishedDatasourceNodeRunStatus,
|
||||
usePublishedPipelinePreProcessingParams,
|
||||
} from '@/service/use-pipeline'
|
||||
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
||||
import { DatasourceType } from '@/models/pipeline'
|
||||
import { sleep } from '@/utils'
|
||||
import { ssePost } from '@/service/base'
|
||||
import type {
|
||||
DataSourceNodeCompletedResponse,
|
||||
DataSourceNodeProcessingResponse,
|
||||
} from '@/types/pipeline'
|
||||
|
||||
const I18N_PREFIX = 'datasetCreation.stepOne.website'
|
||||
|
||||
|
|
@ -51,6 +51,8 @@ const Crawler = ({
|
|||
const { t } = useTranslation()
|
||||
const [step, setStep] = useState<Step>(Step.init)
|
||||
const [controlFoldOptions, setControlFoldOptions] = useState<number>(0)
|
||||
const [totalNum, setTotalNum] = useState(0)
|
||||
const [crawledNum, setCrawledNum] = useState(0)
|
||||
const pipelineId = useDatasetDetailContextWithSelector(s => s.dataset?.pipeline_id)
|
||||
|
||||
const usePreProcessingParams = useRef(!isInPipeline ? usePublishedPipelinePreProcessingParams : useDraftPipelinePreProcessingParams)
|
||||
|
|
@ -68,66 +70,49 @@ const Crawler = ({
|
|||
const isCrawlFinished = step === Step.finished
|
||||
const isRunning = step === Step.running
|
||||
const [crawlResult, setCrawlResult] = useState<{
|
||||
result: CrawlResultItem[]
|
||||
data: CrawlResultItem[]
|
||||
time_consuming: number | string
|
||||
} | undefined>(undefined)
|
||||
const [crawlErrorMessage, setCrawlErrorMessage] = useState('')
|
||||
const showError = isCrawlFinished && crawlErrorMessage
|
||||
|
||||
const useDatasourceNodeRun = useRef(!isInPipeline ? usePublishedDatasourceNodeRun : useDraftDatasourceNodeRun)
|
||||
const useDatasourceNodeRunStatus = useRef(!isInPipeline ? usePublishedDatasourceNodeRunStatus : useDraftDatasourceNodeRunStatus)
|
||||
const { mutateAsync: runDatasourceNode } = useDatasourceNodeRun.current()
|
||||
const { mutateAsync: getDatasourceNodeRunStatus } = useDatasourceNodeRunStatus.current()
|
||||
|
||||
const checkCrawlStatus = useCallback(async (jobId: string) => {
|
||||
const res = await getDatasourceNodeRunStatus({
|
||||
node_id: nodeId,
|
||||
pipeline_id: pipelineId!,
|
||||
job_id: jobId,
|
||||
datasource_type: DatasourceType.websiteCrawl,
|
||||
}, {
|
||||
onError: async (error: any) => {
|
||||
const message = await error.json()
|
||||
setCrawlErrorMessage(message || t(`${I18N_PREFIX}.unknownError`))
|
||||
},
|
||||
}) as any
|
||||
if (res.status === 'completed') {
|
||||
setCrawlResult(res)
|
||||
onCheckedCrawlResultChange(res.result || []) // default select the crawl result
|
||||
setCrawlErrorMessage('')
|
||||
setStep(Step.finished)
|
||||
}
|
||||
else if (res.status === 'processing') {
|
||||
await sleep(2500)
|
||||
await checkCrawlStatus(jobId)
|
||||
}
|
||||
}, [getDatasourceNodeRunStatus, nodeId, pipelineId, t, onCheckedCrawlResultChange])
|
||||
const datasourceNodeRunURL = !isInPipeline
|
||||
? `/rag/pipelines/${pipelineId}/workflows/published/datasource/nodes/${nodeId}/run`
|
||||
: `/rag/pipelines/${pipelineId}/workflows/draft/datasource/nodes/${nodeId}/run`
|
||||
|
||||
const handleRun = useCallback(async (value: Record<string, any>) => {
|
||||
setStep(Step.running)
|
||||
const res = await runDatasourceNode({
|
||||
node_id: nodeId,
|
||||
pipeline_id: pipelineId!,
|
||||
inputs: value,
|
||||
datasource_type: DatasourceType.websiteCrawl,
|
||||
}, {
|
||||
onError: async (error: any) => {
|
||||
const message = await error.json()
|
||||
setCrawlErrorMessage(message || t(`${I18N_PREFIX}.unknownError`))
|
||||
setStep(Step.finished)
|
||||
ssePost(
|
||||
datasourceNodeRunURL,
|
||||
{
|
||||
body: {
|
||||
inputs: value,
|
||||
datasource_type: DatasourceType.websiteCrawl,
|
||||
response_mode: 'streaming',
|
||||
},
|
||||
},
|
||||
}) as any
|
||||
const jobId = res.job_id
|
||||
if (!jobId && res.status === 'completed') {
|
||||
setCrawlResult(res)
|
||||
onCheckedCrawlResultChange(res.result || []) // default select the crawl result
|
||||
setStep(Step.finished)
|
||||
}
|
||||
else if (jobId) {
|
||||
await checkCrawlStatus(jobId)
|
||||
}
|
||||
setCrawlErrorMessage('')
|
||||
}, [runDatasourceNode, nodeId, pipelineId, onCheckedCrawlResultChange, checkCrawlStatus, t])
|
||||
{
|
||||
onDataSourceNodeProcessing: (data: DataSourceNodeProcessingResponse) => {
|
||||
setTotalNum(data.total ?? 0)
|
||||
setCrawledNum(data.completed ?? 0)
|
||||
},
|
||||
onDataSourceNodeCompleted: (data: DataSourceNodeCompletedResponse) => {
|
||||
const { data: crawlData, time_consuming } = data
|
||||
setCrawlResult({
|
||||
data: crawlData as CrawlResultItem[],
|
||||
time_consuming: time_consuming ?? 0,
|
||||
})
|
||||
onCheckedCrawlResultChange(crawlData || []) // default select the crawl result
|
||||
setCrawlErrorMessage('')
|
||||
setStep(Step.finished)
|
||||
},
|
||||
onError: (message: string) => {
|
||||
setCrawlErrorMessage(message || t(`${I18N_PREFIX}.unknownError`))
|
||||
setStep(Step.finished)
|
||||
},
|
||||
},
|
||||
)
|
||||
}, [datasourceNodeRunURL, onCheckedCrawlResultChange, t])
|
||||
|
||||
const handleSubmit = useCallback((value: Record<string, any>) => {
|
||||
handleRun(value)
|
||||
|
|
@ -152,8 +137,8 @@ const Crawler = ({
|
|||
<div className='relative flex flex-col'>
|
||||
{isRunning && (
|
||||
<Crawling
|
||||
crawledNum={0}
|
||||
totalNum={0}
|
||||
crawledNum={crawledNum}
|
||||
totalNum={totalNum}
|
||||
/>
|
||||
)}
|
||||
{showError && (
|
||||
|
|
@ -166,7 +151,7 @@ const Crawler = ({
|
|||
{isCrawlFinished && !showError && (
|
||||
<CrawledResult
|
||||
className='mt-2'
|
||||
list={crawlResult?.result || []}
|
||||
list={crawlResult?.data || []}
|
||||
checkedList={checkedCrawlResult}
|
||||
onSelectedChange={onCheckedCrawlResultChange}
|
||||
usedTime={Number.parseFloat(crawlResult?.time_consuming as string) || 0}
|
||||
|
|
|
|||
|
|
@ -1,11 +1,12 @@
|
|||
import { createContext, useContext, useContextSelector } from 'use-context-selector'
|
||||
import type { DataSet } from '@/models/datasets'
|
||||
import type { IndexingType } from '@/app/components/datasets/create/step-two'
|
||||
import type { QueryObserverResult, RefetchOptions } from '@tanstack/react-query'
|
||||
|
||||
type DatasetDetailContextValue = {
|
||||
indexingTechnique?: IndexingType
|
||||
dataset?: DataSet
|
||||
mutateDatasetRes?: () => void
|
||||
mutateDatasetRes?: (options?: RefetchOptions | undefined) => Promise<QueryObserverResult<DataSet, Error>>
|
||||
}
|
||||
const DatasetDetailContext = createContext<DatasetDetailContextValue>({})
|
||||
|
||||
|
|
|
|||
|
|
@ -178,10 +178,10 @@ export type FetchDatasetsParams = {
|
|||
|
||||
export type DatasetListRequest = {
|
||||
initialPage: number
|
||||
tag_ids: string[]
|
||||
tag_ids?: string[]
|
||||
limit: number
|
||||
include_all: boolean
|
||||
keyword: string
|
||||
include_all?: boolean
|
||||
keyword?: string
|
||||
}
|
||||
|
||||
export type DataSetListResponse = {
|
||||
|
|
|
|||
|
|
@ -168,34 +168,6 @@ export type PipelinePreProcessingParamsResponse = {
|
|||
variables: RAGPipelineVariables
|
||||
}
|
||||
|
||||
export type PipelineDatasourceNodeRunRequest = {
|
||||
pipeline_id: string
|
||||
node_id: string
|
||||
inputs: Record<string, any>
|
||||
datasource_type: DatasourceType
|
||||
}
|
||||
|
||||
export type PipelineDatasourceNodeRunResponse = {
|
||||
job_id?: string
|
||||
status: 'processing' | 'completed'
|
||||
result: any
|
||||
provider_type: DatasourceType
|
||||
}
|
||||
|
||||
export type PipelineDatasourceNodeRunStatusRequest = {
|
||||
pipeline_id: string
|
||||
node_id: string
|
||||
job_id: string
|
||||
datasource_type: DatasourceType
|
||||
}
|
||||
|
||||
export type PipelineDatasourceNodeRunStatusResponse = {
|
||||
provider_type: DatasourceType
|
||||
result: Record<string, any>
|
||||
status: 'processing' | 'completed'
|
||||
job_id: string
|
||||
}
|
||||
|
||||
export type PublishedPipelineInfoResponse = {
|
||||
id: string
|
||||
graph: {
|
||||
|
|
|
|||
|
|
@ -25,6 +25,10 @@ import { removeAccessToken } from '@/app/components/share/utils'
|
|||
import type { FetchOptionType, ResponseError } from './fetch'
|
||||
import { ContentType, base, baseOptions, getAccessToken } from './fetch'
|
||||
import { asyncRunSafe } from '@/utils'
|
||||
import type {
|
||||
DataSourceNodeCompletedResponse,
|
||||
DataSourceNodeProcessingResponse,
|
||||
} from '@/types/pipeline'
|
||||
const TIME_OUT = 100000
|
||||
|
||||
export type IOnDataMoreInfo = {
|
||||
|
|
@ -63,6 +67,9 @@ export type IOnLoopNext = (workflowStarted: LoopNextResponse) => void
|
|||
export type IOnLoopFinished = (workflowFinished: LoopFinishedResponse) => void
|
||||
export type IOnAgentLog = (agentLog: AgentLogResponse) => void
|
||||
|
||||
export type IOnDataSourceNodeProcessing = (dataSourceNodeProcessing: DataSourceNodeProcessingResponse) => void
|
||||
export type IOnDataSourceNodeCompleted = (dataSourceNodeCompleted: DataSourceNodeCompletedResponse) => void
|
||||
|
||||
export type IOtherOptions = {
|
||||
isPublicAPI?: boolean
|
||||
isMarketplaceAPI?: boolean
|
||||
|
|
@ -97,6 +104,10 @@ export type IOtherOptions = {
|
|||
onLoopNext?: IOnLoopNext
|
||||
onLoopFinish?: IOnLoopFinished
|
||||
onAgentLog?: IOnAgentLog
|
||||
|
||||
// Pipeline data source node run
|
||||
onDataSourceNodeProcessing?: IOnDataSourceNodeProcessing
|
||||
onDataSourceNodeCompleted?: IOnDataSourceNodeCompleted
|
||||
}
|
||||
|
||||
function unicodeToChar(text: string) {
|
||||
|
|
@ -152,6 +163,8 @@ const handleStream = (
|
|||
onTTSEnd?: IOnTTSEnd,
|
||||
onTextReplace?: IOnTextReplace,
|
||||
onAgentLog?: IOnAgentLog,
|
||||
onDataSourceNodeProcessing?: IOnDataSourceNodeProcessing,
|
||||
onDataSourceNodeCompleted?: IOnDataSourceNodeCompleted,
|
||||
) => {
|
||||
if (!response.ok)
|
||||
throw new Error('Network response was not ok')
|
||||
|
|
@ -270,6 +283,15 @@ const handleStream = (
|
|||
else if (bufferObj.event === 'tts_message_end') {
|
||||
onTTSEnd?.(bufferObj.message_id, bufferObj.audio)
|
||||
}
|
||||
else if (bufferObj.event === 'datasource_processing') {
|
||||
onDataSourceNodeProcessing?.(bufferObj as DataSourceNodeProcessingResponse)
|
||||
}
|
||||
else if (bufferObj.event === 'datasource_completed') {
|
||||
onDataSourceNodeCompleted?.(bufferObj as DataSourceNodeCompletedResponse)
|
||||
}
|
||||
else {
|
||||
console.warn(`Unknown event: ${bufferObj.event}`, bufferObj)
|
||||
}
|
||||
}
|
||||
})
|
||||
buffer = lines[lines.length - 1]
|
||||
|
|
@ -363,6 +385,8 @@ export const ssePost = async (
|
|||
onLoopStart,
|
||||
onLoopNext,
|
||||
onLoopFinish,
|
||||
onDataSourceNodeProcessing,
|
||||
onDataSourceNodeCompleted,
|
||||
} = otherOptions
|
||||
const abortController = new AbortController()
|
||||
|
||||
|
|
@ -460,6 +484,8 @@ export const ssePost = async (
|
|||
onTTSEnd,
|
||||
onTextReplace,
|
||||
onAgentLog,
|
||||
onDataSourceNodeProcessing,
|
||||
onDataSourceNodeCompleted,
|
||||
)
|
||||
}).catch((e) => {
|
||||
if (e.toString() !== 'AbortError: The user aborted a request.' && !e.toString().errorMessage.includes('TypeError: Cannot assign to read only property'))
|
||||
|
|
|
|||
|
|
@ -46,6 +46,7 @@ export const useDatasetDetail = (datasetId: string) => {
|
|||
return useQuery({
|
||||
queryKey: [NAME_SPACE, 'detail', datasetId],
|
||||
queryFn: () => get<DataSet>(`/datasets/${datasetId}`),
|
||||
enabled: !!datasetId,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,10 +8,6 @@ import type {
|
|||
ImportPipelineDSLRequest,
|
||||
ImportPipelineDSLResponse,
|
||||
PipelineCheckDependenciesResponse,
|
||||
PipelineDatasourceNodeRunRequest,
|
||||
PipelineDatasourceNodeRunResponse,
|
||||
PipelineDatasourceNodeRunStatusRequest,
|
||||
PipelineDatasourceNodeRunStatusResponse,
|
||||
PipelinePreProcessingParamsRequest,
|
||||
PipelinePreProcessingParamsResponse,
|
||||
PipelineProcessingParamsRequest,
|
||||
|
|
@ -133,66 +129,6 @@ export const useCheckPipelineDependencies = (
|
|||
})
|
||||
}
|
||||
|
||||
export const useDraftDatasourceNodeRun = (
|
||||
mutationOptions: MutationOptions<PipelineDatasourceNodeRunResponse, Error, PipelineDatasourceNodeRunRequest> = {},
|
||||
) => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'draft-datasource-node-run'],
|
||||
mutationFn: (request: PipelineDatasourceNodeRunRequest) => {
|
||||
const { pipeline_id, node_id, ...rest } = request
|
||||
return post<PipelineDatasourceNodeRunResponse>(`/rag/pipelines/${pipeline_id}/workflows/draft/datasource/nodes/${node_id}/run`, {
|
||||
body: rest,
|
||||
})
|
||||
},
|
||||
...mutationOptions,
|
||||
})
|
||||
}
|
||||
|
||||
export const usePublishedDatasourceNodeRun = (
|
||||
mutationOptions: MutationOptions<PipelineDatasourceNodeRunResponse, Error, PipelineDatasourceNodeRunRequest> = {},
|
||||
) => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'published-datasource-node-run'],
|
||||
mutationFn: (request: PipelineDatasourceNodeRunRequest) => {
|
||||
const { pipeline_id, node_id, ...rest } = request
|
||||
return post<PipelineDatasourceNodeRunResponse>(`/rag/pipelines/${pipeline_id}/workflows/published/datasource/nodes/${node_id}/run`, {
|
||||
body: rest,
|
||||
})
|
||||
},
|
||||
...mutationOptions,
|
||||
})
|
||||
}
|
||||
|
||||
export const useDraftDatasourceNodeRunStatus = (
|
||||
mutationOptions: MutationOptions<PipelineDatasourceNodeRunStatusResponse, Error, PipelineDatasourceNodeRunStatusRequest> = {},
|
||||
) => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'draft-datasource-node-run-status'],
|
||||
mutationFn: (request: PipelineDatasourceNodeRunStatusRequest) => {
|
||||
const { pipeline_id, node_id, ...rest } = request
|
||||
return post<PipelineDatasourceNodeRunStatusResponse>(`/rag/pipelines/${pipeline_id}/workflows/draft/datasource/nodes/${node_id}/run`, {
|
||||
body: rest,
|
||||
})
|
||||
},
|
||||
...mutationOptions,
|
||||
})
|
||||
}
|
||||
|
||||
export const usePublishedDatasourceNodeRunStatus = (
|
||||
mutationOptions: MutationOptions<PipelineDatasourceNodeRunStatusResponse, Error, PipelineDatasourceNodeRunStatusRequest> = {},
|
||||
) => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'published-datasource-node-run-status'],
|
||||
mutationFn: (request: PipelineDatasourceNodeRunStatusRequest) => {
|
||||
const { pipeline_id, node_id, ...rest } = request
|
||||
return post<PipelineDatasourceNodeRunStatusResponse>(`/rag/pipelines/${pipeline_id}/workflows/published/datasource/nodes/${node_id}/run`, {
|
||||
body: rest,
|
||||
})
|
||||
},
|
||||
...mutationOptions,
|
||||
})
|
||||
}
|
||||
|
||||
export const useDraftPipelineProcessingParams = (params: PipelineProcessingParamsRequest, enabled = true) => {
|
||||
const { pipeline_id, node_id } = params
|
||||
return useQuery<PipelineProcessingParamsResponse>({
|
||||
|
|
|
|||
|
|
@ -0,0 +1,17 @@
|
|||
export type DataSourceNodeProcessingResponse = {
|
||||
event: 'datasource_processing'
|
||||
total: number
|
||||
completed: number
|
||||
}
|
||||
|
||||
export type DataSourceNodeError = {
|
||||
event: 'datasource_error'
|
||||
message: string
|
||||
code?: string
|
||||
}
|
||||
|
||||
export type DataSourceNodeCompletedResponse = {
|
||||
event: 'datasource_completed'
|
||||
data: any
|
||||
time_consuming?: number
|
||||
}
|
||||
Loading…
Reference in New Issue