mirror of https://github.com/langgenius/dify.git
refactor: update variable naming for consistency and improve data source handling in pipeline components
This commit is contained in:
parent
314a2f9be8
commit
14a9052d60
|
|
@ -48,7 +48,7 @@ const InputFieldEditor = ({
|
|||
options,
|
||||
placeholder,
|
||||
unit,
|
||||
default: defaultValue,
|
||||
default_value: defaultValue,
|
||||
allowed_file_upload_methods: allowedFileUploadMethods,
|
||||
allowed_file_types: allowedTypesAndExtensions.allowedFileTypes,
|
||||
allowed_file_extensions: allowedTypesAndExtensions.allowedFileExtensions,
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ export const convertToInputFieldFormData = (data?: InputVar): FormData => {
|
|||
label,
|
||||
variable,
|
||||
max_length,
|
||||
'default': defaultValue,
|
||||
default_value,
|
||||
required,
|
||||
tooltips,
|
||||
options,
|
||||
|
|
@ -30,7 +30,7 @@ export const convertToInputFieldFormData = (data?: InputVar): FormData => {
|
|||
label,
|
||||
variable,
|
||||
maxLength: max_length,
|
||||
default: defaultValue,
|
||||
default: default_value,
|
||||
required,
|
||||
tooltips,
|
||||
options,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { useCallback } from 'react'
|
||||
import { useCallback, useEffect } from 'react'
|
||||
import { useDataSourceOptions } from '../hooks'
|
||||
import OptionCard from './option-card'
|
||||
import { File, Watercrawl } from '@/app/components/base/icons/src/public/knowledge'
|
||||
|
|
@ -9,9 +9,8 @@ import { DataSourceProvider } from '@/models/common'
|
|||
import type { Datasource } from '../types'
|
||||
|
||||
type DataSourceOptionsProps = {
|
||||
dataSources: Datasource[]
|
||||
dataSourceNodeId: string
|
||||
onSelect: (option: string) => void
|
||||
onSelect: (option: Datasource) => void
|
||||
}
|
||||
|
||||
const DATA_SOURCE_ICONS = {
|
||||
|
|
@ -23,15 +22,23 @@ const DATA_SOURCE_ICONS = {
|
|||
}
|
||||
|
||||
const DataSourceOptions = ({
|
||||
dataSources,
|
||||
dataSourceNodeId,
|
||||
onSelect,
|
||||
}: DataSourceOptionsProps) => {
|
||||
const options = useDataSourceOptions(dataSources)
|
||||
const { dataSources, options } = useDataSourceOptions()
|
||||
|
||||
const handelSelect = useCallback((value: string) => {
|
||||
onSelect(value)
|
||||
}, [onSelect])
|
||||
const selectedOption = dataSources.find(option => option.nodeId === value)
|
||||
if (!selectedOption)
|
||||
return
|
||||
onSelect(selectedOption)
|
||||
}, [dataSources, onSelect])
|
||||
|
||||
useEffect(() => {
|
||||
if (options.length > 0)
|
||||
handelSelect(options[0].value)
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<div className='grid w-full grid-cols-4 gap-1'>
|
||||
|
|
|
|||
|
|
@ -1,21 +1,24 @@
|
|||
import { useMemo } from 'react'
|
||||
import { BaseFieldType } from '@/app/components/base/form/form-scenarios/base/types'
|
||||
import { useStore } from '@/app/components/workflow/store'
|
||||
import { InputVarType } from '@/app/components/workflow/types'
|
||||
import { usePipelineProcessingParams } from '@/service/use-pipeline'
|
||||
import { PipelineInputVarType } from '@/models/pipeline'
|
||||
|
||||
type PartialInputVarType = InputVarType.textInput | InputVarType.number | InputVarType.select | InputVarType.checkbox
|
||||
type PartialInputVarType = PipelineInputVarType.textInput | PipelineInputVarType.number | PipelineInputVarType.select | PipelineInputVarType.checkbox
|
||||
|
||||
const VAR_TYPE_MAP: Record<PartialInputVarType, BaseFieldType> = {
|
||||
[InputVarType.textInput]: BaseFieldType.textInput,
|
||||
[InputVarType.number]: BaseFieldType.numberInput,
|
||||
[InputVarType.select]: BaseFieldType.select,
|
||||
[InputVarType.checkbox]: BaseFieldType.checkbox,
|
||||
[PipelineInputVarType.textInput]: BaseFieldType.textInput,
|
||||
[PipelineInputVarType.number]: BaseFieldType.numberInput,
|
||||
[PipelineInputVarType.select]: BaseFieldType.select,
|
||||
[PipelineInputVarType.checkbox]: BaseFieldType.checkbox,
|
||||
}
|
||||
|
||||
export const useConfigurations = () => {
|
||||
export const useConfigurations = (datasourceNodeId: string) => {
|
||||
const pipelineId = useStore(state => state.pipelineId)
|
||||
const { data: paramsConfig } = usePipelineProcessingParams(pipelineId!)
|
||||
const { data: paramsConfig } = usePipelineProcessingParams({
|
||||
pipeline_id: pipelineId!,
|
||||
node_id: datasourceNodeId,
|
||||
})
|
||||
|
||||
const initialData = useMemo(() => {
|
||||
const variables = paramsConfig?.variables || []
|
||||
|
|
|
|||
|
|
@ -6,15 +6,17 @@ import type { FormType } from '@/app/components/base/form'
|
|||
import { useCallback } from 'react'
|
||||
|
||||
type DocumentProcessingProps = {
|
||||
dataSourceNodeId: string
|
||||
onProcess: (data: Record<string, any>) => void
|
||||
onBack: () => void
|
||||
}
|
||||
|
||||
const DocumentProcessing = ({
|
||||
dataSourceNodeId,
|
||||
onProcess,
|
||||
onBack,
|
||||
}: DocumentProcessingProps) => {
|
||||
const { initialData, configurations } = useConfigurations()
|
||||
const { initialData, configurations } = useConfigurations(dataSourceNodeId)
|
||||
const schema = generateZodSchema(configurations)
|
||||
|
||||
const renderCustomActions = useCallback((form: FormType) => (
|
||||
|
|
|
|||
|
|
@ -3,6 +3,10 @@ import type { DataSourceOption, Datasource } from './types'
|
|||
import { TestRunStep } from './types'
|
||||
import { DataSourceType } from '@/models/datasets'
|
||||
import { DataSourceProvider } from '@/models/common'
|
||||
import { useNodes } from 'reactflow'
|
||||
import { BlockEnum } from '@/app/components/workflow/types'
|
||||
import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types'
|
||||
import { useMemo } from 'react'
|
||||
|
||||
export const useTestRunSteps = () => {
|
||||
const { t } = useTranslation()
|
||||
|
|
@ -19,45 +23,78 @@ export const useTestRunSteps = () => {
|
|||
return steps
|
||||
}
|
||||
|
||||
export const useDataSourceOptions = (dataSources: Datasource[]) => {
|
||||
export const useDataSourceOptions = () => {
|
||||
const { t } = useTranslation()
|
||||
const options: DataSourceOption[] = []
|
||||
dataSources.forEach((source) => {
|
||||
if (source.type === DataSourceType.FILE) {
|
||||
options.push({
|
||||
label: t('datasetPipeline.testRun.dataSource.localFiles'),
|
||||
value: source.nodeId,
|
||||
type: DataSourceType.FILE,
|
||||
})
|
||||
}
|
||||
if (source.type === DataSourceType.NOTION) {
|
||||
options.push({
|
||||
label: 'Notion',
|
||||
value: source.nodeId,
|
||||
type: DataSourceType.NOTION,
|
||||
})
|
||||
}
|
||||
if (source.type === DataSourceProvider.fireCrawl) {
|
||||
options.push({
|
||||
label: 'Firecrawl',
|
||||
value: source.nodeId,
|
||||
type: DataSourceProvider.fireCrawl,
|
||||
})
|
||||
}
|
||||
if (source.type === DataSourceProvider.jinaReader) {
|
||||
options.push({
|
||||
label: 'Jina Reader',
|
||||
value: source.nodeId,
|
||||
type: DataSourceProvider.jinaReader,
|
||||
})
|
||||
}
|
||||
if (source.type === DataSourceProvider.waterCrawl) {
|
||||
options.push({
|
||||
label: 'Water Crawl',
|
||||
value: source.nodeId,
|
||||
type: DataSourceProvider.waterCrawl,
|
||||
})
|
||||
}
|
||||
})
|
||||
return options
|
||||
const nodes = useNodes<DataSourceNodeType>()
|
||||
const dataSources: Datasource[] = useMemo(() => {
|
||||
const dataSourceNodes = nodes.filter(node => node.data.type === BlockEnum.DataSource)
|
||||
return dataSourceNodes.map((node) => {
|
||||
let type: DataSourceType | DataSourceProvider = DataSourceType.FILE
|
||||
switch (node.data.tool_name) {
|
||||
case 'file_upload':
|
||||
type = DataSourceType.FILE
|
||||
break
|
||||
case 'search_notion':
|
||||
type = DataSourceType.NOTION
|
||||
break
|
||||
case 'firecrawl':
|
||||
type = DataSourceProvider.fireCrawl
|
||||
break
|
||||
case 'jina_reader':
|
||||
type = DataSourceProvider.jinaReader
|
||||
break
|
||||
case 'water_crawl':
|
||||
type = DataSourceProvider.waterCrawl
|
||||
break
|
||||
}
|
||||
return {
|
||||
nodeId: node.id,
|
||||
type,
|
||||
config: {},
|
||||
}
|
||||
})
|
||||
}, [nodes])
|
||||
|
||||
const options = useMemo(() => {
|
||||
const options: DataSourceOption[] = []
|
||||
dataSources.forEach((source) => {
|
||||
if (source.type === DataSourceType.FILE) {
|
||||
options.push({
|
||||
label: t('datasetPipeline.testRun.dataSource.localFiles'),
|
||||
value: source.nodeId,
|
||||
type: DataSourceType.FILE,
|
||||
})
|
||||
}
|
||||
if (source.type === DataSourceType.NOTION) {
|
||||
options.push({
|
||||
label: 'Notion',
|
||||
value: source.nodeId,
|
||||
type: DataSourceType.NOTION,
|
||||
})
|
||||
}
|
||||
if (source.type === DataSourceProvider.fireCrawl) {
|
||||
options.push({
|
||||
label: 'Firecrawl',
|
||||
value: source.nodeId,
|
||||
type: DataSourceProvider.fireCrawl,
|
||||
})
|
||||
}
|
||||
if (source.type === DataSourceProvider.jinaReader) {
|
||||
options.push({
|
||||
label: 'Jina Reader',
|
||||
value: source.nodeId,
|
||||
type: DataSourceProvider.jinaReader,
|
||||
})
|
||||
}
|
||||
if (source.type === DataSourceProvider.waterCrawl) {
|
||||
options.push({
|
||||
label: 'Water Crawl',
|
||||
value: source.nodeId,
|
||||
type: DataSourceProvider.waterCrawl,
|
||||
})
|
||||
}
|
||||
})
|
||||
return options
|
||||
}, [dataSources, t])
|
||||
return { dataSources, options }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -26,11 +26,7 @@ const TestRunPanel = () => {
|
|||
const { t } = useTranslation()
|
||||
const setShowDebugAndPreviewPanel = useWorkflowStoreWithSelector(state => state.setShowDebugAndPreviewPanel)
|
||||
const [currentStep, setCurrentStep] = useState(1)
|
||||
const [datasource, setDatasource] = useState<Datasource>({
|
||||
nodeId: '1',
|
||||
type: DataSourceType.FILE,
|
||||
config: {},
|
||||
})
|
||||
const [datasource, setDatasource] = useState<Datasource>()
|
||||
const [fileList, setFiles] = useState<FileItem[]>([])
|
||||
const [notionPages, setNotionPages] = useState<NotionPage[]>([])
|
||||
const [websitePages, setWebsitePages] = useState<CrawlResultItem[]>([])
|
||||
|
|
@ -41,28 +37,6 @@ const TestRunPanel = () => {
|
|||
const enableBilling = useProviderContextSelector(state => state.enableBilling)
|
||||
|
||||
const steps = useTestRunSteps()
|
||||
// TODO: replace with real data sources from API
|
||||
const dataSources = useMemo(() => [{
|
||||
nodeId: '1',
|
||||
type: DataSourceType.FILE,
|
||||
config: {},
|
||||
}, {
|
||||
nodeId: '2',
|
||||
type: DataSourceType.NOTION,
|
||||
config: {},
|
||||
}, {
|
||||
nodeId: '3',
|
||||
type: DataSourceProvider.fireCrawl,
|
||||
config: {},
|
||||
}, {
|
||||
nodeId: '4',
|
||||
type: DataSourceProvider.jinaReader,
|
||||
config: {},
|
||||
}, {
|
||||
nodeId: '5',
|
||||
type: DataSourceProvider.waterCrawl,
|
||||
config: {},
|
||||
}], [])
|
||||
|
||||
const allFileLoaded = (fileList.length > 0 && fileList.every(file => file.file.id))
|
||||
const isVectorSpaceFull = plan.usage.vectorSpace >= plan.total.vectorSpace
|
||||
|
|
@ -77,6 +51,7 @@ const TestRunPanel = () => {
|
|||
}, [fileList, isShowVectorSpaceFull])
|
||||
|
||||
const nextBtnDisabled = useMemo(() => {
|
||||
if (!datasource) return false
|
||||
if (datasource.type === DataSourceType.FILE)
|
||||
return nextDisabled
|
||||
if (datasource.type === DataSourceType.NOTION)
|
||||
|
|
@ -92,13 +67,6 @@ const TestRunPanel = () => {
|
|||
setShowDebugAndPreviewPanel(false)
|
||||
}
|
||||
|
||||
const handleDataSourceSelect = useCallback((option: string) => {
|
||||
const dataSource = dataSources.find(dataSource => dataSource.nodeId === option)
|
||||
if (!dataSource)
|
||||
return
|
||||
setDatasource(dataSource)
|
||||
}, [dataSources])
|
||||
|
||||
const updateFile = (fileItem: FileItem, progress: number, list: FileItem[]) => {
|
||||
const newList = produce(list, (draft) => {
|
||||
const targetIndex = draft.findIndex(file => file.fileID === fileItem.fileID)
|
||||
|
|
@ -129,6 +97,8 @@ const TestRunPanel = () => {
|
|||
const { handleRun } = usePipelineRun()
|
||||
|
||||
const handleProcess = useCallback((data: Record<string, any>) => {
|
||||
if (!datasource)
|
||||
return
|
||||
const datasourceInfo: Record<string, any> = {}
|
||||
if (datasource.type === DataSourceType.FILE)
|
||||
datasourceInfo.fileId = fileList.map(file => file.fileID)
|
||||
|
|
@ -176,11 +146,10 @@ const TestRunPanel = () => {
|
|||
<>
|
||||
<div className='flex flex-col gap-y-4 px-4 py-2'>
|
||||
<DataSourceOptions
|
||||
dataSources={dataSources}
|
||||
dataSourceNodeId={datasource.nodeId}
|
||||
onSelect={handleDataSourceSelect}
|
||||
dataSourceNodeId={datasource?.nodeId || ''}
|
||||
onSelect={setDatasource}
|
||||
/>
|
||||
{datasource.type === DataSourceType.FILE && (
|
||||
{datasource?.type === DataSourceType.FILE && (
|
||||
<LocalFile
|
||||
files={fileList}
|
||||
updateFile={updateFile}
|
||||
|
|
@ -188,13 +157,13 @@ const TestRunPanel = () => {
|
|||
notSupportBatchUpload={notSupportBatchUpload}
|
||||
/>
|
||||
)}
|
||||
{datasource.type === DataSourceType.NOTION && (
|
||||
{datasource?.type === DataSourceType.NOTION && (
|
||||
<Notion
|
||||
notionPages={notionPages}
|
||||
updateNotionPages={updateNotionPages}
|
||||
/>
|
||||
)}
|
||||
{datasource.type === DataSourceProvider.fireCrawl && (
|
||||
{datasource?.type === DataSourceProvider.fireCrawl && (
|
||||
<Firecrawl
|
||||
checkedCrawlResult={websitePages}
|
||||
onCheckedCrawlResultChange={setWebsitePages}
|
||||
|
|
@ -203,7 +172,7 @@ const TestRunPanel = () => {
|
|||
onCrawlOptionsChange={setCrawlOptions}
|
||||
/>
|
||||
)}
|
||||
{datasource.type === DataSourceProvider.jinaReader && (
|
||||
{datasource?.type === DataSourceProvider.jinaReader && (
|
||||
<JinaReader
|
||||
checkedCrawlResult={websitePages}
|
||||
onCheckedCrawlResultChange={setWebsitePages}
|
||||
|
|
@ -212,7 +181,7 @@ const TestRunPanel = () => {
|
|||
onCrawlOptionsChange={setCrawlOptions}
|
||||
/>
|
||||
)}
|
||||
{datasource.type === DataSourceProvider.waterCrawl && (
|
||||
{datasource?.type === DataSourceProvider.waterCrawl && (
|
||||
<WaterCrawl
|
||||
checkedCrawlResult={websitePages}
|
||||
onCheckedCrawlResultChange={setWebsitePages}
|
||||
|
|
@ -232,6 +201,7 @@ const TestRunPanel = () => {
|
|||
{
|
||||
currentStep === 2 && (
|
||||
<DocumentProcessing
|
||||
dataSourceNodeId={datasource?.nodeId || ''}
|
||||
onProcess={handleProcess}
|
||||
onBack={handleBackStep}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ export const usePipelineInit = () => {
|
|||
|
||||
useEffect(() => {
|
||||
handleGetInitialWorkflowData()
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [])
|
||||
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
import type { CommonNodeType } from '@/app/components/workflow/types'
|
||||
import type { RAGPipelineVariables } from '@/models/pipeline'
|
||||
|
||||
export type DataSourceNodeType = CommonNodeType
|
||||
export type DataSourceNodeType = CommonNodeType & {
|
||||
variables: RAGPipelineVariables
|
||||
}
|
||||
|
|
|
|||
|
|
@ -113,7 +113,7 @@ export type RAGPipelineVariable = {
|
|||
label: string
|
||||
variable: string
|
||||
max_length?: number
|
||||
default?: string
|
||||
default_value?: string
|
||||
placeholder?: string
|
||||
unit?: string
|
||||
required: boolean
|
||||
|
|
@ -125,9 +125,40 @@ export type RAGPipelineVariable = {
|
|||
}
|
||||
|
||||
export type InputVar = Omit<RAGPipelineVariable, 'belong_to_node_id'>
|
||||
export type RAGPipelineVariables = RAGPipelineVariable[]
|
||||
|
||||
export type PipelineProcessingParamsResponse = {
|
||||
variables: RAGPipelineVariable[]
|
||||
export type PipelineProcessingParamsRequest = {
|
||||
pipeline_id: string
|
||||
node_id: string
|
||||
}
|
||||
|
||||
export type RAGPipelineVariables = RAGPipelineVariable[]
|
||||
export type PipelineProcessingParamsResponse = {
|
||||
variables: RAGPipelineVariables
|
||||
}
|
||||
|
||||
export type PipelineDatasourceNodeRunRequest = {
|
||||
pipeline_id: string
|
||||
node_id: string
|
||||
inputs: Record<string, any>
|
||||
}
|
||||
|
||||
export type PipelineDatasourceNodeRunResponse = {
|
||||
id: string
|
||||
inputs: Record<string, any>
|
||||
process_data: Record<string, any>
|
||||
outputs: Record<string, any>
|
||||
status: string
|
||||
error?: string
|
||||
elapsed_time: number
|
||||
execution_metadata: {
|
||||
total_tokens: number
|
||||
total_price: number
|
||||
currency?: string
|
||||
}
|
||||
extras: {
|
||||
icon: string | object
|
||||
}
|
||||
created_at: string
|
||||
created_by: string
|
||||
finished_at: string
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,6 +8,8 @@ import type {
|
|||
ImportPipelineDSLRequest,
|
||||
ImportPipelineDSLResponse,
|
||||
PipelineCheckDependenciesResponse,
|
||||
PipelineDatasourceNodeRunRequest,
|
||||
PipelineProcessingParamsRequest,
|
||||
PipelineProcessingParamsResponse,
|
||||
PipelineTemplateByIdResponse,
|
||||
PipelineTemplateListParams,
|
||||
|
|
@ -95,7 +97,7 @@ export const useImportPipelineDSLConfirm = (
|
|||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'dsl-import-confirm'],
|
||||
mutationFn: (importId: string) => {
|
||||
return post<ImportPipelineDSLConfirmResponse>(`/rag/pipeline/imports/${importId}/confirm`)
|
||||
return post<ImportPipelineDSLConfirmResponse>(`/rag/pipelines/imports/${importId}/confirm`)
|
||||
},
|
||||
...mutationOptions,
|
||||
})
|
||||
|
|
@ -113,12 +115,29 @@ export const useCheckPipelineDependencies = (
|
|||
})
|
||||
}
|
||||
|
||||
export const useDatasourceNodeRun = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'datasource-node-run'],
|
||||
mutationFn: (request: PipelineDatasourceNodeRunRequest) => {
|
||||
const { pipeline_id, node_id, ...rest } = request
|
||||
return post(`/rag/pipelines/${pipeline_id}/workflows/published/nodes/${node_id}/run`, {
|
||||
body: rest,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Get the config of shared input fields
|
||||
export const usePipelineProcessingParams = (pipelineId: string) => {
|
||||
export const usePipelineProcessingParams = (params: PipelineProcessingParamsRequest) => {
|
||||
const { pipeline_id, node_id } = params
|
||||
return useQuery<PipelineProcessingParamsResponse>({
|
||||
queryKey: [NAME_SPACE, 'pipeline-processing-params', pipelineId],
|
||||
queryKey: [NAME_SPACE, 'pipeline-processing-params', pipeline_id],
|
||||
queryFn: () => {
|
||||
return get<PipelineProcessingParamsResponse>(`/rag/pipeline/${pipelineId}/workflows/processing/parameters`)
|
||||
return get<PipelineProcessingParamsResponse>(`/rag/pipelines/${pipeline_id}/workflows/processing/parameters`, {
|
||||
params: {
|
||||
node_id,
|
||||
},
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue