mirror of https://github.com/langgenius/dify.git
refactor(datasets): rename 'markdown' to 'content' for consistency across components
This commit is contained in:
parent
c8d60f372d
commit
1214942eb7
|
|
@ -3,19 +3,19 @@ import type { CrawlResultItem } from '@/models/datasets'
|
|||
const result: CrawlResultItem[] = [
|
||||
{
|
||||
title: 'Start the frontend Docker container separately',
|
||||
markdown: 'Markdown 1',
|
||||
content: 'Markdown 1',
|
||||
description: 'Description 1',
|
||||
source_url: 'https://example.com/1',
|
||||
},
|
||||
{
|
||||
title: 'Advanced Tool Integration',
|
||||
markdown: 'Markdown 2',
|
||||
content: 'Markdown 2',
|
||||
description: 'Description 2',
|
||||
source_url: 'https://example.com/2',
|
||||
},
|
||||
{
|
||||
title: 'Local Source Code Start | English | Dify',
|
||||
markdown: 'Markdown 3',
|
||||
content: 'Markdown 3',
|
||||
description: 'Description 3',
|
||||
source_url: 'https://example.com/3',
|
||||
},
|
||||
|
|
|
|||
|
|
@ -150,14 +150,15 @@ const JinaReader: FC<Props> = ({
|
|||
}) as any
|
||||
|
||||
if (res.data) {
|
||||
const { title, content, description, url } = res.data
|
||||
const data = {
|
||||
current: 1,
|
||||
total: 1,
|
||||
data: [{
|
||||
title: res.data.title,
|
||||
markdown: res.data.content,
|
||||
description: res.data.description,
|
||||
source_url: res.data.url,
|
||||
title,
|
||||
content,
|
||||
description,
|
||||
source_url: url,
|
||||
}],
|
||||
time_consuming: (Date.now() - startTime) / 1000,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ const WebsitePreview = ({
|
|||
<div className='system-xs-medium truncate text-text-tertiary' title={payload.source_url}>{payload.source_url}</div>
|
||||
</div>
|
||||
<div className={cn(s.previewContent, 'body-md-regular')}>
|
||||
<div className={cn(s.fileContent)}>{payload.markdown}</div>
|
||||
<div className={cn(s.fileContent)}>{payload.content}</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
|
|
|||
|
|
@ -115,13 +115,7 @@ const WebsiteCrawl = ({
|
|||
onDataSourceNodeCompleted: (data: DataSourceNodeCompletedResponse) => {
|
||||
const { data: crawlData, time_consuming } = data
|
||||
const crawlResultData = {
|
||||
data: crawlData.map((item: any) => {
|
||||
const { content, ...rest } = item
|
||||
return {
|
||||
markdown: content || '',
|
||||
...rest,
|
||||
} as CrawlResultItem
|
||||
}),
|
||||
data: crawlData as CrawlResultItem[],
|
||||
time_consuming: time_consuming ?? 0,
|
||||
}
|
||||
setCrawlResult(crawlResultData)
|
||||
|
|
|
|||
|
|
@ -196,7 +196,7 @@ export const useOnlineDrive = () => {
|
|||
const dataSourceStore = useDataSourceStore()
|
||||
|
||||
const selectedOnlineDriveFileList = useMemo(() => {
|
||||
return selectedFileIds.map(key => onlineDriveFileList.find(item => item.id === key)!)
|
||||
return selectedFileIds.map(id => onlineDriveFileList.find(item => item.id === id)!)
|
||||
}, [onlineDriveFileList, selectedFileIds])
|
||||
|
||||
const clearOnlineDriveData = useCallback(() => {
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ const WebsitePreview = ({
|
|||
<span className='uppercase' title={currentWebsite.source_url}>{currentWebsite.source_url}</span>
|
||||
<span>·</span>
|
||||
<span>·</span>
|
||||
<span>{`${formatNumberAbbreviated(currentWebsite.markdown.length)} ${t('datasetPipeline.addDocuments.characters')}`}</span>
|
||||
<span>{`${formatNumberAbbreviated(currentWebsite.content.length)} ${t('datasetPipeline.addDocuments.characters')}`}</span>
|
||||
</div>
|
||||
</div>
|
||||
<button
|
||||
|
|
@ -39,7 +39,7 @@ const WebsitePreview = ({
|
|||
</button>
|
||||
</div>
|
||||
<div className='body-md-regular grow overflow-hidden px-6 py-5 text-text-secondary'>
|
||||
{currentWebsite.markdown}
|
||||
{currentWebsite.content}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ const DocumentSettings = ({ datasetId, documentId }: DocumentSettingsProps) => {
|
|||
{
|
||||
title: documentDetail.name,
|
||||
source_url: documentDetail.data_source_info?.url,
|
||||
markdown: '',
|
||||
content: '',
|
||||
description: '',
|
||||
},
|
||||
]}
|
||||
|
|
|
|||
|
|
@ -9,12 +9,11 @@ import ProcessDocuments from './process-documents'
|
|||
import LeftHeader from './left-header'
|
||||
import { usePipelineExecutionLog, useRunPublishedPipeline } from '@/service/use-pipeline'
|
||||
import type { OnlineDriveFile, PublishedPipelineRunPreviewResponse } from '@/models/pipeline'
|
||||
import { DatasourceType, OnlineDriveFileType } from '@/models/pipeline'
|
||||
import { DatasourceType } from '@/models/pipeline'
|
||||
import { noop } from 'lodash-es'
|
||||
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { useInvalidDocumentDetail, useInvalidDocumentList } from '@/service/knowledge/use-document'
|
||||
import { isFile } from '../../../create-from-pipeline/data-source/online-drive/utils'
|
||||
|
||||
type PipelineSettingsProps = {
|
||||
datasetId: string
|
||||
|
|
@ -56,7 +55,7 @@ const PipelineSettings = ({
|
|||
if (lastRunData?.datasource_type === DatasourceType.websiteCrawl) {
|
||||
const { content, description, source_url, title } = lastRunData.datasource_info
|
||||
websitePages.push({
|
||||
markdown: content,
|
||||
content,
|
||||
description,
|
||||
source_url,
|
||||
title,
|
||||
|
|
@ -80,13 +79,12 @@ const PipelineSettings = ({
|
|||
const onlineDriveFiles = useMemo(() => {
|
||||
const onlineDriveFiles: OnlineDriveFile[] = []
|
||||
if (lastRunData?.datasource_type === DatasourceType.onlineDrive) {
|
||||
const { key } = lastRunData.datasource_info
|
||||
const isFileType = isFile(key)
|
||||
const filePathList = key.split('/')
|
||||
const { id, type, name, size } = lastRunData.datasource_info
|
||||
onlineDriveFiles.push({
|
||||
key,
|
||||
displayName: `${isFileType ? filePathList.pop() : filePathList[filePathList.length - 2]}${isFileType ? '' : '/'}`,
|
||||
type: isFileType ? OnlineDriveFileType.file : OnlineDriveFileType.folder,
|
||||
id,
|
||||
name,
|
||||
type,
|
||||
size,
|
||||
})
|
||||
}
|
||||
return onlineDriveFiles
|
||||
|
|
|
|||
|
|
@ -137,7 +137,7 @@ const Panel: FC<NodePanelProps<ToolNodeType>> = ({
|
|||
)}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
})}
|
||||
</>
|
||||
</OutputVars>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -153,7 +153,7 @@ export type CrawlOptions = {
|
|||
|
||||
export type CrawlResultItem = {
|
||||
title: string
|
||||
markdown: string
|
||||
content: string
|
||||
description: string
|
||||
source_url: string
|
||||
}
|
||||
|
|
@ -324,6 +324,7 @@ export type DataSourceInfo = {
|
|||
provider?: DataSourceProvider
|
||||
job_id: string
|
||||
url: string
|
||||
credential_id?: string
|
||||
}
|
||||
|
||||
export type InitialDocumentDetail = {
|
||||
|
|
|
|||
Loading…
Reference in New Issue