diff --git a/web/app/components/base/notion-page-selector/base.tsx b/web/app/components/base/notion-page-selector/base.tsx index 1f9ddeaebd..9315605cdf 100644 --- a/web/app/components/base/notion-page-selector/base.tsx +++ b/web/app/components/base/notion-page-selector/base.tsx @@ -21,6 +21,7 @@ type NotionPageSelectorProps = { datasetId?: string credentialList: DataSourceCredential[] onSelectCredential?: (credentialId: string) => void + supportBatchUpload?: boolean } const NotionPageSelector = ({ @@ -32,6 +33,7 @@ const NotionPageSelector = ({ datasetId = '', credentialList, onSelectCredential, + supportBatchUpload = false, }: NotionPageSelectorProps) => { const [searchValue, setSearchValue] = useState('') const setShowAccountSettingModal = useModalContextSelector(s => s.setShowAccountSettingModal) @@ -110,7 +112,7 @@ const NotionPageSelector = ({ setCurrentCredential(credential) onSelect([]) // Clear selected pages when changing credential onSelectCredential?.(credential.credentialId) - }, [invalidPreImportNotionPages, onSelect, onSelectCredential]) + }, [datasetId, invalidPreImportNotionPages, notionCredentials, onSelect, onSelectCredential]) const handleSelectPages = useCallback((newSelectedPagesId: Set) => { const selectedPages = Array.from(newSelectedPagesId).map(pageId => pagesMapAndSelectedPagesId[0][pageId]) @@ -175,6 +177,7 @@ const NotionPageSelector = ({ canPreview={canPreview} previewPageId={previewPageId} onPreview={handlePreviewPage} + isMultipleChoice={supportBatchUpload} /> )} diff --git a/web/app/components/base/notion-page-selector/page-selector/index.tsx b/web/app/components/base/notion-page-selector/page-selector/index.tsx index c293555582..9c89b601fb 100644 --- a/web/app/components/base/notion-page-selector/page-selector/index.tsx +++ b/web/app/components/base/notion-page-selector/page-selector/index.tsx @@ -7,6 +7,7 @@ import Checkbox from '../../checkbox' import NotionIcon from '../../notion-icon' import cn from '@/utils/classnames' import type { DataSourceNotionPage, DataSourceNotionPageMap } from '@/models/common' +import Radio from '@/app/components/base/radio/ui' type PageSelectorProps = { value: Set @@ -18,6 +19,7 @@ type PageSelectorProps = { canPreview?: boolean previewPageId?: string onPreview?: (selectedPageId: string) => void + isMultipleChoice?: boolean } type NotionPageTreeItem = { children: Set @@ -80,6 +82,7 @@ const ItemComponent = ({ index, style, data }: ListChildComponentProps<{ searchValue: string previewPageId: string pagesMap: DataSourceNotionPageMap + isMultipleChoice?: boolean }>) => { const { t } = useTranslation() const { @@ -94,6 +97,7 @@ const ItemComponent = ({ index, style, data }: ListChildComponentProps<{ searchValue, previewPageId, pagesMap, + isMultipleChoice, } = data const current = dataList[index] const currentWithChildrenAndDescendants = listMapWithChildrenAndDescendants[current.page_id] @@ -134,16 +138,24 @@ const ItemComponent = ({ index, style, data }: ListChildComponentProps<{ previewPageId === current.page_id && 'bg-state-base-hover')} style={{ ...style, top: style.top as number + 8, left: 8, right: 8, width: 'calc(100% - 16px)' }} > - { - if (disabled) - return - handleCheck(index) - }} - /> + {isMultipleChoice ? ( + { + handleCheck(index) + }} + />) : ( + { + handleCheck(index) + }} + /> + )} {!searchValue && renderArrow()} { const { t } = useTranslation() const [dataList, setDataList] = useState([]) @@ -265,7 +278,7 @@ const PageSelector = ({ const currentWithChildrenAndDescendants = listMapWithChildrenAndDescendants[pageId] if (copyValue.has(pageId)) { - if (!searchValue) { + if (!searchValue && isMultipleChoice) { for (const item of currentWithChildrenAndDescendants.descendants) copyValue.delete(item) } @@ -273,12 +286,18 @@ const PageSelector = ({ copyValue.delete(pageId) } else { - if (!searchValue) { + if (!searchValue && isMultipleChoice) { for (const item of currentWithChildrenAndDescendants.descendants) copyValue.add(item) } - - copyValue.add(pageId) + // Single choice mode, clear previous selection + if (!isMultipleChoice && copyValue.size > 0) { + copyValue.clear() + copyValue.add(pageId) + } + else { + copyValue.add(pageId) + } } onSelect(new Set(copyValue)) @@ -322,6 +341,7 @@ const PageSelector = ({ searchValue, previewPageId: currentPreviewPageId, pagesMap, + isMultipleChoice, }} > {Item} diff --git a/web/app/components/datasets/create/file-uploader/index.tsx b/web/app/components/datasets/create/file-uploader/index.tsx index abe2564ad2..700a5f7680 100644 --- a/web/app/components/datasets/create/file-uploader/index.tsx +++ b/web/app/components/datasets/create/file-uploader/index.tsx @@ -25,7 +25,7 @@ type IFileUploaderProps = { onFileUpdate: (fileItem: FileItem, progress: number, list: FileItem[]) => void onFileListUpdate?: (files: FileItem[]) => void onPreview: (file: File) => void - notSupportBatchUpload?: boolean + supportBatchUpload?: boolean } const FileUploader = ({ @@ -35,7 +35,7 @@ const FileUploader = ({ onFileUpdate, onFileListUpdate, onPreview, - notSupportBatchUpload, + supportBatchUpload = false, }: IFileUploaderProps) => { const { t } = useTranslation() const { notify } = useContext(ToastContext) @@ -44,7 +44,7 @@ const FileUploader = ({ const dropRef = useRef(null) const dragRef = useRef(null) const fileUploader = useRef(null) - const hideUpload = notSupportBatchUpload && fileList.length > 0 + const hideUpload = !supportBatchUpload && fileList.length > 0 const { data: fileUploadConfigResponse } = useFileUploadConfig() const { data: supportFileTypesResponse } = useFileSupportTypes() @@ -68,9 +68,9 @@ const FileUploader = ({ const ACCEPTS = supportTypes.map((ext: string) => `.${ext}`) const fileUploadConfig = useMemo(() => ({ file_size_limit: fileUploadConfigResponse?.file_size_limit ?? 15, - batch_count_limit: fileUploadConfigResponse?.batch_count_limit ?? 5, - file_upload_limit: fileUploadConfigResponse?.file_upload_limit ?? 5, - }), [fileUploadConfigResponse]) + batch_count_limit: supportBatchUpload ? (fileUploadConfigResponse?.batch_count_limit ?? 5) : 1, + file_upload_limit: supportBatchUpload ? (fileUploadConfigResponse?.file_upload_limit ?? 5) : 1, + }), [fileUploadConfigResponse, supportBatchUpload]) const fileListRef = useRef([]) @@ -254,12 +254,12 @@ const FileUploader = ({ }), ) let files = nested.flat() - if (notSupportBatchUpload) files = files.slice(0, 1) + if (!supportBatchUpload) files = files.slice(0, 1) files = files.slice(0, fileUploadConfig.batch_count_limit) const valid = files.filter(isValid) initialUpload(valid) }, - [initialUpload, isValid, notSupportBatchUpload, traverseFileEntry, fileUploadConfig], + [initialUpload, isValid, supportBatchUpload, traverseFileEntry, fileUploadConfig], ) const selectHandle = () => { if (fileUploader.current) @@ -303,7 +303,7 @@ const FileUploader = ({ id="fileUploader" className="hidden" type="file" - multiple={!notSupportBatchUpload} + multiple={supportBatchUpload} accept={ACCEPTS.join(',')} onChange={fileChangeHandle} /> @@ -317,7 +317,7 @@ const FileUploader = ({ - {notSupportBatchUpload ? t('datasetCreation.stepOne.uploader.buttonSingleFile') : t('datasetCreation.stepOne.uploader.button')} + {supportBatchUpload ? t('datasetCreation.stepOne.uploader.button') : t('datasetCreation.stepOne.uploader.buttonSingleFile')} {supportTypes.length > 0 && ( )} @@ -326,7 +326,7 @@ const FileUploader = ({
{t('datasetCreation.stepOne.uploader.tip', { size: fileUploadConfig.file_size_limit, supportTypes: supportTypesShowNames, - batchCount: notSupportBatchUpload ? 1 : fileUploadConfig.batch_count_limit, + batchCount: fileUploadConfig.batch_count_limit, totalCount: fileUploadConfig.file_upload_limit, })}
{dragging &&
} diff --git a/web/app/components/datasets/create/step-one/index.tsx b/web/app/components/datasets/create/step-one/index.tsx index cab1637661..f2768be470 100644 --- a/web/app/components/datasets/create/step-one/index.tsx +++ b/web/app/components/datasets/create/step-one/index.tsx @@ -110,7 +110,7 @@ const StepOne = ({ const hasNotin = notionPages.length > 0 const isVectorSpaceFull = plan.usage.vectorSpace >= plan.total.vectorSpace const isShowVectorSpaceFull = (allFileLoaded || hasNotin) && isVectorSpaceFull && enableBilling - const notSupportBatchUpload = enableBilling && plan.type === 'sandbox' + const supportBatchUpload = !enableBilling || plan.type !== 'sandbox' const nextDisabled = useMemo(() => { if (!files.length) return true @@ -229,7 +229,7 @@ const StepOne = ({ onFileListUpdate={updateFileList} onFileUpdate={updateFile} onPreview={updateCurrentFile} - notSupportBatchUpload={notSupportBatchUpload} + supportBatchUpload={supportBatchUpload} /> {isShowVectorSpaceFull && (
@@ -259,6 +259,7 @@ const StepOne = ({ credentialList={notionCredentialList} onSelectCredential={updateNotionCredentialId} datasetId={datasetId} + supportBatchUpload={supportBatchUpload} />
{isShowVectorSpaceFull && ( diff --git a/web/app/components/datasets/create/website/base/crawled-result-item.tsx b/web/app/components/datasets/create/website/base/crawled-result-item.tsx index 8ea316f62a..51e043c35a 100644 --- a/web/app/components/datasets/create/website/base/crawled-result-item.tsx +++ b/web/app/components/datasets/create/website/base/crawled-result-item.tsx @@ -6,6 +6,7 @@ import cn from '@/utils/classnames' import type { CrawlResultItem as CrawlResultItemType } from '@/models/datasets' import Checkbox from '@/app/components/base/checkbox' import Button from '@/app/components/base/button' +import Radio from '@/app/components/base/radio/ui' type Props = { payload: CrawlResultItemType @@ -13,6 +14,7 @@ type Props = { isPreview: boolean onCheckChange: (checked: boolean) => void onPreview: () => void + isMultipleChoice: boolean } const CrawledResultItem: FC = ({ @@ -21,6 +23,7 @@ const CrawledResultItem: FC = ({ isChecked, onCheckChange, onPreview, + isMultipleChoice, }) => { const { t } = useTranslation() @@ -31,7 +34,21 @@ const CrawledResultItem: FC = ({
- + { + isMultipleChoice ? ( + + ) : ( + + ) + }
void onPreview: (payload: CrawlResultItem) => void usedTime: number + isMultipleChoice: boolean } const CrawledResult: FC = ({ @@ -25,6 +26,7 @@ const CrawledResult: FC = ({ onSelectedChange, onPreview, usedTime, + isMultipleChoice, }) => { const { t } = useTranslation() @@ -40,13 +42,17 @@ const CrawledResult: FC = ({ const handleItemCheckChange = useCallback((item: CrawlResultItem) => { return (checked: boolean) => { - if (checked) - onSelectedChange([...checkedList, item]) - - else + if (checked) { + if (isMultipleChoice) + onSelectedChange([...checkedList, item]) + else + onSelectedChange([item]) + } + else { onSelectedChange(checkedList.filter(checkedItem => checkedItem.source_url !== item.source_url)) + } } - }, [checkedList, onSelectedChange]) + }, [checkedList, isMultipleChoice, onSelectedChange]) const [previewIndex, setPreviewIndex] = React.useState(-1) const handlePreview = useCallback((index: number) => { @@ -59,11 +65,13 @@ const CrawledResult: FC = ({ return (
- + {isMultipleChoice && ( + + )}
{t(`${I18N_PREFIX}.scrapTimeInfo`, { total: list.length, @@ -80,6 +88,7 @@ const CrawledResult: FC = ({ payload={item} isChecked={checkedList.some(checkedItem => checkedItem.source_url === item.source_url)} onCheckChange={handleItemCheckChange(item)} + isMultipleChoice={isMultipleChoice} /> ))}
diff --git a/web/app/components/datasets/create/website/firecrawl/index.tsx b/web/app/components/datasets/create/website/firecrawl/index.tsx index 51c2c7d505..1ef934308a 100644 --- a/web/app/components/datasets/create/website/firecrawl/index.tsx +++ b/web/app/components/datasets/create/website/firecrawl/index.tsx @@ -26,6 +26,7 @@ type Props = { onJobIdChange: (jobId: string) => void crawlOptions: CrawlOptions onCrawlOptionsChange: (payload: CrawlOptions) => void + supportBatchUpload: boolean } enum Step { @@ -41,6 +42,7 @@ const FireCrawl: FC = ({ onJobIdChange, crawlOptions, onCrawlOptionsChange, + supportBatchUpload, }) => { const { t } = useTranslation() const [step, setStep] = useState(Step.init) @@ -171,7 +173,7 @@ const FireCrawl: FC = ({ content: item.markdown, })) setCrawlResult(data) - onCheckedCrawlResultChange(data.data || []) // default select the crawl result + onCheckedCrawlResultChange(supportBatchUpload ? (data.data || []) : (data.data?.slice(0, 1) || [])) // default select the crawl result setCrawlErrorMessage('') } } @@ -182,7 +184,7 @@ const FireCrawl: FC = ({ finally { setStep(Step.finished) } - }, [checkValid, crawlOptions, onJobIdChange, t, waitForCrawlFinished, onCheckedCrawlResultChange]) + }, [checkValid, crawlOptions, onJobIdChange, waitForCrawlFinished, t, onCheckedCrawlResultChange, supportBatchUpload]) return (
@@ -221,6 +223,7 @@ const FireCrawl: FC = ({ onSelectedChange={onCheckedCrawlResultChange} onPreview={onPreview} usedTime={Number.parseFloat(crawlResult?.time_consuming as string) || 0} + isMultipleChoice={supportBatchUpload} /> }
diff --git a/web/app/components/datasets/create/website/index.tsx b/web/app/components/datasets/create/website/index.tsx index ee7ace6815..15324f642e 100644 --- a/web/app/components/datasets/create/website/index.tsx +++ b/web/app/components/datasets/create/website/index.tsx @@ -24,6 +24,7 @@ type Props = { crawlOptions: CrawlOptions onCrawlOptionsChange: (payload: CrawlOptions) => void authedDataSourceList: DataSourceAuth[] + supportBatchUpload?: boolean } const Website: FC = ({ @@ -35,6 +36,7 @@ const Website: FC = ({ crawlOptions, onCrawlOptionsChange, authedDataSourceList, + supportBatchUpload = false, }) => { const { t } = useTranslation() const { setShowAccountSettingModal } = useModalContext() @@ -116,6 +118,7 @@ const Website: FC = ({ onJobIdChange={onJobIdChange} crawlOptions={crawlOptions} onCrawlOptionsChange={onCrawlOptionsChange} + supportBatchUpload={supportBatchUpload} /> )} {source && selectedProvider === DataSourceProvider.waterCrawl && ( @@ -126,6 +129,7 @@ const Website: FC = ({ onJobIdChange={onJobIdChange} crawlOptions={crawlOptions} onCrawlOptionsChange={onCrawlOptionsChange} + supportBatchUpload={supportBatchUpload} /> )} {source && selectedProvider === DataSourceProvider.jinaReader && ( @@ -136,6 +140,7 @@ const Website: FC = ({ onJobIdChange={onJobIdChange} crawlOptions={crawlOptions} onCrawlOptionsChange={onCrawlOptionsChange} + supportBatchUpload={supportBatchUpload} /> )} {!source && ( diff --git a/web/app/components/datasets/create/website/jina-reader/index.tsx b/web/app/components/datasets/create/website/jina-reader/index.tsx index b6e6177af2..b2189b3e5c 100644 --- a/web/app/components/datasets/create/website/jina-reader/index.tsx +++ b/web/app/components/datasets/create/website/jina-reader/index.tsx @@ -26,6 +26,7 @@ type Props = { onJobIdChange: (jobId: string) => void crawlOptions: CrawlOptions onCrawlOptionsChange: (payload: CrawlOptions) => void + supportBatchUpload: boolean } enum Step { @@ -41,6 +42,7 @@ const JinaReader: FC = ({ onJobIdChange, crawlOptions, onCrawlOptionsChange, + supportBatchUpload, }) => { const { t } = useTranslation() const [step, setStep] = useState(Step.init) @@ -157,7 +159,7 @@ const JinaReader: FC = ({ total: 1, data: [{ title, - content, + markdown: content, description, source_url: url, }], @@ -176,7 +178,7 @@ const JinaReader: FC = ({ } else { setCrawlResult(data) - onCheckedCrawlResultChange(data.data || []) // default select the crawl result + onCheckedCrawlResultChange(supportBatchUpload ? (data.data || []) : (data.data?.slice(0, 1) || [])) // default select the crawl result setCrawlErrorMessage('') } } @@ -188,7 +190,7 @@ const JinaReader: FC = ({ finally { setStep(Step.finished) } - }, [checkValid, crawlOptions, onCheckedCrawlResultChange, onJobIdChange, t, waitForCrawlFinished]) + }, [checkValid, crawlOptions, onCheckedCrawlResultChange, onJobIdChange, supportBatchUpload, t, waitForCrawlFinished]) return (
@@ -227,6 +229,7 @@ const JinaReader: FC = ({ onSelectedChange={onCheckedCrawlResultChange} onPreview={onPreview} usedTime={Number.parseFloat(crawlResult?.time_consuming as string) || 0} + isMultipleChoice={supportBatchUpload} /> }
diff --git a/web/app/components/datasets/create/website/preview.tsx b/web/app/components/datasets/create/website/preview.tsx index d148c87196..f43dc83589 100644 --- a/web/app/components/datasets/create/website/preview.tsx +++ b/web/app/components/datasets/create/website/preview.tsx @@ -32,7 +32,7 @@ const WebsitePreview = ({
{payload.source_url}
-
{payload.content}
+
{payload.markdown}
) diff --git a/web/app/components/datasets/create/website/watercrawl/index.tsx b/web/app/components/datasets/create/website/watercrawl/index.tsx index 67a3e53feb..bf0048b788 100644 --- a/web/app/components/datasets/create/website/watercrawl/index.tsx +++ b/web/app/components/datasets/create/website/watercrawl/index.tsx @@ -26,6 +26,7 @@ type Props = { onJobIdChange: (jobId: string) => void crawlOptions: CrawlOptions onCrawlOptionsChange: (payload: CrawlOptions) => void + supportBatchUpload: boolean } enum Step { @@ -41,6 +42,7 @@ const WaterCrawl: FC = ({ onJobIdChange, crawlOptions, onCrawlOptionsChange, + supportBatchUpload, }) => { const { t } = useTranslation() const [step, setStep] = useState(Step.init) @@ -132,7 +134,7 @@ const WaterCrawl: FC = ({ }, } } - }, [crawlOptions.limit]) + }, [crawlOptions.limit, onCheckedCrawlResultChange]) const handleRun = useCallback(async (url: string) => { const { isValid, errorMsg } = checkValid(url) @@ -163,7 +165,7 @@ const WaterCrawl: FC = ({ } else { setCrawlResult(data) - onCheckedCrawlResultChange(data.data || []) // default select the crawl result + onCheckedCrawlResultChange(supportBatchUpload ? (data.data || []) : (data.data?.slice(0, 1) || [])) // default select the crawl result setCrawlErrorMessage('') } } @@ -174,7 +176,7 @@ const WaterCrawl: FC = ({ finally { setStep(Step.finished) } - }, [checkValid, crawlOptions, onJobIdChange, t, waitForCrawlFinished]) + }, [checkValid, crawlOptions, onCheckedCrawlResultChange, onJobIdChange, supportBatchUpload, t, waitForCrawlFinished]) return (
@@ -213,6 +215,7 @@ const WaterCrawl: FC = ({ onSelectedChange={onCheckedCrawlResultChange} onPreview={onPreview} usedTime={Number.parseFloat(crawlResult?.time_consuming as string) || 0} + isMultipleChoice={supportBatchUpload} /> }
diff --git a/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx b/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx index 555f2497ef..eb94d073b7 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx @@ -23,12 +23,12 @@ const SimplePieChart = dynamic(() => import('@/app/components/base/simple-pie-ch export type LocalFileProps = { allowedExtensions: string[] - notSupportBatchUpload?: boolean + supportBatchUpload?: boolean } const LocalFile = ({ allowedExtensions, - notSupportBatchUpload, + supportBatchUpload = false, }: LocalFileProps) => { const { t } = useTranslation() const { notify } = useContext(ToastContext) @@ -42,7 +42,7 @@ const LocalFile = ({ const fileUploader = useRef(null) const fileListRef = useRef([]) - const hideUpload = notSupportBatchUpload && localFileList.length > 0 + const hideUpload = !supportBatchUpload && localFileList.length > 0 const { data: fileUploadConfigResponse } = useFileUploadConfig() const supportTypesShowNames = useMemo(() => { @@ -64,9 +64,9 @@ const LocalFile = ({ const ACCEPTS = allowedExtensions.map((ext: string) => `.${ext}`) const fileUploadConfig = useMemo(() => ({ file_size_limit: fileUploadConfigResponse?.file_size_limit ?? 15, - batch_count_limit: fileUploadConfigResponse?.batch_count_limit ?? 5, - file_upload_limit: fileUploadConfigResponse?.file_upload_limit ?? 5, - }), [fileUploadConfigResponse]) + batch_count_limit: supportBatchUpload ? (fileUploadConfigResponse?.batch_count_limit ?? 5) : 1, + file_upload_limit: supportBatchUpload ? (fileUploadConfigResponse?.file_upload_limit ?? 5) : 1, + }), [fileUploadConfigResponse, supportBatchUpload]) const updateFile = useCallback((fileItem: FileItem, progress: number, list: FileItem[]) => { const { setLocalFileList } = dataSourceStore.getState() @@ -119,7 +119,7 @@ const LocalFile = ({ notify({ type: 'error', message: t('datasetCreation.stepOne.uploader.validation.size', { size: fileUploadConfig.file_size_limit }) }) return isValidType && isValidSize - }, [fileUploadConfig, notify, t, ACCEPTS]) + }, [notify, t, ACCEPTS, fileUploadConfig.file_size_limit]) type UploadResult = Awaited> @@ -230,12 +230,12 @@ const LocalFile = ({ return let files = [...e.dataTransfer.files] as File[] - if (notSupportBatchUpload) + if (!supportBatchUpload) files = files.slice(0, 1) const validFiles = files.filter(isValid) initialUpload(validFiles) - }, [initialUpload, isValid, notSupportBatchUpload]) + }, [initialUpload, isValid, supportBatchUpload]) const selectHandle = useCallback(() => { if (fileUploader.current) @@ -280,7 +280,7 @@ const LocalFile = ({ id='fileUploader' className='hidden' type='file' - multiple={!notSupportBatchUpload} + multiple={supportBatchUpload} accept={ACCEPTS.join(',')} onChange={fileChangeHandle} /> @@ -296,7 +296,7 @@ const LocalFile = ({ - {notSupportBatchUpload ? t('datasetCreation.stepOne.uploader.buttonSingleFile') : t('datasetCreation.stepOne.uploader.button')} + {supportBatchUpload ? t('datasetCreation.stepOne.uploader.button') : t('datasetCreation.stepOne.uploader.buttonSingleFile')} {allowedExtensions.length > 0 && ( )} @@ -305,7 +305,7 @@ const LocalFile = ({
{t('datasetCreation.stepOne.uploader.tip', { size: fileUploadConfig.file_size_limit, supportTypes: supportTypesShowNames, - batchCount: notSupportBatchUpload ? 1 : fileUploadConfig.batch_count_limit, + batchCount: fileUploadConfig.batch_count_limit, totalCount: fileUploadConfig.file_upload_limit, })}
{dragging &&
} diff --git a/web/app/components/datasets/documents/create-from-pipeline/data-source/online-documents/index.tsx b/web/app/components/datasets/documents/create-from-pipeline/data-source/online-documents/index.tsx index 97d6721e00..72ceb4a21e 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/data-source/online-documents/index.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/data-source/online-documents/index.tsx @@ -19,16 +19,18 @@ import { useDocLink } from '@/context/i18n' import { ACCOUNT_SETTING_TAB } from '@/app/components/header/account-setting/constants' type OnlineDocumentsProps = { - isInPipeline?: boolean nodeId: string nodeData: DataSourceNodeType onCredentialChange: (credentialId: string) => void + isInPipeline?: boolean + supportBatchUpload?: boolean } const OnlineDocuments = ({ nodeId, nodeData, isInPipeline = false, + supportBatchUpload = false, onCredentialChange, }: OnlineDocumentsProps) => { const docLink = useDocLink() @@ -157,7 +159,7 @@ const OnlineDocuments = ({ onSelect={handleSelectPages} canPreview={!isInPipeline} onPreview={handlePreviewPage} - isMultipleChoice={!isInPipeline} + isMultipleChoice={supportBatchUpload} currentCredentialId={currentCredentialId} /> ) : ( diff --git a/web/app/components/datasets/documents/create-from-pipeline/data-source/online-drive/file-list/index.tsx b/web/app/components/datasets/documents/create-from-pipeline/data-source/online-drive/file-list/index.tsx index 213415928b..ef63460ef3 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/data-source/online-drive/file-list/index.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/data-source/online-drive/file-list/index.tsx @@ -17,6 +17,7 @@ type FileListProps = { handleSelectFile: (file: OnlineDriveFile) => void handleOpenFolder: (file: OnlineDriveFile) => void isLoading: boolean + supportBatchUpload: boolean } const FileList = ({ @@ -32,6 +33,7 @@ const FileList = ({ handleOpenFolder, isInPipeline, isLoading, + supportBatchUpload, }: FileListProps) => { const [inputValue, setInputValue] = useState(keywords) @@ -72,8 +74,8 @@ const FileList = ({ handleResetKeywords={handleResetKeywords} handleOpenFolder={handleOpenFolder} handleSelectFile={handleSelectFile} - isInPipeline={isInPipeline} isLoading={isLoading} + supportBatchUpload={supportBatchUpload} />
) diff --git a/web/app/components/datasets/documents/create-from-pipeline/data-source/online-drive/file-list/list/index.tsx b/web/app/components/datasets/documents/create-from-pipeline/data-source/online-drive/file-list/list/index.tsx index f21f65904b..b313cadbc8 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/data-source/online-drive/file-list/list/index.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/data-source/online-drive/file-list/list/index.tsx @@ -11,8 +11,8 @@ type FileListProps = { fileList: OnlineDriveFile[] selectedFileIds: string[] keywords: string - isInPipeline: boolean isLoading: boolean + supportBatchUpload: boolean handleResetKeywords: () => void handleSelectFile: (file: OnlineDriveFile) => void handleOpenFolder: (file: OnlineDriveFile) => void @@ -25,8 +25,8 @@ const List = ({ handleResetKeywords, handleSelectFile, handleOpenFolder, - isInPipeline, isLoading, + supportBatchUpload, }: FileListProps) => { const anchorRef = useRef(null) const observerRef = useRef(null) @@ -80,7 +80,7 @@ const List = ({ isSelected={isSelected} onSelect={handleSelectFile} onOpen={handleOpenFolder} - isMultipleChoice={!isInPipeline} + isMultipleChoice={supportBatchUpload} /> ) }) diff --git a/web/app/components/datasets/documents/create-from-pipeline/data-source/online-drive/index.tsx b/web/app/components/datasets/documents/create-from-pipeline/data-source/online-drive/index.tsx index da8fd5dcc0..8bd1d7421b 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/data-source/online-drive/index.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/data-source/online-drive/index.tsx @@ -20,14 +20,16 @@ import { ACCOUNT_SETTING_TAB } from '@/app/components/header/account-setting/con type OnlineDriveProps = { nodeId: string nodeData: DataSourceNodeType - isInPipeline?: boolean onCredentialChange: (credentialId: string) => void + isInPipeline?: boolean + supportBatchUpload?: boolean } const OnlineDrive = ({ nodeId, nodeData, isInPipeline = false, + supportBatchUpload = false, onCredentialChange, }: OnlineDriveProps) => { const docLink = useDocLink() @@ -111,7 +113,7 @@ const OnlineDrive = ({ }, }, ) - }, [datasourceNodeRunURL, dataSourceStore]) + }, [dataSourceStore, datasourceNodeRunURL, breadcrumbs]) useEffect(() => { if (!currentCredentialId) return @@ -152,12 +154,12 @@ const OnlineDrive = ({ draft.splice(index, 1) } else { - if (isInPipeline && draft.length >= 1) return + if (!supportBatchUpload && draft.length >= 1) return draft.push(file.id) } }) setSelectedFileIds(newSelectedFileList) - }, [dataSourceStore, isInPipeline]) + }, [dataSourceStore, supportBatchUpload]) const handleOpenFolder = useCallback((file: OnlineDriveFile) => { const { breadcrumbs, prefix, setBreadcrumbs, setPrefix, setBucket, setOnlineDriveFileList, setSelectedFileIds } = dataSourceStore.getState() @@ -177,7 +179,7 @@ const OnlineDrive = ({ setBreadcrumbs(newBreadcrumbs) setPrefix(newPrefix) } - }, [dataSourceStore, getOnlineDriveFiles]) + }, [dataSourceStore]) const handleSetting = useCallback(() => { setShowAccountSettingModal({ @@ -209,6 +211,7 @@ const OnlineDrive = ({ handleOpenFolder={handleOpenFolder} isInPipeline={isInPipeline} isLoading={isLoading} + supportBatchUpload={supportBatchUpload} />
) diff --git a/web/app/components/datasets/documents/create-from-pipeline/data-source/website-crawl/base/crawled-result-item.tsx b/web/app/components/datasets/documents/create-from-pipeline/data-source/website-crawl/base/crawled-result-item.tsx index 753b32c396..bdfcddfd77 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/data-source/website-crawl/base/crawled-result-item.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/data-source/website-crawl/base/crawled-result-item.tsx @@ -46,6 +46,7 @@ const CrawledResultItem = ({ /> ) : ( diff --git a/web/app/components/datasets/documents/create-from-pipeline/data-source/website-crawl/index.tsx b/web/app/components/datasets/documents/create-from-pipeline/data-source/website-crawl/index.tsx index 648f6a5d93..513ac8edd9 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/data-source/website-crawl/index.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/data-source/website-crawl/index.tsx @@ -33,14 +33,16 @@ const I18N_PREFIX = 'datasetCreation.stepOne.website' export type WebsiteCrawlProps = { nodeId: string nodeData: DataSourceNodeType - isInPipeline?: boolean onCredentialChange: (credentialId: string) => void + isInPipeline?: boolean + supportBatchUpload?: boolean } const WebsiteCrawl = ({ nodeId, nodeData, isInPipeline = false, + supportBatchUpload = false, onCredentialChange, }: WebsiteCrawlProps) => { const { t } = useTranslation() @@ -122,7 +124,7 @@ const WebsiteCrawl = ({ time_consuming: time_consuming ?? 0, } setCrawlResult(crawlResultData) - handleCheckedCrawlResultChange(isInPipeline ? [crawlData[0]] : crawlData) // default select the crawl result + handleCheckedCrawlResultChange(supportBatchUpload ? crawlData : crawlData.slice(0, 1)) // default select the crawl result setCrawlErrorMessage('') setStep(CrawlStep.finished) }, @@ -132,7 +134,7 @@ const WebsiteCrawl = ({ }, }, ) - }, [dataSourceStore, datasourceNodeRunURL, handleCheckedCrawlResultChange, isInPipeline, t]) + }, [dataSourceStore, datasourceNodeRunURL, handleCheckedCrawlResultChange, supportBatchUpload, t]) const handleSubmit = useCallback((value: Record) => { handleRun(value) @@ -149,7 +151,7 @@ const WebsiteCrawl = ({ setTotalNum(0) setCrawlErrorMessage('') onCredentialChange(credentialId) - }, [dataSourceStore, onCredentialChange]) + }, [onCredentialChange]) return (
@@ -195,7 +197,7 @@ const WebsiteCrawl = ({ previewIndex={previewIndex} onPreview={handlePreview} showPreview={!isInPipeline} - isMultipleChoice={!isInPipeline} // only support single choice in test run + isMultipleChoice={supportBatchUpload} // only support single choice in test run /> )}
diff --git a/web/app/components/datasets/documents/create-from-pipeline/index.tsx b/web/app/components/datasets/documents/create-from-pipeline/index.tsx index 77b77700ca..1d9232403a 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/index.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/index.tsx @@ -102,7 +102,7 @@ const CreateFormPipeline = () => { return onlineDriveFileList.length > 0 && isVectorSpaceFull && enableBilling return false }, [allFileLoaded, datasource, datasourceType, enableBilling, isVectorSpaceFull, onlineDocuments.length, onlineDriveFileList.length, websitePages.length]) - const notSupportBatchUpload = enableBilling && plan.type === 'sandbox' + const supportBatchUpload = !enableBilling || plan.type !== 'sandbox' const nextBtnDisabled = useMemo(() => { if (!datasource) return true @@ -125,15 +125,16 @@ const CreateFormPipeline = () => { const showSelect = useMemo(() => { if (datasourceType === DatasourceType.onlineDocument) { const pagesCount = currentWorkspace?.pages.length ?? 0 - return pagesCount > 0 + return supportBatchUpload && pagesCount > 0 } if (datasourceType === DatasourceType.onlineDrive) { const isBucketList = onlineDriveFileList.some(file => file.type === 'bucket') - return !isBucketList && onlineDriveFileList.filter((item) => { + return supportBatchUpload && !isBucketList && onlineDriveFileList.filter((item) => { return item.type !== 'bucket' }).length > 0 } - }, [currentWorkspace?.pages.length, datasourceType, onlineDriveFileList]) + return false + }, [currentWorkspace?.pages.length, datasourceType, supportBatchUpload, onlineDriveFileList]) const totalOptions = useMemo(() => { if (datasourceType === DatasourceType.onlineDocument) @@ -395,7 +396,7 @@ const CreateFormPipeline = () => { clearWebsiteCrawlData() else if (dataSource.nodeData.provider_type === DatasourceType.onlineDrive) clearOnlineDriveData() - }, []) + }, [clearOnlineDocumentData, clearOnlineDriveData, clearWebsiteCrawlData]) const handleSwitchDataSource = useCallback((dataSource: Datasource) => { const { @@ -406,13 +407,13 @@ const CreateFormPipeline = () => { setCurrentCredentialId('') currentNodeIdRef.current = dataSource.nodeId setDatasource(dataSource) - }, [dataSourceStore]) + }, [clearDataSourceData, dataSourceStore]) const handleCredentialChange = useCallback((credentialId: string) => { const { setCurrentCredentialId } = dataSourceStore.getState() clearDataSourceData(datasource!) setCurrentCredentialId(credentialId) - }, [dataSourceStore, datasource]) + }, [clearDataSourceData, dataSourceStore, datasource]) if (isFetchingPipelineInfo) { return ( @@ -443,7 +444,7 @@ const CreateFormPipeline = () => { {datasourceType === DatasourceType.localFile && ( )} {datasourceType === DatasourceType.onlineDocument && ( @@ -451,6 +452,7 @@ const CreateFormPipeline = () => { nodeId={datasource!.nodeId} nodeData={datasource!.nodeData} onCredentialChange={handleCredentialChange} + supportBatchUpload={supportBatchUpload} /> )} {datasourceType === DatasourceType.websiteCrawl && ( @@ -458,6 +460,7 @@ const CreateFormPipeline = () => { nodeId={datasource!.nodeId} nodeData={datasource!.nodeData} onCredentialChange={handleCredentialChange} + supportBatchUpload={supportBatchUpload} /> )} {datasourceType === DatasourceType.onlineDrive && ( @@ -465,6 +468,7 @@ const CreateFormPipeline = () => { nodeId={datasource!.nodeId} nodeData={datasource!.nodeData} onCredentialChange={handleCredentialChange} + supportBatchUpload={supportBatchUpload} /> )} {isShowVectorSpaceFull && ( diff --git a/web/app/components/datasets/documents/create-from-pipeline/preview/web-preview.tsx b/web/app/components/datasets/documents/create-from-pipeline/preview/web-preview.tsx index bae4deb86e..ce7a5da24c 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/preview/web-preview.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/preview/web-preview.tsx @@ -27,7 +27,7 @@ const WebsitePreview = ({ {currentWebsite.source_url} · · - {`${formatNumberAbbreviated(currentWebsite.content.length)} ${t('datasetPipeline.addDocuments.characters')}`} + {`${formatNumberAbbreviated(currentWebsite.markdown.length)} ${t('datasetPipeline.addDocuments.characters')}`}
- {currentWebsite.content} + {currentWebsite.markdown}
) diff --git a/web/app/components/datasets/documents/detail/settings/document-settings.tsx b/web/app/components/datasets/documents/detail/settings/document-settings.tsx index 3bcb8ef3aa..16c90c925f 100644 --- a/web/app/components/datasets/documents/detail/settings/document-settings.tsx +++ b/web/app/components/datasets/documents/detail/settings/document-settings.tsx @@ -113,7 +113,7 @@ const DocumentSettings = ({ datasetId, documentId }: DocumentSettingsProps) => { return [{ title: websiteInfo.title, source_url: websiteInfo.source_url, - content: websiteInfo.content, + markdown: websiteInfo.content, description: websiteInfo.description, }] }, [websiteInfo]) diff --git a/web/app/components/datasets/documents/detail/settings/pipeline-settings/index.tsx b/web/app/components/datasets/documents/detail/settings/pipeline-settings/index.tsx index 1ab47be445..0381222415 100644 --- a/web/app/components/datasets/documents/detail/settings/pipeline-settings/index.tsx +++ b/web/app/components/datasets/documents/detail/settings/pipeline-settings/index.tsx @@ -55,7 +55,7 @@ const PipelineSettings = ({ if (lastRunData?.datasource_type === DatasourceType.websiteCrawl) { const { content, description, source_url, title } = lastRunData.datasource_info websitePages.push({ - content, + markdown: content, description, source_url, title, @@ -135,7 +135,7 @@ const PipelineSettings = ({ push(`/datasets/${datasetId}/documents`) }, }) - }, [datasetId, invalidDocumentDetail, invalidDocumentList, lastRunData, pipelineId, push, runPublishedPipeline]) + }, [datasetId, documentId, invalidDocumentDetail, invalidDocumentList, lastRunData, pipelineId, push, runPublishedPipeline]) const onClickProcess = useCallback(() => { isPreview.current = false diff --git a/web/app/components/rag-pipeline/components/panel/test-run/preparation/index.tsx b/web/app/components/rag-pipeline/components/panel/test-run/preparation/index.tsx index eb73599314..c659d8669a 100644 --- a/web/app/components/rag-pipeline/components/panel/test-run/preparation/index.tsx +++ b/web/app/components/rag-pipeline/components/panel/test-run/preparation/index.tsx @@ -131,7 +131,7 @@ const Preparation = () => { clearWebsiteCrawlData() else if (dataSource.nodeData.provider_type === DatasourceType.onlineDrive) clearOnlineDriveData() - }, []) + }, [clearOnlineDocumentData, clearOnlineDriveData, clearWebsiteCrawlData]) const handleSwitchDataSource = useCallback((dataSource: Datasource) => { const { @@ -142,13 +142,13 @@ const Preparation = () => { setCurrentCredentialId('') currentNodeIdRef.current = dataSource.nodeId setDatasource(dataSource) - }, [dataSourceStore]) + }, [clearDataSourceData, dataSourceStore]) const handleCredentialChange = useCallback((credentialId: string) => { const { setCurrentCredentialId } = dataSourceStore.getState() clearDataSourceData(datasource!) setCurrentCredentialId(credentialId) - }, [dataSourceStore, datasource]) + }, [clearDataSourceData, dataSourceStore, datasource]) return ( <> @@ -164,7 +164,7 @@ const Preparation = () => { {datasourceType === DatasourceType.localFile && ( )} {datasourceType === DatasourceType.onlineDocument && ( @@ -173,6 +173,7 @@ const Preparation = () => { nodeData={datasource!.nodeData} isInPipeline onCredentialChange={handleCredentialChange} + supportBatchUpload={false} /> )} {datasourceType === DatasourceType.websiteCrawl && ( @@ -181,6 +182,7 @@ const Preparation = () => { nodeData={datasource!.nodeData} isInPipeline onCredentialChange={handleCredentialChange} + supportBatchUpload={false} /> )} {datasourceType === DatasourceType.onlineDrive && ( @@ -189,6 +191,7 @@ const Preparation = () => { nodeData={datasource!.nodeData} isInPipeline onCredentialChange={handleCredentialChange} + supportBatchUpload={false} /> )} diff --git a/web/app/components/workflow/nodes/data-source/before-run-form.tsx b/web/app/components/workflow/nodes/data-source/before-run-form.tsx index 764599b4cb..521fdfb087 100644 --- a/web/app/components/workflow/nodes/data-source/before-run-form.tsx +++ b/web/app/components/workflow/nodes/data-source/before-run-form.tsx @@ -43,13 +43,13 @@ const BeforeRunForm: FC = (props) => { clearWebsiteCrawlData() else if (datasourceType === DatasourceType.onlineDrive) clearOnlineDriveData() - }, [datasourceType]) + }, [clearOnlineDocumentData, clearOnlineDriveData, clearWebsiteCrawlData, datasourceType]) const handleCredentialChange = useCallback((credentialId: string) => { const { setCurrentCredentialId } = dataSourceStore.getState() clearDataSourceData() setCurrentCredentialId(credentialId) - }, [dataSourceStore]) + }, [clearDataSourceData, dataSourceStore]) return ( = (props) => { {datasourceType === DatasourceType.localFile && ( )} {datasourceType === DatasourceType.onlineDocument && ( @@ -69,6 +69,7 @@ const BeforeRunForm: FC = (props) => { nodeData={datasourceNodeData} isInPipeline onCredentialChange={handleCredentialChange} + supportBatchUpload={false} /> )} {datasourceType === DatasourceType.websiteCrawl && ( @@ -77,6 +78,7 @@ const BeforeRunForm: FC = (props) => { nodeData={datasourceNodeData} isInPipeline onCredentialChange={handleCredentialChange} + supportBatchUpload={false} /> )} {datasourceType === DatasourceType.onlineDrive && ( @@ -85,6 +87,7 @@ const BeforeRunForm: FC = (props) => { nodeData={datasourceNodeData} isInPipeline onCredentialChange={handleCredentialChange} + supportBatchUpload={false} /> )}
diff --git a/web/models/datasets.ts b/web/models/datasets.ts index 574897a9b4..fe4c568e46 100644 --- a/web/models/datasets.ts +++ b/web/models/datasets.ts @@ -156,7 +156,7 @@ export type CrawlOptions = { export type CrawlResultItem = { title: string - content: string + markdown: string description: string source_url: string }