mirror of https://github.com/langgenius/dify.git
Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev
This commit is contained in:
commit
52460f6929
|
|
@ -43,6 +43,7 @@ select = [
|
|||
"S307", # suspicious-eval-usage, disallow use of `eval` and `ast.literal_eval`
|
||||
"S301", # suspicious-pickle-usage, disallow use of `pickle` and its wrappers.
|
||||
"S302", # suspicious-marshal-usage, disallow use of `marshal` module
|
||||
"S311", # suspicious-non-cryptographic-random-usage
|
||||
]
|
||||
|
||||
ignore = [
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import logging
|
||||
import random
|
||||
import secrets
|
||||
from typing import cast
|
||||
|
||||
from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity
|
||||
|
|
@ -38,7 +38,7 @@ def check_moderation(tenant_id: str, model_config: ModelConfigWithCredentialsEnt
|
|||
if len(text_chunks) == 0:
|
||||
return True
|
||||
|
||||
text_chunk = random.choice(text_chunks)
|
||||
text_chunk = secrets.choice(text_chunks)
|
||||
|
||||
try:
|
||||
model_provider_factory = ModelProviderFactory(tenant_id)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
import base64
|
||||
import json
|
||||
import secrets
|
||||
import string
|
||||
from collections.abc import Mapping
|
||||
from copy import deepcopy
|
||||
from random import randint
|
||||
from typing import Any, Literal
|
||||
from urllib.parse import urlencode, urlparse
|
||||
|
||||
|
|
@ -434,4 +435,4 @@ def _generate_random_string(n: int) -> str:
|
|||
>>> _generate_random_string(5)
|
||||
'abcde'
|
||||
"""
|
||||
return "".join([chr(randint(97, 122)) for _ in range(n)])
|
||||
return "".join(secrets.choice(string.ascii_lowercase) for _ in range(n))
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import json
|
||||
import logging
|
||||
import random
|
||||
import re
|
||||
import secrets
|
||||
import string
|
||||
import subprocess
|
||||
import time
|
||||
|
|
@ -176,7 +176,7 @@ def generate_string(n):
|
|||
letters_digits = string.ascii_letters + string.digits
|
||||
result = ""
|
||||
for i in range(n):
|
||||
result += random.choice(letters_digits)
|
||||
result += secrets.choice(letters_digits)
|
||||
|
||||
return result
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import base64
|
||||
import json
|
||||
import logging
|
||||
import random
|
||||
import secrets
|
||||
import uuid
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
|
@ -261,7 +260,7 @@ class AccountService:
|
|||
|
||||
@staticmethod
|
||||
def generate_account_deletion_verification_code(account: Account) -> tuple[str, str]:
|
||||
code = "".join([str(random.randint(0, 9)) for _ in range(6)])
|
||||
code = "".join([str(secrets.randbelow(exclusive_upper_bound=10)) for _ in range(6)])
|
||||
token = TokenManager.generate_token(
|
||||
account=account, token_type="account_deletion", additional_data={"code": code}
|
||||
)
|
||||
|
|
@ -429,7 +428,7 @@ class AccountService:
|
|||
additional_data: dict[str, Any] = {},
|
||||
):
|
||||
if not code:
|
||||
code = "".join([str(random.randint(0, 9)) for _ in range(6)])
|
||||
code = "".join([str(secrets.randbelow(exclusive_upper_bound=10)) for _ in range(6)])
|
||||
additional_data["code"] = code
|
||||
token = TokenManager.generate_token(
|
||||
account=account, email=email, token_type="reset_password", additional_data=additional_data
|
||||
|
|
@ -456,7 +455,7 @@ class AccountService:
|
|||
|
||||
raise EmailCodeLoginRateLimitExceededError()
|
||||
|
||||
code = "".join([str(random.randint(0, 9)) for _ in range(6)])
|
||||
code = "".join([str(secrets.randbelow(exclusive_upper_bound=10)) for _ in range(6)])
|
||||
token = TokenManager.generate_token(
|
||||
account=account, email=email, token_type="email_code_login", additional_data={"code": code}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import copy
|
|||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import random
|
||||
import secrets
|
||||
import time
|
||||
import uuid
|
||||
from collections import Counter
|
||||
|
|
@ -1137,7 +1137,7 @@ class DocumentService:
|
|||
documents.append(document)
|
||||
batch = document.batch
|
||||
else:
|
||||
batch = time.strftime("%Y%m%d%H%M%S") + str(random.randint(100000, 999999))
|
||||
batch = time.strftime("%Y%m%d%H%M%S") + str(100000 + secrets.randbelow(exclusive_upper_bound=900000))
|
||||
# save process rule
|
||||
if not dataset_process_rule:
|
||||
process_rule = knowledge_config.process_rule
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import random
|
||||
import secrets
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
|
|
@ -66,7 +66,7 @@ class WebAppAuthService:
|
|||
if email is None:
|
||||
raise ValueError("Email must be provided.")
|
||||
|
||||
code = "".join([str(random.randint(0, 9)) for _ in range(6)])
|
||||
code = "".join([str(secrets.randbelow(exclusive_upper_bound=10)) for _ in range(6)])
|
||||
token = TokenManager.generate_token(
|
||||
account=account, email=email, token_type="webapp_email_code_login", additional_data={"code": code}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import random
|
||||
import secrets
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
|
@ -34,7 +34,7 @@ def test_retry_logic_success(mock_request):
|
|||
side_effects = []
|
||||
|
||||
for _ in range(SSRF_DEFAULT_MAX_RETRIES):
|
||||
status_code = random.choice(STATUS_FORCELIST)
|
||||
status_code = secrets.choice(STATUS_FORCELIST)
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = status_code
|
||||
side_effects.append(mock_response)
|
||||
|
|
|
|||
|
|
@ -18,9 +18,10 @@ const queryDateFormat = 'YYYY-MM-DD HH:mm'
|
|||
|
||||
export type IChartViewProps = {
|
||||
appId: string
|
||||
headerRight: React.ReactNode
|
||||
}
|
||||
|
||||
export default function ChartView({ appId }: IChartViewProps) {
|
||||
export default function ChartView({ appId, headerRight }: IChartViewProps) {
|
||||
const { t } = useTranslation()
|
||||
const appDetail = useAppStore(state => state.appDetail)
|
||||
const isChatApp = appDetail?.mode !== 'completion' && appDetail?.mode !== 'workflow'
|
||||
|
|
@ -46,19 +47,22 @@ export default function ChartView({ appId }: IChartViewProps) {
|
|||
|
||||
return (
|
||||
<div>
|
||||
<div className='system-xl-semibold mb-4 mt-8 flex flex-row items-center text-text-primary'>
|
||||
<span className='mr-3'>{t('appOverview.analysis.title')}</span>
|
||||
<SimpleSelect
|
||||
items={Object.entries(TIME_PERIOD_MAPPING).map(([k, v]) => ({ value: k, name: t(`appLog.filter.period.${v.name}`) }))}
|
||||
className='mt-0 !w-40'
|
||||
onSelect={(item) => {
|
||||
const id = item.value
|
||||
const value = TIME_PERIOD_MAPPING[id]?.value ?? '-1'
|
||||
const name = item.name || t('appLog.filter.period.allTime')
|
||||
onSelect({ value, name })
|
||||
}}
|
||||
defaultValue={'2'}
|
||||
/>
|
||||
<div className='mb-4 flex items-center justify-between'>
|
||||
<div className='system-xl-semibold flex flex-row items-center text-text-primary'>
|
||||
<span className='mr-3'>{t('appOverview.analysis.title')}</span>
|
||||
<SimpleSelect
|
||||
items={Object.entries(TIME_PERIOD_MAPPING).map(([k, v]) => ({ value: k, name: t(`appLog.filter.period.${v.name}`) }))}
|
||||
className='mt-0 !w-40'
|
||||
onSelect={(item) => {
|
||||
const id = item.value
|
||||
const value = TIME_PERIOD_MAPPING[id]?.value ?? '-1'
|
||||
const name = item.name || t('appLog.filter.period.allTime')
|
||||
onSelect({ value, name })
|
||||
}}
|
||||
defaultValue={'2'}
|
||||
/>
|
||||
</div>
|
||||
{headerRight}
|
||||
</div>
|
||||
{!isWorkflow && (
|
||||
<div className='mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2'>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import React from 'react'
|
||||
import ChartView from './chartView'
|
||||
import CardView from './cardView'
|
||||
import TracingPanel from './tracing/panel'
|
||||
import ApikeyInfoPanel from '@/app/components/app/overview/apikey-info-panel'
|
||||
|
||||
|
|
@ -18,9 +17,10 @@ const Overview = async (props: IDevelopProps) => {
|
|||
return (
|
||||
<div className="h-full overflow-scroll bg-chatbot-bg px-4 py-6 sm:px-12">
|
||||
<ApikeyInfoPanel />
|
||||
<TracingPanel />
|
||||
<CardView appId={appId} />
|
||||
<ChartView appId={appId} />
|
||||
<ChartView
|
||||
appId={appId}
|
||||
headerRight={<TracingPanel />}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -154,7 +154,6 @@ const Panel: FC = () => {
|
|||
if (!isLoaded) {
|
||||
return (
|
||||
<div className='mb-3 flex items-center justify-between'>
|
||||
<Title className='h-[41px]' />
|
||||
<div className='w-[200px]'>
|
||||
<Loading />
|
||||
</div>
|
||||
|
|
@ -163,8 +162,7 @@ const Panel: FC = () => {
|
|||
}
|
||||
|
||||
return (
|
||||
<div className={cn('mb-3 flex items-center justify-between')}>
|
||||
<Title className='h-[41px]' />
|
||||
<div className={cn('flex items-center justify-between')}>
|
||||
<div
|
||||
className={cn(
|
||||
'flex cursor-pointer items-center rounded-xl border-l-[0.5px] border-t border-effects-highlight bg-background-default-dodge p-2 shadow-xs hover:border-effects-highlight-lightmode-off hover:bg-background-default-lighter',
|
||||
|
|
|
|||
|
|
@ -1,14 +1,60 @@
|
|||
import { memo } from 'react'
|
||||
import {
|
||||
memo,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { useStore } from '../../workflow/store'
|
||||
import InputField from './input-field'
|
||||
import RagPipelinePanel from './panel'
|
||||
import RagPipelineHeader from './rag-pipeline-header'
|
||||
import type { EnvironmentVariable } from '@/app/components/workflow/types'
|
||||
import { DSL_EXPORT_CHECK } from '@/app/components/workflow/constants'
|
||||
import UpdateDSLModal from '@/app/components/workflow/update-dsl-modal'
|
||||
import DSLExportConfirmModal from '@/app/components/workflow/dsl-export-confirm-modal'
|
||||
import {
|
||||
useDSL,
|
||||
usePanelInteractions,
|
||||
} from '@/app/components/workflow/hooks'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
|
||||
const RagPipelineChildren = () => {
|
||||
const { eventEmitter } = useEventEmitterContextContext()
|
||||
const [secretEnvList, setSecretEnvList] = useState<EnvironmentVariable[]>([])
|
||||
const showInputFieldDialog = useStore(state => state.showInputFieldDialog)
|
||||
const showImportDSLModal = useStore(s => s.showImportDSLModal)
|
||||
const setShowImportDSLModal = useStore(s => s.setShowImportDSLModal)
|
||||
const {
|
||||
handlePaneContextmenuCancel,
|
||||
} = usePanelInteractions()
|
||||
const {
|
||||
exportCheck,
|
||||
handleExportDSL,
|
||||
} = useDSL()
|
||||
|
||||
eventEmitter?.useSubscription((v: any) => {
|
||||
if (v.type === DSL_EXPORT_CHECK)
|
||||
setSecretEnvList(v.payload.data as EnvironmentVariable[])
|
||||
})
|
||||
|
||||
return (
|
||||
<>
|
||||
{
|
||||
showImportDSLModal && (
|
||||
<UpdateDSLModal
|
||||
onCancel={() => setShowImportDSLModal(false)}
|
||||
onBackup={exportCheck!}
|
||||
onImport={handlePaneContextmenuCancel}
|
||||
/>
|
||||
)
|
||||
}
|
||||
{
|
||||
secretEnvList.length > 0 && (
|
||||
<DSLExportConfirmModal
|
||||
envList={secretEnvList}
|
||||
onConfirm={handleExportDSL!}
|
||||
onClose={() => setSecretEnvList([])}
|
||||
/>
|
||||
)
|
||||
}
|
||||
<RagPipelineHeader />
|
||||
<RagPipelinePanel />
|
||||
{
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import type { WorkflowProps } from '@/app/components/workflow'
|
|||
import RagPipelineChildren from './rag-pipeline-children'
|
||||
import {
|
||||
useAvailableNodesMetaData,
|
||||
useDSL,
|
||||
useGetRunAndTraceUrl,
|
||||
useNodesSyncDraft,
|
||||
usePipelineRefreshDraft,
|
||||
|
|
@ -37,6 +38,10 @@ const RagPipelineMain = ({
|
|||
} = usePipelineStartRun()
|
||||
const availableNodesMetaData = useAvailableNodesMetaData()
|
||||
const { getWorkflowRunAndTraceUrl } = useGetRunAndTraceUrl()
|
||||
const {
|
||||
exportCheck,
|
||||
handleExportDSL,
|
||||
} = useDSL()
|
||||
|
||||
const hooksStore = useMemo(() => {
|
||||
return {
|
||||
|
|
@ -52,6 +57,8 @@ const RagPipelineMain = ({
|
|||
handleStartWorkflowRun,
|
||||
handleWorkflowStartRunInWorkflow,
|
||||
getWorkflowRunAndTraceUrl,
|
||||
exportCheck,
|
||||
handleExportDSL,
|
||||
}
|
||||
}, [
|
||||
availableNodesMetaData,
|
||||
|
|
@ -66,6 +73,8 @@ const RagPipelineMain = ({
|
|||
handleStartWorkflowRun,
|
||||
handleWorkflowStartRunInWorkflow,
|
||||
getWorkflowRunAndTraceUrl,
|
||||
exportCheck,
|
||||
handleExportDSL,
|
||||
])
|
||||
|
||||
return (
|
||||
|
|
|
|||
|
|
@ -5,3 +5,4 @@ export * from './use-pipeline-run'
|
|||
export * from './use-pipeline-start-run'
|
||||
export * from './use-pipeline-init'
|
||||
export * from './use-get-run-and-trace-url'
|
||||
export * from './use-DSL'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,81 @@
|
|||
import {
|
||||
useCallback,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import {
|
||||
DSL_EXPORT_CHECK,
|
||||
} from '@/app/components/workflow/constants'
|
||||
import { useNodesSyncDraft } from './use-nodes-sync-draft'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import { fetchWorkflowDraft } from '@/service/workflow'
|
||||
import { useToastContext } from '@/app/components/base/toast'
|
||||
import { useWorkflowStore } from '@/app/components/workflow/store'
|
||||
import { useExportPipelineDSL } from '@/service/use-pipeline'
|
||||
|
||||
export const useDSL = () => {
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useToastContext()
|
||||
const { eventEmitter } = useEventEmitterContextContext()
|
||||
const [exporting, setExporting] = useState(false)
|
||||
const { doSyncWorkflowDraft } = useNodesSyncDraft()
|
||||
const workflowStore = useWorkflowStore()
|
||||
const { mutateAsync: exportPipelineConfig } = useExportPipelineDSL()
|
||||
|
||||
const handleExportDSL = useCallback(async (include = false) => {
|
||||
const { pipelineId, knowledgeName } = workflowStore.getState()
|
||||
if (!pipelineId)
|
||||
return
|
||||
|
||||
if (exporting)
|
||||
return
|
||||
|
||||
try {
|
||||
setExporting(true)
|
||||
await doSyncWorkflowDraft()
|
||||
const { data } = await exportPipelineConfig({
|
||||
pipelineId,
|
||||
include,
|
||||
})
|
||||
const a = document.createElement('a')
|
||||
const file = new Blob([data], { type: 'application/yaml' })
|
||||
a.href = URL.createObjectURL(file)
|
||||
a.download = `${knowledgeName}.yml`
|
||||
a.click()
|
||||
}
|
||||
catch {
|
||||
notify({ type: 'error', message: t('app.exportFailed') })
|
||||
}
|
||||
finally {
|
||||
setExporting(false)
|
||||
}
|
||||
}, [notify, t, doSyncWorkflowDraft, exporting, exportPipelineConfig, workflowStore])
|
||||
|
||||
const exportCheck = useCallback(async () => {
|
||||
const { pipelineId } = workflowStore.getState()
|
||||
if (!pipelineId)
|
||||
return
|
||||
try {
|
||||
const workflowDraft = await fetchWorkflowDraft(`/rag/pipelines/${pipelineId}/workflows/draft`)
|
||||
const list = (workflowDraft.environment_variables || []).filter(env => env.value_type === 'secret')
|
||||
if (list.length === 0) {
|
||||
handleExportDSL()
|
||||
return
|
||||
}
|
||||
eventEmitter?.emit({
|
||||
type: DSL_EXPORT_CHECK,
|
||||
payload: {
|
||||
data: list,
|
||||
},
|
||||
} as any)
|
||||
}
|
||||
catch {
|
||||
notify({ type: 'error', message: t('app.exportFailed') })
|
||||
}
|
||||
}, [eventEmitter, handleExportDSL, notify, t, workflowStore])
|
||||
|
||||
return {
|
||||
exportCheck,
|
||||
handleExportDSL,
|
||||
}
|
||||
}
|
||||
|
|
@ -24,10 +24,11 @@ export const usePipelineInit = () => {
|
|||
const [data, setData] = useState<FetchWorkflowDraftResponse>()
|
||||
const [isLoading, setIsLoading] = useState(true)
|
||||
const datasetId = useDatasetDetailContextWithSelector(s => s.dataset)?.pipeline_id
|
||||
const knowledgeName = useDatasetDetailContextWithSelector(s => s.dataset)?.name
|
||||
|
||||
useEffect(() => {
|
||||
workflowStore.setState({ pipelineId: datasetId })
|
||||
}, [datasetId, workflowStore])
|
||||
workflowStore.setState({ pipelineId: datasetId, knowledgeName })
|
||||
}, [datasetId, workflowStore, knowledgeName])
|
||||
|
||||
usePipelineConfig()
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ import { transformDataSourceToTool } from '@/app/components/workflow/block-selec
|
|||
|
||||
export type RagPipelineSliceShape = {
|
||||
pipelineId: string
|
||||
knowledgeName: string
|
||||
showInputFieldDialog: boolean
|
||||
setShowInputFieldDialog: (showInputFieldPanel: boolean) => void
|
||||
nodesDefaultConfigs: Record<string, any>
|
||||
|
|
@ -21,6 +22,7 @@ export type RagPipelineSliceShape = {
|
|||
export type CreateRagPipelineSliceSlice = StateCreator<RagPipelineSliceShape>
|
||||
export const createRagPipelineSliceSlice: StateCreator<RagPipelineSliceShape> = set => ({
|
||||
pipelineId: '',
|
||||
knowledgeName: '',
|
||||
showInputFieldDialog: false,
|
||||
setShowInputFieldDialog: showInputFieldDialog => set(() => ({ showInputFieldDialog })),
|
||||
nodesDefaultConfigs: {},
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ const WorkflowChildren = () => {
|
|||
showImportDSLModal && (
|
||||
<UpdateDSLModal
|
||||
onCancel={() => setShowImportDSLModal(false)}
|
||||
onBackup={exportCheck}
|
||||
onBackup={exportCheck!}
|
||||
onImport={handlePaneContextmenuCancel}
|
||||
/>
|
||||
)
|
||||
|
|
@ -55,7 +55,7 @@ const WorkflowChildren = () => {
|
|||
secretEnvList.length > 0 && (
|
||||
<DSLExportConfirmModal
|
||||
envList={secretEnvList}
|
||||
onConfirm={handleExportDSL}
|
||||
onConfirm={handleExportDSL!}
|
||||
onClose={() => setSecretEnvList([])}
|
||||
/>
|
||||
)
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ import type { WorkflowProps } from '@/app/components/workflow'
|
|||
import WorkflowChildren from './workflow-children'
|
||||
import {
|
||||
useAvailableNodesMetaData,
|
||||
useDSL,
|
||||
useGetRunAndTraceUrl,
|
||||
useNodesSyncDraft,
|
||||
useWorkflowRefreshDraft,
|
||||
|
|
@ -50,6 +51,10 @@ const WorkflowMain = ({
|
|||
} = useWorkflowStartRun()
|
||||
const availableNodesMetaData = useAvailableNodesMetaData()
|
||||
const { getWorkflowRunAndTraceUrl } = useGetRunAndTraceUrl()
|
||||
const {
|
||||
exportCheck,
|
||||
handleExportDSL,
|
||||
} = useDSL()
|
||||
|
||||
const hooksStore = useMemo(() => {
|
||||
return {
|
||||
|
|
@ -66,6 +71,8 @@ const WorkflowMain = ({
|
|||
handleWorkflowStartRunInWorkflow,
|
||||
availableNodesMetaData,
|
||||
getWorkflowRunAndTraceUrl,
|
||||
exportCheck,
|
||||
handleExportDSL,
|
||||
}
|
||||
}, [
|
||||
syncWorkflowDraftWhenPageClose,
|
||||
|
|
@ -81,6 +88,8 @@ const WorkflowMain = ({
|
|||
handleWorkflowStartRunInWorkflow,
|
||||
availableNodesMetaData,
|
||||
getWorkflowRunAndTraceUrl,
|
||||
exportCheck,
|
||||
handleExportDSL,
|
||||
])
|
||||
|
||||
return (
|
||||
|
|
|
|||
|
|
@ -7,3 +7,4 @@ export * from './use-is-chat-mode'
|
|||
export * from './use-available-nodes-meta-data'
|
||||
export * from './use-workflow-refresh-draft'
|
||||
export * from './use-get-run-and-trace-url'
|
||||
export * from './use-DSL'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,79 @@
|
|||
import {
|
||||
useCallback,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import {
|
||||
DSL_EXPORT_CHECK,
|
||||
} from '@/app/components/workflow/constants'
|
||||
import { useNodesSyncDraft } from './use-nodes-sync-draft'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import { fetchWorkflowDraft } from '@/service/workflow'
|
||||
import { exportAppConfig } from '@/service/apps'
|
||||
import { useToastContext } from '@/app/components/base/toast'
|
||||
import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
|
||||
export const useDSL = () => {
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useToastContext()
|
||||
const { eventEmitter } = useEventEmitterContextContext()
|
||||
const [exporting, setExporting] = useState(false)
|
||||
const { doSyncWorkflowDraft } = useNodesSyncDraft()
|
||||
|
||||
const appDetail = useAppStore(s => s.appDetail)
|
||||
|
||||
const handleExportDSL = useCallback(async (include = false) => {
|
||||
if (!appDetail)
|
||||
return
|
||||
|
||||
if (exporting)
|
||||
return
|
||||
|
||||
try {
|
||||
setExporting(true)
|
||||
await doSyncWorkflowDraft()
|
||||
const { data } = await exportAppConfig({
|
||||
appID: appDetail.id,
|
||||
include,
|
||||
})
|
||||
const a = document.createElement('a')
|
||||
const file = new Blob([data], { type: 'application/yaml' })
|
||||
a.href = URL.createObjectURL(file)
|
||||
a.download = `${appDetail.name}.yml`
|
||||
a.click()
|
||||
}
|
||||
catch {
|
||||
notify({ type: 'error', message: t('app.exportFailed') })
|
||||
}
|
||||
finally {
|
||||
setExporting(false)
|
||||
}
|
||||
}, [appDetail, notify, t, doSyncWorkflowDraft, exporting])
|
||||
|
||||
const exportCheck = useCallback(async () => {
|
||||
if (!appDetail)
|
||||
return
|
||||
try {
|
||||
const workflowDraft = await fetchWorkflowDraft(`/apps/${appDetail?.id}/workflows/draft`)
|
||||
const list = (workflowDraft.environment_variables || []).filter(env => env.value_type === 'secret')
|
||||
if (list.length === 0) {
|
||||
handleExportDSL()
|
||||
return
|
||||
}
|
||||
eventEmitter?.emit({
|
||||
type: DSL_EXPORT_CHECK,
|
||||
payload: {
|
||||
data: list,
|
||||
},
|
||||
} as any)
|
||||
}
|
||||
catch {
|
||||
notify({ type: 'error', message: t('app.exportFailed') })
|
||||
}
|
||||
}, [appDetail, eventEmitter, handleExportDSL, notify, t])
|
||||
|
||||
return {
|
||||
exportCheck,
|
||||
handleExportDSL,
|
||||
}
|
||||
}
|
||||
|
|
@ -13,6 +13,7 @@ export const transformDataSourceToTool = (dataSourceItem: DataSourceItem) => {
|
|||
type: dataSourceItem.declaration.provider_type,
|
||||
team_credentials: {},
|
||||
allow_delete: true,
|
||||
is_team_authorization: dataSourceItem.is_authorized,
|
||||
is_authorized: dataSourceItem.is_authorized,
|
||||
labels: dataSourceItem.declaration.identity.tags || [],
|
||||
plugin_id: dataSourceItem.plugin_id,
|
||||
|
|
|
|||
|
|
@ -37,6 +37,8 @@ export type CommonHooksFnMap = {
|
|||
handleWorkflowStartRunInChatflow: () => void
|
||||
availableNodesMetaData?: AvailableNodesMetaData
|
||||
getWorkflowRunAndTraceUrl: (runId?: string) => { runUrl: string; traceUrl: string }
|
||||
exportCheck?: () => Promise<void>
|
||||
handleExportDSL?: (include?: boolean) => Promise<void>
|
||||
}
|
||||
|
||||
export type Shape = {
|
||||
|
|
@ -62,6 +64,8 @@ export const createHooksStore = ({
|
|||
runUrl: '',
|
||||
traceUrl: '',
|
||||
}),
|
||||
exportCheck = async () => noop(),
|
||||
handleExportDSL = async () => noop(),
|
||||
}: Partial<Shape>) => {
|
||||
return createStore<Shape>(set => ({
|
||||
refreshAll: props => set(state => ({ ...state, ...props })),
|
||||
|
|
@ -78,6 +82,8 @@ export const createHooksStore = ({
|
|||
handleWorkflowStartRunInChatflow,
|
||||
availableNodesMetaData,
|
||||
getWorkflowRunAndTraceUrl,
|
||||
exportCheck,
|
||||
handleExportDSL,
|
||||
}))
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -19,3 +19,4 @@ export * from './use-nodes-meta-data'
|
|||
export * from './use-available-blocks'
|
||||
export * from './use-workflow-refresh-draft'
|
||||
export * from './use-tool-icon'
|
||||
export * from './use-DSL'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,11 @@
|
|||
import { useHooksStore } from '@/app/components/workflow/hooks-store'
|
||||
|
||||
export const useDSL = () => {
|
||||
const exportCheck = useHooksStore(s => s.exportCheck)
|
||||
const handleExportDSL = useHooksStore(s => s.handleExportDSL)
|
||||
|
||||
return {
|
||||
exportCheck,
|
||||
handleExportDSL,
|
||||
}
|
||||
}
|
||||
|
|
@ -1,13 +1,11 @@
|
|||
import {
|
||||
useCallback,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useReactFlow, useStoreApi } from 'reactflow'
|
||||
import produce from 'immer'
|
||||
import { useStore, useWorkflowStore } from '../store'
|
||||
import {
|
||||
CUSTOM_NODE, DSL_EXPORT_CHECK,
|
||||
CUSTOM_NODE,
|
||||
NODE_LAYOUT_HORIZONTAL_PADDING,
|
||||
NODE_LAYOUT_VERTICAL_PADDING,
|
||||
WORKFLOW_DATA_UPDATE,
|
||||
|
|
@ -30,10 +28,6 @@ import { useNodesInteractionsWithoutSync } from './use-nodes-interactions-withou
|
|||
import { useNodesSyncDraft } from './use-nodes-sync-draft'
|
||||
import { WorkflowHistoryEvent, useWorkflowHistory } from './use-workflow-history'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import { fetchWorkflowDraft } from '@/service/workflow'
|
||||
import { exportAppConfig } from '@/service/apps'
|
||||
import { useToastContext } from '@/app/components/base/toast'
|
||||
import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
|
||||
export const useWorkflowInteractions = () => {
|
||||
const workflowStore = useWorkflowStore()
|
||||
|
|
@ -336,68 +330,3 @@ export const useWorkflowUpdate = () => {
|
|||
handleUpdateWorkflowCanvas,
|
||||
}
|
||||
}
|
||||
|
||||
export const useDSL = () => {
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useToastContext()
|
||||
const { eventEmitter } = useEventEmitterContextContext()
|
||||
const [exporting, setExporting] = useState(false)
|
||||
const { doSyncWorkflowDraft } = useNodesSyncDraft()
|
||||
|
||||
const appDetail = useAppStore(s => s.appDetail)
|
||||
|
||||
const handleExportDSL = useCallback(async (include = false) => {
|
||||
if (!appDetail)
|
||||
return
|
||||
|
||||
if (exporting)
|
||||
return
|
||||
|
||||
try {
|
||||
setExporting(true)
|
||||
await doSyncWorkflowDraft()
|
||||
const { data } = await exportAppConfig({
|
||||
appID: appDetail.id,
|
||||
include,
|
||||
})
|
||||
const a = document.createElement('a')
|
||||
const file = new Blob([data], { type: 'application/yaml' })
|
||||
a.href = URL.createObjectURL(file)
|
||||
a.download = `${appDetail.name}.yml`
|
||||
a.click()
|
||||
}
|
||||
catch {
|
||||
notify({ type: 'error', message: t('app.exportFailed') })
|
||||
}
|
||||
finally {
|
||||
setExporting(false)
|
||||
}
|
||||
}, [appDetail, notify, t, doSyncWorkflowDraft, exporting])
|
||||
|
||||
const exportCheck = useCallback(async () => {
|
||||
if (!appDetail)
|
||||
return
|
||||
try {
|
||||
const workflowDraft = await fetchWorkflowDraft(`/apps/${appDetail?.id}/workflows/draft`)
|
||||
const list = (workflowDraft.environment_variables || []).filter(env => env.value_type === 'secret')
|
||||
if (list.length === 0) {
|
||||
handleExportDSL()
|
||||
return
|
||||
}
|
||||
eventEmitter?.emit({
|
||||
type: DSL_EXPORT_CHECK,
|
||||
payload: {
|
||||
data: list,
|
||||
},
|
||||
} as any)
|
||||
}
|
||||
catch {
|
||||
notify({ type: 'error', message: t('app.exportFailed') })
|
||||
}
|
||||
}, [appDetail, eventEmitter, handleExportDSL, notify, t])
|
||||
|
||||
return {
|
||||
exportCheck,
|
||||
handleExportDSL,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ const PanelContextmenu = () => {
|
|||
<div className='p-1'>
|
||||
<div
|
||||
className='flex h-8 cursor-pointer items-center justify-between rounded-lg px-3 text-sm text-text-secondary hover:bg-state-base-hover'
|
||||
onClick={() => exportCheck()}
|
||||
onClick={() => exportCheck?.()}
|
||||
>
|
||||
{t('app.export')}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -5,6 +5,18 @@
|
|||
"engines": {
|
||||
"node": ">=v22.11.0"
|
||||
},
|
||||
"browserslist": [
|
||||
"last 1 Chrome version",
|
||||
"last 1 Firefox version",
|
||||
"last 1 Edge version",
|
||||
"last 1 Safari version",
|
||||
"iOS >=15",
|
||||
"Android >= 10",
|
||||
"and_chr >= 126",
|
||||
"and_ff >= 137",
|
||||
"and_uc >= 15.5",
|
||||
"and_qq >= 14.9"
|
||||
],
|
||||
"scripts": {
|
||||
"dev": "cross-env NODE_OPTIONS='--inspect' next dev",
|
||||
"build": "next build",
|
||||
|
|
|
|||
|
|
@ -298,3 +298,15 @@ export const usePublishedPipelinePreProcessingParams = (params: PipelinePreProce
|
|||
enabled,
|
||||
})
|
||||
}
|
||||
|
||||
export const useExportPipelineDSL = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'export-pipeline-dsl'],
|
||||
mutationFn: ({
|
||||
pipelineId,
|
||||
include = false,
|
||||
}: { pipelineId: string; include?: boolean }) => {
|
||||
return get<ExportTemplateDSLResponse>(`/rag/pipelines/${pipelineId}/export?include_secret=${include}`)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue