feat(web): fix template

This commit is contained in:
JzoNg 2026-05-05 20:41:53 +08:00
parent 8a72e46ce8
commit dd54ca0cab
12 changed files with 131 additions and 137 deletions

View File

@ -11,7 +11,7 @@ const mockUseDefaultEvaluationMetrics = vi.hoisted(() => vi.fn())
const mockUseEvaluationConfig = vi.hoisted(() => vi.fn())
const mockUseSaveEvaluationConfigMutation = vi.hoisted(() => vi.fn())
const mockUseStartEvaluationRunMutation = vi.hoisted(() => vi.fn())
const mockUseEvaluationTemplateColumnsMutation = vi.hoisted(() => vi.fn())
const mockUseEvaluationTemplateColumns = vi.hoisted(() => vi.fn())
const mockUsePublishedPipelineInfo = vi.hoisted(() => vi.fn())
const mockUseSnippetPublishedWorkflow = vi.hoisted(() => vi.fn())
@ -56,7 +56,7 @@ vi.mock('@/service/use-evaluation', () => ({
useDefaultEvaluationMetrics: (...args: unknown[]) => mockUseDefaultEvaluationMetrics(...args),
useSaveEvaluationConfigMutation: (...args: unknown[]) => mockUseSaveEvaluationConfigMutation(...args),
useStartEvaluationRunMutation: (...args: unknown[]) => mockUseStartEvaluationRunMutation(...args),
useEvaluationTemplateColumnsMutation: (...args: unknown[]) => mockUseEvaluationTemplateColumnsMutation(...args),
useEvaluationTemplateColumns: (...args: unknown[]) => mockUseEvaluationTemplateColumns(...args),
}))
vi.mock('@/service/use-pipeline', () => ({
@ -172,9 +172,17 @@ describe('Evaluation', () => {
isPending: false,
mutate: vi.fn(),
})
mockUseEvaluationTemplateColumnsMutation.mockReturnValue({
mockUseEvaluationTemplateColumns.mockReturnValue({
data: {
columns: [
{ name: 'index', type: 'number' },
{ name: 'query', type: 'string' },
{ name: 'expected_output', type: 'string' },
],
},
isError: false,
isFetching: false,
isPending: false,
mutate: vi.fn(),
})
mockUsePublishedPipelineInfo.mockReturnValue({
data: {
@ -332,72 +340,61 @@ describe('Evaluation', () => {
expect(screen.queryByText('evaluation.batch.noticeDescription')).not.toBeInTheDocument()
})
it('should use published snippet input fields for snippet batch templates', () => {
mockUseSnippetPublishedWorkflow.mockReturnValue({
it('should use template columns for snippet batch templates', () => {
const store = useEvaluationStore.getState()
act(() => {
store.ensureResource('snippets', 'snippet-fields')
store.setJudgeModel('snippets', 'snippet-fields', 'openai::gpt-4o-mini')
store.addBuiltinMetric('snippets', 'snippet-fields', 'answer-correctness', [
{ node_id: 'node-answer', title: 'Answer Node', type: 'llm' },
])
})
mockUseEvaluationTemplateColumns.mockReturnValue({
data: {
graph: {
nodes: [{
id: 'start',
data: {
type: 'start',
variables: [{
variable: 'graph_only',
type: 'text-input',
}],
},
}],
},
input_fields: [
{
label: 'Snippet Topic',
variable: 'snippet_topic',
type: 'text-input',
required: true,
},
{
label: 'Need Summary',
variable: 'need_summary',
type: 'checkbox',
required: false,
},
columns: [
{ name: 'index', type: 'number' },
{ name: 'snippet_topic', type: 'string' },
{ name: 'need_summary', type: 'boolean' },
],
},
isLoading: false,
isError: false,
isFetching: false,
isPending: false,
})
renderWithQueryClient(<Evaluation resourceType="snippets" resourceId="snippet-fields" />)
expect(mockUseSnippetPublishedWorkflow).toHaveBeenCalledWith('snippet-fields')
expect(mockUseEvaluationTemplateColumns).toHaveBeenCalledWith(
'snippets',
'snippet-fields',
expect.any(Object),
true,
)
expect(screen.getByText('snippet_topic')).toBeInTheDocument()
expect(screen.getByText('need_summary')).toBeInTheDocument()
expect(screen.queryByText('graph_only')).not.toBeInTheDocument()
})
it('should show snippet-specific empty input fields copy', () => {
mockUseSnippetPublishedWorkflow.mockReturnValue({
it('should show empty template columns copy', () => {
const store = useEvaluationStore.getState()
act(() => {
store.ensureResource('snippets', 'snippet-empty-fields')
store.setJudgeModel('snippets', 'snippet-empty-fields', 'openai::gpt-4o-mini')
store.addBuiltinMetric('snippets', 'snippet-empty-fields', 'answer-correctness', [
{ node_id: 'node-answer', title: 'Answer Node', type: 'llm' },
])
})
mockUseEvaluationTemplateColumns.mockReturnValue({
data: {
graph: {
nodes: [{
id: 'start',
data: {
type: 'start',
variables: [{
variable: 'graph_only',
type: 'text-input',
}],
},
}],
},
input_fields: [],
columns: [],
},
isLoading: false,
isError: false,
isFetching: false,
isPending: false,
})
renderWithQueryClient(<Evaluation resourceType="snippets" resourceId="snippet-empty-fields" />)
expect(screen.getByText('evaluation.batch.noSnippetInputFields')).toBeInTheDocument()
expect(screen.queryByText('evaluation.batch.noInputFields')).not.toBeInTheDocument()
expect(screen.queryByText('graph_only')).not.toBeInTheDocument()
expect(screen.getByText('evaluation.batch.noTemplateColumns')).toBeInTheDocument()
})
it('should hide the value row for empty operators', () => {
@ -630,14 +627,17 @@ describe('Evaluation', () => {
it('should download the fixed pipeline template columns', () => {
const createElement = document.createElement.bind(document)
const getTemplateColumns = vi.fn((_input: unknown, options?: { onSuccess?: (value: { columns: string[] }) => void }) => {
options?.onSuccess?.({
columns: ['index', 'query', 'expected_output'],
})
})
mockUseEvaluationTemplateColumnsMutation.mockReturnValue({
mockUseEvaluationTemplateColumns.mockReturnValue({
data: {
columns: [
{ name: 'index', type: 'number' },
{ name: 'query', type: 'string' },
{ name: 'expected_output', type: 'string' },
],
},
isError: false,
isFetching: false,
isPending: false,
mutate: getTemplateColumns,
})
let downloadLink: HTMLAnchorElement | undefined
const createElementSpy = vi.spyOn(document, 'createElement').mockImplementation((tagName, options) => {
@ -660,16 +660,15 @@ describe('Evaluation', () => {
const templateContent = decodeURIComponent(downloadLink?.href ?? '').replace('data:text/csv;charset=utf-8,', '')
expect(downloadLink?.download).toBe('pipeline-evaluation-template.csv')
expect(templateContent.trim().split(',')).toEqual(['index', 'query', 'expected_output'])
expect(getTemplateColumns).toHaveBeenCalledWith({
params: {
targetType: 'datasets',
targetId: 'dataset-template',
},
body: expect.objectContaining({
expect(mockUseEvaluationTemplateColumns).toHaveBeenLastCalledWith(
'datasets',
'dataset-template',
expect.objectContaining({
evaluation_model: 'gpt-4o-mini',
evaluation_model_provider: 'openai',
}),
}, expect.any(Object))
true,
)
createElementSpy.mockRestore()
})

View File

@ -5,7 +5,6 @@ import { EVALUATION_TEMPLATE_FILE_NAMES } from '../../store-utils'
import InputFieldsRequirements from './input-fields/input-fields-requirements'
import UploadRunPopover from './input-fields/upload-run-popover'
import { useInputFieldsActions } from './input-fields/use-input-fields-actions'
import { usePublishedInputFields } from './input-fields/use-published-input-fields'
type InputFieldsTabProps = EvaluationResourceProps & {
isPanelReady: boolean
@ -19,11 +18,9 @@ const InputFieldsTab = ({
isRunnable,
}: InputFieldsTabProps) => {
const { t } = useTranslation('evaluation')
const { inputFields, isInputFieldsLoading } = usePublishedInputFields(resourceType, resourceId)
const actions = useInputFieldsActions({
resourceType,
resourceId,
isInputFieldsLoading,
isPanelReady,
isRunnable,
templateFileName: EVALUATION_TEMPLATE_FILE_NAMES[resourceType],
@ -32,9 +29,8 @@ const InputFieldsTab = ({
return (
<div className="space-y-5">
<InputFieldsRequirements
resourceType={resourceType}
inputFields={inputFields}
isLoading={isInputFieldsLoading}
inputFields={actions.templateColumns}
isLoading={actions.isTemplateColumnsLoading}
/>
<div className="space-y-3">
<Button variant="secondary" className="w-full justify-center" disabled={!actions.canDownloadTemplate} onClick={actions.handleDownloadTemplate}>
@ -45,7 +41,7 @@ const InputFieldsTab = ({
open={actions.isUploadPopoverOpen}
onOpenChange={actions.setIsUploadPopoverOpen}
triggerDisabled={actions.uploadButtonDisabled}
inputFields={inputFields}
inputFields={actions.templateColumns}
currentFileName={actions.currentFileName}
currentFileExtension={actions.currentFileExtension}
currentFileSize={actions.currentFileSize}

View File

@ -4,16 +4,16 @@ describe('input fields utils', () => {
describe('buildTemplateCsvContent', () => {
it('should build CSV content from API columns without injecting columns', () => {
expect(buildTemplateCsvContent([
'index',
'query',
'expected_output',
{ name: 'index', type: 'number' },
{ name: 'query', type: 'string' },
{ name: 'expected_output', type: 'string' },
])).toBe('index,query,expected_output\n')
})
it('should escape CSV column names', () => {
expect(buildTemplateCsvContent([
'query,text',
'answer "draft"',
{ name: 'query,text', type: 'string' },
{ name: 'answer "draft"', type: 'string' },
])).toBe('"query,text","answer ""draft"""\n')
})
})

View File

@ -1,22 +1,16 @@
import type { EvaluationResourceType } from '../../../types'
import type { InputField } from './input-fields-utils'
import { useTranslation } from 'react-i18next'
type InputFieldsRequirementsProps = {
resourceType: EvaluationResourceType
inputFields: InputField[]
isLoading: boolean
}
const InputFieldsRequirements = ({
resourceType,
inputFields,
isLoading,
}: InputFieldsRequirementsProps) => {
const { t } = useTranslation('evaluation')
const emptyDescription = resourceType === 'snippets'
? t('batch.noSnippetInputFields')
: t('batch.noInputFields')
return (
<div>
@ -30,7 +24,7 @@ const InputFieldsRequirements = ({
)}
{!isLoading && inputFields.length === 0 && (
<div className="px-1 py-0.5 system-xs-regular text-text-tertiary">
{emptyDescription}
{t('batch.noTemplateColumns')}
</div>
)}
{!isLoading && inputFields.map(field => (

View File

@ -1,5 +1,6 @@
import type { StartNodeType } from '@/app/components/workflow/nodes/start/types'
import type { InputVar, Node } from '@/app/components/workflow/types'
import type { EvaluationTemplateColumn } from '@/types/evaluation'
import type { SnippetInputField } from '@/types/snippet'
import { inputVarTypeToVarType } from '@/app/components/workflow/nodes/_base/components/variable/utils'
import { BlockEnum, InputVarType } from '@/app/components/workflow/types'
@ -64,8 +65,8 @@ const escapeCsvCell = (value: string) => {
return `"${value.replace(/"/g, '""')}"`
}
export const buildTemplateCsvContent = (columns: string[]) => {
return `${columns.map(escapeCsvCell).join(',')}\n`
export const buildTemplateCsvContent = (columns: EvaluationTemplateColumn[]) => {
return `${columns.map(column => escapeCsvCell(column.name)).join(',')}\n`
}
export const getFileExtension = (fileName: string) => {

View File

@ -47,7 +47,7 @@ const UploadRunPopover = ({
const { t } = useTranslation('evaluation')
const { t: tCommon } = useTranslation('common')
const fileInputRef = useRef<HTMLInputElement>(null)
const previewFields = inputFields.slice(0, 3)
const previewFields = inputFields
const booleanExampleValue = t('conditions.boolean.true')
const handleFileChange = (event: ChangeEvent<HTMLInputElement>) => {

View File

@ -1,10 +1,10 @@
import type { EvaluationResourceProps } from '../../../types'
import { toast } from '@langgenius/dify-ui/toast'
import { useMutation } from '@tanstack/react-query'
import { useState } from 'react'
import { useMemo, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { upload } from '@/service/base'
import { useEvaluationTemplateColumnsMutation, useStartEvaluationRunMutation } from '@/service/use-evaluation'
import { useEvaluationTemplateColumns, useStartEvaluationRunMutation } from '@/service/use-evaluation'
import { formatFileSize } from '@/utils/format'
import { useEvaluationResource, useEvaluationStore } from '../../../store'
import { buildEvaluationConfigPayload, buildEvaluationRunRequest } from '../../../store-utils'
@ -16,7 +16,6 @@ type UploadedFileMeta = {
}
type UseInputFieldsActionsParams = EvaluationResourceProps & {
isInputFieldsLoading: boolean
isPanelReady: boolean
isRunnable: boolean
templateFileName: string
@ -25,7 +24,6 @@ type UseInputFieldsActionsParams = EvaluationResourceProps & {
export const useInputFieldsActions = ({
resourceType,
resourceId,
isInputFieldsLoading,
isPanelReady,
isRunnable,
templateFileName,
@ -37,7 +35,10 @@ export const useInputFieldsActions = ({
const setUploadedFile = useEvaluationStore(state => state.setUploadedFile)
const setUploadedFileName = useEvaluationStore(state => state.setUploadedFileName)
const startRunMutation = useStartEvaluationRunMutation()
const templateColumnsMutation = useEvaluationTemplateColumnsMutation()
const templateConfigPayload = useMemo(() => {
return isPanelReady ? buildEvaluationConfigPayload(resource, resourceType) : null
}, [isPanelReady, resource, resourceType])
const templateColumnsQuery = useEvaluationTemplateColumns(resourceType, resourceId, templateConfigPayload, isPanelReady)
const [isUploadPopoverOpen, setIsUploadPopoverOpen] = useState(false)
const [uploadedFileMeta, setUploadedFileMeta] = useState<UploadedFileMeta | null>(null)
const uploadMutation = useMutation({
@ -65,43 +66,30 @@ export const useInputFieldsActions = ({
const isFileUploading = uploadMutation.isPending
const isRunning = startRunMutation.isPending
const isTemplateColumnsLoading = templateColumnsQuery.isPending || templateColumnsQuery.isFetching
const templateColumns = templateColumnsQuery.data?.columns ?? []
const uploadedFileId = resource.uploadedFileId
const currentFileName = uploadedFileMeta?.name ?? resource.uploadedFileName
const canDownloadTemplate = isPanelReady && !templateColumnsMutation.isPending
const canDownloadTemplate = isPanelReady && !isTemplateColumnsLoading && templateColumns.length > 0
const isRunDisabled = !isRunnable || !uploadedFileId || isFileUploading || isRunning
const uploadButtonDisabled = !isPanelReady || isInputFieldsLoading || isRunning
const uploadButtonDisabled = !isPanelReady || isTemplateColumnsLoading || isRunning
const handleDownloadTemplate = () => {
const body = buildEvaluationConfigPayload(resource, resourceType)
if (!body) {
toast.warning(t('batch.validation'))
if (templateColumnsQuery.isError) {
toast.error(t('batch.templateColumnsError'))
return
}
templateColumnsMutation.mutate({
params: {
targetType: resourceType,
targetId: resourceId,
},
body,
}, {
onSuccess: ({ columns }) => {
if (!columns.length) {
toast.warning(t('batch.noTemplateColumns'))
return
}
if (!templateColumns.length) {
toast.warning(t('batch.noTemplateColumns'))
return
}
const content = buildTemplateCsvContent(columns)
const link = document.createElement('a')
link.href = `data:text/csv;charset=utf-8,${encodeURIComponent(content)}`
link.download = templateFileName
link.click()
},
onError: () => {
toast.error(t('batch.templateColumnsError'))
},
})
const content = buildTemplateCsvContent(templateColumns)
const link = document.createElement('a')
link.href = `data:text/csv;charset=utf-8,${encodeURIComponent(content)}`
link.download = templateFileName
link.click()
}
const handleRun = () => {
@ -178,8 +166,10 @@ export const useInputFieldsActions = ({
isFileUploading,
isRunning,
isRunDisabled,
isTemplateColumnsLoading,
isUploadPopoverOpen,
setIsUploadPopoverOpen,
templateColumns,
uploadButtonDisabled,
}
}

View File

@ -1,7 +1,6 @@
'use client'
import type { EvaluationResourceProps } from '../../types'
import type { InputField } from '../batch-test-panel/input-fields/input-fields-utils'
import { Button } from '@langgenius/dify-ui/button'
import { useTranslation } from 'react-i18next'
import { isEvaluationRunnable, useEvaluationResource } from '../../store'
@ -9,12 +8,6 @@ import { EVALUATION_TEMPLATE_FILE_NAMES } from '../../store-utils'
import UploadRunPopover from '../batch-test-panel/input-fields/upload-run-popover'
import { useInputFieldsActions } from '../batch-test-panel/input-fields/use-input-fields-actions'
const PIPELINE_INPUT_FIELDS: InputField[] = [
{ name: 'index', type: 'number' },
{ name: 'query', type: 'string' },
{ name: 'expected_output', type: 'string' },
]
const PipelineBatchActions = ({
resourceType,
resourceId,
@ -26,7 +19,6 @@ const PipelineBatchActions = ({
const actions = useInputFieldsActions({
resourceType,
resourceId,
isInputFieldsLoading: false,
isPanelReady: isConfigReady,
isRunnable,
templateFileName: EVALUATION_TEMPLATE_FILE_NAMES[resourceType],
@ -49,7 +41,7 @@ const PipelineBatchActions = ({
onOpenChange={actions.setIsUploadPopoverOpen}
triggerDisabled={actions.uploadButtonDisabled}
triggerLabel={t('pipeline.uploadAndRun')}
inputFields={PIPELINE_INPUT_FIELDS}
inputFields={actions.templateColumns}
currentFileName={actions.currentFileName}
currentFileExtension={actions.currentFileExtension}
currentFileSize={actions.currentFileSize}

View File

@ -4,7 +4,7 @@
"batch.emptyHistory": "No test history yet.",
"batch.example": "Example:",
"batch.fileRequired": "Upload an evaluation dataset file before running the test.",
"batch.loadingInputFields": "Loading input fields...",
"batch.loadingInputFields": "Loading template columns...",
"batch.noInputFields": "No published start node input fields found.",
"batch.noSnippetInputFields": "No published snippet input fields found.",
"batch.noTemplateColumns": "No template columns found.",

View File

@ -4,7 +4,7 @@
"batch.emptyHistory": "还没有测试历史。",
"batch.example": "示例:",
"batch.fileRequired": "请先上传评估数据集文件,再运行测试。",
"batch.loadingInputFields": "正在加载输入字段...",
"batch.loadingInputFields": "正在加载模板列...",
"batch.noInputFields": "未找到已发布 Start 节点的输入字段。",
"batch.noSnippetInputFields": "未找到已发布的片段输入字段。",
"batch.noTemplateColumns": "未找到模板列。",

View File

@ -1,5 +1,5 @@
import type { EvaluationResourceType, NonPipelineEvaluationResourceType } from '@/app/components/evaluation/types'
import type { AvailableEvaluationWorkflowsResponse, EvaluationConfig } from '@/types/evaluation'
import type { AvailableEvaluationWorkflowsResponse, EvaluationConfig, EvaluationConfigData } from '@/types/evaluation'
import {
keepPreviousData,
skipToken,
@ -131,8 +131,25 @@ export const useStartEvaluationRunMutation = () => {
}))
}
export const useEvaluationTemplateColumnsMutation = () => {
return useMutation(consoleQuery.evaluation.templateColumns.mutationOptions())
export const useEvaluationTemplateColumns = (
resourceType: EvaluationResourceType,
resourceId: string,
configPayload: EvaluationConfigData | null,
enabled = true,
) => {
return useQuery(consoleQuery.evaluation.templateColumns.queryOptions({
input: resourceId && configPayload
? {
params: {
targetType: resourceType,
targetId: resourceId,
},
body: configPayload,
}
: skipToken,
enabled: !!resourceId && !!configPayload && enabled,
refetchOnWindowFocus: false,
}))
}
export const useAvailableEvaluationWorkflows = (

View File

@ -59,8 +59,13 @@ export type EvaluationRunRequest = EvaluationConfigData & {
file_id: string
}
export type EvaluationTemplateColumn = {
name: string
type: string
}
export type EvaluationTemplateColumnsResponse = {
columns: string[]
columns: EvaluationTemplateColumn[]
}
export type EvaluationRunStatus = 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'