dify/web/app/components/workflow/nodes/llm/default.spec.ts
yyh bbe975c6bc
feat: enhance model plugin workflow checks and model provider management UX (#33289)
Signed-off-by: yyh <yuanyouhuilyz@gmail.com>
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: CodingOnStar <hanxujiang@dify.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Coding On Star <447357187@qq.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: statxc <tyleradams93226@gmail.com>
2026-03-18 10:16:15 +08:00

48 lines
1.3 KiB
TypeScript

import type { LLMNodeType } from './types'
import { AppModeEnum } from '@/types/app'
import { EditionType, PromptRole } from '../../types'
import nodeDefault from './default'
const t = (key: string) => key
const createPayload = (overrides: Partial<LLMNodeType> = {}): LLMNodeType => ({
...nodeDefault.defaultValue,
model: {
...nodeDefault.defaultValue.model,
provider: 'langgenius/openai/gpt-4.1',
mode: AppModeEnum.CHAT,
},
prompt_template: [{
role: PromptRole.system,
text: 'You are helpful.',
edition_type: EditionType.basic,
}],
...overrides,
}) as LLMNodeType
describe('llm default node validation', () => {
it('should require a model provider before validating the prompt', () => {
const result = nodeDefault.checkValid(createPayload({
model: {
...nodeDefault.defaultValue.model,
provider: '',
name: 'gpt-4.1',
mode: AppModeEnum.CHAT,
completion_params: {
temperature: 0.7,
},
},
}), t)
expect(result.isValid).toBe(false)
expect(result.errorMessage).toBe('errorMsg.fieldRequired')
})
it('should return a valid result when the provider and prompt are configured', () => {
const result = nodeDefault.checkValid(createPayload(), t)
expect(result.isValid).toBe(true)
expect(result.errorMessage).toBe('')
})
})