diff --git a/web/app/components/workflow/nodes/llm/default.ts b/web/app/components/workflow/nodes/llm/default.ts index 3f3e9f7105..758176658d 100644 --- a/web/app/components/workflow/nodes/llm/default.ts +++ b/web/app/components/workflow/nodes/llm/default.ts @@ -12,7 +12,7 @@ const nodeDefault: NodeDefault = { }, }, variables: [], - prompt: [{ + prompt_template: [{ role: PromptRole.system, text: '', }], diff --git a/web/app/components/workflow/nodes/llm/panel.tsx b/web/app/components/workflow/nodes/llm/panel.tsx index 0f83beb2c3..c57f84e420 100644 --- a/web/app/components/workflow/nodes/llm/panel.tsx +++ b/web/app/components/workflow/nodes/llm/panel.tsx @@ -160,7 +160,7 @@ const Panel: FC> = ({ item.variable)} onChange={handlePromptChange} /> diff --git a/web/app/components/workflow/nodes/llm/types.ts b/web/app/components/workflow/nodes/llm/types.ts index b24fc7b48b..248db3a063 100644 --- a/web/app/components/workflow/nodes/llm/types.ts +++ b/web/app/components/workflow/nodes/llm/types.ts @@ -4,7 +4,7 @@ import type { CommonNodeType, Memory, ModelConfig, PromptItem, ValueSelector, Va export type LLMNodeType = CommonNodeType & { model: ModelConfig variables: Variable[] - prompt: PromptItem[] | PromptItem + prompt_template: PromptItem[] | PromptItem memory: Memory context: { enabled: boolean diff --git a/web/app/components/workflow/nodes/llm/use-config.ts b/web/app/components/workflow/nodes/llm/use-config.ts index f4f23ab520..9231cbf70b 100644 --- a/web/app/components/workflow/nodes/llm/use-config.ts +++ b/web/app/components/workflow/nodes/llm/use-config.ts @@ -27,7 +27,7 @@ const useConfig = (id: string, payload: LLMNodeType) => { draft.model.mode = model.mode! const isModeChange = model.mode !== inputs.model.mode if (isModeChange) - draft.prompt = model.mode === 'chat' ? [{ role: PromptRole.system, text: '' }] : { text: '' } + draft.prompt_template = model.mode === 'chat' ? [{ role: PromptRole.system, text: '' }] : { text: '' } }) setInputs(newInputs) }, [inputs, setInputs]) @@ -65,7 +65,7 @@ const useConfig = (id: string, payload: LLMNodeType) => { const handlePromptChange = useCallback((newPrompt: PromptItem[] | PromptItem) => { const newInputs = produce(inputs, (draft) => { - draft.prompt = newPrompt + draft.prompt_template = newPrompt }) setInputs(newInputs) }, [inputs, setInputs])