mirror of https://github.com/langgenius/dify.git
chore: prompt to promt template
This commit is contained in:
parent
14d71fb598
commit
3823ae5890
|
|
@ -12,7 +12,7 @@ const nodeDefault: NodeDefault<LLMNodeType> = {
|
|||
},
|
||||
},
|
||||
variables: [],
|
||||
prompt: [{
|
||||
prompt_template: [{
|
||||
role: PromptRole.system,
|
||||
text: '',
|
||||
}],
|
||||
|
|
|
|||
|
|
@ -160,7 +160,7 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({
|
|||
<ConfigPrompt
|
||||
readOnly={readOnly}
|
||||
isChatModel={isChatModel}
|
||||
payload={inputs.prompt}
|
||||
payload={inputs.prompt_template}
|
||||
variables={inputs.variables.map(item => item.variable)}
|
||||
onChange={handlePromptChange}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import type { CommonNodeType, Memory, ModelConfig, PromptItem, ValueSelector, Va
|
|||
export type LLMNodeType = CommonNodeType & {
|
||||
model: ModelConfig
|
||||
variables: Variable[]
|
||||
prompt: PromptItem[] | PromptItem
|
||||
prompt_template: PromptItem[] | PromptItem
|
||||
memory: Memory
|
||||
context: {
|
||||
enabled: boolean
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
|
|||
draft.model.mode = model.mode!
|
||||
const isModeChange = model.mode !== inputs.model.mode
|
||||
if (isModeChange)
|
||||
draft.prompt = model.mode === 'chat' ? [{ role: PromptRole.system, text: '' }] : { text: '' }
|
||||
draft.prompt_template = model.mode === 'chat' ? [{ role: PromptRole.system, text: '' }] : { text: '' }
|
||||
})
|
||||
setInputs(newInputs)
|
||||
}, [inputs, setInputs])
|
||||
|
|
@ -65,7 +65,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
|
|||
|
||||
const handlePromptChange = useCallback((newPrompt: PromptItem[] | PromptItem) => {
|
||||
const newInputs = produce(inputs, (draft) => {
|
||||
draft.prompt = newPrompt
|
||||
draft.prompt_template = newPrompt
|
||||
})
|
||||
setInputs(newInputs)
|
||||
}, [inputs, setInputs])
|
||||
|
|
|
|||
Loading…
Reference in New Issue