mirror of https://github.com/langgenius/dify.git
feat: llm node support config memroy
This commit is contained in:
parent
f95eb2df0d
commit
9c0d44fa09
|
|
@ -6,8 +6,8 @@ import Workflow from '@/app/components/workflow'
|
|||
import { BlockEnum } from '@/app/components/workflow/types'
|
||||
|
||||
const nodes = [
|
||||
BlockEnum.QuestionClassifier/* 5 */, BlockEnum.Tool/* 10 */, BlockEnum.VariableAssigner/* 11 */, BlockEnum.Start/* 1 */, BlockEnum.DirectAnswer/* 2 */, BlockEnum.LLM/* 3 */, BlockEnum.KnowledgeRetrieval/* 4 */,
|
||||
BlockEnum.IfElse/* 6 */, BlockEnum.Code/* 7 */, BlockEnum.TemplateTransform/* 8 */, BlockEnum.HttpRequest/* 9 */,
|
||||
BlockEnum.LLM/* 3 */, BlockEnum.VariableAssigner/* 11 */, BlockEnum.Start/* 1 */, BlockEnum.DirectAnswer/* 2 */, BlockEnum.KnowledgeRetrieval/* 4 */, BlockEnum.QuestionClassifier/* 5 */,
|
||||
BlockEnum.IfElse/* 6 */, BlockEnum.Code/* 7 */, BlockEnum.TemplateTransform/* 8 */, BlockEnum.HttpRequest/* 9 */, BlockEnum.Tool/* 10 */,
|
||||
BlockEnum.End/* 12 */,
|
||||
].map((item, i) => ({
|
||||
id: `${i + 1}`,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import type { FC } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import MemoryConfig from '../_base/components/memory-config'
|
||||
import useConfig from './use-config'
|
||||
import { mockData } from './mock'
|
||||
import VarList from '@/app/components/workflow/nodes/_base/components/variable/var-list'
|
||||
|
|
@ -23,10 +24,11 @@ const Panel: FC = () => {
|
|||
handleVarListChange,
|
||||
handleAddVariable,
|
||||
toggleContextEnabled,
|
||||
handleMemoryChange,
|
||||
} = useConfig(mockData)
|
||||
const model = inputs.model
|
||||
// const modelMode = inputs.model?.mode
|
||||
// const isChatMode = modelMode === 'chat'
|
||||
const modelMode = inputs.model?.mode
|
||||
const isChatMode = modelMode === 'chat'
|
||||
|
||||
return (
|
||||
<div className='mt-2'>
|
||||
|
|
@ -77,12 +79,28 @@ const Panel: FC = () => {
|
|||
)
|
||||
: null}
|
||||
</Field>
|
||||
|
||||
{/* Prompt */}
|
||||
<Field
|
||||
title={t(`${i18nPrefix}.prompt`)}
|
||||
>
|
||||
Prompt
|
||||
</Field>
|
||||
<Split />
|
||||
|
||||
{/* Memory */}
|
||||
{isChatMode && (
|
||||
<>
|
||||
<MemoryConfig
|
||||
readonly={readOnly}
|
||||
payload={inputs.memory}
|
||||
onChange={handleMemoryChange}
|
||||
canSetRoleName
|
||||
/>
|
||||
<Split />
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Vision: GPT4-vision and so on */}
|
||||
<Field
|
||||
title={t(`${i18nPrefix}.vision`)}
|
||||
inline
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import { useCallback, useState } from 'react'
|
||||
import produce from 'immer'
|
||||
import useVarList from '../_base/hooks/use-var-list'
|
||||
import type { Memory } from '../../types'
|
||||
import type { LLMNodeType } from './types'
|
||||
|
||||
const useConfig = (initInputs: LLMNodeType) => {
|
||||
|
|
@ -37,6 +38,13 @@ const useConfig = (initInputs: LLMNodeType) => {
|
|||
setInputs(newInputs)
|
||||
}, [inputs, setInputs])
|
||||
|
||||
const handleMemoryChange = useCallback((newMemory: Memory) => {
|
||||
const newInputs = produce(inputs, (draft) => {
|
||||
draft.memory = newMemory
|
||||
})
|
||||
setInputs(newInputs)
|
||||
}, [inputs, setInputs])
|
||||
|
||||
return {
|
||||
inputs,
|
||||
handleModelChanged,
|
||||
|
|
@ -44,6 +52,7 @@ const useConfig = (initInputs: LLMNodeType) => {
|
|||
handleVarListChange,
|
||||
handleAddVariable,
|
||||
toggleContextEnabled,
|
||||
handleMemoryChange,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue