fix: memory support switch

This commit is contained in:
Joel 2024-03-20 20:50:07 +08:00
parent 94ca0edb68
commit d4f362164f
8 changed files with 105 additions and 92 deletions

View File

@ -45,31 +45,38 @@ const RoleItem: FC<RoleItemProps> = ({
type Props = {
className?: string
readonly: boolean
payload: Memory
onChange: (memory: Memory) => void
config: { data?: Memory }
onChange: (memory?: Memory) => void
canSetRoleName?: boolean
}
const MEMORY_DEFAULT: Memory = { window: { enabled: false, size: WINDOW_SIZE_DEFAULT } }
const MemoryConfig: FC<Props> = ({
className,
readonly,
payload = { window: { enabled: false, size: WINDOW_SIZE_DEFAULT } },
config = { data: MEMORY_DEFAULT },
onChange,
canSetRoleName = false,
}) => {
const { t } = useTranslation()
const payload = config.data
const handleMemoryEnabledChange = useCallback((enabled: boolean) => {
onChange(enabled ? MEMORY_DEFAULT : undefined)
}, [onChange])
const handleWindowEnabledChange = useCallback((enabled: boolean) => {
const newPayload = produce(payload, (draft) => {
const newPayload = produce(config.data || MEMORY_DEFAULT, (draft) => {
if (!draft.window)
draft.window = { enabled: false, size: WINDOW_SIZE_DEFAULT }
draft.window.enabled = enabled
})
onChange(newPayload)
}, [payload, onChange])
}, [config, onChange])
const handleWindowSizeChange = useCallback((size: number | string) => {
const newPayload = produce(payload, (draft) => {
const newPayload = produce(payload || MEMORY_DEFAULT, (draft) => {
if (!draft.window)
draft.window = { enabled: true, size: WINDOW_SIZE_DEFAULT }
let limitedSize: null | string | number = size
@ -94,13 +101,17 @@ const MemoryConfig: FC<Props> = ({
}, [payload, onChange])
const handleBlur = useCallback(() => {
const payload = config.data
if (!payload)
return
if (payload.window.size === '' || payload.window.size === null)
handleWindowSizeChange(WINDOW_SIZE_DEFAULT)
}, [handleWindowSizeChange, payload.window?.size])
}, [handleWindowSizeChange, config])
const handleRolePrefixChange = useCallback((role: MemoryRole) => {
return (value: string) => {
const newPayload = produce(payload, (draft) => {
const newPayload = produce(config.data || MEMORY_DEFAULT, (draft) => {
if (!draft.role_prefix) {
draft.role_prefix = {
user: '',
@ -111,69 +122,79 @@ const MemoryConfig: FC<Props> = ({
})
onChange(newPayload)
}
}, [payload, onChange])
}, [config, onChange])
return (
<div className={cn(className)}>
<Field
title={t(`${i18nPrefix}.memory`)}
tooltip={t(`${i18nPrefix}.memoryTip`)!}
operations={
<Switch
defaultValue={!!payload}
onChange={handleMemoryEnabledChange}
size='md'
disabled={readonly}
/>
}
>
<>
{/* window size */}
<div className='flex justify-between'>
<div className='flex items-center h-8 space-x-1'>
<Switch
defaultValue={payload.window?.enabled}
onChange={handleWindowEnabledChange}
size='md'
disabled={readonly}
/>
<div className='leading-[18px] text-xs font-medium text-gray-500 uppercase'>{t(`${i18nPrefix}.windowSize`)}</div>
</div>
<div className='flex items-center h-8 space-x-2'>
<Slider
className='w-[144px]'
value={(payload.window?.size || WINDOW_SIZE_DEFAULT) as number}
min={WINDOW_SIZE_MIN}
max={WINDOW_SIZE_MAX}
step={1}
onChange={handleWindowSizeChange}
disabled={readonly}
/>
<input
value={(payload.window?.size || WINDOW_SIZE_DEFAULT) as number}
className='shrink-0 block ml-4 pl-3 w-12 h-8 appearance-none outline-none rounded-lg bg-gray-100 text-[13px] text-gra-900'
type='number'
min={WINDOW_SIZE_MIN}
max={WINDOW_SIZE_MAX}
step={1}
onChange={e => handleWindowSizeChange(e.target.value)}
onBlur={handleBlur}
disabled={readonly}
/>
</div>
</div>
{canSetRoleName && (
<div className='mt-4'>
<div className='leading-6 text-xs font-medium text-gray-500 uppercase'>{t(`${i18nPrefix}.conversationRoleName`)}</div>
<div className='mt-1 space-y-2'>
<RoleItem
readonly={readonly}
title={t(`${i18nPrefix}.user`)}
value={payload.role_prefix?.user || ''}
onChange={handleRolePrefixChange(MemoryRole.user)}
{payload && (
<>
{/* window size */}
<div className='flex justify-between'>
<div className='flex items-center h-8 space-x-1'>
<Switch
defaultValue={payload?.window?.enabled}
onChange={handleWindowEnabledChange}
size='md'
disabled={readonly}
/>
<RoleItem
readonly={readonly}
title={t(`${i18nPrefix}.assistant`)}
value={payload.role_prefix?.assistant || ''}
onChange={handleRolePrefixChange(MemoryRole.assistant)}
<div className='leading-[18px] text-xs font-medium text-gray-500 uppercase'>{t(`${i18nPrefix}.windowSize`)}</div>
</div>
<div className='flex items-center h-8 space-x-2'>
<Slider
className='w-[144px]'
value={(payload.window?.size || WINDOW_SIZE_DEFAULT) as number}
min={WINDOW_SIZE_MIN}
max={WINDOW_SIZE_MAX}
step={1}
onChange={handleWindowSizeChange}
disabled={readonly || !payload.window?.enabled}
/>
<input
value={(payload.window?.size || WINDOW_SIZE_DEFAULT) as number}
className='shrink-0 block ml-4 pl-3 w-12 h-8 appearance-none outline-none rounded-lg bg-gray-100 text-[13px] text-gra-900'
type='number'
min={WINDOW_SIZE_MIN}
max={WINDOW_SIZE_MAX}
step={1}
onChange={e => handleWindowSizeChange(e.target.value)}
onBlur={handleBlur}
disabled={readonly}
/>
</div>
</div>
{canSetRoleName && (
<div className='mt-4'>
<div className='leading-6 text-xs font-medium text-gray-500 uppercase'>{t(`${i18nPrefix}.conversationRoleName`)}</div>
<div className='mt-1 space-y-2'>
<RoleItem
readonly={readonly}
title={t(`${i18nPrefix}.user`)}
value={payload.role_prefix?.user || ''}
onChange={handleRolePrefixChange(MemoryRole.user)}
/>
<RoleItem
readonly={readonly}
title={t(`${i18nPrefix}.assistant`)}
value={payload.role_prefix?.assistant || ''}
onChange={handleRolePrefixChange(MemoryRole.assistant)}
/>
</div>
</div>
)}
</>
)}
)}
</>
</Field>
</div>
)

View File

@ -16,13 +16,6 @@ const nodeDefault: NodeDefault<LLMNodeType> = {
},
},
variables: [],
memory: {
role_prefix: undefined,
window: {
enabled: false,
size: 50,
},
},
prompt_template: [{
role: PromptRole.system,
text: '',

View File

@ -180,11 +180,11 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({
<div className='text-xs text-gray-300'>Memory examples(Designing)</div>
)} */}
{/* Memory */}
{isChatModel && (
{isChatMode && (
<>
<MemoryConfig
readonly={readOnly}
payload={inputs.memory}
config={{ data: inputs.memory }}
onChange={handleMemoryChange}
canSetRoleName={isCompletionModel}
/>

View File

@ -5,7 +5,7 @@ export type LLMNodeType = CommonNodeType & {
model: ModelConfig
variables: Variable[]
prompt_template: PromptItem[] | PromptItem
memory: Memory
memory?: Memory
context: {
enabled: boolean
variable_selector: ValueSelector

View File

@ -1,4 +1,4 @@
import { useCallback, useEffect, useRef } from 'react'
import { useCallback, useEffect, useRef, useState } from 'react'
import produce from 'immer'
import useVarList from '../_base/hooks/use-var-list'
import { VarType } from '../../types'
@ -23,7 +23,18 @@ const useConfig = (id: string, payload: LLMNodeType) => {
const isChatMode = useIsChatMode()
const defaultConfig = useStore(s => s.nodesDefaultConfigs)[payload.type]
const { inputs, setInputs } = useNodeCrud<LLMNodeType>(id, payload)
const [defaultRolePrefix, setDefaultRolePrefix] = useState<{ user: string; assistant: string }>({ user: '', assistant: '' })
const { inputs, setInputs: doSetInputs } = useNodeCrud<LLMNodeType>(id, payload)
const setInputs = useCallback((newInputs: LLMNodeType) => {
if (newInputs.memory && !newInputs.memory.role_prefix) {
const newPayload = produce(newInputs, (draft) => {
draft.memory!.role_prefix = defaultRolePrefix
})
doSetInputs(newPayload)
return
}
doSetInputs(newInputs)
}, [doSetInputs, defaultRolePrefix])
const inputRef = useRef(inputs)
useEffect(() => {
inputRef.current = inputs
@ -68,23 +79,11 @@ const useConfig = (id: string, payload: LLMNodeType) => {
}
else {
draft.prompt_template = promptTemplates.completion_model.prompt
if (!draft.memory) {
draft.memory = {
role_prefix: {
user: '',
assistant: '',
},
window: {
enabled: false,
size: '',
},
}
}
draft.memory.role_prefix = {
setDefaultRolePrefix({
user: promptTemplates.completion_model.conversation_histories_role.user_prefix,
assistant: promptTemplates.completion_model.conversation_histories_role.assistant_prefix,
}
})
}
}, [isChatModel])
useEffect(() => {
@ -165,7 +164,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
setInputs(newInputs)
}, [inputs, setInputs])
const handleMemoryChange = useCallback((newMemory: Memory) => {
const handleMemoryChange = useCallback((newMemory?: Memory) => {
const newInputs = produce(inputs, (draft) => {
draft.memory = newMemory
})

View File

@ -11,8 +11,8 @@ type Props = {
instruction: string
onInstructionChange: (instruction: string) => void
hideMemorySetting: boolean
memory: Memory
onMemoryChange: (memory: Memory) => void
memory?: Memory
onMemoryChange: (memory?: Memory) => void
readonly?: boolean
}
@ -46,7 +46,7 @@ const AdvancedSetting: FC<Props> = ({
<MemoryConfig
className='mt-4'
readonly={false}
payload={memory}
config={{ data: memory }}
onChange={onMemoryChange}
canSetRoleName={false}
/>

View File

@ -10,5 +10,5 @@ export type QuestionClassifierNodeType = CommonNodeType & {
model: ModelConfig
classes: Topic[]
instruction: string
memory: Memory
memory?: Memory
}

View File

@ -99,7 +99,7 @@ const useConfig = (id: string, payload: QuestionClassifierNodeType) => {
setInputs(newInputs)
}, [inputs, setInputs])
const handleMemoryChange = useCallback((memory: Memory) => {
const handleMemoryChange = useCallback((memory?: Memory) => {
const newInputs = produce(inputs, (draft) => {
draft.memory = memory
})