mirror of https://github.com/langgenius/dify.git
feat: add model selector
This commit is contained in:
parent
56407a910d
commit
45ba3ca07b
|
|
@ -0,0 +1,37 @@
|
|||
import type { LLMNodeData } from '../../types'
|
||||
import { MemoryRole } from '../../types'
|
||||
import { Resolution } from '@/types/app'
|
||||
|
||||
export const mockLLMNodeData: LLMNodeData = {
|
||||
title: 'Test',
|
||||
desc: 'Test',
|
||||
type: 'Test',
|
||||
model: {
|
||||
provider: 'openai',
|
||||
name: 'gpt-4',
|
||||
mode: 'completion',
|
||||
completion_params: {
|
||||
temperature: 0.7,
|
||||
},
|
||||
},
|
||||
variables: [],
|
||||
prompt: [],
|
||||
memory: {
|
||||
role_prefix: MemoryRole.assistant,
|
||||
window: {
|
||||
enabled: false,
|
||||
size: 0,
|
||||
},
|
||||
},
|
||||
context: {
|
||||
enabled: false,
|
||||
size: 0,
|
||||
},
|
||||
vision: {
|
||||
enabled: false,
|
||||
variable_selector: [],
|
||||
configs: {
|
||||
detail: Resolution.low,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
@ -1,13 +1,21 @@
|
|||
import type { FC } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import BasePanel from '../_base/panel'
|
||||
import useInput from './use-input'
|
||||
import { mockLLMNodeData } from './mock'
|
||||
import Field from '@/app/components/workflow/nodes/_base/components/field'
|
||||
import AddButton from '@/app/components/base/button/add-button'
|
||||
import Split from '@/app/components/workflow/nodes/_base/components/split'
|
||||
import ModelSelector from '@/app/components/header/account-setting/model-provider-page/model-selector'
|
||||
import { useTextGenerationCurrentProviderAndModelAndModelList } from '@/app/components/header/account-setting/model-provider-page/hooks'
|
||||
const i18nPrefix = 'workflow.nodes.llm'
|
||||
|
||||
const Panel: FC = () => {
|
||||
const { t } = useTranslation()
|
||||
const { inputs, handleModelChanged } = useInput(mockLLMNodeData)
|
||||
const {
|
||||
textGenerationModelList,
|
||||
} = useTextGenerationCurrentProviderAndModelAndModelList()
|
||||
const handleAddVariable = () => {
|
||||
console.log('add variable')
|
||||
}
|
||||
|
|
@ -18,7 +26,16 @@ const Panel: FC = () => {
|
|||
<Field
|
||||
title={t(`${i18nPrefix}.model`)}
|
||||
>
|
||||
Model Selector
|
||||
<ModelSelector
|
||||
defaultModel={(inputs.model?.provider && inputs.model?.name)
|
||||
? {
|
||||
provider: inputs.model.provider,
|
||||
model: inputs.model.name,
|
||||
}
|
||||
: undefined}
|
||||
modelList={textGenerationModelList}
|
||||
onSelect={handleModelChanged}
|
||||
/>
|
||||
</Field>
|
||||
|
||||
<Field
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
import { useCallback, useState } from 'react'
|
||||
import produce from 'immer'
|
||||
import type { LLMNodeData } from '../../types'
|
||||
const useInput = (initInputs: LLMNodeData) => {
|
||||
const [inputs, setInputs] = useState<LLMNodeData>(initInputs)
|
||||
|
||||
const handleModelChanged = useCallback((model: { provider: string; model: string }) => {
|
||||
const newInputs = produce(inputs, (draft) => {
|
||||
draft.model.provider = model.provider
|
||||
draft.model.name = model.model
|
||||
})
|
||||
setInputs(newInputs)
|
||||
}, [inputs.model])
|
||||
return {
|
||||
inputs,
|
||||
setInputs: (key: string, payload: any) => {
|
||||
setInputs({
|
||||
...inputs,
|
||||
[key]: payload,
|
||||
} as LLMNodeData)
|
||||
},
|
||||
handleModelChanged,
|
||||
}
|
||||
}
|
||||
|
||||
export default useInput
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import type { Node as ReactFlowNode } from 'reactflow'
|
||||
|
||||
import type { Resolution } from '@/types/app'
|
||||
export type NodeData = {
|
||||
type: string
|
||||
name?: string
|
||||
|
|
@ -7,3 +7,62 @@ export type NodeData = {
|
|||
description?: string
|
||||
}
|
||||
export type Node = ReactFlowNode<NodeData>
|
||||
|
||||
export type ValueSelector = string[] // [nodeId, key | obj key path]
|
||||
|
||||
export type Variable = {
|
||||
variable: string
|
||||
value_selector: ValueSelector
|
||||
}
|
||||
|
||||
export type ModelConfig = {
|
||||
provider: string
|
||||
name: string
|
||||
mode: string
|
||||
completion_params: Record<string, any>
|
||||
}
|
||||
|
||||
export enum PromptRole {
|
||||
system = 'system',
|
||||
user = 'user',
|
||||
assistant = 'assistant',
|
||||
}
|
||||
|
||||
export type PromptItem = {
|
||||
role?: PromptRole
|
||||
text: string
|
||||
}
|
||||
|
||||
export enum MemoryRole {
|
||||
user = 'user',
|
||||
assistant = 'assistant',
|
||||
}
|
||||
|
||||
export type Memory = {
|
||||
role_prefix: MemoryRole
|
||||
window: {
|
||||
enabled: boolean
|
||||
size: number
|
||||
}
|
||||
}
|
||||
|
||||
export type LLMNodeData = {
|
||||
title: string
|
||||
desc: string
|
||||
type: string
|
||||
model: ModelConfig
|
||||
variables: Variable[]
|
||||
prompt: PromptItem[] | PromptItem
|
||||
memory: Memory
|
||||
context: {
|
||||
enabled: boolean
|
||||
size: number
|
||||
}
|
||||
vision: {
|
||||
enabled: boolean
|
||||
variable_selector: ValueSelector
|
||||
configs: {
|
||||
detail: Resolution
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue