diff --git a/web/app/components/workflow/nodes/llm/panel.tsx b/web/app/components/workflow/nodes/llm/panel.tsx index fd08cbc52a..38155f2b56 100644 --- a/web/app/components/workflow/nodes/llm/panel.tsx +++ b/web/app/components/workflow/nodes/llm/panel.tsx @@ -20,6 +20,7 @@ const Panel: FC = () => { const { inputs, + isShowVisionConfig, handleModelChanged, handleCompletionParamsChange, handleVarListChange, @@ -88,8 +89,8 @@ const Panel: FC = () => { Prompt - {/* */} - {isChatApp && isChatApp && ( + {/* Memory examples */} + {isChatApp && isChatModel && (
Memory examples(Designing)
)} {/* Memory */} @@ -106,18 +107,19 @@ const Panel: FC = () => { )} {/* Vision: GPT4-vision and so on */} - - } - /> + {isShowVisionConfig && ( + + } + /> + )} -
<> diff --git a/web/app/components/workflow/nodes/llm/use-config.ts b/web/app/components/workflow/nodes/llm/use-config.ts index d2a9ace7c7..169baec22b 100644 --- a/web/app/components/workflow/nodes/llm/use-config.ts +++ b/web/app/components/workflow/nodes/llm/use-config.ts @@ -4,11 +4,14 @@ import useVarList from '../_base/hooks/use-var-list' import type { Memory, ValueSelector } from '../../types' import type { LLMNodeType } from './types' import type { Resolution } from '@/types/app' +import { useTextGenerationCurrentProviderAndModelAndModelList } from '@/app/components/header/account-setting/model-provider-page/hooks' +import { ModelFeatureEnum } from '@/app/components/header/account-setting/model-provider-page/declarations' const useConfig = (initInputs: LLMNodeType) => { const [inputs, setInputs] = useState(initInputs) // model + const model = inputs.model const handleModelChanged = useCallback((model: { provider: string; modelId: string; mode?: string }) => { const newInputs = produce(inputs, (draft) => { draft.model.provider = model.provider @@ -25,6 +28,16 @@ const useConfig = (initInputs: LLMNodeType) => { setInputs(newInputs) }, [inputs, setInputs]) + const { + currentModel: currModel, + } = useTextGenerationCurrentProviderAndModelAndModelList( + { + provider: model.provider, + model: model.name, + }, + ) + const isShowVisionConfig = !!currModel?.features?.includes(ModelFeatureEnum.vision) + // variables const { handleVarListChange, handleAddVariable } = useVarList({ inputs, @@ -55,6 +68,7 @@ const useConfig = (initInputs: LLMNodeType) => { return { inputs, + isShowVisionConfig, handleModelChanged, handleCompletionParamsChange, handleVarListChange,