From b7360140ee83adebccd6a07fba5375b21352fc83 Mon Sep 17 00:00:00 2001 From: issac2e <90555819+issac2e@users.noreply.github.com> Date: Thu, 30 Oct 2025 09:38:39 +0800 Subject: [PATCH] fix: resolve stale closure values in LLM node callbacks (#27612) (#27614) Co-authored-by: liuchen15 --- .../workflow/nodes/llm/use-config.ts | 35 ++++++++++--------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/web/app/components/workflow/nodes/llm/use-config.ts b/web/app/components/workflow/nodes/llm/use-config.ts index 44c7096744..c0608865b8 100644 --- a/web/app/components/workflow/nodes/llm/use-config.ts +++ b/web/app/components/workflow/nodes/llm/use-config.ts @@ -27,6 +27,9 @@ const useConfig = (id: string, payload: LLMNodeType) => { const [defaultRolePrefix, setDefaultRolePrefix] = useState<{ user: string; assistant: string }>({ user: '', assistant: '' }) const { inputs, setInputs: doSetInputs } = useNodeCrud(id, payload) const inputRef = useRef(inputs) + useEffect(() => { + inputRef.current = inputs + }, [inputs]) const { deleteNodeInspectorVars } = useInspectVarsCrud() @@ -117,7 +120,7 @@ const useConfig = (id: string, payload: LLMNodeType) => { } = useConfigVision(model, { payload: inputs.vision, onChange: (newPayload) => { - const newInputs = produce(inputs, (draft) => { + const newInputs = produce(inputRef.current, (draft) => { draft.vision = newPayload }) setInputs(newInputs) @@ -148,11 +151,11 @@ const useConfig = (id: string, payload: LLMNodeType) => { }, [model.provider, currentProvider, currentModel, handleModelChanged]) const handleCompletionParamsChange = useCallback((newParams: Record) => { - const newInputs = produce(inputs, (draft) => { + const newInputs = produce(inputRef.current, (draft) => { draft.model.completion_params = newParams }) setInputs(newInputs) - }, [inputs, setInputs]) + }, [setInputs]) // change to vision model to set vision enabled, else disabled useEffect(() => { @@ -238,29 +241,29 @@ const useConfig = (id: string, payload: LLMNodeType) => { // context const handleContextVarChange = useCallback((newVar: ValueSelector | string) => { - const newInputs = produce(inputs, (draft) => { + const newInputs = produce(inputRef.current, (draft) => { draft.context.variable_selector = newVar as ValueSelector || [] draft.context.enabled = !!(newVar && newVar.length > 0) }) setInputs(newInputs) - }, [inputs, setInputs]) + }, [setInputs]) const handlePromptChange = useCallback((newPrompt: PromptItem[] | PromptItem) => { const newInputs = produce(inputRef.current, (draft) => { draft.prompt_template = newPrompt }) setInputs(newInputs) - }, [inputs, setInputs]) + }, [setInputs]) const handleMemoryChange = useCallback((newMemory?: Memory) => { - const newInputs = produce(inputs, (draft) => { + const newInputs = produce(inputRef.current, (draft) => { draft.memory = newMemory }) setInputs(newInputs) - }, [inputs, setInputs]) + }, [setInputs]) const handleSyeQueryChange = useCallback((newQuery: string) => { - const newInputs = produce(inputs, (draft) => { + const newInputs = produce(inputRef.current, (draft) => { if (!draft.memory) { draft.memory = { window: { @@ -275,7 +278,7 @@ const useConfig = (id: string, payload: LLMNodeType) => { } }) setInputs(newInputs) - }, [inputs, setInputs]) + }, [setInputs]) // structure output const { data: modelList } = useModelList(ModelTypeEnum.textGeneration) @@ -286,22 +289,22 @@ const useConfig = (id: string, payload: LLMNodeType) => { const [structuredOutputCollapsed, setStructuredOutputCollapsed] = useState(true) const handleStructureOutputEnableChange = useCallback((enabled: boolean) => { - const newInputs = produce(inputs, (draft) => { + const newInputs = produce(inputRef.current, (draft) => { draft.structured_output_enabled = enabled }) setInputs(newInputs) if (enabled) setStructuredOutputCollapsed(false) deleteNodeInspectorVars(id) - }, [inputs, setInputs, deleteNodeInspectorVars, id]) + }, [setInputs, deleteNodeInspectorVars, id]) const handleStructureOutputChange = useCallback((newOutput: StructuredOutput) => { - const newInputs = produce(inputs, (draft) => { + const newInputs = produce(inputRef.current, (draft) => { draft.structured_output = newOutput }) setInputs(newInputs) deleteNodeInspectorVars(id) - }, [inputs, setInputs, deleteNodeInspectorVars, id]) + }, [setInputs, deleteNodeInspectorVars, id]) const filterInputVar = useCallback((varPayload: Var) => { return [VarType.number, VarType.string, VarType.secret, VarType.arrayString, VarType.arrayNumber, VarType.file, VarType.arrayFile].includes(varPayload.type) @@ -317,11 +320,11 @@ const useConfig = (id: string, payload: LLMNodeType) => { // reasoning format const handleReasoningFormatChange = useCallback((reasoningFormat: 'tagged' | 'separated') => { - const newInputs = produce(inputs, (draft) => { + const newInputs = produce(inputRef.current, (draft) => { draft.reasoning_format = reasoningFormat }) setInputs(newInputs) - }, [inputs, setInputs]) + }, [setInputs]) const { availableVars,