From 4b2baeea655892667a7bde3c246cd6d054172511 Mon Sep 17 00:00:00 2001 From: Stream <1542763342@qq.com> Date: Thu, 17 Jul 2025 14:19:52 +0800 Subject: [PATCH] fix: use model provided by user in prompt generator (#22541) (#22542) Co-authored-by: stream --- api/core/llm_generator/llm_generator.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index e01896a491..f7fd93be4a 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -148,9 +148,11 @@ class LLMGenerator: model_manager = ModelManager() - model_instance = model_manager.get_default_model_instance( + model_instance = model_manager.get_model_instance( tenant_id=tenant_id, model_type=ModelType.LLM, + provider=model_config.get("provider", ""), + model=model_config.get("name", ""), ) try: