From 88354355589e64767d592b791b011ae8403b9485 Mon Sep 17 00:00:00 2001 From: Garfield Dai Date: Mon, 13 Nov 2023 23:13:01 +0800 Subject: [PATCH] fix: change model mode. (#1520) --- api/core/completion.py | 2 -- api/core/prompt/prompt_transform.py | 4 +--- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/api/core/completion.py b/api/core/completion.py index b4f5e36b4c..64db2ea4ce 100644 --- a/api/core/completion.py +++ b/api/core/completion.py @@ -277,7 +277,6 @@ class Completion: if app_model_config.prompt_type == 'simple': prompt_messages, stop_words = prompt_transform.get_prompt( app_mode=mode, - app_model_config=app_model_config, pre_prompt=app_model_config.pre_prompt, inputs=inputs, query=query, @@ -366,7 +365,6 @@ class Completion: if app_model_config.prompt_type == 'simple': prompt_messages, _ = prompt_transform.get_prompt( app_mode=mode, - app_model_config=app_model_config, pre_prompt=app_model_config.pre_prompt, inputs=inputs, query=query, diff --git a/api/core/prompt/prompt_transform.py b/api/core/prompt/prompt_transform.py index cdfe08e9b0..1f7bc6e6e5 100644 --- a/api/core/prompt/prompt_transform.py +++ b/api/core/prompt/prompt_transform.py @@ -27,7 +27,6 @@ class AppMode(enum.Enum): class PromptTransform: def get_prompt(self, app_mode: str, - app_model_config: AppModelConfig, pre_prompt: str, inputs: dict, query: str, @@ -36,10 +35,9 @@ class PromptTransform: memory: Optional[BaseChatMemory], model_instance: BaseLLM) -> \ Tuple[List[PromptMessage], Optional[List[str]]]: - model_mode = app_model_config.model_dict['mode'] app_mode_enum = AppMode(app_mode) - model_mode_enum = ModelMode(model_mode) + model_mode_enum = model_instance.model_mode prompt_rules = self._read_prompt_rules_from_file(self._prompt_file_name(app_mode, model_instance))