From 8480b0197b85ddb870591a478f4bf6149f814264 Mon Sep 17 00:00:00 2001 From: takatost Date: Tue, 10 Oct 2023 13:01:18 +0800 Subject: [PATCH] fix: prompt for baichuan text generation models (#1299) --- api/core/model_providers/models/llm/baichuan_model.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/api/core/model_providers/models/llm/baichuan_model.py b/api/core/model_providers/models/llm/baichuan_model.py index e614547fa3..d2aea36cca 100644 --- a/api/core/model_providers/models/llm/baichuan_model.py +++ b/api/core/model_providers/models/llm/baichuan_model.py @@ -37,6 +37,12 @@ class BaichuanModel(BaseLLM): prompts = self._get_prompt_from_messages(messages) return self._client.generate([prompts], stop, callbacks) + def prompt_file_name(self, mode: str) -> str: + if mode == 'completion': + return 'baichuan_completion' + else: + return 'baichuan_chat' + def get_num_tokens(self, messages: List[PromptMessage]) -> int: """ get num tokens of prompt messages.