From 6fdbc7dbf3158ca5cf87d95a9cc819d5684bd37c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=99=A2=E5=93=8E=E5=93=9F=E5=96=82?= Date: Thu, 15 Aug 2024 12:14:13 +0000 Subject: [PATCH] fix error when use farui-plus model (#7316) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: 雪风 --- api/core/model_runtime/model_providers/tongyi/llm/llm.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/api/core/model_runtime/model_providers/tongyi/llm/llm.py b/api/core/model_runtime/model_providers/tongyi/llm/llm.py index a75db78d8..4e1bb0a5a 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/llm.py +++ b/api/core/model_runtime/model_providers/tongyi/llm/llm.py @@ -159,6 +159,8 @@ You should also complete the text started with ``` but not tell ``` directly. """ if model in ['qwen-turbo-chat', 'qwen-plus-chat']: model = model.replace('-chat', '') + if model == 'farui-plus': + model = 'qwen-farui-plus' if model in self.tokenizers: tokenizer = self.tokenizers[model]