Skip to content

Commit 6374385

Browse files
committed
Merge branch 'main' into feat/2.4.0
2 parents c30188b + e3e9e72 commit 6374385

5 files changed

Lines changed: 495 additions & 22 deletions

File tree

src/backend/bisheng/core/ai/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from langchain_anthropic import ChatAnthropic
2-
from langchain_community.chat_models import ChatTongyi, ChatZhipuAI, MiniMaxChat, MoonshotChat
2+
from langchain_community.chat_models import ChatTongyi, ChatZhipuAI, MoonshotChat
33
from langchain_community.document_compressors import DashScopeRerank
44
from langchain_community.embeddings import DashScopeEmbeddings
55
from langchain_deepseek import ChatDeepSeek
@@ -30,7 +30,6 @@
3030
'AzureChatOpenAI',
3131
'ChatTongyi',
3232
'ChatZhipuAI',
33-
'MiniMaxChat',
3433
'ChatAnthropic',
3534
'ChatDeepSeek',
3635
'MoonshotChat',

src/backend/bisheng/llm/domain/llm/llm.py

Lines changed: 2 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from bisheng.common.errcode.server import NoLlmModelConfigError, LlmModelConfigDeletedError, LlmProviderDeletedError, \
1616
LlmModelTypeError, LlmModelOfflineError, InitLlmError
1717
from bisheng.core.ai import ChatOllama, ChatOpenAI, ChatOpenAICompatible, \
18-
AzureChatOpenAI, ChatZhipuAI, MiniMaxChat, ChatAnthropic, MoonshotChat
18+
AzureChatOpenAI, ChatZhipuAI, ChatAnthropic, MoonshotChat
1919
from bisheng.core.ai.llm.custom_chat_deepseek import CustomChatDeepSeek
2020
from bisheng.core.ai.llm.custom_chat_tongyi import CustomChatTongYi
2121
from bisheng.llm.domain.const import LLMModelType, LLMServerType
@@ -126,19 +126,6 @@ def _get_qwen_params(params: dict, server_config: dict, model_config: dict) -> d
126126
return user_kwargs
127127

128128

129-
def _get_minimax_params(params: dict, server_config: dict, model_config: dict) -> dict:
130-
params['minimax_api_key'] = server_config.get('openai_api_key')
131-
params['base_url'] = server_config.get('openai_api_base').rstrip('/')
132-
if 'max_tokens' not in params:
133-
params['max_tokens'] = 2048
134-
if '/chat/completions' not in params['base_url']:
135-
params['base_url'] = f"{params['base_url']}/chat/completions"
136-
137-
user_kwargs = _get_user_kwargs(model_config)
138-
user_kwargs.update(params)
139-
return user_kwargs
140-
141-
142129
def _get_anthropic_params(params: dict, server_config: dict, model_config: dict) -> dict:
143130
params.update(server_config)
144131

@@ -182,7 +169,7 @@ def _get_spark_params(params: dict, server_config: dict, model_config: dict) ->
182169
LLMServerType.QWEN.value: {'client': CustomChatTongYi, 'params_handler': _get_qwen_params},
183170
LLMServerType.QIAN_FAN.value: {'client': ChatOpenAICompatible, 'params_handler': _get_openai_params},
184171
LLMServerType.ZHIPU.value: {'client': ChatZhipuAI, 'params_handler': _get_zhipu_params},
185-
LLMServerType.MINIMAX.value: {'client': MiniMaxChat, 'params_handler': _get_minimax_params},
172+
LLMServerType.MINIMAX.value: {'client': ChatOpenAICompatible, 'params_handler': _get_openai_params},
186173
LLMServerType.ANTHROPIC.value: {'client': ChatAnthropic, 'params_handler': _get_anthropic_params},
187174
LLMServerType.DEEPSEEK.value: {'client': CustomChatDeepSeek, 'params_handler': _get_deepseek_params},
188175
LLMServerType.SPARK.value: {'client': ChatOpenAICompatible, 'params_handler': _get_spark_params},

0 commit comments

Comments
 (0)