Skip to content

Commit 8870c33

Browse files
author
PR Bot
committed
feat: upgrade MiniMax provider to OpenAI-compatible API with M2.5 models
- Switch MiniMax LLM client from legacy MiniMaxChat to ChatOpenAICompatible - Remove custom _get_minimax_params, reuse standard _get_openai_params - Add MiniMax-M2.5 and MiniMax-M2.5-highspeed models to model templates - Update default API base URL from api.minimax.chat to api.minimax.io - Preserve web_search tool support in parse_kwargs for MiniMax - Add 26 unit tests + 3 integration tests for MiniMax provider
1 parent 39bc019 commit 8870c33

5 files changed

Lines changed: 293 additions & 22 deletions

File tree

src/backend/bisheng/core/ai/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from langchain_anthropic import ChatAnthropic
2-
from langchain_community.chat_models import ChatTongyi, ChatZhipuAI, MiniMaxChat, MoonshotChat
2+
from langchain_community.chat_models import ChatTongyi, ChatZhipuAI, MoonshotChat
33
from langchain_community.document_compressors import DashScopeRerank
44
from langchain_community.embeddings import DashScopeEmbeddings
55
from langchain_deepseek import ChatDeepSeek
@@ -30,7 +30,6 @@
3030
'AzureChatOpenAI',
3131
'ChatTongyi',
3232
'ChatZhipuAI',
33-
'MiniMaxChat',
3433
'ChatAnthropic',
3534
'ChatDeepSeek',
3635
'MoonshotChat',

src/backend/bisheng/llm/domain/llm/llm.py

Lines changed: 2 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from bisheng.common.errcode.server import NoLlmModelConfigError, LlmModelConfigDeletedError, LlmProviderDeletedError, \
1616
LlmModelTypeError, LlmModelOfflineError, InitLlmError
1717
from bisheng.core.ai import ChatOllama, ChatOpenAI, ChatOpenAICompatible, \
18-
AzureChatOpenAI, ChatZhipuAI, MiniMaxChat, ChatAnthropic, MoonshotChat
18+
AzureChatOpenAI, ChatZhipuAI, ChatAnthropic, MoonshotChat
1919
from bisheng.core.ai.llm.custom_chat_deepseek import CustomChatDeepSeek
2020
from bisheng.core.ai.llm.custom_chat_tongyi import CustomChatTongYi
2121
from bisheng.llm.domain.const import LLMModelType, LLMServerType
@@ -110,19 +110,6 @@ def _get_qwen_params(params: dict, server_config: dict, model_config: dict) -> d
110110
return user_kwargs
111111

112112

113-
def _get_minimax_params(params: dict, server_config: dict, model_config: dict) -> dict:
114-
params['minimax_api_key'] = server_config.get('openai_api_key')
115-
params['base_url'] = server_config.get('openai_api_base').rstrip('/')
116-
if 'max_tokens' not in params:
117-
params['max_tokens'] = 2048
118-
if '/chat/completions' not in params['base_url']:
119-
params['base_url'] = f"{params['base_url']}/chat/completions"
120-
121-
user_kwargs = _get_user_kwargs(model_config)
122-
user_kwargs.update(params)
123-
return user_kwargs
124-
125-
126113
def _get_anthropic_params(params: dict, server_config: dict, model_config: dict) -> dict:
127114
params.update(server_config)
128115

@@ -166,7 +153,7 @@ def _get_spark_params(params: dict, server_config: dict, model_config: dict) ->
166153
LLMServerType.QWEN.value: {'client': CustomChatTongYi, 'params_handler': _get_qwen_params},
167154
LLMServerType.QIAN_FAN.value: {'client': ChatOpenAICompatible, 'params_handler': _get_openai_params},
168155
LLMServerType.ZHIPU.value: {'client': ChatZhipuAI, 'params_handler': _get_zhipu_params},
169-
LLMServerType.MINIMAX.value: {'client': MiniMaxChat, 'params_handler': _get_minimax_params},
156+
LLMServerType.MINIMAX.value: {'client': ChatOpenAICompatible, 'params_handler': _get_openai_params},
170157
LLMServerType.ANTHROPIC.value: {'client': ChatAnthropic, 'params_handler': _get_anthropic_params},
171158
LLMServerType.DEEPSEEK.value: {'client': CustomChatDeepSeek, 'params_handler': _get_openai_params},
172159
LLMServerType.SPARK.value: {'client': ChatOpenAICompatible, 'params_handler': _get_spark_params},
Lines changed: 274 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,274 @@
1+
"""Tests for MiniMax provider upgrade to OpenAI-compatible API (M2.5 support).
2+
3+
These tests verify the MiniMax provider changes:
4+
1. Backend: MiniMax uses ChatOpenAICompatible (OpenAI-compatible) instead of legacy MiniMaxChat
5+
2. Frontend: Model data includes M2.5 models, default API URL updated
6+
"""
7+
import ast
8+
import json
9+
import os
10+
import unittest
11+
12+
# Paths relative to this test file
13+
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
14+
LLM_PY = os.path.join(BASE_DIR, '..', 'bisheng', 'llm', 'domain', 'llm', 'llm.py')
15+
AI_INIT = os.path.join(BASE_DIR, '..', 'bisheng', 'core', 'ai', '__init__.py')
16+
DATA_JSON = os.path.join(BASE_DIR, '..', '..', 'frontend', 'platform', 'public', 'models', 'data.json')
17+
CUSTOM_FORM_TSX = os.path.join(BASE_DIR, '..', '..', 'frontend', 'platform', 'src', 'pages',
18+
'ModelPage', 'manage', 'CustomForm.tsx')
19+
EMBEDDING_PY = os.path.join(BASE_DIR, '..', 'bisheng', 'llm', 'domain', 'llm', 'embedding.py')
20+
CONST_PY = os.path.join(BASE_DIR, '..', 'bisheng', 'llm', 'domain', 'const.py')
21+
ADVANCED_PARAMS_TS = os.path.join(BASE_DIR, '..', '..', 'frontend', 'platform', 'src', 'util',
22+
'advancedParamsTemplates.ts')
23+
24+
25+
class TestMiniMaxBackendConfig(unittest.TestCase):
26+
"""Unit tests for MiniMax backend provider configuration via source analysis."""
27+
28+
def setUp(self):
29+
with open(os.path.normpath(LLM_PY)) as f:
30+
self.llm_source = f.read()
31+
32+
def test_minimax_uses_chat_openai_compatible(self):
33+
"""MiniMax factory entry should use ChatOpenAICompatible client."""
34+
self.assertIn(
35+
"LLMServerType.MINIMAX.value: {'client': ChatOpenAICompatible",
36+
self.llm_source
37+
)
38+
39+
def test_minimax_uses_openai_params_handler(self):
40+
"""MiniMax factory entry should use _get_openai_params handler."""
41+
self.assertIn(
42+
"'params_handler': _get_openai_params}",
43+
self.llm_source.split('MINIMAX')[1].split('\n')[0]
44+
)
45+
46+
def test_no_legacy_minimax_params_handler(self):
47+
"""Legacy _get_minimax_params function should be removed."""
48+
self.assertNotIn('def _get_minimax_params', self.llm_source)
49+
50+
def test_no_minimax_chat_import(self):
51+
"""MiniMaxChat should not be imported in llm.py."""
52+
self.assertNotIn('MiniMaxChat', self.llm_source)
53+
54+
def test_chat_openai_compatible_imported(self):
55+
"""ChatOpenAICompatible should be imported in llm.py."""
56+
self.assertIn('ChatOpenAICompatible', self.llm_source)
57+
58+
def test_minimax_server_type_defined(self):
59+
"""MiniMax should be defined as a valid server type in const.py."""
60+
with open(os.path.normpath(CONST_PY)) as f:
61+
const_source = f.read()
62+
self.assertIn("MINIMAX = 'minimax'", const_source)
63+
64+
def test_web_search_support_preserved(self):
65+
"""parse_kwargs should still handle web_search for MINIMAX."""
66+
self.assertIn('LLMServerType.MINIMAX.value', self.llm_source)
67+
self.assertIn("'type': 'web_search'", self.llm_source)
68+
69+
def test_minimax_chat_removed_from_core_ai(self):
70+
"""Legacy MiniMaxChat should be removed from core.ai (no longer needed)."""
71+
with open(os.path.normpath(AI_INIT)) as f:
72+
ai_init_source = f.read()
73+
self.assertNotIn('MiniMaxChat', ai_init_source)
74+
75+
def test_minimax_embedding_uses_openai(self):
76+
"""MiniMax embedding should use OpenAIEmbeddings with _get_openai_params."""
77+
with open(os.path.normpath(EMBEDDING_PY)) as f:
78+
embedding_source = f.read()
79+
self.assertIn('LLMServerType.MINIMAX.value', embedding_source)
80+
self.assertIn('OpenAIEmbeddings', embedding_source)
81+
82+
83+
class TestMiniMaxModelData(unittest.TestCase):
84+
"""Unit tests for MiniMax model template data."""
85+
86+
def setUp(self):
87+
with open(os.path.normpath(DATA_JSON)) as f:
88+
self.model_data = json.load(f)
89+
90+
def test_minimax_key_exists(self):
91+
"""MiniMax provider should be present in data.json."""
92+
self.assertIn('minimax', self.model_data)
93+
94+
def test_minimax_has_m25_model(self):
95+
"""MiniMax-M2.5 should be listed as an LLM model."""
96+
model_names = [m['model_name'] for m in self.model_data['minimax']]
97+
self.assertIn('MiniMax-M2.5', model_names)
98+
99+
def test_minimax_has_m25_highspeed_model(self):
100+
"""MiniMax-M2.5-highspeed should be listed as an LLM model."""
101+
model_names = [m['model_name'] for m in self.model_data['minimax']]
102+
self.assertIn('MiniMax-M2.5-highspeed', model_names)
103+
104+
def test_minimax_has_text01_model(self):
105+
"""MiniMax-Text-01 should still be listed for backward compatibility."""
106+
model_names = [m['model_name'] for m in self.model_data['minimax']]
107+
self.assertIn('MiniMax-Text-01', model_names)
108+
109+
def test_minimax_m25_is_first(self):
110+
"""MiniMax-M2.5 should be the first (default) model."""
111+
self.assertEqual(self.model_data['minimax'][0]['model_name'], 'MiniMax-M2.5')
112+
113+
def test_minimax_models_are_llm_type(self):
114+
"""All MiniMax models should be of type 'llm'."""
115+
for model in self.model_data['minimax']:
116+
self.assertEqual(model['model_type'], 'llm')
117+
118+
def test_minimax_model_count(self):
119+
"""MiniMax should have 3 models listed."""
120+
self.assertEqual(len(self.model_data['minimax']), 3)
121+
122+
def test_data_json_is_valid_json(self):
123+
"""data.json should be valid JSON with all expected providers."""
124+
expected_providers = [
125+
'openai', 'azure_openai', 'qwen', 'deepseek', 'minimax',
126+
'volcengine', 'silicon',
127+
]
128+
for provider in expected_providers:
129+
self.assertIn(provider, self.model_data)
130+
131+
132+
class TestMiniMaxFrontendConfig(unittest.TestCase):
133+
"""Unit tests for MiniMax frontend configuration."""
134+
135+
def setUp(self):
136+
with open(os.path.normpath(CUSTOM_FORM_TSX)) as f:
137+
self.form_source = f.read()
138+
139+
def test_minimax_default_api_base_url(self):
140+
"""Default API base URL should be https://api.minimax.io/v1 (new OpenAI-compatible API)."""
141+
self.assertIn('https://api.minimax.io/v1', self.form_source)
142+
143+
def test_no_legacy_api_url(self):
144+
"""Legacy API URL api.minimax.chat should not be present."""
145+
self.assertNotIn('api.minimax.chat', self.form_source)
146+
147+
def test_minimax_requires_api_key(self):
148+
"""MiniMax provider form should require an API key."""
149+
# Find minimax section and check for API key field
150+
minimax_idx = self.form_source.index('minimax:')
151+
section_end = self.form_source.index('],', minimax_idx) + 2
152+
minimax_section = self.form_source[minimax_idx:section_end]
153+
self.assertIn('openai_api_key', minimax_section)
154+
155+
def test_minimax_requires_api_base(self):
156+
"""MiniMax provider form should require an API base URL."""
157+
minimax_idx = self.form_source.index('minimax:')
158+
section_end = self.form_source.index('],', minimax_idx) + 2
159+
minimax_section = self.form_source[minimax_idx:section_end]
160+
self.assertIn('openai_api_base', minimax_section)
161+
162+
163+
class TestMiniMaxAdvancedParams(unittest.TestCase):
164+
"""Unit tests for MiniMax advanced parameters template."""
165+
166+
def setUp(self):
167+
with open(os.path.normpath(ADVANCED_PARAMS_TS)) as f:
168+
self.template_source = f.read()
169+
170+
def test_minimax_llm_template_exists(self):
171+
"""minimax-llm template should exist in advancedParamsTemplates."""
172+
self.assertIn("'minimax-llm'", self.template_source)
173+
174+
def test_minimax_mapping_exists(self):
175+
"""minimax should be mapped to minimax-llm template."""
176+
self.assertIn("'minimax': 'minimax-llm'", self.template_source)
177+
178+
def test_minimax_embedding_template_exists(self):
179+
"""minimax-embedding template should exist."""
180+
self.assertIn("'minimax-embedding'", self.template_source)
181+
182+
183+
class TestMiniMaxProviderLinks(unittest.TestCase):
184+
"""Unit tests for MiniMax provider documentation links."""
185+
186+
def setUp(self):
187+
use_link_path = os.path.join(
188+
BASE_DIR, '..', '..', 'frontend', 'platform', 'src', 'pages',
189+
'ModelPage', 'manage', 'useLink.ts'
190+
)
191+
with open(os.path.normpath(use_link_path)) as f:
192+
self.links_source = f.read()
193+
194+
def test_minimax_api_key_url(self):
195+
"""MiniMax should have an API key URL."""
196+
minimax_idx = self.links_source.index('minimax:')
197+
section_end = self.links_source.index('},', minimax_idx) + 2
198+
minimax_section = self.links_source[minimax_idx:section_end]
199+
self.assertIn('apiKeyUrl', minimax_section)
200+
self.assertIn('platform.minimaxi.com', minimax_section)
201+
202+
def test_minimax_model_url(self):
203+
"""MiniMax should have a model documentation URL."""
204+
minimax_idx = self.links_source.index('minimax:')
205+
section_end = self.links_source.index('},', minimax_idx) + 2
206+
minimax_section = self.links_source[minimax_idx:section_end]
207+
self.assertIn('modelUrl', minimax_section)
208+
209+
210+
class TestMiniMaxIntegration(unittest.TestCase):
211+
"""Integration tests for MiniMax provider (require MINIMAX_API_KEY)."""
212+
213+
@unittest.skipUnless(
214+
os.environ.get('MINIMAX_API_KEY'),
215+
'MINIMAX_API_KEY not set'
216+
)
217+
def test_minimax_m25_chat_completion(self):
218+
"""Test actual chat completion with MiniMax-M2.5 via OpenAI-compatible API."""
219+
from langchain_openai import ChatOpenAI
220+
221+
llm = ChatOpenAI(
222+
model='MiniMax-M2.5',
223+
api_key=os.environ['MINIMAX_API_KEY'],
224+
base_url='https://api.minimax.io/v1',
225+
temperature=0.1,
226+
max_tokens=64,
227+
)
228+
response = llm.invoke('Say hello in one word.')
229+
self.assertIsNotNone(response.content)
230+
self.assertTrue(len(response.content) > 0)
231+
232+
@unittest.skipUnless(
233+
os.environ.get('MINIMAX_API_KEY'),
234+
'MINIMAX_API_KEY not set'
235+
)
236+
def test_minimax_m25_highspeed_chat_completion(self):
237+
"""Test actual chat completion with MiniMax-M2.5-highspeed."""
238+
from langchain_openai import ChatOpenAI
239+
240+
llm = ChatOpenAI(
241+
model='MiniMax-M2.5-highspeed',
242+
api_key=os.environ['MINIMAX_API_KEY'],
243+
base_url='https://api.minimax.io/v1',
244+
temperature=0.1,
245+
max_tokens=64,
246+
)
247+
response = llm.invoke('Say hello in one word.')
248+
self.assertIsNotNone(response.content)
249+
self.assertTrue(len(response.content) > 0)
250+
251+
@unittest.skipUnless(
252+
os.environ.get('MINIMAX_API_KEY'),
253+
'MINIMAX_API_KEY not set'
254+
)
255+
def test_minimax_m25_streaming(self):
256+
"""Test streaming chat completion with MiniMax-M2.5."""
257+
from langchain_openai import ChatOpenAI
258+
259+
llm = ChatOpenAI(
260+
model='MiniMax-M2.5',
261+
api_key=os.environ['MINIMAX_API_KEY'],
262+
base_url='https://api.minimax.io/v1',
263+
temperature=0.1,
264+
max_tokens=64,
265+
streaming=True,
266+
)
267+
chunks = list(llm.stream('Say hello in one word.'))
268+
self.assertTrue(len(chunks) > 0)
269+
full_content = ''.join(c.content for c in chunks)
270+
self.assertTrue(len(full_content) > 0)
271+
272+
273+
if __name__ == '__main__':
274+
unittest.main()

src/frontend/platform/public/models/data.json

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -80,10 +80,21 @@
8080
"model_type": "llm"
8181
}],
8282
"minimax": [{
83-
"name": "model 1",
84-
"model_name": "MiniMax-Text-01",
85-
"model_type": "llm"
86-
}],
83+
"name": "model 1",
84+
"model_name": "MiniMax-M2.5",
85+
"model_type": "llm"
86+
},
87+
{
88+
"name": "model 2",
89+
"model_name": "MiniMax-M2.5-highspeed",
90+
"model_type": "llm"
91+
},
92+
{
93+
"name": "model 3",
94+
"model_name": "MiniMax-Text-01",
95+
"model_type": "llm"
96+
}
97+
],
8798
"volcengine": [{
8899
"name": "model 1",
89100
"model_name": "deepseek-v3",

src/frontend/platform/src/pages/ModelPage/manage/CustomForm.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ const modelProviders = {
233233
label: "Api Host",
234234
type: "text",
235235
placeholder: "",
236-
default: "https://api.minimax.chat/v1",
236+
default: "https://api.minimax.io/v1",
237237
required: true,
238238
key: "openai_api_base",
239239
},

0 commit comments

Comments
 (0)