Skip to content

Commit 0692cd5

Browse files
author
PR Bot
committed
feat: add MiniMax-M2.7 and M2.7-highspeed models as default
Add MiniMax-M2.7 and MiniMax-M2.7-highspeed as the newest model options, positioned as the default (first) models in the provider list. Existing M2.5 and Text-01 models are retained for backward compatibility. Updated tests to verify M2.7 models and added M2.7 integration tests.
1 parent b84d7d3 commit 0692cd5

2 files changed

Lines changed: 59 additions & 20 deletions

File tree

src/backend/test/test_minimax_provider.py

Lines changed: 47 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""Tests for MiniMax provider upgrade to OpenAI-compatible API (M2.5 support).
1+
"""Tests for MiniMax provider upgrade to OpenAI-compatible API (M2.7 support).
22
33
These tests verify the MiniMax provider changes without requiring the full
44
bisheng backend to be installed, by testing the specific files that were changed.
@@ -181,13 +181,23 @@ def test_minimax_models_exist(self):
181181
"""MiniMax models should be present in data.json."""
182182
self.assertIn('minimax', self.model_data)
183183

184+
def test_minimax_has_m27_model(self):
185+
"""MiniMax-M2.7 should be listed as an LLM model."""
186+
model_names = [m['model_name'] for m in self.model_data['minimax']]
187+
self.assertIn('MiniMax-M2.7', model_names)
188+
189+
def test_minimax_has_m27_highspeed_model(self):
190+
"""MiniMax-M2.7-highspeed should be listed as an LLM model."""
191+
model_names = [m['model_name'] for m in self.model_data['minimax']]
192+
self.assertIn('MiniMax-M2.7-highspeed', model_names)
193+
184194
def test_minimax_has_m25_model(self):
185-
"""MiniMax-M2.5 should be listed as an LLM model."""
195+
"""MiniMax-M2.5 should still be listed for backward compatibility."""
186196
model_names = [m['model_name'] for m in self.model_data['minimax']]
187197
self.assertIn('MiniMax-M2.5', model_names)
188198

189199
def test_minimax_has_m25_highspeed_model(self):
190-
"""MiniMax-M2.5-highspeed should be listed as an LLM model."""
200+
"""MiniMax-M2.5-highspeed should still be listed."""
191201
model_names = [m['model_name'] for m in self.model_data['minimax']]
192202
self.assertIn('MiniMax-M2.5-highspeed', model_names)
193203

@@ -196,18 +206,18 @@ def test_minimax_has_text01_model(self):
196206
model_names = [m['model_name'] for m in self.model_data['minimax']]
197207
self.assertIn('MiniMax-Text-01', model_names)
198208

199-
def test_minimax_m25_is_first(self):
200-
"""MiniMax-M2.5 should be the first (default) model."""
201-
self.assertEqual(self.model_data['minimax'][0]['model_name'], 'MiniMax-M2.5')
209+
def test_minimax_m27_is_first(self):
210+
"""MiniMax-M2.7 should be the first (default) model."""
211+
self.assertEqual(self.model_data['minimax'][0]['model_name'], 'MiniMax-M2.7')
202212

203213
def test_minimax_models_are_llm_type(self):
204214
"""All MiniMax models should be of type 'llm'."""
205215
for model in self.model_data['minimax']:
206216
self.assertEqual(model['model_type'], 'llm')
207217

208218
def test_minimax_model_count(self):
209-
"""MiniMax should have 3 models listed."""
210-
self.assertEqual(len(self.model_data['minimax']), 3)
219+
"""MiniMax should have 5 models listed."""
220+
self.assertEqual(len(self.model_data['minimax']), 5)
211221

212222

213223
class TestFrontendCustomForm(unittest.TestCase):
@@ -323,7 +333,7 @@ def test_no_legacy_params(self):
323333

324334
def test_preserves_model_name(self):
325335
"""Model name should be preserved."""
326-
for name in ['MiniMax-M2.5', 'MiniMax-M2.5-highspeed', 'MiniMax-Text-01']:
336+
for name in ['MiniMax-M2.7', 'MiniMax-M2.7-highspeed', 'MiniMax-M2.5', 'MiniMax-M2.5-highspeed', 'MiniMax-Text-01']:
327337
result = self._openai_params_fn(
328338
{'model': name},
329339
{'openai_api_key': 'key', 'openai_api_base': 'https://api.minimax.io/v1'},
@@ -377,12 +387,12 @@ class TestMiniMaxIntegration(unittest.TestCase):
377387
os.environ.get('MINIMAX_API_KEY'),
378388
'MINIMAX_API_KEY not set'
379389
)
380-
def test_minimax_m25_chat_completion(self):
381-
"""Test actual chat completion with MiniMax-M2.5 via OpenAI-compatible API."""
390+
def test_minimax_m27_chat_completion(self):
391+
"""Test actual chat completion with MiniMax-M2.7 via OpenAI-compatible API."""
382392
from langchain_openai import ChatOpenAI
383393

384394
llm = ChatOpenAI(
385-
model='MiniMax-M2.5',
395+
model='MiniMax-M2.7',
386396
api_key=os.environ['MINIMAX_API_KEY'],
387397
base_url='https://api.minimax.io/v1',
388398
temperature=0.1,
@@ -396,12 +406,12 @@ def test_minimax_m25_chat_completion(self):
396406
os.environ.get('MINIMAX_API_KEY'),
397407
'MINIMAX_API_KEY not set'
398408
)
399-
def test_minimax_m25_highspeed_chat_completion(self):
400-
"""Test actual chat completion with MiniMax-M2.5-highspeed."""
409+
def test_minimax_m27_highspeed_chat_completion(self):
410+
"""Test actual chat completion with MiniMax-M2.7-highspeed."""
401411
from langchain_openai import ChatOpenAI
402412

403413
llm = ChatOpenAI(
404-
model='MiniMax-M2.5-highspeed',
414+
model='MiniMax-M2.7-highspeed',
405415
api_key=os.environ['MINIMAX_API_KEY'],
406416
base_url='https://api.minimax.io/v1',
407417
temperature=0.1,
@@ -415,12 +425,12 @@ def test_minimax_m25_highspeed_chat_completion(self):
415425
os.environ.get('MINIMAX_API_KEY'),
416426
'MINIMAX_API_KEY not set'
417427
)
418-
def test_minimax_m25_streaming(self):
419-
"""Test streaming chat completion with MiniMax-M2.5."""
428+
def test_minimax_m27_streaming(self):
429+
"""Test streaming chat completion with MiniMax-M2.7."""
420430
from langchain_openai import ChatOpenAI
421431

422432
llm = ChatOpenAI(
423-
model='MiniMax-M2.5',
433+
model='MiniMax-M2.7',
424434
api_key=os.environ['MINIMAX_API_KEY'],
425435
base_url='https://api.minimax.io/v1',
426436
temperature=0.1,
@@ -432,6 +442,25 @@ def test_minimax_m25_streaming(self):
432442
full_content = ''.join(c.content for c in chunks)
433443
self.assertTrue(len(full_content) > 0)
434444

445+
@unittest.skipUnless(
446+
os.environ.get('MINIMAX_API_KEY'),
447+
'MINIMAX_API_KEY not set'
448+
)
449+
def test_minimax_m25_chat_completion(self):
450+
"""Test actual chat completion with MiniMax-M2.5 via OpenAI-compatible API."""
451+
from langchain_openai import ChatOpenAI
452+
453+
llm = ChatOpenAI(
454+
model='MiniMax-M2.5',
455+
api_key=os.environ['MINIMAX_API_KEY'],
456+
base_url='https://api.minimax.io/v1',
457+
temperature=0.1,
458+
max_tokens=64,
459+
)
460+
response = llm.invoke('Say hello in one word.')
461+
self.assertIsNotNone(response.content)
462+
self.assertTrue(len(response.content) > 0)
463+
435464

436465
if __name__ == '__main__':
437466
unittest.main()

src/frontend/platform/public/models/data.json

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -81,16 +81,26 @@
8181
}],
8282
"minimax": [{
8383
"name": "model 1",
84-
"model_name": "MiniMax-M2.5",
84+
"model_name": "MiniMax-M2.7",
8585
"model_type": "llm"
8686
},
8787
{
8888
"name": "model 2",
89-
"model_name": "MiniMax-M2.5-highspeed",
89+
"model_name": "MiniMax-M2.7-highspeed",
9090
"model_type": "llm"
9191
},
9292
{
9393
"name": "model 3",
94+
"model_name": "MiniMax-M2.5",
95+
"model_type": "llm"
96+
},
97+
{
98+
"name": "model 4",
99+
"model_name": "MiniMax-M2.5-highspeed",
100+
"model_type": "llm"
101+
},
102+
{
103+
"name": "model 5",
94104
"model_name": "MiniMax-Text-01",
95105
"model_type": "llm"
96106
}

0 commit comments

Comments
 (0)