Refa: add MiniMax-M2 and remove deprecated MiniMax models (#11642)

### What problem does this PR solve?

Add MiniMax-M2 and remove deprecated models.

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
- [x] Refactoring
This commit is contained in:
Yongteng Lei
2025-12-02 14:43:44 +08:00
committed by GitHub
parent 519f03097e
commit a713f54732
15 changed files with 62 additions and 112 deletions

View File

@@ -1226,39 +1226,14 @@
{
"name": "MiniMax",
"logo": "",
"tags": "LLM,TEXT EMBEDDING",
"tags": "LLM",
"status": "1",
"rank": "810",
"llm": [
{
"llm_name": "abab6.5-chat",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "abab6.5s-chat",
"tags": "LLM,CHAT,245k",
"max_tokens": 245760,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "abab6.5t-chat",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "abab6.5g-chat",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "abab5.5s-chat",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"llm_name": "MiniMax-M2",
"tags": "LLM,CHAT,200k",
"max_tokens": 200000,
"model_type": "chat"
}
]

View File

@@ -51,6 +51,7 @@ class SupportedLiteLLMProvider(StrEnum):
AI_302 = "302.AI"
JiekouAI = "Jiekou.AI"
ZHIPU_AI = "ZHIPU-AI"
MiniMax = "MiniMax"
FACTORY_DEFAULT_BASE_URL = {
@@ -73,6 +74,7 @@ FACTORY_DEFAULT_BASE_URL = {
SupportedLiteLLMProvider.Anthropic: "https://api.anthropic.com/",
SupportedLiteLLMProvider.JiekouAI: "https://api.jiekou.ai/openai",
SupportedLiteLLMProvider.ZHIPU_AI: "https://open.bigmodel.cn/api/paas/v4",
SupportedLiteLLMProvider.MiniMax: "https://api.minimaxi.com/v1",
}
@@ -105,6 +107,7 @@ LITELLM_PROVIDER_PREFIX = {
SupportedLiteLLMProvider.AI_302: "openai/",
SupportedLiteLLMProvider.JiekouAI: "openai/",
SupportedLiteLLMProvider.ZHIPU_AI: "openai/",
SupportedLiteLLMProvider.MiniMax: "openai/",
}
ChatModel = globals().get("ChatModel", {})

View File

@@ -28,7 +28,6 @@ from urllib.parse import urljoin
import json_repair
import litellm
import openai
import requests
from openai import AsyncOpenAI, OpenAI
from openai.lib.azure import AzureOpenAI
from strenum import StrEnum
@@ -1015,86 +1014,6 @@ class VolcEngineChat(Base):
super().__init__(ark_api_key, model_name, base_url, **kwargs)
class MiniMaxChat(Base):
_FACTORY_NAME = "MiniMax"
def __init__(self, key, model_name, base_url="https://api.minimax.chat/v1/text/chatcompletion_v2", **kwargs):
super().__init__(key, model_name, base_url=base_url, **kwargs)
if not base_url:
base_url = "https://api.minimax.chat/v1/text/chatcompletion_v2"
self.base_url = base_url
self.model_name = model_name
self.api_key = key
def _clean_conf(self, gen_conf):
for k in list(gen_conf.keys()):
if k not in ["temperature", "top_p", "max_tokens"]:
del gen_conf[k]
return gen_conf
def _chat(self, history, gen_conf):
headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json",
}
payload = json.dumps({"model": self.model_name, "messages": history, **gen_conf})
response = requests.request("POST", url=self.base_url, headers=headers, data=payload)
response = response.json()
ans = response["choices"][0]["message"]["content"].strip()
if response["choices"][0]["finish_reason"] == "length":
if is_chinese(ans):
ans += LENGTH_NOTIFICATION_CN
else:
ans += LENGTH_NOTIFICATION_EN
return ans, total_token_count_from_response(response)
def chat_streamly(self, system, history, gen_conf):
if system and history and history[0].get("role") != "system":
history.insert(0, {"role": "system", "content": system})
for k in list(gen_conf.keys()):
if k not in ["temperature", "top_p", "max_tokens"]:
del gen_conf[k]
ans = ""
total_tokens = 0
try:
headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json",
}
payload = json.dumps(
{
"model": self.model_name,
"messages": history,
"stream": True,
**gen_conf,
}
)
response = requests.request(
"POST",
url=self.base_url,
headers=headers,
data=payload,
)
for resp in response.text.split("\n\n")[:-1]:
resp = json.loads(resp[6:])
text = ""
if "choices" in resp and "delta" in resp["choices"][0]:
text = resp["choices"][0]["delta"]["content"]
ans = text
tol = total_token_count_from_response(resp)
if not tol:
total_tokens += num_tokens_from_string(text)
else:
total_tokens = tol
yield ans
except Exception as e:
yield ans + "\n**ERROR**: " + str(e)
yield total_tokens
class MistralChat(Base):
_FACTORY_NAME = "Mistral"
@@ -1642,6 +1561,7 @@ class LiteLLMBase(ABC):
"302.AI",
"Jiekou.AI",
"ZHIPU-AI",
"MiniMax",
]
def __init__(self, key, model_name, base_url=None, **kwargs):

View File

@@ -628,6 +628,10 @@ export default {
'Für chinesische Benutzer ist keine Eingabe erforderlich oder verwenden Sie https://dashscope.aliyuncs.com/compatible-mode/v1. Für internationale Benutzer verwenden Sie https://dashscope-intl.aliyuncs.com/compatible-mode/v1',
tongyiBaseUrlPlaceholder:
'(Nur für internationale Benutzer, bitte Hinweis beachten)',
minimaxBaseUrlTip:
'Nur für internationale Nutzer: https://api.minimax.io/v1 verwenden.',
minimaxBaseUrlPlaceholder:
'(Nur für internationale Benutzer, https://api.minimax.io/v1 eintragen)',
modify: 'Ändern',
systemModelSettings: 'Standardmodelle festlegen',
chatModel: 'Chat-Modell',

View File

@@ -858,6 +858,10 @@ Example: Virtual Hosted Style`,
tongyiBaseUrlTip:
'For Chinese users, no need to fill in or use https://dashscope.aliyuncs.com/compatible-mode/v1. For international users, use https://dashscope-intl.aliyuncs.com/compatible-mode/v1',
tongyiBaseUrlPlaceholder: '(International users only, please see tip)',
minimaxBaseUrlTip:
'International users only: use https://api.minimax.io/v1',
minimaxBaseUrlPlaceholder:
'(International users only, fill in https://api.minimax.io/v1)',
modify: 'Modify',
systemModelSettings: 'Set default models',
chatModel: 'LLM',

View File

@@ -344,6 +344,10 @@ export default {
'Para usuarios chinos, no es necesario rellenar o usar https://dashscope.aliyuncs.com/compatible-mode/v1. Para usuarios internacionales, usar https://dashscope-intl.aliyuncs.com/compatible-mode/v1',
tongyiBaseUrlPlaceholder:
'(Solo para usuarios internacionales, por favor ver consejo)',
minimaxBaseUrlTip:
'Solo usuarios internacionales: utilice https://api.minimax.io/v1.',
minimaxBaseUrlPlaceholder:
'(Solo usuarios internacionales, ingrese https://api.minimax.io/v1)',
modify: 'Modificar',
systemModelSettings: 'Establecer modelos predeterminados',
chatModel: 'Modelo de chat',

View File

@@ -526,6 +526,10 @@ export default {
'Pour les utilisateurs chinois, pas besoin de remplir ou utiliser https://dashscope.aliyuncs.com/compatible-mode/v1. Pour les utilisateurs internationaux, utilisez https://dashscope-intl.aliyuncs.com/compatible-mode/v1',
tongyiBaseUrlPlaceholder:
"(Utilisateurs internationaux uniquement, veuillez consulter l'astuce)",
minimaxBaseUrlTip:
'Utilisateurs internationaux uniquement : utilisez https://api.minimax.io/v1.',
minimaxBaseUrlPlaceholder:
'(Utilisateurs internationaux uniquement, renseignez https://api.minimax.io/v1)',
modify: 'Modifier',
systemModelSettings: 'Définir les modèles par défaut',
chatModel: 'Modèle de chat',

View File

@@ -516,6 +516,10 @@ export default {
'Untuk pengguna Tiongkok, tidak perlu diisi atau gunakan https://dashscope.aliyuncs.com/compatible-mode/v1. Untuk pengguna internasional, gunakan https://dashscope-intl.aliyuncs.com/compatible-mode/v1',
tongyiBaseUrlPlaceholder:
'(Hanya untuk pengguna internasional, silakan lihat tip)',
minimaxBaseUrlTip:
'Hanya untuk pengguna internasional: gunakan https://api.minimax.io/v1.',
minimaxBaseUrlPlaceholder:
'(Hanya untuk pengguna internasional, isi https://api.minimax.io/v1)',
modify: 'Ubah',
systemModelSettings: 'Tetapkan model default',
chatModel: 'Model Obrolan',

View File

@@ -557,6 +557,10 @@ export default {
tongyiBaseUrlTip:
'中国ユーザーの場合、記入不要または https://dashscope.aliyuncs.com/compatible-mode/v1 を使用してください。国際ユーザーは https://dashscope-intl.aliyuncs.com/compatible-mode/v1 を使用してください',
tongyiBaseUrlPlaceholder: '(国際ユーザーのみ、ヒントをご覧ください)',
minimaxBaseUrlTip:
'国際ユーザーのみhttps://api.minimax.io/v1 を使用してください。',
minimaxBaseUrlPlaceholder:
'国際ユーザーのみ、https://api.minimax.io/v1 を入力してください)',
modify: '変更',
systemModelSettings: 'デフォルトモデルを設定する',
chatModel: 'チャットモデル',

View File

@@ -508,6 +508,10 @@ export default {
'Para usuários chineses, não é necessário preencher ou usar https://dashscope.aliyuncs.com/compatible-mode/v1. Para usuários internacionais, use https://dashscope-intl.aliyuncs.com/compatible-mode/v1',
tongyiBaseUrlPlaceholder:
'(Apenas para usuários internacionais, consulte a dica)',
minimaxBaseUrlTip:
'Somente usuários internacionais: use https://api.minimax.io/v1.',
minimaxBaseUrlPlaceholder:
'(Somente para usuários internacionais, preencha https://api.minimax.io/v1)',
modify: 'Modificar',
systemModelSettings: 'Definir modelos padrão',
chatModel: 'Modelo de chat',

View File

@@ -846,6 +846,10 @@ export default {
'Для китайских пользователей не нужно заполнять или используйте https://dashscope.aliyuncs.com/compatible-mode/v1. Для международных пользователей используйте https://dashscope-intl.aliyuncs.com/compatible-mode/v1',
tongyiBaseUrlPlaceholder:
'(Только для международных пользователей, см. подсказку)',
minimaxBaseUrlTip:
'Только для международных пользователей: используйте https://api.minimax.io/v1.',
minimaxBaseUrlPlaceholder:
'(Только для международных пользователей, введите https://api.minimax.io/v1)',
modify: 'Изменить',
systemModelSettings: 'Установить модели по умолчанию',
chatModel: 'LLM',

View File

@@ -558,6 +558,10 @@ export default {
baseUrl: 'Base-Url',
baseUrlTip:
'Nếu khóa API của bạn từ OpenAI, chỉ cần bỏ qua nó. Bất kỳ nhà cung cấp trung gian nào khác sẽ cung cấp URL cơ sở này với khóa API.',
minimaxBaseUrlTip:
'Chỉ người dùng quốc tế: dùng https://api.minimax.io/v1.',
minimaxBaseUrlPlaceholder:
'(Chỉ dành cho người dùng quốc tế, điền https://api.minimax.io/v1)',
modify: 'Sửa đổi',
systemModelSettings: 'Đặt mô hình mặc định',
chatModel: 'Mô hình trò chuyện',

View File

@@ -596,6 +596,8 @@ export default {
tongyiBaseUrlTip:
'中國用戶無需填寫或使用 https://dashscope.aliyuncs.com/compatible-mode/v1。國際用戶請使用 https://dashscope-intl.aliyuncs.com/compatible-mode/v1',
tongyiBaseUrlPlaceholder: '(僅國際用戶,請參閱提示)',
minimaxBaseUrlTip: '僅國際用戶:使用 https://api.minimax.io/v1。',
minimaxBaseUrlPlaceholder: '(僅國際用戶填寫 https://api.minimax.io/v1',
modify: '修改',
systemModelSettings: '設定預設模型',
chatModel: '聊天模型',

View File

@@ -813,6 +813,8 @@ General实体和关系提取提示来自 GitHub - microsoft/graphrag基于
tongyiBaseUrlTip:
'对于中国用户,不需要填写或使用 https://dashscope.aliyuncs.com/compatible-mode/v1。对于国际用户使用 https://dashscope-intl.aliyuncs.com/compatible-mode/v1。',
tongyiBaseUrlPlaceholder: '(仅国际用户需要)',
minimaxBaseUrlTip: '仅国际用户:使用 https://api.minimax.io/v1。',
minimaxBaseUrlPlaceholder: '(仅国际用户填写 https://api.minimax.io/v1)',
modify: '修改',
systemModelSettings: '设置默认模型',
chatModel: 'LLM',

View File

@@ -34,6 +34,7 @@ const modelsWithBaseUrl = [
LLMFactory.OpenAI,
LLMFactory.AzureOpenAI,
LLMFactory.TongYiQianWen,
LLMFactory.MiniMax,
];
const ApiKeyModal = ({
@@ -109,7 +110,16 @@ const ApiKeyModal = ({
name="base_url"
render={({ field }) => (
<FormItem>
<FormLabel className="text-sm font-medium text-text-primary">
<FormLabel
className="text-sm font-medium text-text-primary"
tooltip={
llmFactory === LLMFactory.MiniMax
? t('minimaxBaseUrlTip')
: llmFactory === LLMFactory.TongYiQianWen
? t('tongyiBaseUrlTip')
: t('baseUrlTip')
}
>
{t('baseUrl')}
</FormLabel>
<FormControl>
@@ -118,7 +128,9 @@ const ApiKeyModal = ({
placeholder={
llmFactory === LLMFactory.TongYiQianWen
? t('tongyiBaseUrlPlaceholder')
: 'https://api.openai.com/v1'
: llmFactory === LLMFactory.MiniMax
? t('minimaxBaseUrlPlaceholder')
: 'https://api.openai.com/v1'
}
onKeyDown={handleKeyDown}
className="w-full"