Skip to content

Commit

Permalink
Adding the Minimax model (#1009)
Browse files Browse the repository at this point in the history
### What problem does this PR solve?

Added support for MiniMax LLM

### Type of change

- [x] New Feature (non-breaking change which adds functionality)

---------

Co-authored-by: cecilia-uu <[email protected]>
  • Loading branch information
cecilia-uu and cecilia-uu authored May 31, 2024
1 parent 5d2f713 commit 260c68f
Show file tree
Hide file tree
Showing 3 changed files with 58 additions and 1 deletion.
48 changes: 48 additions & 0 deletions api/db/init_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,11 @@ def init_superuser():
"logo": "",
"tags": "TEXT EMBEDDING, TEXT RE-RANK",
"status": "1",
},{
"name": "Minimax",
"logo": "",
"tags": "LLM,TEXT EMBEDDING",
"status": "1",
}
# {
# "name": "文心一言",
Expand Down Expand Up @@ -536,6 +541,49 @@ def init_llm_factory():
"max_tokens": 2048,
"model_type": LLMType.RERANK.value
},
# ------------------------ Minimax -----------------------
{
"fid": factory_infos[13]["name"],
"llm_name": "abab6.5-chat",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": LLMType.CHAT.value
},
{
"fid": factory_infos[13]["name"],
"llm_name": "abab6.5s-chat",
"tags": "LLM,CHAT,245k",
"max_tokens": 245760,
"model_type": LLMType.CHAT.value
},
{
"fid": factory_infos[13]["name"],
"llm_name": "abab6.5t-chat",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": LLMType.CHAT.value
},
{
"fid": factory_infos[13]["name"],
"llm_name": "abab6.5g-chat",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": LLMType.CHAT.value
},
{
"fid": factory_infos[13]["name"],
"llm_name": "abab5.5-chat",
"tags": "LLM,CHAT,16k",
"max_tokens": 16384,
"model_type": LLMType.CHAT.value
},
{
"fid": factory_infos[13]["name"],
"llm_name": "abab5.5s-chat",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": LLMType.CHAT.value
},
]
for info in factory_infos:
try:
Expand Down
3 changes: 2 additions & 1 deletion rag/llm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@
"Xinference": XinferenceChat,
"Moonshot": MoonshotChat,
"DeepSeek": DeepSeekChat,
"BaiChuan": BaiChuanChat
"BaiChuan": BaiChuanChat,
"MiniMax": MiniMaxChat
}


Expand Down
8 changes: 8 additions & 0 deletions rag/llm/chat_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -464,3 +464,11 @@ def chat_streamly(self, system, history, gen_conf):
except Exception as e:
yield ans + "\n**ERROR**: " + str(e)
yield tk_count


class MiniMaxChat(Base):
def __init__(self, key, model_name="abab6.5s-chat",
base_url="https://api.minimax.chat/v1/text/chatcompletion_v2"):
if not base_url:
base_url="https://api.minimax.chat/v1/text/chatcompletion_v2"
super().__init__(key, model_name, base_url)

0 comments on commit 260c68f

Please sign in to comment.