Minimax Client使用
2024-08-20
chatcompletion_pro api
标准请求
python
import requests
group_id = "请填写您的group_id"
api_key = "请填写您的api_key"
url = "https://api.minimax.chat/v1/text/chatcompletion_pro?GroupId=" + group_id
headers = {"Content-Type": "application/json", "Authorization": "Bearer " + api_key}
payload = {
"model": "abab6.5s-chat",
"tokens_to_generate": 2048,
"temperature": 0.1,
"top_p": 0.9,
"stream": False,
"reply_constraints": {"sender_type": "BOT", "sender_name": "MM智能助理"},
"sample_messages": [],
"plugins": [],
"messages": [
{
"sender_type": "USER",
"sender_name": "小明",
"text": "帮我用英文翻译下面这句话:我是谁",
}
],
"bot_setting": [
{
"bot_name": "MM智能助理",
"content": "MM智能助理是一款由MiniMax自研的,没有调用其他产品的接口的大型语言模型。MiniMax是一家中国科技公司,一直致力于进行大模型相关的研究。",
}
],
}
response = requests.post(url, headers=headers, json=payload)
print(response.status_code)
print(response.text)
流式请求
python
import requests
group_id = "请填写您的group_id"
api_key = "请填写您的api_key"
url = "https://api.minimax.chat/v1/text/chatcompletion_pro?GroupId=" + group_id
headers = {"Content-Type": "application/json", "Authorization": "Bearer " + api_key}
payload = {
"model": "abab6.5s-chat",
"tokens_to_generate": 2048,
"temperature": 0.1,
"top_p": 0.9,
"stream": True,
"reply_constraints": {"sender_type": "BOT", "sender_name": "MM智能助理"},
"sample_messages": [],
"plugins": [],
"messages": [
{
"sender_type": "USER",
"sender_name": "小明",
"text": "帮我用英文翻译下面这句话:我是谁",
}
],
"bot_setting": [
{
"bot_name": "MM智能助理",
"content": "MM智能助理是一款由MiniMax自研的,没有调用其他产品的接口的大型语言模型。MiniMax是一家中国科技公司,一直致力于进行大模型相关的研究。",
}
],
}
response = requests.post(url, headers=headers, json=payload, stream=True)
# 处理响应流
for line in response.iter_lines():
if line:
# 处理接收到的数据
print(line.decode("utf-8"))
chatcompletion_v2 api
标准请求
python
import requests
import json
url = "https://api.minimax.chat/v1/text/chatcompletion_v2"
api_key = "请填写您的api_key"
payload = json.dumps(
{
"model": "替换成具体的模型名,例如:abab6.5s-chat",
"messages": [
{
"role": "system",
"name": "MM智能助理", # 选填字段
"content": "MM智能助理是一款由MiniMax自研的,没有调用其他产品的接口的大型语言模型。MiniMax是一家中国科技公司,一直致力于进行大模型相关的研究。",
},
{
"role": "user",
"name": "用户", # 选填字段
"content": "你会按照以下要求回复我的内容:“根据我给出的多段信息分别判断信息文本内容表达了哪类情绪,并给出判断的理由,判断理由务必精简、准确。”我的内容是:“1、学习三星堆正确拍照姿势,留下难忘回忆!2、太可爱啦!换了个小猫图标!3、当代社畜分类图鉴,你是哪种?”",
},
],
"tool_choice": "none",
"stream": False,
"max_tokens": 2048,
"temperature": 0.1,
"top_p": 0.9,
}
)
headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
response = requests.request("POST", url, headers=headers, data=payload)
print(response.text)
流式请求
python
import requests
import json
url = "https://api.minimax.chat/v1/text/chatcompletion_v2"
api_key = "请填写您的api_key"
payload = json.dumps(
{
"model": "替换成具体的模型名,例如:abab6.5s-chat",
"messages": [
{
"role": "system",
"name": "MM智能助理", # 选填字段
"content": "MM智能助理是一款由MiniMax自研的,没有调用其他产品的接口的大型语言模型。MiniMax是一家中国科技公司,一直致力于进行大模型相关的研究。",
},
{
"role": "user",
"name": "用户", # 选填字段
"content": "你会按照以下要求回复我的内容:“根据我给出的多段信息分别判断信息文本内容表达了哪类情绪,并给出判断的理由,判断理由务必精简、准确。”我的内容是:“1、学习三星堆正确拍照姿势,留下难忘回忆!2、太可爱啦!换了个小猫图标!3、当代社畜分类图鉴,你是哪种?”",
},
],
"tool_choice": "none",
"stream": True,
"max_tokens": 2048,
"temperature": 0.1,
"top_p": 0.9,
}
)
headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
response = requests.post(url, headers=headers, data=payload, stream=True)
# 处理响应流
for line in response.iter_lines():
if line:
# 处理接收到的数据
print(line.decode("utf-8"))