添加chatglmTurbo模型

This commit is contained in:
Kakune55 2023-11-10 16:16:28 +08:00
parent 3c94b93196
commit caccd39395
4 changed files with 44 additions and 6 deletions

View File

@ -0,0 +1,27 @@
import zhipuai
zhipuai.api_key = "83f977afedc7f414dea579a2551298c2.sydItr1sZT8UPXWb"
def service(prompt,history = ""):
if history == "":
response = zhipuai.model_api.invoke(
model="chatglm_turbo",
prompt=[
{"role": "user", "content": prompt},
]
)
else:
response = zhipuai.model_api.invoke(
model="chatglm_turbo",
prompt=[
{"role": "user", "content": history[1]["user"]},
{"role": "assistant", "content": history[1]["bot"]},
{"role": "user", "content": history[0]["user"]},
{"role": "assistant", "content": history[0]["bot"]},
{"role": "user", "content": prompt},
]
)
if response["code"] == 200:
return 200, response["data"]["choices"][0]["content"], response["data"]["usage"]['total_tokens']
else:
return 50 , response["code"]+response["msg"], 0

View File

@ -1,6 +1,6 @@
import flask , requests , json import flask , requests , json
from flask_cors import CORS from flask_cors import CORS
import db , qwenTurbo import db , qwenTurbo ,chatglmTurbo
@ -24,11 +24,21 @@ def post_data():
elif surplusToken <= 0: elif surplusToken <= 0:
return {"code":40,"output":"Token has been use up"} return {"code":40,"output":"Token has been use up"}
if userRequest["model"] == "qwen-turbo": if userRequest["model"] == "qwen-turbo": # 调用qwen-Turbo
if userRequest["context"] == 1: # 是否使用上文关联 if userRequest["context"] == 1: # 是否使用上文关联
code , output , tokenUsed = qwenTurbo.service(userRequest['userkey'],userRequest['prompt'],userRequest['history']) code , output , tokenUsed = qwenTurbo.service(userRequest['prompt'],userRequest['history'])
elif userRequest["context"] == 0: elif userRequest["context"] == 0:
code , output , tokenUsed = qwenTurbo.service(userRequest['userkey'],userRequest['prompt']) code , output , tokenUsed = qwenTurbo.service(userRequest['prompt'])
if userRequest["model"] == "chatglm-turbo": # 调用chatglm-turbo
if userRequest["context"] == 1: # 是否使用上文关联
code , output , tokenUsed = chatglmTurbo.service(userRequest['prompt'],userRequest['history'])
elif userRequest["context"] == 0:
code , output , tokenUsed = chatglmTurbo.service(userRequest['prompt'])
db.reduce_value(userRequest['userkey'], tokenUsed) db.reduce_value(userRequest['userkey'], tokenUsed)
return {"code":code,"output":output,"surplus":surplusToken} return {"code":code,"output":output,"surplus":surplusToken}

View File

@ -8,7 +8,7 @@ header = {
"Authorization":"Bearer sk-69129a5d7fc6468a9f6f30d6935254c6" "Authorization":"Bearer sk-69129a5d7fc6468a9f6f30d6935254c6"
} }
def service(userkey,prompt,history = ""): def service(prompt,history = ""):
# 设置请求数据 # 设置请求数据
if history == "": if history == "":
data = { data = {

View File

@ -108,6 +108,7 @@
<p>使用的AI模型</p> <p>使用的AI模型</p>
<select id="setUpDropdown" defaultValue="qwen-turbo"> <select id="setUpDropdown" defaultValue="qwen-turbo">
<option value="qwen-turbo">qwen-turbo</option> <option value="qwen-turbo">qwen-turbo</option>
<option value="chatglm-turbo">chatglmTurbo</option>
</select> </select>
<hr> <hr>
<h3>当前UserKey</h3> <h3>当前UserKey</h3>
@ -261,7 +262,7 @@
.then(data => { .then(data => {
// 在这里处理返回的JSON数据 // 在这里处理返回的JSON数据
if (data["code"] == 200) { if (data["code"] == 200) {
appendMessage("KakuAI",data["output"]); appendMessage("KakuAI"+"("+document.getElementById("setUpDropdown").value+")",data["output"]);
document.getElementById("showtoken").innerHTML = "<br>剩余Tokens:" + data["surplus"]; document.getElementById("showtoken").innerHTML = "<br>剩余Tokens:" + data["surplus"];
} }
else{ else{