vllm接入

This commit is contained in:
yanyuxiyangzk@126.com 2024-04-03 18:25:03 +08:00
parent 405b331bdc
commit 9c8d0b05dc
2 changed files with 61 additions and 0 deletions

23
llm/GPT.py Normal file
View File

@ -0,0 +1,23 @@
import openai
class GPT():
def __init__(self, model_path = 'gpt-3.5-turbo', api_key = None, base_url = None):
openai.api_key = api_key
self.model_path = model_path
if base_url != None:
openai.base_url = base_url
def chat(self, message):
response = openai.ChatCompletion.create(
model=self.model_path,
messages=[
{"role": "user", "content": message}
]
)
return response['choices'][0]['message']['content']
if __name__ == '__main__':
llm = GPT('gpt-3.5-turbo', 'fk193752-RlcPi2mBQqPOU5u1F8SFkG2z0gtxD0HS','https://openai.api2d.net/v1')
# llm = GPT('gpt-3.5-turbo', 'fk193752-RlcPi2mBQqPOU5u1F8SFkG2z0gtxD0HS','https://openai.api2d.net/v1')
response = llm.chat("如何应对压力?")

38
llm/VllmGPT.py Normal file
View File

@ -0,0 +1,38 @@
import json
import requests
# from core import content_db
def question(cont):
chat_list = []
# contentdb = content_db.new_instance()
# list = contentdb.get_list('all','desc',11)
# answer_info = dict()
# chat_list = []
# i = len(list)-1
# while i >= 0:
# answer_info = dict()
# if list[i][0] == "member":
# answer_info["role"] = "user"
# answer_info["content"] = list[i][2]
# elif list[i][0] == "fay":
# answer_info["role"] = "bot"
# answer_info["content"] = list[i][2]
# chat_list.append(answer_info)
# i -= 1
content = {
"model": "THUDM/chatglm3-6b",
"prompt":"请简单回复我。" + cont,
"history":chat_list}
url = "http://192.168.1.3:8101/v1/completions"
req = json.dumps(content)
headers = {'content-type': 'application/json'}
r = requests.post(url, headers=headers, data=req)
res = json.loads(r.text)
return res['choices'][0]['text']
if __name__ == "__main__":
req = question("你叫什么名字啊今年多大了")
print(req)