vllm+chatglm3-6b

This commit is contained in:
yanyuxiyangzk@126.com 2024-04-03 22:02:23 +08:00
parent fe963ed543
commit d5c8b240cc
2 changed files with 3 additions and 2 deletions

3
app.py
View File

@ -125,7 +125,8 @@ def echo_socket(ws):
def llm_response(message):
from llm.LLM import LLM
# llm = LLM().init_model('Gemini', model_path= 'gemini-pro',api_key='Your API Key', proxy_url=None)
llm = LLM().init_model('ChatGPT', model_path= 'gpt-3.5-turbo',api_key='Your API Key')
# llm = LLM().init_model('ChatGPT', model_path= 'gpt-3.5-turbo',api_key='Your API Key')
llm = LLM().init_model('VllmGPT', model_path= 'THUDM/chatglm3-6b')
response = llm.chat(message)
print(response)
return response

View File

@ -47,6 +47,6 @@ if __name__ == '__main__':
# llm.test_Gemini(api_key='你的API Key', proxy_url=None)
# llm = LLM().init_model('Gemini', model_path= 'gemini-pro',api_key='AIzaSyBWAWfT8zsyAZcRIXLS5Vzlw8KKCN9qsAg', proxy_url='http://172.31.71.58:7890')
# response = llm.chat("如何应对压力?")
llm = LLM().init_model('VllmGPT', model_path= 'THUDM/chatglm3-6b',api_key='', proxy_url='http://172.31.71.58:7890')
llm = LLM().init_model('VllmGPT', model_path= 'THUDM/chatglm3-6b')
response = llm.chat("如何应对压力?")
# print(response)