RoboWaiter/robowaiter/llm_client/ask_llm.py

37 lines
1.0 KiB
Python
Raw Normal View History

import os
import requests
import urllib3
2023-11-09 21:52:13 +08:00
from robowaiter.utils import get_root_path
from robowaiter.llm_client.single_round import single_round
2023-11-15 14:30:57 +08:00
from robowaiter.llm_client.tool_api import run_conversation
########################################
# 该文件实现了与大模型的简单通信
########################################
# 忽略https的安全性警告
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
2023-11-09 21:52:13 +08:00
root_path = get_root_path()
# load test questions
file_path = os.path.join(root_path,"robowaiter/llm_client/data/test_questions.txt")
2023-11-09 21:52:13 +08:00
with open(file_path,'r',encoding="utf-8") as f:
test_questions_dict = eval(f.read())
2023-11-09 21:52:13 +08:00
def ask_llm(question):
if question in test_questions_dict:
2023-11-14 20:08:54 +08:00
ans = test_questions_dict[question]
else:
2023-11-15 14:30:57 +08:00
ans = run_conversation(question, stream=False)
# ans = single_round(question)
2023-11-14 20:08:54 +08:00
print(f"大模型输出: {ans}")
2023-11-09 21:52:13 +08:00
return ans
if __name__ == '__main__':
question = '''测试VLM做一杯咖啡'''
print(ask_llm(question))