add api to human

This commit is contained in:
lipku 2024-05-06 08:14:06 +08:00
parent 8c012c5ab8
commit 3e702b8a12
3 changed files with 278 additions and 3 deletions

53
app.py
View File

@ -206,6 +206,37 @@ def chat_socket(ws):
#####webrtc###############################
pcs = set()
async def txt_to_audio_async(text_):
if tts_type == "edgetts":
voicename = "zh-CN-YunxiaNeural"
text = text_
t = time.time()
#asyncio.get_event_loop().run_until_complete(main(voicename,text,nerfreal))
await main(voicename,text,nerfreal)
print(f'-------edge tts time:{time.time()-t:.4f}s')
elif tts_type == "gpt-sovits": #gpt_sovits
stream_tts(
gpt_sovits(
text_,
app.config['CHARACTER'], #"test", #character
"zh", #en args.language,
app.config['TTS_SERVER'], #"http://127.0.0.1:5000", #args.server_url,
app.config['EMOTION'], #emotion
),
nerfreal
)
else: #xtts
stream_tts(
xtts(
text_,
gspeaker,
"zh-cn", #en args.language,
app.config['TTS_SERVER'], #"http://localhost:9000", #args.server_url,
"20" #args.stream_chunk_size
),
nerfreal
)
#@app.route('/offer', methods=['POST'])
async def offer(request):
params = await request.json()
@ -239,6 +270,21 @@ async def offer(request):
),
)
async def human(request):
params = await request.json()
if params['type']=='echo':
await txt_to_audio_async(params['text'])
elif params['type']=='chat':
res=llm_response(params['text'])
await txt_to_audio_async(res)
return web.Response(
content_type="application/json",
text=json.dumps(
{"code": 0, "data":"ok"}
),
)
async def on_shutdown(app):
# close peer connections
@ -400,7 +446,7 @@ if __name__ == '__main__':
parser.add_argument('--CHARACTER', type=str, default='test')
parser.add_argument('--EMOTION', type=str, default='default')
parser.add_argument('--listenport', type=int, default=8000)
parser.add_argument('--listenport', type=int, default=8010)
opt = parser.parse_args()
app.config.from_object(opt)
@ -463,13 +509,14 @@ if __name__ == '__main__':
appasync = web.Application()
appasync.on_shutdown.append(on_shutdown)
appasync.router.add_post("/offer", offer)
appasync.router.add_post("/human", human)
appasync.router.add_static('/',path='web')
def run_server(runner):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(runner.setup())
site = web.TCPSite(runner, '0.0.0.0', 8010)
site = web.TCPSite(runner, '0.0.0.0', opt.listenport)
loop.run_until_complete(site.start())
if opt.transport=='rtcpush':
loop.run_until_complete(run(opt.push_url))
@ -479,7 +526,7 @@ if __name__ == '__main__':
print('start websocket server')
#app.on_shutdown.append(on_shutdown)
#app.router.add_post("/offer", offer)
server = pywsgi.WSGIServer(('0.0.0.0', opt.listenport), app, handler_class=WebSocketHandler)
server = pywsgi.WSGIServer(('0.0.0.0', 8000), app, handler_class=WebSocketHandler)
server.serve_forever()

135
web/rtcpushapi.html Normal file
View File

@ -0,0 +1,135 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>WebRTC webcam</title>
<style>
button {
padding: 8px 16px;
}
video {
width: 100%;
}
.option {
margin-bottom: 8px;
}
#media {
max-width: 1280px;
}
</style>
</head>
<body>
<div class="option">
<input id="use-stun" type="checkbox"/>
<label for="use-stun">Use STUN server</label>
</div>
<button class="btn btn-primary" id="btn_play">Start</button>
<form class="form-inline" id="echo-form">
<div class="form-group">
<p>input text</p>
<textarea cols="2" rows="3" style="width:600px;height:50px;" class="form-control" id="message">test</textarea>
</div>
<button type="submit" class="btn btn-default">Send</button>
</form>
<div id="media">
<h2>Media</h2>
<video id="rtc_media_player" style="width:600px;" controls autoplay></video>
</div>
<script src="srs.sdk.js"></script>
<script type="text/javascript" src="http://cdn.sockjs.org/sockjs-0.3.4.js"></script>
<script type="text/javascript" src="https://ajax.aspnetcdn.com/ajax/jquery/jquery-2.1.1.min.js"></script>
</body>
<script type="text/javascript" charset="utf-8">
$(document).ready(function() {
// var host = window.location.hostname
// var ws = new WebSocket("ws://"+host+":8000/humanecho");
// //document.getElementsByTagName("video")[0].setAttribute("src", aa["video"]);
// ws.onopen = function() {
// console.log('Connected');
// };
// ws.onmessage = function(e) {
// console.log('Received: ' + e.data);
// data = e
// var vid = JSON.parse(data.data);
// console.log(typeof(vid),vid)
// //document.getElementsByTagName("video")[0].setAttribute("src", vid["video"]);
// };
// ws.onclose = function(e) {
// console.log('Closed');
// };
$('#echo-form').on('submit', function(e) {
e.preventDefault();
var message = $('#message').val();
console.log('Sending: ' + message);
fetch('/human', {
body: JSON.stringify({
text: message,
type: 'echo',
}),
headers: {
'Content-Type': 'application/json'
},
method: 'POST'
});
//ws.send(message);
$('#message').val('');
});
});
$(function(){
var sdk = null; // Global handler to do cleanup when republishing.
var startPlay = function() {
$('#rtc_media_player').show();
// Close PC when user replay.
if (sdk) {
sdk.close();
}
sdk = new SrsRtcWhipWhepAsync();
// User should set the stream when publish is done, @see https://webrtc.org/getting-started/media-devices
// However SRS SDK provides a consist API like https://webrtc.org/getting-started/remote-streams
$('#rtc_media_player').prop('srcObject', sdk.stream);
// Optional callback, SDK will add track to stream.
// sdk.ontrack = function (event) { console.log('Got track', event); sdk.stream.addTrack(event.track); };
var host = window.location.hostname
// For example: webrtc://r.ossrs.net/live/livestream
var url = "http://"+host+":1985/rtc/v1/whep/?app=live&stream=livestream"
sdk.play(url).then(function(session){
//$('#sessionid').html(session.sessionid);
//$('#simulator-drop').attr('href', session.simulator + '?drop=1&username=' + session.sessionid);
}).catch(function (reason) {
sdk.close();
$('#rtc_media_player').hide();
console.error(reason);
});
};
$('#rtc_media_player').hide();
// var query = parse_query_string();
// srs_init_whep("#txt_url", query);
$("#btn_play").click(startPlay);
// Never play util windows loaded @see https://github.com/ossrs/srs/issues/2732
// if (query.autostart === 'true') {
// $('#rtc_media_player').prop('muted', true);
// console.warn('For autostart, we should mute it, see https://www.jianshu.com/p/c3c6944eed5a ' +
// 'or https://developers.google.com/web/updates/2017/09/autoplay-policy-changes#audiovideo_elements');
// window.addEventListener("load", function(){ startPlay(); });
// }
});
</script>
</html>

93
web/webrtcapi.html Normal file
View File

@ -0,0 +1,93 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>WebRTC webcam</title>
<style>
button {
padding: 8px 16px;
}
video {
width: 100%;
}
.option {
margin-bottom: 8px;
}
#media {
max-width: 1280px;
}
</style>
</head>
<body>
<div class="option">
<input id="use-stun" type="checkbox"/>
<label for="use-stun">Use STUN server</label>
</div>
<button id="start" onclick="start()">Start</button>
<button id="stop" style="display: none" onclick="stop()">Stop</button>
<form class="form-inline" id="echo-form">
<div class="form-group">
<p>input text</p>
<textarea cols="2" rows="3" style="width:600px;height:50px;" class="form-control" id="message">test</textarea>
</div>
<button type="submit" class="btn btn-default">Send</button>
</form>
<div id="media">
<h2>Media</h2>
<audio id="audio" autoplay="true"></audio>
<video id="video" style="width:600px;" autoplay="true" playsinline="true"></video>
</div>
<script src="client.js"></script>
<script type="text/javascript" src="http://cdn.sockjs.org/sockjs-0.3.4.js"></script>
<script type="text/javascript" src="https://ajax.aspnetcdn.com/ajax/jquery/jquery-2.1.1.min.js"></script>
</body>
<script type="text/javascript" charset="utf-8">
$(document).ready(function() {
// var host = window.location.hostname
// var ws = new WebSocket("ws://"+host+":8000/humanecho");
// //document.getElementsByTagName("video")[0].setAttribute("src", aa["video"]);
// ws.onopen = function() {
// console.log('Connected');
// };
// ws.onmessage = function(e) {
// console.log('Received: ' + e.data);
// data = e
// var vid = JSON.parse(data.data);
// console.log(typeof(vid),vid)
// //document.getElementsByTagName("video")[0].setAttribute("src", vid["video"]);
// };
// ws.onclose = function(e) {
// console.log('Closed');
// };
$('#echo-form').on('submit', function(e) {
e.preventDefault();
var message = $('#message').val();
console.log('Sending: ' + message);
fetch('/human', {
body: JSON.stringify({
text: message,
type: 'echo',
}),
headers: {
'Content-Type': 'application/json'
},
method: 'POST'
});
//ws.send(message);
$('#message').val('');
});
});
</script>
</html>