add webrtc push

This commit is contained in:
lipku 2024-04-27 18:08:57 +08:00
parent 027e15201a
commit 4137e5bce6
5 changed files with 873 additions and 4 deletions

View File

@ -107,18 +107,34 @@ python app.py --fullbody --fullbody_img data/fullbody/img --fullbody_offset_x 10
- ernerf训练第三步torso如果训练的不好在拼接处会有接缝。可以在上面的命令加上--torso_imgs data/xxx/torso_imgstorso不用模型推理直接用训练数据集里的torso图片。这种方式可能头颈处会有些人工痕迹。 - ernerf训练第三步torso如果训练的不好在拼接处会有接缝。可以在上面的命令加上--torso_imgs data/xxx/torso_imgstorso不用模型推理直接用训练数据集里的torso图片。这种方式可能头颈处会有些人工痕迹。
### 3.6 webrtc ### 3.6 webrtc
#### 3.6.1 p2p模式
此种模式不需要srs
``` ```
python app.py --transport webrtc python app.py --transport webrtc
``` ```
用浏览器打开http://serverip:8010/webrtc.html 用浏览器打开http://serverip:8010/webrtc.html
#### 3.6.2 通过srs一对多
启动srs
```
export CANDIDATE='<服务器外网ip>'
docker run --rm --env CANDIDATE=$CANDIDATE \
-p 1935:1935 -p 8080:8080 -p 1985:1985 -p 8000:8000/udp \
registry.cn-hangzhou.aliyuncs.com/ossrs/srs:5 \
objs/srs -c conf/rtc.conf
```
然后运行
```
python app.py --transport rtcpush --push_url 'http://localhost:1985/rtc/v1/whip/?app=live&stream=livestream'
```
用浏览器打开http://serverip:8010/rtcpush.html
## 4. Docker Run ## 4. Docker Run
不需要第1步的安装直接运行。 不需要第1步的安装直接运行。
``` ```
docker run --gpus all -it --network=host --rm registry.cn-hangzhou.aliyuncs.com/lipku/nerfstream:v1.3 docker run --gpus all -it --network=host --rm registry.cn-hangzhou.aliyuncs.com/lipku/nerfstream:v1.3
``` ```
srs的运行同2.1 docker版本已经不是最新代码可以作为一个空环境把最新代码拷进去运行。
## 5. Data flow ## 5. Data flow
![](/assets/dataflow.png) ![](/assets/dataflow.png)

34
app.py
View File

@ -14,6 +14,7 @@ from threading import Thread,Event
import multiprocessing import multiprocessing
from aiohttp import web from aiohttp import web
import aiohttp
from aiortc import RTCPeerConnection, RTCSessionDescription from aiortc import RTCPeerConnection, RTCSessionDescription
from webrtc import HumanPlayer from webrtc import HumanPlayer
@ -244,6 +245,33 @@ async def on_shutdown(app):
coros = [pc.close() for pc in pcs] coros = [pc.close() for pc in pcs]
await asyncio.gather(*coros) await asyncio.gather(*coros)
pcs.clear() pcs.clear()
async def post(url,data):
try:
async with aiohttp.ClientSession() as session:
async with session.post(url,data=data) as response:
return await response.text()
except aiohttp.ClientError as e:
print(f'Error: {e}')
async def run(push_url):
pc = RTCPeerConnection()
pcs.add(pc)
@pc.on("connectionstatechange")
async def on_connectionstatechange():
print("Connection state is %s" % pc.connectionState)
if pc.connectionState == "failed":
await pc.close()
pcs.discard(pc)
player = HumanPlayer(nerfreal)
audio_sender = pc.addTrack(player.audio)
video_sender = pc.addTrack(player.video)
await pc.setLocalDescription(await pc.createOffer())
answer = await post(push_url,pc.localDescription.sdp)
await pc.setRemoteDescription(RTCSessionDescription(sdp=answer,type='answer'))
########################################## ##########################################
if __name__ == '__main__': if __name__ == '__main__':
@ -344,8 +372,8 @@ if __name__ == '__main__':
# parser.add_argument('--asr_model', type=str, default='facebook/wav2vec2-large-960h-lv60-self') # parser.add_argument('--asr_model', type=str, default='facebook/wav2vec2-large-960h-lv60-self')
# parser.add_argument('--asr_model', type=str, default='facebook/hubert-large-ls960-ft') # parser.add_argument('--asr_model', type=str, default='facebook/hubert-large-ls960-ft')
parser.add_argument('--transport', type=str, default='rtmp') #rtmp webrtc parser.add_argument('--transport', type=str, default='rtmp') #rtmp webrtc rtcpush
parser.add_argument('--push_url', type=str, default='rtmp://localhost/live/livestream') parser.add_argument('--push_url', type=str, default='rtmp://localhost/live/livestream') #http://localhost:1985/rtc/v1/whip/?app=live&stream=livestream
parser.add_argument('--asr_save_feats', action='store_true') parser.add_argument('--asr_save_feats', action='store_true')
# audio FPS # audio FPS
@ -437,6 +465,8 @@ if __name__ == '__main__':
loop.run_until_complete(runner.setup()) loop.run_until_complete(runner.setup())
site = web.TCPSite(runner, '0.0.0.0', 8010) site = web.TCPSite(runner, '0.0.0.0', 8010)
loop.run_until_complete(site.start()) loop.run_until_complete(site.start())
if opt.transport=='rtcpush':
loop.run_until_complete(run(opt.push_url))
loop.run_forever() loop.run_forever()
Thread(target=run_server, args=(web.AppRunner(appasync),)).start() Thread(target=run_server, args=(web.AppRunner(appasync),)).start()

View File

@ -191,7 +191,7 @@ class ASR:
if not self.terminated: if not self.terminated:
self.frames = self.frames[-(self.stride_left_size + self.stride_right_size):] self.frames = self.frames[-(self.stride_left_size + self.stride_right_size):]
print(f'[INFO] frame_to_text... ') #print(f'[INFO] frame_to_text... ')
#t = time.time() #t = time.time()
logits, labels, text = self.__frame_to_text(inputs) logits, labels, text = self.__frame_to_text(inputs)
#print(f'-------wav2vec time:{time.time()-t:.4f}s') #print(f'-------wav2vec time:{time.time()-t:.4f}s')

125
web/rtcpush.html Normal file
View File

@ -0,0 +1,125 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>WebRTC webcam</title>
<style>
button {
padding: 8px 16px;
}
video {
width: 100%;
}
.option {
margin-bottom: 8px;
}
#media {
max-width: 1280px;
}
</style>
</head>
<body>
<div class="option">
<input id="use-stun" type="checkbox"/>
<label for="use-stun">Use STUN server</label>
</div>
<button class="btn btn-primary" id="btn_play">Start</button>
<form class="form-inline" id="echo-form">
<div class="form-group">
<p>input text</p>
<textarea cols="2" rows="3" style="width:600px;height:50px;" class="form-control" id="message">test</textarea>
</div>
<button type="submit" class="btn btn-default">Send</button>
</form>
<div id="media">
<h2>Media</h2>
<video id="rtc_media_player" style="width:600px;" controls autoplay></video>
</div>
<script src="srs.sdk.js"></script>
<script type="text/javascript" src="http://cdn.sockjs.org/sockjs-0.3.4.js"></script>
<script type="text/javascript" src="https://ajax.aspnetcdn.com/ajax/jquery/jquery-2.1.1.min.js"></script>
</body>
<script type="text/javascript" charset="utf-8">
$(document).ready(function() {
var host = window.location.hostname
var ws = new WebSocket("ws://"+host+":8000/humanecho");
//document.getElementsByTagName("video")[0].setAttribute("src", aa["video"]);
ws.onopen = function() {
console.log('Connected');
};
ws.onmessage = function(e) {
console.log('Received: ' + e.data);
data = e
var vid = JSON.parse(data.data);
console.log(typeof(vid),vid)
//document.getElementsByTagName("video")[0].setAttribute("src", vid["video"]);
};
ws.onclose = function(e) {
console.log('Closed');
};
$('#echo-form').on('submit', function(e) {
e.preventDefault();
var message = $('#message').val();
console.log('Sending: ' + message);
ws.send(message);
$('#message').val('');
});
});
$(function(){
var sdk = null; // Global handler to do cleanup when republishing.
var startPlay = function() {
$('#rtc_media_player').show();
// Close PC when user replay.
if (sdk) {
sdk.close();
}
sdk = new SrsRtcWhipWhepAsync();
// User should set the stream when publish is done, @see https://webrtc.org/getting-started/media-devices
// However SRS SDK provides a consist API like https://webrtc.org/getting-started/remote-streams
$('#rtc_media_player').prop('srcObject', sdk.stream);
// Optional callback, SDK will add track to stream.
// sdk.ontrack = function (event) { console.log('Got track', event); sdk.stream.addTrack(event.track); };
var host = window.location.hostname
// For example: webrtc://r.ossrs.net/live/livestream
var url = "http://"+host+":1985/rtc/v1/whep/?app=live&stream=livestream"
sdk.play(url).then(function(session){
//$('#sessionid').html(session.sessionid);
//$('#simulator-drop').attr('href', session.simulator + '?drop=1&username=' + session.sessionid);
}).catch(function (reason) {
sdk.close();
$('#rtc_media_player').hide();
console.error(reason);
});
};
$('#rtc_media_player').hide();
// var query = parse_query_string();
// srs_init_whep("#txt_url", query);
$("#btn_play").click(startPlay);
// Never play util windows loaded @see https://github.com/ossrs/srs/issues/2732
// if (query.autostart === 'true') {
// $('#rtc_media_player').prop('muted', true);
// console.warn('For autostart, we should mute it, see https://www.jianshu.com/p/c3c6944eed5a ' +
// 'or https://developers.google.com/web/updates/2017/09/autoplay-policy-changes#audiovideo_elements');
// window.addEventListener("load", function(){ startPlay(); });
// }
});
</script>
</html>

698
web/srs.sdk.js Normal file
View File

@ -0,0 +1,698 @@
//
// Copyright (c) 2013-2021 Winlin
//
// SPDX-License-Identifier: MIT
//
'use strict';
function SrsError(name, message) {
this.name = name;
this.message = message;
this.stack = (new Error()).stack;
}
SrsError.prototype = Object.create(Error.prototype);
SrsError.prototype.constructor = SrsError;
// Depends on adapter-7.4.0.min.js from https://github.com/webrtc/adapter
// Async-awat-prmise based SRS RTC Publisher.
function SrsRtcPublisherAsync() {
var self = {};
// https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
self.constraints = {
audio: true,
video: {
width: {ideal: 320, max: 576}
}
};
// @see https://github.com/rtcdn/rtcdn-draft
// @url The WebRTC url to play with, for example:
// webrtc://r.ossrs.net/live/livestream
// or specifies the API port:
// webrtc://r.ossrs.net:11985/live/livestream
// or autostart the publish:
// webrtc://r.ossrs.net/live/livestream?autostart=true
// or change the app from live to myapp:
// webrtc://r.ossrs.net:11985/myapp/livestream
// or change the stream from livestream to mystream:
// webrtc://r.ossrs.net:11985/live/mystream
// or set the api server to myapi.domain.com:
// webrtc://myapi.domain.com/live/livestream
// or set the candidate(eip) of answer:
// webrtc://r.ossrs.net/live/livestream?candidate=39.107.238.185
// or force to access https API:
// webrtc://r.ossrs.net/live/livestream?schema=https
// or use plaintext, without SRTP:
// webrtc://r.ossrs.net/live/livestream?encrypt=false
// or any other information, will pass-by in the query:
// webrtc://r.ossrs.net/live/livestream?vhost=xxx
// webrtc://r.ossrs.net/live/livestream?token=xxx
self.publish = async function (url) {
var conf = self.__internal.prepareUrl(url);
self.pc.addTransceiver("audio", {direction: "sendonly"});
self.pc.addTransceiver("video", {direction: "sendonly"});
//self.pc.addTransceiver("video", {direction: "sendonly"});
//self.pc.addTransceiver("audio", {direction: "sendonly"});
if (!navigator.mediaDevices && window.location.protocol === 'http:' && window.location.hostname !== 'localhost') {
throw new SrsError('HttpsRequiredError', `Please use HTTPS or localhost to publish, read https://github.com/ossrs/srs/issues/2762#issuecomment-983147576`);
}
var stream = await navigator.mediaDevices.getUserMedia(self.constraints);
// @see https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addStream#Migrating_to_addTrack
stream.getTracks().forEach(function (track) {
self.pc.addTrack(track);
// Notify about local track when stream is ok.
self.ontrack && self.ontrack({track: track});
});
var offer = await self.pc.createOffer();
await self.pc.setLocalDescription(offer);
var session = await new Promise(function (resolve, reject) {
// @see https://github.com/rtcdn/rtcdn-draft
var data = {
api: conf.apiUrl, tid: conf.tid, streamurl: conf.streamUrl,
clientip: null, sdp: offer.sdp
};
console.log("Generated offer: ", data);
const xhr = new XMLHttpRequest();
xhr.onload = function() {
if (xhr.readyState !== xhr.DONE) return;
if (xhr.status !== 200 && xhr.status !== 201) return reject(xhr);
const data = JSON.parse(xhr.responseText);
console.log("Got answer: ", data);
return data.code ? reject(xhr) : resolve(data);
}
xhr.open('POST', conf.apiUrl, true);
xhr.setRequestHeader('Content-type', 'application/json');
xhr.send(JSON.stringify(data));
});
await self.pc.setRemoteDescription(
new RTCSessionDescription({type: 'answer', sdp: session.sdp})
);
session.simulator = conf.schema + '//' + conf.urlObject.server + ':' + conf.port + '/rtc/v1/nack/';
return session;
};
// Close the publisher.
self.close = function () {
self.pc && self.pc.close();
self.pc = null;
};
// The callback when got local stream.
// @see https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addStream#Migrating_to_addTrack
self.ontrack = function (event) {
// Add track to stream of SDK.
self.stream.addTrack(event.track);
};
// Internal APIs.
self.__internal = {
defaultPath: '/rtc/v1/publish/',
prepareUrl: function (webrtcUrl) {
var urlObject = self.__internal.parse(webrtcUrl);
// If user specifies the schema, use it as API schema.
var schema = urlObject.user_query.schema;
schema = schema ? schema + ':' : window.location.protocol;
var port = urlObject.port || 1985;
if (schema === 'https:') {
port = urlObject.port || 443;
}
// @see https://github.com/rtcdn/rtcdn-draft
var api = urlObject.user_query.play || self.__internal.defaultPath;
if (api.lastIndexOf('/') !== api.length - 1) {
api += '/';
}
var apiUrl = schema + '//' + urlObject.server + ':' + port + api;
for (var key in urlObject.user_query) {
if (key !== 'api' && key !== 'play') {
apiUrl += '&' + key + '=' + urlObject.user_query[key];
}
}
// Replace /rtc/v1/play/&k=v to /rtc/v1/play/?k=v
apiUrl = apiUrl.replace(api + '&', api + '?');
var streamUrl = urlObject.url;
return {
apiUrl: apiUrl, streamUrl: streamUrl, schema: schema, urlObject: urlObject, port: port,
tid: Number(parseInt(new Date().getTime()*Math.random()*100)).toString(16).slice(0, 7)
};
},
parse: function (url) {
// @see: http://stackoverflow.com/questions/10469575/how-to-use-location-object-to-parse-url-without-redirecting-the-page-in-javascri
var a = document.createElement("a");
a.href = url.replace("rtmp://", "http://")
.replace("webrtc://", "http://")
.replace("rtc://", "http://");
var vhost = a.hostname;
var app = a.pathname.substring(1, a.pathname.lastIndexOf("/"));
var stream = a.pathname.slice(a.pathname.lastIndexOf("/") + 1);
// parse the vhost in the params of app, that srs supports.
app = app.replace("...vhost...", "?vhost=");
if (app.indexOf("?") >= 0) {
var params = app.slice(app.indexOf("?"));
app = app.slice(0, app.indexOf("?"));
if (params.indexOf("vhost=") > 0) {
vhost = params.slice(params.indexOf("vhost=") + "vhost=".length);
if (vhost.indexOf("&") > 0) {
vhost = vhost.slice(0, vhost.indexOf("&"));
}
}
}
// when vhost equals to server, and server is ip,
// the vhost is __defaultVhost__
if (a.hostname === vhost) {
var re = /^(\d+)\.(\d+)\.(\d+)\.(\d+)$/;
if (re.test(a.hostname)) {
vhost = "__defaultVhost__";
}
}
// parse the schema
var schema = "rtmp";
if (url.indexOf("://") > 0) {
schema = url.slice(0, url.indexOf("://"));
}
var port = a.port;
if (!port) {
// Finger out by webrtc url, if contains http or https port, to overwrite default 1985.
if (schema === 'webrtc' && url.indexOf(`webrtc://${a.host}:`) === 0) {
port = (url.indexOf(`webrtc://${a.host}:80`) === 0) ? 80 : 443;
}
// Guess by schema.
if (schema === 'http') {
port = 80;
} else if (schema === 'https') {
port = 443;
} else if (schema === 'rtmp') {
port = 1935;
}
}
var ret = {
url: url,
schema: schema,
server: a.hostname, port: port,
vhost: vhost, app: app, stream: stream
};
self.__internal.fill_query(a.search, ret);
// For webrtc API, we use 443 if page is https, or schema specified it.
if (!ret.port) {
if (schema === 'webrtc' || schema === 'rtc') {
if (ret.user_query.schema === 'https') {
ret.port = 443;
} else if (window.location.href.indexOf('https://') === 0) {
ret.port = 443;
} else {
// For WebRTC, SRS use 1985 as default API port.
ret.port = 1985;
}
}
}
return ret;
},
fill_query: function (query_string, obj) {
// pure user query object.
obj.user_query = {};
if (query_string.length === 0) {
return;
}
// split again for angularjs.
if (query_string.indexOf("?") >= 0) {
query_string = query_string.split("?")[1];
}
var queries = query_string.split("&");
for (var i = 0; i < queries.length; i++) {
var elem = queries[i];
var query = elem.split("=");
obj[query[0]] = query[1];
obj.user_query[query[0]] = query[1];
}
// alias domain for vhost.
if (obj.domain) {
obj.vhost = obj.domain;
}
}
};
self.pc = new RTCPeerConnection(null);
// To keep api consistent between player and publisher.
// @see https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addStream#Migrating_to_addTrack
// @see https://webrtc.org/getting-started/media-devices
self.stream = new MediaStream();
return self;
}
// Depends on adapter-7.4.0.min.js from https://github.com/webrtc/adapter
// Async-await-promise based SRS RTC Player.
function SrsRtcPlayerAsync() {
var self = {};
// @see https://github.com/rtcdn/rtcdn-draft
// @url The WebRTC url to play with, for example:
// webrtc://r.ossrs.net/live/livestream
// or specifies the API port:
// webrtc://r.ossrs.net:11985/live/livestream
// webrtc://r.ossrs.net:80/live/livestream
// or autostart the play:
// webrtc://r.ossrs.net/live/livestream?autostart=true
// or change the app from live to myapp:
// webrtc://r.ossrs.net:11985/myapp/livestream
// or change the stream from livestream to mystream:
// webrtc://r.ossrs.net:11985/live/mystream
// or set the api server to myapi.domain.com:
// webrtc://myapi.domain.com/live/livestream
// or set the candidate(eip) of answer:
// webrtc://r.ossrs.net/live/livestream?candidate=39.107.238.185
// or force to access https API:
// webrtc://r.ossrs.net/live/livestream?schema=https
// or use plaintext, without SRTP:
// webrtc://r.ossrs.net/live/livestream?encrypt=false
// or any other information, will pass-by in the query:
// webrtc://r.ossrs.net/live/livestream?vhost=xxx
// webrtc://r.ossrs.net/live/livestream?token=xxx
self.play = async function(url) {
var conf = self.__internal.prepareUrl(url);
self.pc.addTransceiver("audio", {direction: "recvonly"});
self.pc.addTransceiver("video", {direction: "recvonly"});
//self.pc.addTransceiver("video", {direction: "recvonly"});
//self.pc.addTransceiver("audio", {direction: "recvonly"});
var offer = await self.pc.createOffer();
await self.pc.setLocalDescription(offer);
var session = await new Promise(function(resolve, reject) {
// @see https://github.com/rtcdn/rtcdn-draft
var data = {
api: conf.apiUrl, tid: conf.tid, streamurl: conf.streamUrl,
clientip: null, sdp: offer.sdp
};
console.log("Generated offer: ", data);
const xhr = new XMLHttpRequest();
xhr.onload = function() {
if (xhr.readyState !== xhr.DONE) return;
if (xhr.status !== 200 && xhr.status !== 201) return reject(xhr);
const data = JSON.parse(xhr.responseText);
console.log("Got answer: ", data);
return data.code ? reject(xhr) : resolve(data);
}
xhr.open('POST', conf.apiUrl, true);
xhr.setRequestHeader('Content-type', 'application/json');
xhr.send(JSON.stringify(data));
});
await self.pc.setRemoteDescription(
new RTCSessionDescription({type: 'answer', sdp: session.sdp})
);
session.simulator = conf.schema + '//' + conf.urlObject.server + ':' + conf.port + '/rtc/v1/nack/';
return session;
};
// Close the player.
self.close = function() {
self.pc && self.pc.close();
self.pc = null;
};
// The callback when got remote track.
// Note that the onaddstream is deprecated, @see https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/onaddstream
self.ontrack = function (event) {
// https://webrtc.org/getting-started/remote-streams
self.stream.addTrack(event.track);
};
// Internal APIs.
self.__internal = {
defaultPath: '/rtc/v1/play/',
prepareUrl: function (webrtcUrl) {
var urlObject = self.__internal.parse(webrtcUrl);
// If user specifies the schema, use it as API schema.
var schema = urlObject.user_query.schema;
schema = schema ? schema + ':' : window.location.protocol;
var port = urlObject.port || 1985;
if (schema === 'https:') {
port = urlObject.port || 443;
}
// @see https://github.com/rtcdn/rtcdn-draft
var api = urlObject.user_query.play || self.__internal.defaultPath;
if (api.lastIndexOf('/') !== api.length - 1) {
api += '/';
}
var apiUrl = schema + '//' + urlObject.server + ':' + port + api;
for (var key in urlObject.user_query) {
if (key !== 'api' && key !== 'play') {
apiUrl += '&' + key + '=' + urlObject.user_query[key];
}
}
// Replace /rtc/v1/play/&k=v to /rtc/v1/play/?k=v
apiUrl = apiUrl.replace(api + '&', api + '?');
var streamUrl = urlObject.url;
return {
apiUrl: apiUrl, streamUrl: streamUrl, schema: schema, urlObject: urlObject, port: port,
tid: Number(parseInt(new Date().getTime()*Math.random()*100)).toString(16).slice(0, 7)
};
},
parse: function (url) {
// @see: http://stackoverflow.com/questions/10469575/how-to-use-location-object-to-parse-url-without-redirecting-the-page-in-javascri
var a = document.createElement("a");
a.href = url.replace("rtmp://", "http://")
.replace("webrtc://", "http://")
.replace("rtc://", "http://");
var vhost = a.hostname;
var app = a.pathname.substring(1, a.pathname.lastIndexOf("/"));
var stream = a.pathname.slice(a.pathname.lastIndexOf("/") + 1);
// parse the vhost in the params of app, that srs supports.
app = app.replace("...vhost...", "?vhost=");
if (app.indexOf("?") >= 0) {
var params = app.slice(app.indexOf("?"));
app = app.slice(0, app.indexOf("?"));
if (params.indexOf("vhost=") > 0) {
vhost = params.slice(params.indexOf("vhost=") + "vhost=".length);
if (vhost.indexOf("&") > 0) {
vhost = vhost.slice(0, vhost.indexOf("&"));
}
}
}
// when vhost equals to server, and server is ip,
// the vhost is __defaultVhost__
if (a.hostname === vhost) {
var re = /^(\d+)\.(\d+)\.(\d+)\.(\d+)$/;
if (re.test(a.hostname)) {
vhost = "__defaultVhost__";
}
}
// parse the schema
var schema = "rtmp";
if (url.indexOf("://") > 0) {
schema = url.slice(0, url.indexOf("://"));
}
var port = a.port;
if (!port) {
// Finger out by webrtc url, if contains http or https port, to overwrite default 1985.
if (schema === 'webrtc' && url.indexOf(`webrtc://${a.host}:`) === 0) {
port = (url.indexOf(`webrtc://${a.host}:80`) === 0) ? 80 : 443;
}
// Guess by schema.
if (schema === 'http') {
port = 80;
} else if (schema === 'https') {
port = 443;
} else if (schema === 'rtmp') {
port = 1935;
}
}
var ret = {
url: url,
schema: schema,
server: a.hostname, port: port,
vhost: vhost, app: app, stream: stream
};
self.__internal.fill_query(a.search, ret);
// For webrtc API, we use 443 if page is https, or schema specified it.
if (!ret.port) {
if (schema === 'webrtc' || schema === 'rtc') {
if (ret.user_query.schema === 'https') {
ret.port = 443;
} else if (window.location.href.indexOf('https://') === 0) {
ret.port = 443;
} else {
// For WebRTC, SRS use 1985 as default API port.
ret.port = 1985;
}
}
}
return ret;
},
fill_query: function (query_string, obj) {
// pure user query object.
obj.user_query = {};
if (query_string.length === 0) {
return;
}
// split again for angularjs.
if (query_string.indexOf("?") >= 0) {
query_string = query_string.split("?")[1];
}
var queries = query_string.split("&");
for (var i = 0; i < queries.length; i++) {
var elem = queries[i];
var query = elem.split("=");
obj[query[0]] = query[1];
obj.user_query[query[0]] = query[1];
}
// alias domain for vhost.
if (obj.domain) {
obj.vhost = obj.domain;
}
}
};
self.pc = new RTCPeerConnection(null);
// Create a stream to add track to the stream, @see https://webrtc.org/getting-started/remote-streams
self.stream = new MediaStream();
// https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/ontrack
self.pc.ontrack = function(event) {
if (self.ontrack) {
self.ontrack(event);
}
};
return self;
}
// Depends on adapter-7.4.0.min.js from https://github.com/webrtc/adapter
// Async-awat-prmise based SRS RTC Publisher by WHIP.
function SrsRtcWhipWhepAsync() {
var self = {};
// https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
self.constraints = {
audio: true,
video: {
width: {ideal: 320, max: 576}
}
};
// See https://datatracker.ietf.org/doc/draft-ietf-wish-whip/
// @url The WebRTC url to publish with, for example:
// http://localhost:1985/rtc/v1/whip/?app=live&stream=livestream
// @options The options to control playing, supports:
// videoOnly: boolean, whether only play video, default to false.
// audioOnly: boolean, whether only play audio, default to false.
self.publish = async function (url, options) {
if (url.indexOf('/whip/') === -1) throw new Error(`invalid WHIP url ${url}`);
if (options?.videoOnly && options?.audioOnly) throw new Error(`The videoOnly and audioOnly in options can't be true at the same time`);
if (!options?.videoOnly) {
self.pc.addTransceiver("audio", {direction: "sendonly"});
} else {
self.constraints.audio = false;
}
if (!options?.audioOnly) {
self.pc.addTransceiver("video", {direction: "sendonly"});
} else {
self.constraints.video = false;
}
if (!navigator.mediaDevices && window.location.protocol === 'http:' && window.location.hostname !== 'localhost') {
throw new SrsError('HttpsRequiredError', `Please use HTTPS or localhost to publish, read https://github.com/ossrs/srs/issues/2762#issuecomment-983147576`);
}
var stream = await navigator.mediaDevices.getUserMedia(self.constraints);
// @see https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addStream#Migrating_to_addTrack
stream.getTracks().forEach(function (track) {
self.pc.addTrack(track);
// Notify about local track when stream is ok.
self.ontrack && self.ontrack({track: track});
});
var offer = await self.pc.createOffer();
await self.pc.setLocalDescription(offer);
const answer = await new Promise(function (resolve, reject) {
console.log(`Generated offer: ${offer.sdp}`);
const xhr = new XMLHttpRequest();
xhr.onload = function() {
if (xhr.readyState !== xhr.DONE) return;
if (xhr.status !== 200 && xhr.status !== 201) return reject(xhr);
const data = xhr.responseText;
console.log("Got answer: ", data);
return data.code ? reject(xhr) : resolve(data);
}
xhr.open('POST', url, true);
xhr.setRequestHeader('Content-type', 'application/sdp');
xhr.send(offer.sdp);
});
await self.pc.setRemoteDescription(
new RTCSessionDescription({type: 'answer', sdp: answer})
);
return self.__internal.parseId(url, offer.sdp, answer);
};
// See https://datatracker.ietf.org/doc/draft-ietf-wish-whip/
// @url The WebRTC url to play with, for example:
// http://localhost:1985/rtc/v1/whep/?app=live&stream=livestream
// @options The options to control playing, supports:
// videoOnly: boolean, whether only play video, default to false.
// audioOnly: boolean, whether only play audio, default to false.
self.play = async function(url, options) {
if (url.indexOf('/whip-play/') === -1 && url.indexOf('/whep/') === -1) throw new Error(`invalid WHEP url ${url}`);
if (options?.videoOnly && options?.audioOnly) throw new Error(`The videoOnly and audioOnly in options can't be true at the same time`);
if (!options?.videoOnly) self.pc.addTransceiver("audio", {direction: "recvonly"});
if (!options?.audioOnly) self.pc.addTransceiver("video", {direction: "recvonly"});
var offer = await self.pc.createOffer();
await self.pc.setLocalDescription(offer);
const answer = await new Promise(function(resolve, reject) {
console.log(`Generated offer: ${offer.sdp}`);
const xhr = new XMLHttpRequest();
xhr.onload = function() {
if (xhr.readyState !== xhr.DONE) return;
if (xhr.status !== 200 && xhr.status !== 201) return reject(xhr);
const data = xhr.responseText;
console.log("Got answer: ", data);
return data.code ? reject(xhr) : resolve(data);
}
xhr.open('POST', url, true);
xhr.setRequestHeader('Content-type', 'application/sdp');
xhr.send(offer.sdp);
});
await self.pc.setRemoteDescription(
new RTCSessionDescription({type: 'answer', sdp: answer})
);
return self.__internal.parseId(url, offer.sdp, answer);
};
// Close the publisher.
self.close = function () {
self.pc && self.pc.close();
self.pc = null;
};
// The callback when got local stream.
// @see https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addStream#Migrating_to_addTrack
self.ontrack = function (event) {
// Add track to stream of SDK.
self.stream.addTrack(event.track);
};
self.pc = new RTCPeerConnection(null);
// To keep api consistent between player and publisher.
// @see https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addStream#Migrating_to_addTrack
// @see https://webrtc.org/getting-started/media-devices
self.stream = new MediaStream();
// Internal APIs.
self.__internal = {
parseId: (url, offer, answer) => {
let sessionid = offer.substr(offer.indexOf('a=ice-ufrag:') + 'a=ice-ufrag:'.length);
sessionid = sessionid.substr(0, sessionid.indexOf('\n') - 1) + ':';
sessionid += answer.substr(answer.indexOf('a=ice-ufrag:') + 'a=ice-ufrag:'.length);
sessionid = sessionid.substr(0, sessionid.indexOf('\n'));
const a = document.createElement("a");
a.href = url;
return {
sessionid: sessionid, // Should be ice-ufrag of answer:offer.
simulator: a.protocol + '//' + a.host + '/rtc/v1/nack/',
};
},
};
// https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/ontrack
self.pc.ontrack = function(event) {
if (self.ontrack) {
self.ontrack(event);
}
};
return self;
}
// Format the codec of RTCRtpSender, kind(audio/video) is optional filter.
// https://developer.mozilla.org/en-US/docs/Web/Media/Formats/WebRTC_codecs#getting_the_supported_codecs
function SrsRtcFormatSenders(senders, kind) {
var codecs = [];
senders.forEach(function (sender) {
var params = sender.getParameters();
params && params.codecs && params.codecs.forEach(function(c) {
if (kind && sender.track.kind !== kind) {
return;
}
if (c.mimeType.indexOf('/red') > 0 || c.mimeType.indexOf('/rtx') > 0 || c.mimeType.indexOf('/fec') > 0) {
return;
}
var s = '';
s += c.mimeType.replace('audio/', '').replace('video/', '');
s += ', ' + c.clockRate + 'HZ';
if (sender.track.kind === "audio") {
s += ', channels: ' + c.channels;
}
s += ', pt: ' + c.payloadType;
codecs.push(s);
});
});
return codecs.join(", ");
}