简体   繁体   English

WebRTC视频通话如何控制带宽?

[英]How to control bandwidth in WebRTC video call?

I am trying to develop a Video Calling/Conferencing application using WebRTC and node.js. Right now there is no facility to control bandwidth during during video call.我正在尝试使用 WebRTC 和 node.js 开发视频通话/会议应用程序。目前在视频通话期间没有控制带宽的工具。 Is there any way to control/reduce bandwidth.有什么方法可以控制/减少带宽。 (like I want make whole my web application to work on 150 kbps while video conferencing). (就像我想让整个 web 应用程序在视频会议时以 150 kbps 的速度工作)。

Any suggestions are highly appreciated.任何建议都非常感谢。 Thanks in advance.提前致谢。

Try this demo .试试这个演示 You can inject bandwidth attributes ( b=AS ) in the session descriptions:您可以在会话描述中注入带宽属性 ( b=AS ):

audioBandwidth = 50;
videoBandwidth = 256;
function setBandwidth(sdp) {
    sdp = sdp.replace(/a=mid:audio\r\n/g, 'a=mid:audio\r\nb=AS:' + audioBandwidth + '\r\n');
    sdp = sdp.replace(/a=mid:video\r\n/g, 'a=mid:video\r\nb=AS:' + videoBandwidth + '\r\n');
    return sdp;
}


// ----------------------------------------------------------

peer.createOffer(function (sessionDescription) {
        sessionDescription.sdp = setBandwidth(sessionDescription.sdp);
        peer.setLocalDescription(sessionDescription);
    }, null, constraints);

peer.createAnswer(function (sessionDescription) {
        sessionDescription.sdp = setBandwidth(sessionDescription.sdp);
        peer.setLocalDescription(sessionDescription);
    }, null, constraints);

b=AS is already present in sdp for data m-line ; b=AS已经存在于data m-line sdp 中; its default value is 50 .它的默认值为50

Updated at Sept 23, 2015 2015 年 9 月 23 日更新

Here is a library that provides full control over both audio/video tracks' bitrates:这是一个可以完全控制音频/视频轨道比特率的库:

// here is how to use it
var bandwidth = {
    screen: 300, // 300kbits minimum
    audio: 50,   // 50kbits  minimum
    video: 256   // 256kbits (both min-max)
};
var isScreenSharing = false;

sdp = BandwidthHandler.setApplicationSpecificBandwidth(sdp, bandwidth, isScreenSharing);
sdp = BandwidthHandler.setVideoBitrates(sdp, {
    min: bandwidth.video,
    max: bandwidth.video
});
sdp = BandwidthHandler.setOpusAttributes(sdp);

Here is the library code.这是库代码。 Its quite big but it works!它相当大,但它的工作原理!

// BandwidthHandler.js

var BandwidthHandler = (function() {
    function setBAS(sdp, bandwidth, isScreen) {
        if (!!navigator.mozGetUserMedia || !bandwidth) {
            return sdp;
        }

        if (isScreen) {
            if (!bandwidth.screen) {
                console.warn('It seems that you are not using bandwidth for screen. Screen sharing is expected to fail.');
            } else if (bandwidth.screen < 300) {
                console.warn('It seems that you are using wrong bandwidth value for screen. Screen sharing is expected to fail.');
            }
        }

        // if screen; must use at least 300kbs
        if (bandwidth.screen && isScreen) {
            sdp = sdp.replace(/b=AS([^\r\n]+\r\n)/g, '');
            sdp = sdp.replace(/a=mid:video\r\n/g, 'a=mid:video\r\nb=AS:' + bandwidth.screen + '\r\n');
        }

        // remove existing bandwidth lines
        if (bandwidth.audio || bandwidth.video || bandwidth.data) {
            sdp = sdp.replace(/b=AS([^\r\n]+\r\n)/g, '');
        }

        if (bandwidth.audio) {
            sdp = sdp.replace(/a=mid:audio\r\n/g, 'a=mid:audio\r\nb=AS:' + bandwidth.audio + '\r\n');
        }

        if (bandwidth.video) {
            sdp = sdp.replace(/a=mid:video\r\n/g, 'a=mid:video\r\nb=AS:' + (isScreen ? bandwidth.screen : bandwidth.video) + '\r\n');
        }

        return sdp;
    }

    // Find the line in sdpLines that starts with |prefix|, and, if specified,
    // contains |substr| (case-insensitive search).
    function findLine(sdpLines, prefix, substr) {
        return findLineInRange(sdpLines, 0, -1, prefix, substr);
    }

    // Find the line in sdpLines[startLine...endLine - 1] that starts with |prefix|
    // and, if specified, contains |substr| (case-insensitive search).
    function findLineInRange(sdpLines, startLine, endLine, prefix, substr) {
        var realEndLine = endLine !== -1 ? endLine : sdpLines.length;
        for (var i = startLine; i < realEndLine; ++i) {
            if (sdpLines[i].indexOf(prefix) === 0) {
                if (!substr ||
                    sdpLines[i].toLowerCase().indexOf(substr.toLowerCase()) !== -1) {
                    return i;
                }
            }
        }
        return null;
    }

    // Gets the codec payload type from an a=rtpmap:X line.
    function getCodecPayloadType(sdpLine) {
        var pattern = new RegExp('a=rtpmap:(\\d+) \\w+\\/\\d+');
        var result = sdpLine.match(pattern);
        return (result && result.length === 2) ? result[1] : null;
    }

    function setVideoBitrates(sdp, params) {
        params = params || {};
        var xgoogle_min_bitrate = params.min;
        var xgoogle_max_bitrate = params.max;

        var sdpLines = sdp.split('\r\n');

        // VP8
        var vp8Index = findLine(sdpLines, 'a=rtpmap', 'VP8/90000');
        var vp8Payload;
        if (vp8Index) {
            vp8Payload = getCodecPayloadType(sdpLines[vp8Index]);
        }

        if (!vp8Payload) {
            return sdp;
        }

        var rtxIndex = findLine(sdpLines, 'a=rtpmap', 'rtx/90000');
        var rtxPayload;
        if (rtxIndex) {
            rtxPayload = getCodecPayloadType(sdpLines[rtxIndex]);
        }

        if (!rtxIndex) {
            return sdp;
        }

        var rtxFmtpLineIndex = findLine(sdpLines, 'a=fmtp:' + rtxPayload.toString());
        if (rtxFmtpLineIndex !== null) {
            var appendrtxNext = '\r\n';
            appendrtxNext += 'a=fmtp:' + vp8Payload + ' x-google-min-bitrate=' + (xgoogle_min_bitrate || '228') + '; x-google-max-bitrate=' + (xgoogle_max_bitrate || '228');
            sdpLines[rtxFmtpLineIndex] = sdpLines[rtxFmtpLineIndex].concat(appendrtxNext);
            sdp = sdpLines.join('\r\n');
        }

        return sdp;
    }

    function setOpusAttributes(sdp, params) {
        params = params || {};

        var sdpLines = sdp.split('\r\n');

        // Opus
        var opusIndex = findLine(sdpLines, 'a=rtpmap', 'opus/48000');
        var opusPayload;
        if (opusIndex) {
            opusPayload = getCodecPayloadType(sdpLines[opusIndex]);
        }

        if (!opusPayload) {
            return sdp;
        }

        var opusFmtpLineIndex = findLine(sdpLines, 'a=fmtp:' + opusPayload.toString());
        if (opusFmtpLineIndex === null) {
            return sdp;
        }

        var appendOpusNext = '';
        appendOpusNext += '; stereo=' + (typeof params.stereo != 'undefined' ? params.stereo : '1');
        appendOpusNext += '; sprop-stereo=' + (typeof params['sprop-stereo'] != 'undefined' ? params['sprop-stereo'] : '1');

        if (typeof params.maxaveragebitrate != 'undefined') {
            appendOpusNext += '; maxaveragebitrate=' + (params.maxaveragebitrate || 128 * 1024 * 8);
        }

        if (typeof params.maxplaybackrate != 'undefined') {
            appendOpusNext += '; maxplaybackrate=' + (params.maxplaybackrate || 128 * 1024 * 8);
        }

        if (typeof params.cbr != 'undefined') {
            appendOpusNext += '; cbr=' + (typeof params.cbr != 'undefined' ? params.cbr : '1');
        }

        if (typeof params.useinbandfec != 'undefined') {
            appendOpusNext += '; useinbandfec=' + params.useinbandfec;
        }

        if (typeof params.usedtx != 'undefined') {
            appendOpusNext += '; usedtx=' + params.usedtx;
        }

        if (typeof params.maxptime != 'undefined') {
            appendOpusNext += '\r\na=maxptime:' + params.maxptime;
        }

        sdpLines[opusFmtpLineIndex] = sdpLines[opusFmtpLineIndex].concat(appendOpusNext);

        sdp = sdpLines.join('\r\n');
        return sdp;
    }

    return {
        setApplicationSpecificBandwidth: function(sdp, bandwidth, isScreen) {
            return setBAS(sdp, bandwidth, isScreen);
        },
        setVideoBitrates: function(sdp, params) {
            return setVideoBitrates(sdp, params);
        },
        setOpusAttributes: function(sdp, params) {
            return setOpusAttributes(sdp, params);
        }
    };
})();

Here is how to set advance opus bitrate parameters:以下是设置高级作品比特率参数的方法:

sdp = BandwidthHandler.setOpusAttributes(sdp, {
    'stereo': 0, // to disable stereo (to force mono audio)
    'sprop-stereo': 1,
    'maxaveragebitrate': 500 * 1024 * 8, // 500 kbits
    'maxplaybackrate': 500 * 1024 * 8, // 500 kbits
    'cbr': 0, // disable cbr
    'useinbandfec': 1, // use inband fec
    'usedtx': 1, // use dtx
    'maxptime': 3
});

不确定这是否有帮助,但您可以通过约束限制来自 getUserMedia 的视频分辨率:请参阅simpl.info/getusermedia/constraints/ 上的演示。

A more up-to-date answer一个更新的答案

const videobitrate = 20000;
var offer = pc.localDescription;
// Set bandwidth for video
offer.sdp = offer.sdp.replace(/(m=video.*\r\n)/g, `$1b=AS:${videobitrate}\r\n`);
pc.setLocalDescription(offer);

Explanation: a=mid:video is not a guaranteed tag.说明: a=mid:video不是保证标签。 For receive only video, you might not see it or see a=mid:0 .对于仅接收视频,您可能看不到它或看到a=mid:0 Generally it's a better bet to look for the m=video xxxx xxxx (or similar audio) tag and append the bandwidth parameters underneath通常最好寻找m=video xxxx xxxx (或类似的音频)标签并在下面附加带宽参数

You should also be able to use bandwidth constraints on the stream ( see this demo ), but it doesn't appear to be working, even in the latest canary (29.0.1529.3).您还应该能够对流使用带宽限制( 请参阅此演示),但它似乎不起作用,即使在最新的金丝雀 (29.0.1529.3) 中也是如此。

There's some discussion of the SDP-based approach on the discuss-webrtc mailing list, which links to WebRTC bug 1846.Discussion-webrtc邮件列表中有一些关于基于 SDP 的方法的讨论,该邮件列表链接到 WebRTC 错误 1846。

I did it Yesterday and it works like a charm!我昨天做到了,它就像一个魅力! in my case, it was needed to prevent slow and old phones get freeze during a videocall!就我而言,需要防止慢速和旧手机在视频通话期间冻结! have a look看一看

function handle_offer_sdp(offer) {
    let sdp = offer.sdp.split('\r\n');//convert to an concatenable array
    let new_sdp = '';
    let position = null;
    sdp = sdp.slice(0, -1); //remove the last comma ','
    for(let i = 0; i < sdp.length; i++) {//look if exists already a b=AS:XXX line
        if(sdp[i].match(/b=AS:/)) {
            position = i; //mark the position
        }
    }
    if(position) {
        sdp.splice(position, 1);//remove if exists
    }
    for(let i = 0; i < sdp.length; i++) {
        if(sdp[i].match(/m=video/)) {//modify and add the new lines for video
            new_sdp += sdp[i] + '\r\n' + 'b=AS:' + '128' + '\r\n';
        }
        else {
            if(sdp[i].match(/m=audio/)) { //modify and add the new lines for audio
                new_sdp += sdp[i] + '\r\n' + 'b=AS:' + 64 + '\r\n';
            }
            else {
                new_sdp += sdp[i] + '\r\n';
            }
        }
    }
    return new_sdp; //return the new sdp
}
pc.createOffer(function(offer) {
    offer.sdp = handle_offer_sdp(offer); //invoke function saving the new sdp
    pc.setLocalDescription(offer);
}, function(error) {
    console.log('error -> ' + error);
});

My answer is not for node.js, but maybe someone stuck with controlling webrtc bandwidth while developing a native phone app (iOS, android).我的答案不适用于 node.js,但也许有人在开发本机手机应用程序(iOS、Android)时坚持控制 webrtc 带宽。

So, at least in version GoogleWebRTC (1.1.31999) for iOS and org.webrtc:google-webrtc:1.0.22672 for android there is method in PeerConnection instance.因此,至少在适用于 iOS 的GoogleWebRTC (1.1.31999)适用于 android 的org.webrtc:google-webrtc:1.0.22672 版本中, PeerConnection实例中有方法。

For iOS:对于 iOS:

let updateBitrateSuccessful = pc.setBweMinBitrateBps(300000, currentBitrateBps: 1000000, maxBitrateBps: 3000000)
print("Update rtc connection bitrate " + (updateBitrateSuccessful ? "successful" : "failed"))

Respectively, for Android:分别为Android:

boolean updateBitrateSuccessful = pc.setBitrate(300000, 1000000, 3000000);
Log.d("AppLog", "Update rtc connection bitrate " + (updateBitrateSuccessful ? "successful" : "failed"));

It depends on what SFU media server you're using.这取决于您使用的 SFU 媒体服务器。 But in short, your media server needs to tell the client browser what maximum bitrate it should send, by setting the bandwidth attribute in the answer SDP, as well as in the REMB message it periodically sends.但简而言之,您的媒体服务器需要通过在应答SDP 以及它定期发送的 REMB 消息中设置带宽属性来告诉客户端浏览器它应该发送的最大比特率。

The REMB (receiver estimated maximum bitrate) applies separately to audio and video streams (at least on desktop Chrome and Firefox that I tested). REMB(接收器估计的最大比特率)分别适用于音频和视频流(至少在我测试的桌面 Chrome 和 Firefox 上)。 So if REMB is set to 75kps and you have one audio and one video stream, then each will confine to 75kps for a total transport bitrate of 150kps.因此,如果 REMB 设置为 75kps,并且您有一个音频和一个视频流,那么每个流将限制为 75kps,总传输比特率为 150kps。 You should use chrome://webrtc-internals to test and verify this.您应该使用 chrome://webrtc-internals 来测试和验证这一点。

If you are using OPUS as the audio codec, you can control the audio bandwidth separately by setting the maxaveragebitrate attribute in the answer SDP.如果您使用 OPUS 作为音频编解码器,您可以通过在answer SDP 中设置 maxaveragebitrate 属性来单独控制音频带宽。 Setting this attribute will override the REMB value (verified on Chrome).设置此属性将覆盖 REMB 值(已在 Chrome 上验证)。 So you can set audio bitrate to 16kps and the video bitrate (via REMB) to 134kps for a combined transport bitrate of 150.因此,您可以将音频比特率设置为 16kps,将视频比特率(通过 REMB)设置为 134kps,以实现 150 的组合传输比特率。

Note that the REMB is sent by your server, so your server needs to support this.请注意,REMB 是由您的服务器发送的,因此您的服务器需要支持这一点。 The other SDP attributes can be manipulated on the client side by modifying the answer SDP that you receive, right before passing it to setRemoteDescription().其他 SDP 属性可以在客户端通过修改您收到的答案SDP 来操作,就在将其传递给 setRemoteDescription() 之前。

This is my limited understanding based on online research.这是我基于在线研究的有限理解。 It's not based on deep knowledge of the technology stack, so please take it with a grain of salt.它不是基于对技术堆栈的深入了解,所以请持保留态度。

I recommend to change value of maxBitrate property as described here https://stackoverflow.com/a/71223675/1199820我建议按照此处所述更改maxBitrate属性的值https://stackoverflow.com/a/71223675/1199820

Check this, this works for me.检查这个,这对我有用。

Control your bitrate via getSenders(), after peer is connected then you can set your maximum bitrate.通过 getSenders() 控制您的比特率,在对等方连接后,您可以设置最大比特率。

This method allow you to control bitrate without renegotiation.此方法允许您在不重新协商的情况下控制比特率。 so, you can change the streaming quality during a call因此,您可以在通话期间更改流媒体质量

    //bandwidth => "unlimited", 75 kbps, 250 kbps, 1000 kbps, 2000 kbps
    var bandwidth = 75;
    
    const sender = pc1.getSenders()[0];
    const parameters = sender.getParameters();
    if (!parameters.encodings) {
      parameters.encodings = [{}];
    }
    if (bandwidth === 'unlimited') {
      delete parameters.encodings[0].maxBitrate;
    } else {
      parameters.encodings[0].maxBitrate = bandwidth * 1000;
    }
    sender.setParameters(parameters)
        .then(() => {
          // on success
        })
        .catch(e => console.error(e));

Reference code & demo 参考代码&demo

WebRTC is for peer to peer communication, you cannot control bandwidth in video call. WebRTC用于对等通信,您无法控制视频通话中的带宽。

In google chrome there are these properties on a video element: 在谷歌浏览器中,视频元素上有以下属性:

webkitVideoDecodedByteCount: 0
webkitAudioDecodedByteCount: 0

These are useful to know how fast the client can decode the video. 这些对于了解客户端解码视频的速度非常有用。 As the video plays you would keep track of the delta amount of these bytes which gives you bytes/s the client is processing the video.( SO thread ) 在视频播放时,您将跟踪这些字节的增量,这将为您提供客户端正在处理视频的字节数。( SO线程

you should use Network Information API to know bandwidth ( it is still under implementation). 您应该使用网络信息API来了解带宽(它仍在实施中)。

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM