[英]How can I get real time update on a webpage using nodejs and socket.io from a mysql database?
[英]How can I stream audio using Socket.io in real time?
我目前正在使用 socket.io 在 HTML 和 JS 中創建聊天應用程序。 我能夠輕松地在兩台計算機之間進行通信,發送書面消息。 我的問題是我無法弄清楚如何將錄制在一台計算機中的實時語音發送到另一台計算機。 我正在使用“navigator.getUserMedia”從麥克風錄制音頻,效果很好,但我無法通過 Socket.io 將其發送到另一台計算機。
我知道我可以開始錄制,然后停止並最終發送結果,但這不是我需要的,我需要的是實時流媒體。
這是服務器的代碼:
var app = require('express')();
var http = require('http').Server(app);
var io = require('socket.io')(http);
var path = require('path');
var users = [];
app.get('/', function(req, res){
res.sendFile(__dirname + '/index.html');
});
io.on('connection', function(socket){
socket.on('add user', function(user_id){
users.push(user_id);
});
socket.on('chat message', function(msg){
io.emit('chat message', msg);
});
socket.on('voice sent', function(msg){
io.emit('voice received', msg);
});
});
http.listen(3000, function(){
console.log('listening on *:3000');
});
這是客戶端js:
$(function () {
var socket = io();
var user_id = Math.random();
socket.emit('add user', user_id);
$('form').submit(function(){
socket.emit('chat message', {"message": $('#m').val(), "user": user_id});
$('#m').val('');
return false;
});
socket.on('chat message', function(msg){
if(msg.user == user_id){
$('#messages').append($('<li class="mine">').text(msg.message));
}
else{
$('#messages').append($('<li>').text(msg.message));
}
});
socket.on('voice received', function(msg){
var audio = document.querySelector('audio');
audio.src = window.URL.createObjectURL(stream);
});
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia;
if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
navigator.getUserMedia ({audio: true}, successCallback, errorCallback);
} else {
console.log('getUserMedia not supported on your browser!');
}
function successCallback(stream){
socket.emit('voice sent', stream);
}
function errorCallback(error) {
console.error('An error occurred: [CODE ' + error.code + ']');
}
});
我希望有人能幫助我。 謝謝
我試着這樣做,過了一段時間我這樣做了:
export default {
data() {
return {
audioCtx: new AudioContext(), // for get stream from mic and send via socket
audioC: new AudioContext(), // for receive data from socket and translate and play
}
},
created() {
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia
if (navigator.getUserMedia) {
console.log('getUserMedia supported.')
navigator.getUserMedia(
{ audio: true },
stream => {
const source = this.audioCtx.createMediaStreamSource(stream)
const processor = this.audioCtx.createScriptProcessor(2048, 1, 1)
source.connect(processor)
processor.connect(this.audioCtx.destination)
processor.onaudioprocess = e => {
this.$store.state.mic.Emit('Voice', e.inputBuffer.getChannelData(0))
}
this.$store.state.mic.On('Voice', msg => {
const fbu = new Float32Array(Object.values(JSON.parse(msg)))
const audioBuffer = this.audioC.createBuffer(1, 2048, this.audioC.sampleRate)
audioBuffer.getChannelData(0).set(fbu)
const sourceNode = new AudioBufferSourceNode(this.audioC, { buffer: audioBuffer })
sourceNode.connect(this.audioC.destination)
sourceNode.start(0)
})
this.audioC.resume()
},
err => console.error(err)
)
}
}
}
並且服務器只是將它得到的所有東西都發送給除了發送方客戶端之外的所有東西,但仍然存在問題,由於不同的 AudioBufferSourceNode 通話質量不好,它被分離和切碎。
在這塊代碼中
socket.on('voice received', function(msg){
var audio = document.querySelector('audio');
audio.src = window.URL.createObjectURL(stream);
});
流在任何地方都沒有定義......我認為這可以解決你的問題
socket.on('voice received', function(msg){
var audio = document.querySelector('audio');
audio.src = window.URL.createObjectURL(msg);
});
使用對等或 webrtc .. git 中提供了許多現場示例...
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.