[英]Trying to make audio visualization with circle ripples on Canvas and with javascript
[英]Javascript Audio Visualization
我正在使用用戶/瀏覽器麥克風制作一個小型錄音功能。 當麥克風發出聲音時,會顯示音頻可視化(如均衡器)。 所以票價這么好。
但我真的想改變可視化的方式,如下圖所示。 但我以前從未在這個領域工作過,也不知道如何了解它。
我想象這樣的事情: https://images.app.goo.gl/pfKgnGnQz3MJVkbW6
我有兩個問題:
我當前的均衡器可視化代碼
audioContext = new AudioContext();
gumStream = stream;
input = audioContext.createMediaStreamSource(stream);
rec = new Recorder(input,{numChannels:1})
rec.record()
inputPoint = audioContext.createGain();
audioInput = input;
audioInput.connect(inputPoint);
analyserNode = audioContext.createAnalyser();
analyserNode.fftSize = 1024;
inputPoint.connect( analyserNode );
updateAnalysers();
function updateAnalysers(time) {
if (!analyserContext) {
var canvas = document.getElementById("analyser");
canvasWidth = canvas.width;
canvasHeight = canvas.height;
analyserContext = canvas.getContext('2d');
}
{
var SPACING = 5;
var BAR_WIDTH = 5;
var numBars = Math.round(canvasWidth / SPACING);
var freqByteData = new Uint8Array(analyserNode.frequencyBinCount);
analyserNode.getByteFrequencyData(freqByteData);
analyserContext.clearRect(0, 0, canvasWidth, canvasHeight);
analyserContext.fillStyle = '#D5E9EB';
analyserContext.lineCap = 'round';
var multiplier = analyserNode.frequencyBinCount / numBars;
// Draw rectangle for each frequency bin.
for (var i = 0; i < numBars; ++i) {
var magnitude = 0;
var offset = Math.floor( i * multiplier );
for (var j = 0; j< multiplier; j++)
magnitude += freqByteData[offset + j];
magnitude = magnitude / multiplier;
var magnitude2 = freqByteData[i * multiplier];
analyserContext.fillRect(i * SPACING, canvasHeight, BAR_WIDTH, -magnitude);
}
}
rafID = window.requestAnimationFrame( updateAnalysers );
}
答案 1:
您的圖像已損壞,因此無法回答,但據我所知,您可以使用音頻數據可視化任何波形
你如何開始這樣的事情? (或者有沒有人做過這樣的事情可以分享例子?)
答案 2:
所以我確實使用了自定義波形。 我正在分享我的代碼
import React, { Component } from "react"; import AudioVisualiser from "./AudioVisualiser"; class AudioAnalyser extends Component { constructor(props) { super(props); this.state = { audioData: new Uint8Array(0) }; this.tick = this.tick.bind(this); } componentDidMount() { this.audioContext = new (window.AudioContext || window.webkitAudioContext)(); this.analyser = this.audioContext.createAnalyser(); this.dataArray = new Uint8Array(this.analyser.frequencyBinCount); this.source = this.audioContext.createMediaStreamSource(this.props.audio); this.source.connect(this.analyser); this.rafId = requestAnimationFrame(this.tick); } tick() { this.analyser.getByteTimeDomainData(this.dataArray); this.setState({ audioData: this.dataArray }); this.rafId = requestAnimationFrame(this.tick); } componentWillUnmount() { cancelAnimationFrame(this.rafId); // this.analyser.disconnect(); // this.source.disconnect(); } render() { return <AudioVisualiser audioData={this.state.audioData} />; } } export default AudioAnalyser;
import React, { Component } from 'react';
class AudioVisualiser extends Component {
constructor(props) {
super(props);
this.canvas = React.createRef();
}
componentDidUpdate() {
this.draw();
}
draw() {
const { audioData } = this.props;
const canvas = this.canvas.current;
const height = canvas.height;
const width = canvas.width;
const context = canvas.getContext('2d');
let x = 0;
const sliceWidth = (width * 1.0) / audioData.length;
context.lineWidth = 2;
context.strokeStyle = '#000000';
context.clearRect(0, 0, width, height);
context.beginPath();
context.moveTo(0, height / 2);
for (const item of audioData) {
const y = (item / 255.0) * height;
context.lineTo(x, y);
x += sliceWidth;
}
context.lineTo(x, height / 2);
context.stroke();
}
render() {
return <canvas width="300" height="300" ref={this.canvas} />;
}
}
export default AudioVisualiser;
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.