虽然 audio 标签使用起来很简单,但也存在一些局限。比如它只控制音频的播放、暂停、音量等。如果我们想进一步控制音频,比如通道合并和拆分、混响、音高和音频幅度压缩等。那么仅仅使用 audio 标签是做不到的。为了解决这个问题,我们需要使用 Web Audio API。
Web Audio API 提供了一个非常高效和通用的系统来控制 Web 上的音频,允许开发人员为音频添加特殊效果、可视化音频、添加空间效果等等。Web Audio API 让用户能够在音频上下文(AudioContext)中进行音频操作,具有模块化路由的特点。基本的音频操作是在音频节点上执行的,这些节点连接在一起形成一个音频路由图。
获取频率数据后,我们就可以使用 Canvas API 实现数据可视化,比如使用 CanvasRenderingContext2D 接口中的 fillRect 方法,对数据进行可视化。
function drawBar(){
requestAnimationFrame(drawBar);
analyser.getByteFrequencyData(frequencyData);
canvasContext.clearRect(0,0, canvasWidth, canvasHeight);
let barHeight, barWidth, r, g, b;
for (let i =0, x =0; i < bufferLength; i++){
barHeight = frequencyData[i];
barWidth = canvasWidth / bufferLength *2;
r = barHeight +25*(i / bufferLength);
g =250*(i / bufferLength);
b =50;
canvasContext.fillStyle="rgb("+ r +","+ g +","+ b +")";
canvasContext.fillRect(x, canvasHeight - barHeight, barWidth, barHeight);
x += barWidth +2;}}
1.
2.
3.
4.
5.
6.
7.
8.
9.
10.
11.
12.
13.
14.
15.
16.
分析完上面的处理流程,我们来看一下完整的代码:
<!DOCTYPE html><html lang="en"><head><meta charset="UTF-8"><meta http-equiv="X-UA-Compatible" content="IE=edge"><meta name="viewport" content="width=device-width, initial-scale=1.0"><title>Visualizations with Web Audio API</title></head><body><input id="audioFile" type="file" accept="audio/*"/><canvas id="canvas"></canvas><script>
const canvas = document.querySelector("#canvas");
const inputFile = document.querySelector("#audioFile");
const canvasWidth = window.innerWidth;
const canvasHeight = window.innerHeight;
const canvasContext = canvas.getContext("2d");
canvas.width= canvasWidth;
canvas.height= canvasHeight;
let frequencyData =[], bufferLength =0, analyser;
inputFile.onchange= function(event){
const file = event.target.files[0];
const reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onload= evt=>{
const encodedBuffer = evt.currentTarget.result;
const context = new AudioContext();
context.decodeAudioData(encodedBuffer, decodedBuffer=>{
const dataSource = context.createBufferSource();
dataSource.buffer= decodedBuffer;
analyser = createAnalyser(context, dataSource);
bufferLength = analyser.frequencyBinCount;
frequencyData = new Uint8Array(bufferLength);
dataSource.start();
drawBar();})}
function createAnalyser(context, dataSource){
const analyser = context.createAnalyser();
analyser.fftSize=512;
dataSource.connect(analyser);
analyser.connect(context.destination);
return analyser;}
function drawBar(){
requestAnimationFrame(drawBar);
analyser.getByteFrequencyData(frequencyData);
canvasContext.clearRect(0,0, canvasWidth, canvasHeight);
let barHeight, barWidth, r, g, b;
for (let i =0, x =0; i < bufferLength; i++){
barHeight = frequencyData[i];
barWidth = canvasWidth / bufferLength *2;
r = barHeight +25*(i / bufferLength);
g =250*(i / bufferLength);
b =50;
canvasContext.fillStyle="rgb("+ r +","+ g +","+ b +")";
canvasContext.fillRect(x, canvasHeight - barHeight, barWidth, barHeight);
x += barWidth +2;}}}</script></body></html>