HTML5 Audio API示例

来老师 2018-10-17

HTML5 Audio API示例

汇总记录,非常感谢老师们,大牛们的分享:

示例一和示例二  参考尚硅谷老师的视频教程;

示例三参考链接:https://blog.csdn.net/u011354613/article/details/51272907

示例一:纯js播放自定义音乐

<!DOCTYPE html>
<html>
<head>
 <meta charset="utf-8">
 <title>首页</title>
 <style>
  
 </style>
</head>
<body>
<button id="button">两只老虎</button>
<script type="text/javascript">
 window.AudioContext = window.AudioContext||window.webkitAudioContext||window.mozAudioContext;
 (function(){
  if(!window.AudioContext){
   alert("当前浏览器不支持哟!");
   return;
  }
  var btn = document.getElementById("button");
  var audioCtx = new AudioContext();
  //声音频率数据
  var arrFrequency = [261.63, 293.66, 329.63, 261.63, 261.63, 293.66, 329.63, 261.63, 329.63,
  349.23, 392.00, 329.63, 349.23, 329.00, 329.00, 440.00, 329.00, 349.23, 329.63, 261.63, 329.00,
  440.00, 329.00, 349.23, 329.63, 261.63, 293.66, 196.00, 261.63, 293.66, 196.00, 261.63];
  
  var start = 0;
  
  btn.addEventListener('mouseenter',function(){
   var frequency = arrFrequency[start];
   if(!frequency){
    start = 0;
    frequency = arrFrequency[start];
   }
   start++;
   //创建音调控制对象
   var oscillator = audioCtx.createOscillator();
   //创建音量控制对象
   var gainNode = audioCtx.createGain();
   //音调音量关联
   oscillator.connect(gainNode);
   //音量和设备关联
   gainNode.connect(audioCtx.destination);
   //音色类型指定为正玄波
   oscillator.type = "sine";
   //设置音调频率
   oscillator.frequency.value = frequency;
   
   //先把当前音量设置为0
   gainNode.gain.setValueAtTime(0,audioCtx.currentTime);
   //声音音量线性从0到1
   gainNode.gain.linearRampToValueAtTime(1,audioCtx.currentTime + 0.01);
   //声音走起
   oscillator.start(audioCtx.currentTime);
   //0.5秒时间内音量指数曲线变为0.01
   gainNode.gain.exponentialRampToValueAtTime(0.01,audioCtx.currentTime + 0.5);
   //关闭声音
   oscillator.stop(audioCtx.currentTime + 0.5);
   
  })
 })();
 
</script>
</body>

</html>

示例二:钢琴八音盒

<!DOCTYPE html>
<html>
 <head>
  <meta charset="UTF-8">
  <title>钢琴8音盒</title>
  <style>
   html,body{
    margin: 0;
    padding: 0;
   }
   .keys{
    margin: 20px auto;
    background: #F8F8F8;
    padding: 10px;
    font-size: 0;
    text-align: center;
   }
   .item-key{
    float: left;
    width: 40px;
    height: 160px;
    background: #fff;
    border: 1px solid #333;
    z-index: 1;
   }
   .active{
    box-shadow: 0 -2px 3px #333 inset;
    background: linear-gradient(to bottom,#fff,#ddd);
   }
   .area{
    display: inline-block;
   }
   .black{
    background: #333333;
    color: #fff;
    height: 80px;
    width: 20px;
    margin-left: -11px;
    margin-right: -11px;
    z-index: 2;
    position: relative;
   }
   .black .active{
    box-shadow: 0 -2px 3px #fff inset;
    background: linear-gradient(#000,#ddd);
   }
  </style>
 </head>
 <body>
  <div class="keys">
   
  </div>
  <script src="./assets/global/plugins/jquery.min.js" type="text/javascript"></script>
  <script>
   //创建音频上下文
   var audioCtx = new AudioContext();
   //4个8度,C大调
   const voiceList = [
    [261.63,277.18,293.66,311.13,329.63,349.23,369.99,392,415.3,440,466.16,493.88],
    [523.25,554.37,587.33,622.25,659.26,698.46,739.99,783.99,830.61,880,932.33,987.77],
    [1046.5,1108.7,1174.7,1244.5,1318.5,1396.9,1480,1568,1661.2,1760,1864.7,1975.5],
    [2093,2217.5,2349.3,2489,2637,2793.8,2960,3136,3322.4,3520,3729.3,3951.1]
   ];
   function buildKey(){
    let html = [];
    voiceList.forEach(item => {
     html.push('<span class="area">');
     item.forEach((em,index) => {
      if(index == 1 || index == 3 || index == 6 || index == 8 || index == 10){
       html.push('<a href="javascript:;" class="item-key black" data-frequency="'+em+'">'+(index + 1)+'</a>');
      }else{
       html.push('<a href="javascript:;" class="item-key" data-frequency="'+em+'">'+(index + 1)+'</a>');
      }
     });
     html.push('</span>');
    })
    $('.keys').html(html.join(''));
   }
   function bind(){
    $(document).on('mousedown','.item-key',function(e){
     showVoice.call(this);
    }).on('mouseup','.item-key',function(e){
     stopVoice.call(this);
     $(this).off();
    }).on('mousedown',function(){
     $(this).on('mousemove',function(e){
      return false;
     }).on('mouseenter','.item-key',function(e){
      showVoice.call(this);
     }).on('mouseleave','.item-key',function(e){
      stopVoice.call(this);
     }).on('mouseup',function(){
      stopVoice.call(this);
     })
    });
    $(document).on('mouseup',function(){
     stopVoice.call(this);
    })
   }
   function bindM(){
    $(document).on('touchstart','.item-key',function(e){
     showVoice.call(this);
    }).on('touchend','.item-key',function(e){
     stopVoice.call(this);
     $(this).off();
    }).on('touchstart',function(){
     $(this).on('mousemove',function(e){
      return false;
     }).on('mouseenter','.item-key',function(e){
      showVoice.call(this);
     }).on('mouseleave','.item-key',function(e){
      stopVoice.call(this);
     }).on('touchend',function(){
      stopVoice.call(this);
     })
    });
    $(document).on('touchend',function(){
     stopVoice.call(this);
    })
   }
   function showVoice(){
    let val = $(this).data('frequency');
    $(this).addClass('active');
    this.gainNode && this.gainNode.gain.setValueAtTime(0,audioCtx.currentTime);
//    this.OscillatorNode && this.OscillatorNode.stop(audioCtx.currentTime + 1);
    this.oscillator && this.oscillator.stop(audioCtx.currentTime + 1);
    //创建音调控制对象
    this.oscillator = audioCtx.createOscillator();
    //创建音量控制对象
    this.gainNode = audioCtx.createGain();
    //音调音量关联
    this.oscillator.connect(this.gainNode);
    //音量和设备关联
    this.gainNode.connect(audioCtx.destination);
    //音色类型指定为正玄波
    this.oscillator.type = "sine";
    //设置音调频率
    this.oscillator.frequency.value = val;
    //先把当前音量设置为0
    this.gainNode.gain.setValueAtTime(0,audioCtx.currentTime);
    //声音走起
    this.oscillator.start(audioCtx.currentTime);
    //0.01秒时间内,声音音量线性从0到1
    this.gainNode.gain.linearRampToValueAtTime(1,audioCtx.currentTime + 0.01);
   }
   function stopVoice(){
    $(this).removeClass('active');
    this.gainNode && this.gainNode.gain.exponentialRampToValueAtTime(0.001,audioCtx.currentTime + 0.6);
    //0.8秒后停止声音
    this.oscillator && this.oscillator.stop(audioCtx.currentTime + 0.6);
    this.oscillator = this.gainNode = null;
    $(this).off('mouseenter').off('mouseleave');
   }
   console.log(navigator.userAgent);
   buildKey();
   
   var u = navigator.userAgent;
   var isAndroid = u.indexOf('Android') > -1 || u.indexOf('Adr') > -1; //android终端
   var isioS = !!u.match(/\(i[^;]+;( U;)? CPU.+Mac Os X/); //ios终端
   if(isAndroid || isioS){
    bindM()
   }else{
    bind();
   }
  </script>
 </body>
</html>

示例三:选择文件播放

html代码:

<!DOCTYPE html>
<html>
    <head>
        <meta name="description" content="HTML5 Audio Spectrum Visualizer">
        <meta name="viewport" content="width=device-width, initial-scale=1.0">
        <title>HTML5 Audio API showcase | Audio visualizer</title>
        <link type="text/css" rel="stylesheet" href="style.css">
    </head>  
    <body>
        <div id="wrapper">
            <div id="fileWrapper" class="file_wrapper">
                <div id="info">
                    HTML5 Audio API showcase | An Audio Viusalizer
                </div>
                <label for="uploadedFile">Drag&drop or select a file to play:</label>
                <input type="file" id="uploadedFile"></input>
            </div>
            <div id="visualizer_wrapper">
                <canvas id='canvas' width="800" height="350"></canvas>
            </div>
        </div>
        <footer>
            <small>Star me on <a href="https://github.com/Wayou/HTML5_Audio_Visualizer"  target="_blank">GitHub</a></small>
        </footer>
        <script type="text/javascript" src="html5_audio_visualizer.js"></script>
    </body>
</html>

js代码:

/**
 * An audio spectrum visualizer built with HTML5 Audio API
 * Author:Wayou
 * License: MIT
 * Feb 15, 2014
 */
window.onload = function() {
    new Visualizer().ini();
};
var Visualizer = function() {
    this.file = null; //the current file
    this.fileName = null; //the current file name
    this.audioContext = null;
    this.source = null; //the audio source
    this.info = document.getElementById('info').innerHTML; //used to upgrade the UI information
    this.infoUpdateId = null; //to store the setTimeout ID and clear the interval
    this.animationId = null;
    this.status = 0; //flag for sound is playing 1 or stopped 0
    this.forceStop = false;
    this.allCapsReachBottom = false;
};
Visualizer.prototype = {
    ini: function() {
        this._prepareAPI();
        this._addEventListner();
    },
    _prepareAPI: function() {
        //fix browser vender for AudioContext and requestAnimationFrame
        window.AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.msAudioContext;
        window.requestAnimationFrame = window.requestAnimationFrame || window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame || window.msRequestAnimationFrame;
        window.cancelAnimationFrame = window.cancelAnimationFrame || window.webkitCancelAnimationFrame || window.mozCancelAnimationFrame || window.msCancelAnimationFrame;
        try {
            this.audioContext = new AudioContext();
        } catch (e) {
            this._updateInfo('!Your browser does not support AudioContext', false);
            console.log(e);
        }
    },
    _addEventListner: function() {
        var that = this,
            audioInput = document.getElementById('uploadedFile'),
            dropContainer = document.getElementsByTagName("canvas")[0];
        //listen the file upload
        audioInput.onchange = function() {
            if (that.audioContext===null) {return;};

            //the if statement fixes the file selction cancle, because the onchange will trigger even the file selection been canceled
            if (audioInput.files.length !== 0) {
                //only process the first file
                that.file = audioInput.files[0];
                that.fileName = that.file.name;
                if (that.status === 1) {
                    //the sound is still playing but we upload another file, so set the forceStop flag to true
                    that.forceStop = true;
                };
                document.getElementById('fileWrapper').style.opacity = 1;
                that._updateInfo('Uploading', true);
                //once the file is ready,start the visualizer
                that._start();
            };
        };
        //listen the drag & drop
        dropContainer.addEventListener("dragenter", function() {
            document.getElementById('fileWrapper').style.opacity = 1;
            that._updateInfo('Drop it on the page', true);
        }, false);
        dropContainer.addEventListener("dragover", function(e) {
            e.stopPropagation();
            e.preventDefault();
            //set the drop mode
            e.dataTransfer.dropEffect = 'copy';
        }, false);
        dropContainer.addEventListener("dragleave", function() {
            document.getElementById('fileWrapper').style.opacity = 0.2;
            that._updateInfo(that.info, false);
        }, false);
        dropContainer.addEventListener("drop", function(e) {
            e.stopPropagation();
            e.preventDefault();
            if (that.audioContext===null) {return;};
            document.getElementById('fileWrapper').style.opacity = 1;
            that._updateInfo('Uploading', true);
            //get the dropped file
            that.file = e.dataTransfer.files[0];
            if (that.status === 1) {
                document.getElementById('fileWrapper').style.opacity = 1;
                that.forceStop = true;
            };
            that.fileName = that.file.name;
            //once the file is ready,start the visualizer
            that._start();
        }, false);
    },
    _start: function() {
        //read and decode the file into audio array buffer
        var that = this,
            file = this.file,
            fr = new FileReader();
        fr.onload = function(e) {
            var fileResult = e.target.result;
            var audioContext = that.audioContext;
            if (audioContext === null) {
                return;
            };
            that._updateInfo('Decoding the audio', true);
            audioContext.decodeAudioData(fileResult, function(buffer) {
                that._updateInfo('Decode succussfully,start the visualizer', true);
                that._visualize(audioContext, buffer);
            }, function(e) {
                that._updateInfo('!Fail to decode the file', false);
                console.error(e);
            });
        };
        fr.onerror = function(e) {
            that._updateInfo('!Fail to read the file', false);
            console.error(e);
        };
        //assign the file to the reader
        this._updateInfo('Starting read the file', true);
        fr.readAsArrayBuffer(file);
    },
    _visualize: function(audioContext, buffer) {
        var audioBufferSouceNode = audioContext.createBufferSource(),
            analyser = audioContext.createAnalyser(),
            that = this;
        //connect the source to the analyser
        audioBufferSouceNode.connect(analyser);
        //connect the analyser to the destination(the speaker), or we won't hear the sound
        analyser.connect(audioContext.destination);
        //then assign the buffer to the buffer source node
        audioBufferSouceNode.buffer = buffer;
        //play the source
        if (!audioBufferSouceNode.start) {
            audioBufferSouceNode.start = audioBufferSouceNode.noteOn //in old browsers use noteOn method
            audioBufferSouceNode.stop = audioBufferSouceNode.noteOff //in old browsers use noteOff method
        };
        //stop the previous sound if any
        if (this.animationId !== null) {
            cancelAnimationFrame(this.animationId);
        }
        if (this.source !== null) {
            this.source.stop(0);
        }
        audioBufferSouceNode.start(0);
        this.status = 1;
        this.source = audioBufferSouceNode;
        audioBufferSouceNode.onended = function() {
            that._audioEnd(that);
        };
        this._updateInfo('Playing ' + this.fileName, false);
        this.info = 'Playing ' + this.fileName;
        document.getElementById('fileWrapper').style.opacity = 0.2;
        this._drawSpectrum(analyser);
    },
    _drawSpectrum: function(analyser) {
        var that = this,
            canvas = document.getElementById('canvas'),
            cwidth = canvas.width,
            cheight = canvas.height - 2,
            meterWidth = 10, //width of the meters in the spectrum
            gap = 2, //gap between meters
            capHeight = 2,
            capStyle = '#fff',
            meterNum = 800 / (10 + 2), //count of the meters
            capYPositionArray = []; ////store the vertical position of hte caps for the preivous frame
        ctx = canvas.getContext('2d'),
        gradient = ctx.createLinearGradient(0, 0, 0, 300);
        gradient.addColorStop(1, '#0f0');
        gradient.addColorStop(0.5, '#ff0');
        gradient.addColorStop(0, '#f00');
        var drawMeter = function() {
            var array = new Uint8Array(analyser.frequencyBinCount);
            analyser.getByteFrequencyData(array);
            if (that.status === 0) {
                //fix when some sounds end the value still not back to zero
                for (var i = array.length - 1; i >= 0; i--) {
                    array[i] = 0;
                };
                allCapsReachBottom = true;
                for (var i = capYPositionArray.length - 1; i >= 0; i--) {
                    allCapsReachBottom = allCapsReachBottom && (capYPositionArray[i] === 0);
                };
                if (allCapsReachBottom) {
                    cancelAnimationFrame(that.animationId); //since the sound is stoped and animation finished, stop the requestAnimation to prevent potential memory leak,THIS IS VERY IMPORTANT!
                    return;
                };
            };
            var step = Math.round(array.length / meterNum); //sample limited data from the total array
            ctx.clearRect(0, 0, cwidth, cheight);
            for (var i = 0; i < meterNum; i++) {
                var value = array[i * step];
                if (capYPositionArray.length < Math.round(meterNum)) {
                    capYPositionArray.push(value);
                };
                ctx.fillStyle = capStyle;
                //draw the cap, with transition effect
                if (value < capYPositionArray[i]) {
                    ctx.fillRect(i * 12, cheight - (--capYPositionArray[i]), meterWidth, capHeight);
                } else {
                    ctx.fillRect(i * 12, cheight - value, meterWidth, capHeight);
                    capYPositionArray[i] = value;
                };
                ctx.fillStyle = gradient; //set the filllStyle to gradient for a better look
                ctx.fillRect(i * 12 /*meterWidth+gap*/ , cheight - value + capHeight, meterWidth, cheight); //the meter
            }
            that.animationId = requestAnimationFrame(drawMeter);
        }
        this.animationId = requestAnimationFrame(drawMeter);
    },
    _audioEnd: function(instance) {
        if (this.forceStop) {
            this.forceStop = false;
            this.status = 1;
            return;
        };
        this.status = 0;
        var text = 'HTML5 Audio API showcase | An Audio Viusalizer';
        document.getElementById('fileWrapper').style.opacity = 1;
        document.getElementById('info').innerHTML = text;
        instance.info = text;
        document.getElementById('uploadedFile').value = '';
    },
    _updateInfo: function(text, processing) {
        var infoBar = document.getElementById('info'),
            dots = '...',
            i = 0,
            that = this;
        infoBar.innerHTML = text + dots.substring(0, i++);
        if (this.infoUpdateId !== null) {
            clearTimeout(this.infoUpdateId);
        };
        if (processing) {
            //animate dots at the end of the info text
            var animateDot = function() {
                if (i > 3) {
                    i = 0
                };
                infoBar.innerHTML = text + dots.substring(0, i++);
                that.infoUpdateId = setTimeout(animateDot, 250);
            }
            this.infoUpdateId = setTimeout(animateDot, 250);
        };
    }
}

css代码:

html, body {
    margin: 0;
    font-family: arial, "Microsoft YaHei";
    background-color: #272822;
    color: #FEFEFE;
}
#fileWrapper{
 transition:all 0.5s ease;
}
#fileWrapper:hover{
 opacity: 1!important;
}

#visualizer_wrapper{
 text-align: center;
}
footer{
 position: fixed;
 bottom: 2px;
 color:#aaa;
}

相关推荐