【前端】vue+recorder实现录音功能

重要的事情说三遍:要在https下才能实现! 要在https下才能实现!! 要在https下才能实现!!!
我当时就是忽略了这个点折腾了很久,最后根据这篇文章:Vue.js实战——封装浏览器录音组件_6,实现了录音功能。
因为本地是http,不能看到效果,建议使用【whistle】强大的web调试代理工具来进行代理本地,随时查看修改的效果

因为原文章不能抛出录音失败的错误,所以在此基础修改下

//recorder.js
export default class Recorder {
  constructor(stream, config) {
    //兼容
    window.URL = window.URL || window.webkitURL;
    navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;

    config = config || {};
    config.sampleBits = config.sampleBits || 16;   //采样数位 8, 16
    config.sampleRate = config.sampleRate || 8000; //采样率(1/6 44100)

    this.context = new (window.webkitAudioContext || window.AudioContext)();
    this.audioInput = this.context.createMediaStreamSource(stream);
    this.createScript = this.context.createScriptProcessor || this.context.createJavaScriptNode;
    this.recorder = this.createScript.apply(this.context, [4096, 1, 1]);

    this.audioData = {
      size: 0,          //录音文件长度
      buffer: [],    //录音缓存
      inputSampleRate: this.context.sampleRate,   //输入采样率
      inputSampleBits: 16,     //输入采样数位 8, 16
      outputSampleRate: config.sampleRate,   //输出采样率
      oututSampleBits: config.sampleBits,       //输出采样数位 8, 16
      input: function (data) {
        this.buffer.push(new Float32Array(data));
        this.size += data.length;
      },
      compress: function () { //合并压缩
        //合并
        let data = new Float32Array(this.size);
        let offset = 0;
        for (let i = 0; i < this.buffer.length; i++) {
          data.set(this.buffer[i], offset);
          offset += this.buffer[i].length;
        }
        //压缩
        let compression = parseInt(this.inputSampleRate / this.outputSampleRate);
        let length = data.length / compression;
        let result = new Float32Array(length);
        let index = 0, j = 0;
        while (index < length) {
          result[index] = data[j];
          j += compression;
          index++;
        }
        return result;
      },
      encodeWAV: function () {
        let sampleRate = Math.min(this.inputSampleRate, this.outputSampleRate);
        let sampleBits = Math.min(this.inputSampleBits, this.oututSampleBits);
        let bytes = this.compress();
        let dataLength = bytes.length * (sampleBits / 8);
        let buffer = new ArrayBuffer(44 + dataLength);
        let data = new DataView(buffer);

        let channelCount = 1;//单声道
        let offset = 0;

        let writeString = function (str) {
          for (let i = 0; i < str.length; i++) {
            data.setUint8(offset + i, str.charCodeAt(i));
          }
        };

        // 资源交换文件标识符
        writeString('RIFF');
        offset += 4;
        // 下个地址开始到文件尾总字节数,即文件大小-8
        data.setUint32(offset, 36 + dataLength, true);
        offset += 4;
        // WAV文件标志
        writeString('WAVE');
        offset += 4;
        // 波形格式标志
        writeString('fmt ');
        offset += 4;
        // 过滤字节,一般为 0x10 = 16
        data.setUint32(offset, 16, true);
        offset += 4;
        // 格式类别 (PCM形式采样数据)
        data.setUint16(offset, 1, true);
        offset += 2;
        // 通道数
        data.setUint16(offset, channelCount, true);
        offset += 2;
        // 采样率,每秒样本数,表示每个通道的播放速度
        data.setUint32(offset, sampleRate, true);
        offset += 4;
        // 波形数据传输率 (每秒平均字节数) 单声道×每秒数据位数×每样本数据位/8
        data.setUint32(offset, channelCount * sampleRate * (sampleBits / 8), true);
        offset += 4;
        // 快数据调整数 采样一次占用字节数 单声道×每样本的数据位数/8
        data.setUint16(offset, channelCount * (sampleBits / 8), true);
        offset += 2;
        // 每样本数据位数
        data.setUint16(offset, sampleBits, true);
        offset += 2;
        // 数据标识符
        writeString('data');
        offset += 4;
        // 采样数据总数,即数据总大小-44
        data.setUint32(offset, dataLength, true);
        offset += 4;
        // 写入采样数据
        if (sampleBits === 8) {
          for (let i = 0; i < bytes.length; i++ , offset++) {
            let s = Math.max(-1, Math.min(1, bytes[i]));
            let val = s < 0 ? s * 0x8000 : s * 0x7FFF;
            val = parseInt(255 / (65535 / (val + 32768)));
            data.setInt8(offset, val, true);
          }
        } else {
          for (let i = 0; i < bytes.length; i++ , offset += 2) {
            let s = Math.max(-1, Math.min(1, bytes[i]));
            data.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
          }
        }
        return new Blob([data], { type: 'audio/wav' });
      }
    };
  }

  //开始录音
  start () {
    this.audioInput.connect(this.recorder);
    this.recorder.connect(this.context.destination);

    //音频采集
    let self = this;
    this.recorder.onaudioprocess = function (e) {
      self.audioData.input(e.inputBuffer.getChannelData(0));
    };
  };

  //停止
  stop () {
    this.recorder.disconnect();
  };

  //获取音频文件
  getBlob () {
    this.stop();
    return this.audioData.encodeWAV();
  };

  //回放
  play (audio) {
    audio.src = window.URL.createObjectURL(this.getBlob());
  };

  //清理缓存的录音数据
  clear (audio) {
    this.audioData.buffer = [];
    this.audioData.size = 0;
    audio.src = ''
  };

  static checkError (e) {
    const { name } = e;
    let errorMsg = ''
    switch (name) {
      case 'AbortError': errorMsg = '录音设备无法被使用'; break;
      case 'NotAllowedError': errorMsg = '用户已禁止网页调用录音设备'; break;
      case 'PermissionDeniedError': errorMsg = '用户已禁止网页调用录音设备'; break;     // 用户拒绝
      case 'NotFoundError': errorMsg = '录音设备未找到'; break;
      case 'DevicesNotFoundError': errorMsg = '录音设备未找到'; break;
      case 'NotReadableError': errorMsg = '录音设备无法使用'; break;
      case 'NotSupportedError': errorMsg = '不支持录音功能'; break;
      case 'MandatoryUnsatisfiedError': errorMsg = '无法发现指定的硬件设备'; break;
      default: errorMsg = '录音调用错误'; break;
    }
    return { error: errorMsg }
  };

  static get (callback, config) {
    if (callback) {
      if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
        navigator.mediaDevices.getUserMedia({ audio: true, video: false }).then((stream) => {
          let rec = new Recorder(stream, config);
          callback(rec);
        }).catch((e) => {
          callback(Recorder.checkError(e));
        })
      } else {
        navigator.getUserMedia({ audio: true, video: false }).then((stream) => {
          let rec = new Recorder(stream, config);
          callback(rec);
        }).catch((e) => {
          // Recorder.checkError(e)
          callback(Recorder.checkError(e));
        })
      }
    }
  };
}

//record-sdk.js
import Recorder from "./recorder";
export default class Record {
  startRecord (param) {
    let self = this;
    try {
      Recorder.get(rec => {
        if (rec.error) return param.error(rec.error);
        self.recorder = rec;
        self.recorder.start();
        param.success("开始录音");
      })
    } catch (e) {
      param.error("开始录音失败" + e);
    }
  }

  stopRecord (param) {
    let self = this;
    try {
      let blobData = self.recorder.getBlob();
      param.success(blobData);
    } catch (e) {
      param.error("结束录音失败" + e);
    }
  }

  play (audio) {
    let self = this;
    try {
      self.recorder.play(audio);
    } catch (e) {
      console.error("录音播放失败" + e);
    }
  }

  clear (audio) {
    let self = this;
    try {
      self.recorder.clear(audio);
    } catch (e) {
      console.error("清空录音失败" + e);
    }
  }
}

//voice.vue
...
  <div class="record">
      <h1>{{tipMsg}}</h1>
      <button @click="onStartVoice">开始</button>
      <button @click="onEndVoice">结束</button>
      <button @click="onPlayAudio">播放</button>
      <div class="record-play"
           v-show="isFinished">
        <h2>Current voice player is:</h2>
        <audio id="audioVoice"
               controls
               autoplay></audio>
      </div>
    </div>
...
<script>
import Record from '@/plugins/recorder/record-sdk';
export default {
  data () {
    return {
      isVoice: false,
      isFinished: false,
      tipMsg: '录音',
      audio: "",
      recorder: new Record()
    }
  },
 methods: {
     // 开始录音
    onStartVoice () {
      this.onStopAudio()
      this.isFinished = false;
      this.recorder.startRecord({
        success: res => {
          this.isVoice = true
        },
        error: e => {
          this.isVoice = false
          this.$toast(e)
        }
      });
    },

    // 结束录音
    onEndVoice () {
      this.isFinished = false;
      this.recorder.stopRecord({
        success: res => {
          this.isVoice = false
          //此处可以获取音频源文件(res),用于上传等操作
          console.log('音频源文件', res)
        },
        error: e => {
          this.isVoice = false
        }
      });
    },

    // 播放录音
    onPlayAudio () {
      this.isVoice = false
      this.isFinished = true;
      this.audio = document.getElementById("audioVoice");
      this.recorder.play(this.audio);
    },

    // 停止播放录音
    onStopAudio () {
      this.recorder.clear(this.audio);
    }
  }
}
</script>

因为项目可能要实现语音录菜功能,便提前看下h5是否可以录音,如果后期真要实现语音录菜功能,,再来更新

作者:北极星丶超帅的
链接:https://www.jianshu.com/p/f5637e838af0
来源:简书
著作权归作者所有。商业转载请联系作者获得授权,非商业转载请注明出处。

vue 实现 扫二维码 功能

如何通过 vue 调用 相机 实现 扫一扫的功能,但是查看文档发现,需要获取 getUserMedia 的属性值,但存在兼容性问题。

退而求其次,通过 h5plus 来实现。

QrScanner.vue

<!-- 扫描二维码-->

<template>

<div>

<!-- 内容部分 -->

<video id="qr-vedio"class="v"autoplay=""></video>

<canvas id="qr-canvas"width="800"height="600"style="width: 800px; height: 600px;display:none;"></canvas><p v-show="result != ''">{{result}}</p>

<p v-show="errorMes != ''">{{errorMes}}</p></div></template><script>

exportdefault{

props: {//},

data () {

return{

vedio: '',

canvas: '',

context: '',

stopScan: null,

errorMes: '',

result: ''

}

},

mounted(){

console.log(1);

var_that = this;

window.URL = window.URL || window.webkitURL || window.mozURL || window.msURL;

navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;

this.vedio = document.getElementById('qr-vedio');

this.canvas = document.getElementById('qr-canvas');

this.context = this.canvas.getContext('2d');

console.log(2);// Call the getUserMedia method with our callback functionsif(navigator.getUserMedia) {

console.log(3);

varvideoSource = [];navigator.mediaDevices.enumerateDevices().then((function(sourceInfos) {

var i;

for(i = 0; i != sourceInfos.length; ++i) {

var sourceInfo = sourceInfos[i];

if(sourceInfo.kind === 'videoinput'&& sourceInfo.label.indexOf('back') != -1) {

videoSource.push(sourceInfo.deviceId);

}

}

var successCallback = function(stream) {_

that.vedio.src = (window.URL && window.URL.createObjectURL(stream)) || stream;window.localMediaStream = stream;_that.vedio.addEventListener("loadstart", function() {_that.vedio.play();}, false);

_that.stopScan = setInterval(_that.scan, 500);}

navigator.getUserMedia({

video: {

optional: [{ sourceId: videoSource[0] }]

}}, successCallback, function(e) {console.log(e);});}));

} else{

this.errorMes = 'Native web camera streaming (getUserMedia) not supported in this browser.';

}

qrcode.callback = function(data) {_

that.result = data;

console.log(data)

if(window.localMediaStream && window.localMediaStream.stop) {

window.localMediaStream.stop();

}

if(_that.stopScan) {

clearInterval(_that.stopScan);}};

},methods: {

scan() {

if(window.localMediaStream) {

this.context.drawImage(this.vedio, 0, 0, 100, 100);

}try{

qrcode.decode();

} catch(e) {

console.log('decode has error');

}}}}

</script>

<style lang="less"scoped>

.v {width: 320px;height: 240px;}

#qr-canvas {width: 800px;height: 800px;}

</style>

记录自己写的一个自定义vide标签自定义控制按钮事件播放器,video标签自定义按钮

由于UI还没出样式,于是就先写写video自定义按钮事件,附在线mp4视频测试地址
版本:vue,没样式,只有video标签自定义控制按钮事件
在线mp4测试地址:GO

<template>
    <div class="player">
        <video ref="video" :controls="false" controlslist="nodownload">
            <!-- <source src="./../../assets/source/sintel.mp4" type="video/mp4"/> -->
            <source
                src="http://vfx.mtime.cn/Video/2019/03/21/mp4/190321153853126488.mp4"
                type="video/mp4"
            />
        </video>
        <div class="btns">
            <el-button ref="isPlay" class="stop">停止/播放</el-button>
            <el-button ref="enableMute">关闭声音</el-button>
            <el-button ref="disableMute">打开声音</el-button>
            <input type="range" ref="ran" :value="ranVal"/>
            <div ref="current"></div>
            <!-- 当前进度 -->
            <div ref="buffered"></div>
            <!-- 下载进度 秒 -->
            <div ref="duration"></div>
            <!-- 总时长 -->
            <el-button ref="fullScreen">全屏</el-button>
            <!-- 全屏按钮 -->
            <div ref="progress" style="height:10px;background:#f00;">
                <!-- 进度条 -->
                <div ref="bar" style="height:5px;background:#0f0;"></div>
                <!-- 播放进度 -->
                <div ref="buffer" style="height:5px;background:#00f;"></div>
                <!-- 缓存进度 -->
            </div>
        </div>
    </div>
</template>

<script>
import { hasClass } from "@/commons/func";
export default {
    data() {
        return {
            bufferTimer: null,
            timer: null,
            video: null,

            enableMute: null,
            disableMute: null,
            ran: null,
            ranVal:0,
            current: null,
            buffered: null,
            duration: null,
            fullScreen: null,
            progress: null,
            bar: null,
            buffer: null
        };
    },
    methods: {
        init() {
            this.video = this.$refs.video; //获取video对象
            this.isPlay = this.$refs.isPlay.$el; //获取播放/暂停按钮对象,element-ui库需要'.$el'获取
            this.enableMute = this.$refs.enableMute.$el; //获取关闭声音按钮对象
            this.disableMute = this.$refs.disableMute.$el; //获取开启声音按钮对象
            this.ran = this.$refs.ran; //获取滑块对象,方便调整音量大小
            this.ranVal = this.video.volume*100;
            this.ran.style.backgroundSize = this.ranVal+'% 100%';
            this.current = this.$refs.current; //获取显示当前播放时间进度的对象
            this.buffered = this.$refs.buffered; //获取显示下载进度的对象,下载使用,暂时无用
            this.duration = this.$refs.duration; //
            this.fullScreen = this.$refs.fullScreen.$el;
            this.progress = this.$refs.progress;
            this.bar = this.$refs.bar;
            this.buffer = this.$refs.buffer;
            this.addEvent(this.isPlay, "click", this.playPause);
            this.addEvent(this.video, "timeupdate", this.timeupdate);
            this.addEvent(this.progress, "click", this.changeProgress);
            this.addEvent(this.fullScreen, "click", this.launchFullScreen);
            this.addEvent(this.enableMute, "click", this.closeVolume);
            this.addEvent(this.disableMute, "click", this.openVolume);
            this.rangeSlider(this.ran,{min:0,max:100,step:5,callback:this.setVolume})
        },
        // 补零
        zeroFill(num) {
            if (num < 10) {
                num = "0" + num;
            }
            return num;
        },
        // 处理秒数为时分秒 h:m:s
        getTime(num) {
            let m = this.zeroFill(Math.floor(num / 60) % 60),
                s = this.zeroFill(Math.floor(num % 60)),
                h = this.zeroFill(Math.floor(Math.floor(num / 60) / 60)),
                time = "" + h + ":" + m + ":" + s + "";
            return time;
        },
        //全屏方法
        launchFullScreen() {
            if (this.video.requestFullscreen) {
                this.video.requestFullscreen();
            } else if (this.video.mozRequestFullScreen) {
                this.video.mozRequestFullScreen();
            } else if (this.video.webkitRequestFullscreen) {
                this.video.webkitRequestFullscreen();
            } else if (this.video.msRequestFullscreen) {
                this.video.msRequestFullscreen();
            }
        },
        //播放和暂停
        playPause() {
            let classStr = this.isPlay.className;
            if (hasClass(this.isPlay, "stop")) {
                this.video.play();
                this.bufferTimer = setInterval(() => {
                    this.buffer.style.width =
                        (this.video.buffered.end(0) / this.video.duration) *
                            100 +
                        "%";
                }, 1000 / 30);
                if (this.video.buffered.end(0) == this.video.duration) {
                    this.buffer.style.width = "100%";
                    clearInterval(this.bufferTimer);
                }
                this.timer = setInterval(() => {
                    this.bar.style.width =
                        (this.video.currentTime / this.video.duration) * 100 +
                        "%";
                }, 1000 / 30);
                this.isPlay.className = classStr.replace("stop", "play");
            } else if (hasClass(this.isPlay, "play")) {
                this.video.pause();
                clearInterval(this.timer);
                this.isPlay.className = classStr.replace("play", "stop");
            }
        },
        //视频播放进度改变触发
        timeupdate() {
            this.current.innerHTML = this.getTime(this.video.currentTime);
            this.duration.innerHTML = this.getTime(this.video.duration);
            this.buffered.innerHTML = this.video.buffered.end(0);
            if (this.video.currentTime == this.video.duration) {
                this.isPlay.className = this.isPlay.className.replace(
                    "play",
                    "stop"
                );
            }
        },
        //点击进度条改变播放进度
        changeProgress(e) {
            let barLength = e.pageX - this.progress.offsetLeft;
            this.video.currentTime =
                (barLength / this.progress.clientWidth) * this.video.duration;
            this.bar.style.width =
                (barLength / this.progress.clientWidth) * 100 + "%";
        },
        //关闭声音
        closeVolume() {
            this.video.muted = true;
        },
        //开启声音
        openVolume() {
            this.video.muted = false;
        },
        //设置音量
        setVolume() {
            this.video.volume = this.ran.value / 100;
            this.video.muted = false;
        },
        rangeSlider(rangeElem, { min, max, step, callback }) {
            min = !isNaN(parseFloat(min)) ? Number(min) : null;
            max = !isNaN(parseFloat(max)) ? Number(max) : null;
            step = !isNaN(parseFloat(step)) ? Number(step) : 1;
            callback = callback ? callback : null;

            rangeElem.setAttribute("min", min);
            rangeElem.setAttribute("max", max);
            rangeElem.setAttribute("step", step);

            rangeElem.addEventListener("input", function(e) {
                var that = e.target;
                that.style.backgroundSize = this.value + "% 100%";
                if (typeof callback == "function") {
                    callback(that);
                }
            });
        }
    },
    mounted() {
        this.init();
    }
};
</script>

<style lang="less" scoped>
input[type="range"] {
    -webkit-appearance: none;
    width: 200px;
    height: 5px;
    border-radius: 5px;
    background: -webkit-linear-gradient(#fa03e4, #a5f601) no-repeat;
    background-size: 0% 100%;
}
input[type="range"]::-webkit-slider-thumb {
    -webkit-appearance: none;
    height: 15px;
    width: 5px;
    margin-top: -5px; /*使滑块超出轨道部分的偏移量相等*/
    background: #f5f5f5;
    border-radius: 2px; /*外观设置为圆形*/
    border: solid 1px #a5a5a5; /*设置边框*/
    box-shadow: 0 0px 1px #666666; /*添加底部阴影*/
}
input[type="range"]::-webkit-slider-runnable-track {
    height: 5px;
    border-radius: 2px; /*将轨道设为圆角的*/
    box-shadow: 0 0px 1px #0f00ff, inset 0 0px 2px #00ffff; /* 轨道内置阴影效果 */
}
input[type="range"]:focus {
    outline: none;
}
</style>

有个通用函数是写在外面引入的,现写在这儿,防止用到的兄dei不知道怎么弄

hasClass(elem,classm){
    return elem.className.indexOf(classm) > -1;
}

有几个绑定事件的函数估计会找不到,this.addEvent 找不到的【看这里】,已经封装好了的,可以直接拿去用

获取用户当前位置信息的两种方法——H5、微信

在之前的 调用百度地图API的总结 中获取当前位置信息我用的是 H5 ,其实微信也提供了获取用户地理位置的方法,现将这两种方法都贴出来,看情况选择使用。

一、H5 获取当前地理位置得到经纬度

复制代码
   // H5 获取当前位置经纬度
    var location_lon = '',location_lat = ''; // 经度,纬度
    if (navigator.geolocation){
        navigator.geolocation.getCurrentPosition(function (position) {
            location_lon = position.coords.longitude;
            location_lat = position.coords.latitude;
           // alert('h5经度:'+location_lon);alert('h5纬度:'+location_lat);
        });
    }else {
        alert("您的设备不支持定位功能");
    }
复制代码

二、微信公众平台(地理位置)

微信有获取用户地理位置的接口,在 wx.config 中配置就可以用了:

复制代码
wx.config({
    debug: true, 
    appId: '', // 必填,公众号的唯一标识
    timestamp: , // 必填,生成签名的时间戳
    nonceStr: '', // 必填,生成签名的随机串
    signature: '',// 必填,签名
    jsApiList: ['getLocation'] // 必填,需要使用的JS接口列表
});
复制代码

获得权限后就可以直接使用 wx.getLocation() 方法获得用户的地理位置了:

复制代码
wx.getLocation({
   type: 'wgs84', // 默认为wgs84的gps坐标,如果要返回直接给openLocation用的火星坐标,可传入'gcj02'
   success: function (res) {
      var latitude = res.latitude; // 纬度,浮点数,范围为90 ~ -90
      var longitude = res.longitude; // 经度,浮点数,范围为180 ~ -180。
      var speed = res.speed; // 速度,以米/每秒计
      var accuracy = res.accuracy; // 位置精度
   }
});
复制代码

三、地理位置配合地图展示

获得的经纬度可以使用百度地图或者高德地图展示,微信内置地图也可以查看位置:

复制代码
wx.openLocation({
   latitude: 0, // 纬度,浮点数,范围为90 ~ -90
   longitude: 0, // 经度,浮点数,范围为180 ~ -180。
   name: '', // 位置名
   address: '', // 地址详情说明
   scale: 1, // 地图缩放级别,整形值,范围从1~28。默认为最大
   infoUrl: '' // 在查看位置界面底部显示的超链接,可点击跳转
});
复制代码