這篇文章給大家介紹怎么在html5項(xiàng)目中實(shí)現(xiàn)一個(gè)錄音功能,內(nèi)容非常詳細(xì),感興趣的小伙伴們可以參考借鑒,希望對(duì)大家能有所幫助。
成都創(chuàng)新互聯(lián)是一家專業(yè)提供長(zhǎng)寧企業(yè)網(wǎng)站建設(shè),專注與成都網(wǎng)站制作、網(wǎng)站設(shè)計(jì)、H5開(kāi)發(fā)、小程序制作等業(yè)務(wù)。10年已為長(zhǎng)寧眾多企業(yè)、政府機(jī)構(gòu)等服務(wù)。創(chuàng)新互聯(lián)專業(yè)網(wǎng)站建設(shè)公司優(yōu)惠進(jìn)行中。步驟1
由于新的api是通過(guò)navigator.mediaDevices.getUserMedia,且返回一個(gè)promise。
而舊的api是navigator.getUserMedia,于是做了一個(gè)兼容性。代碼如下:
// 老的瀏覽器可能根本沒(méi)有實(shí)現(xiàn) mediaDevices,所以我們可以先設(shè)置一個(gè)空的對(duì)象 if (navigator.mediaDevices === undefined) { navigator.mediaDevices = {}; } // 一些瀏覽器部分支持 mediaDevices。我們不能直接給對(duì)象設(shè)置 getUserMedia // 因?yàn)檫@樣可能會(huì)覆蓋已有的屬性。這里我們只會(huì)在沒(méi)有g(shù)etUserMedia屬性的時(shí)候添加它。 if (navigator.mediaDevices.getUserMedia === undefined) { let getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia; navigator.mediaDevices.getUserMedia = function(constraints) { // 首先,如果有g(shù)etUserMedia的話,就獲得它 // 一些瀏覽器根本沒(méi)實(shí)現(xiàn)它 - 那么就返回一個(gè)error到promise的reject來(lái)保持一個(gè)統(tǒng)一的接口 if (!getUserMedia) { return Promise.reject(new Error('getUserMedia is not implemented in this browser')); } // 否則,為老的navigator.getUserMedia方法包裹一個(gè)Promise return new Promise(function(resolve, reject) { getUserMedia.call(navigator, constraints, resolve, reject); }); };
步驟2
這是網(wǎng)上存在的一個(gè)方法,封裝了一個(gè)HZRecorder?;旧弦昧诉@個(gè)方法。調(diào)用HZRecorder.get就可以調(diào)起錄音接口,這個(gè)方法傳入一個(gè)callback函數(shù),new HZRecorder后執(zhí)行callback函數(shù)且傳入一個(gè)實(shí)體化后的HZRecorder對(duì)象??梢酝ㄟ^(guò)該對(duì)象的方法實(shí)現(xiàn)開(kāi)始錄音、暫停、停止、播放等功能。
var HZRecorder = function (stream, config) { config = config || {}; config.sampleBits = config.sampleBits || 8; //采樣數(shù)位 8, 16 config.sampleRate = config.sampleRate || (44100 / 6); //采樣率(1/6 44100) //創(chuàng)建一個(gè)音頻環(huán)境對(duì)象 audioContext = window.AudioContext || window.webkitAudioContext; var context = new audioContext(); //將聲音輸入這個(gè)對(duì)像 var audioInput = context.createMediaStreamSource(stream); //設(shè)置音量節(jié)點(diǎn) var volume = context.createGain(); audioInput.connect(volume); //創(chuàng)建緩存,用來(lái)緩存聲音 var bufferSize = 4096; // 創(chuàng)建聲音的緩存節(jié)點(diǎn),createScriptProcessor方法的 // 第二個(gè)和第三個(gè)參數(shù)指的是輸入和輸出都是雙聲道。 var recorder = context.createScriptProcessor(bufferSize, 2, 2); var audioData = { size: 0 //錄音文件長(zhǎng)度 , buffer: [] //錄音緩存 , inputSampleRate: context.sampleRate //輸入采樣率 , inputSampleBits: 16 //輸入采樣數(shù)位 8, 16 , outputSampleRate: config.sampleRate //輸出采樣率 , oututSampleBits: config.sampleBits //輸出采樣數(shù)位 8, 16 , input: function (data) { this.buffer.push(new Float32Array(data)); this.size += data.length; } , compress: function () { //合并壓縮 //合并 var data = new Float32Array(this.size); var offset = 0; for (var i = 0; i < this.buffer.length; i++) { data.set(this.buffer[i], offset); offset += this.buffer[i].length; } //壓縮 var compression = parseInt(this.inputSampleRate / this.outputSampleRate); var length = data.length / compression; var result = new Float32Array(length); var index = 0, j = 0; while (index < length) { result[index] = data[j]; j += compression; index++; } return result; } , encodeWAV: function () { var sampleRate = Math.min(this.inputSampleRate, this.outputSampleRate); var sampleBits = Math.min(this.inputSampleBits, this.oututSampleBits); var bytes = this.compress(); var dataLength = bytes.length * (sampleBits / 8); var buffer = new ArrayBuffer(44 + dataLength); var data = new DataView(buffer); var channelCount = 1;//單聲道 var offset = 0; var writeString = function (str) { for (var i = 0; i < str.length; i++) { data.setUint8(offset + i, str.charCodeAt(i)); } }; // 資源交換文件標(biāo)識(shí)符 writeString('RIFF'); offset += 4; // 下個(gè)地址開(kāi)始到文件尾總字節(jié)數(shù),即文件大小-8 data.setUint32(offset, 36 + dataLength, true); offset += 4; // WAV文件標(biāo)志 writeString('WAVE'); offset += 4; // 波形格式標(biāo)志 writeString('fmt '); offset += 4; // 過(guò)濾字節(jié),一般為 0x10 = 16 data.setUint32(offset, 16, true); offset += 4; // 格式類別 (PCM形式采樣數(shù)據(jù)) data.setUint16(offset, 1, true); offset += 2; // 通道數(shù) data.setUint16(offset, channelCount, true); offset += 2; // 采樣率,每秒樣本數(shù),表示每個(gè)通道的播放速度 data.setUint32(offset, sampleRate, true); offset += 4; // 波形數(shù)據(jù)傳輸率 (每秒平均字節(jié)數(shù)) 單聲道×每秒數(shù)據(jù)位數(shù)×每樣本數(shù)據(jù)位/8 data.setUint32(offset, channelCount * sampleRate * (sampleBits / 8), true); offset += 4; // 快數(shù)據(jù)調(diào)整數(shù) 采樣一次占用字節(jié)數(shù) 單聲道×每樣本的數(shù)據(jù)位數(shù)/8 data.setUint16(offset, channelCount * (sampleBits / 8), true); offset += 2; // 每樣本數(shù)據(jù)位數(shù) data.setUint16(offset, sampleBits, true); offset += 2; // 數(shù)據(jù)標(biāo)識(shí)符 writeString('data'); offset += 4; // 采樣數(shù)據(jù)總數(shù),即數(shù)據(jù)總大小-44 data.setUint32(offset, dataLength, true); offset += 4; // 寫(xiě)入采樣數(shù)據(jù) if (sampleBits === 8) { for (var i = 0; i < bytes.length; i++, offset++) { var s = Math.max(-1, Math.min(1, bytes[i])); var val = s < 0 ? s * 0x8000 : s * 0x7FFF; val = parseInt(255 / (65535 / (val + 32768))); data.setInt8(offset, val, true); } } else { for (var i = 0; i < bytes.length; i++, offset += 2) { var s = Math.max(-1, Math.min(1, bytes[i])); data.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true); } } return new Blob([data], { type: 'audio/wav' }); } }; //開(kāi)始錄音 this.start = function () { audioInput.connect(recorder); recorder.connect(context.destination); }; //停止 this.stop = function () { recorder.disconnect(); }; // 結(jié)束 this.end = function() { context.close(); }; // 繼續(xù) this.again = function() { recorder.connect(context.destination); }; //獲取音頻文件 this.getBlob = function () { this.stop(); return audioData.encodeWAV(); }; //回放 this.play = function (audio) { audio.src = window.URL.createObjectURL(this.getBlob()); }; //上傳 this.upload = function (url, callback) { var fd = new FormData(); fd.append('audioData', this.getBlob()); var xhr = new XMLHttpRequest(); if (callback) { xhr.upload.addEventListener('progress', function (e) { callback('uploading', e); }, false); xhr.addEventListener('load', function (e) { callback('ok', e); }, false); xhr.addEventListener('error', function (e) { callback('error', e); }, false); xhr.addEventListener('abort', function (e) { callback('cancel', e); }, false); } xhr.open('POST', url); xhr.send(fd); }; //音頻采集 recorder.onaudioprocess = function (e) { audioData.input(e.inputBuffer.getChannelData(0)); //record(e.inputBuffer.getChannelData(0)); }; }; //拋出異常 HZRecorder.throwError = function (message) { throw new function () { this.toString = function () { return message; };}; }; //是否支持錄音 HZRecorder.canRecording = (navigator.getUserMedia != null); //獲取錄音機(jī) HZRecorder.get = function (callback, config) { if (callback) { navigator.mediaDevices .getUserMedia({ audio: true }) .then(function(stream) { let rec = new HZRecorder(stream, config); callback(rec); }) .catch(function(error) { HZRecorder.throwError('無(wú)法錄音,請(qǐng)檢查設(shè)備狀態(tài)'); }); } }; window.HZRecorder = HZRecorder;
以上,已經(jīng)可以滿足大部分的需求。但是我們要兼容pad端。我們的pad有幾個(gè)問(wèn)題必須解決。
錄音格式必須是mp3才能播放
window.URL.createObjectURL傳入blob數(shù)據(jù)在pad端報(bào)錯(cuò),轉(zhuǎn)不了
以下為解決這兩個(gè)問(wèn)題的方案。
步驟3
以下為我實(shí)現(xiàn) 錄音格式為mp3 和 window.URL.createObjectURL傳入blob數(shù)據(jù)在pad端報(bào)錯(cuò) 的方案。
1、修改HZRecorder里的audioData對(duì)象代碼。并引入網(wǎng)上一位大神的一個(gè)js文件lamejs.js
const lame = new lamejs(); let audioData = { samplesMono: null, maxSamples: 1152, mp3Encoder: new lame.Mp3Encoder(1, context.sampleRate || 44100, config.bitRate || 128), dataBuffer: [], size: 0, // 錄音文件長(zhǎng)度 buffer: [], // 錄音緩存 inputSampleRate: context.sampleRate, // 輸入采樣率 inputSampleBits: 16, // 輸入采樣數(shù)位 8, 16 outputSampleRate: config.sampleRate, // 輸出采樣率 oututSampleBits: config.sampleBits, // 輸出采樣數(shù)位 8, 16 convertBuffer: function(arrayBuffer) { let data = new Float32Array(arrayBuffer); let out = new Int16Array(arrayBuffer.length); this.floatTo16BitPCM(data, out); return out; }, floatTo16BitPCM: function(input, output) { for (let i = 0; i < input.length; i++) { let s = Math.max(-1, Math.min(1, input[i])); output[i] = s < 0 ? s * 0x8000 : s * 0x7fff; } }, appendToBuffer: function(mp3Buf) { this.dataBuffer.push(new Int8Array(mp3Buf)); }, encode: function(arrayBuffer) { this.samplesMono = this.convertBuffer(arrayBuffer); let remaining = this.samplesMono.length; for (let i = 0; remaining >= 0; i += this.maxSamples) { let left = this.samplesMono.subarray(i, i + this.maxSamples); let mp3buf = this.mp3Encoder.encodeBuffer(left); this.appendToBuffer(mp3buf); remaining -= this.maxSamples; } }, finish: function() { this.appendToBuffer(this.mp3Encoder.flush()); return new Blob(this.dataBuffer, { type: 'audio/mp3' }); }, input: function(data) { this.buffer.push(new Float32Array(data)); this.size += data.length; }, compress: function() { // 合并壓縮 // 合并 let data = new Float32Array(this.size); let offset = 0; for (let i = 0; i < this.buffer.length; i++) { data.set(this.buffer[i], offset); offset += this.buffer[i].length; } // 壓縮 let compression = parseInt(this.inputSampleRate / this.outputSampleRate, 10); let length = data.length / compression; let result = new Float32Array(length); let index = 0; let j = 0; while (index < length) { result[index] = data[j]; j += compression; index++; } return result; }, encodeWAV: function() { let sampleRate = Math.min(this.inputSampleRate, this.outputSampleRate); let sampleBits = Math.min(this.inputSampleBits, this.oututSampleBits); let bytes = this.compress(); let dataLength = bytes.length * (sampleBits / 8); let buffer = new ArrayBuffer(44 + dataLength); let data = new DataView(buffer); let channelCount = 1; // 單聲道 let offset = 0; let writeString = function(str) { for (let i = 0; i < str.length; i++) { data.setUint8(offset + i, str.charCodeAt(i)); } }; // 資源交換文件標(biāo)識(shí)符 writeString('RIFF'); offset += 4; // 下個(gè)地址開(kāi)始到文件尾總字節(jié)數(shù),即文件大小-8 data.setUint32(offset, 36 + dataLength, true); offset += 4; // WAV文件標(biāo)志 writeString('WAVE'); offset += 4; // 波形格式標(biāo)志 writeString('fmt '); offset += 4; // 過(guò)濾字節(jié),一般為 0x10 = 16 data.setUint32(offset, 16, true); offset += 4; // 格式類別 (PCM形式采樣數(shù)據(jù)) data.setUint16(offset, 1, true); offset += 2; // 通道數(shù) data.setUint16(offset, channelCount, true); offset += 2; // 采樣率,每秒樣本數(shù),表示每個(gè)通道的播放速度 data.setUint32(offset, sampleRate, true); offset += 4; // 波形數(shù)據(jù)傳輸率 (每秒平均字節(jié)數(shù)) 單聲道×每秒數(shù)據(jù)位數(shù)×每樣本數(shù)據(jù)位/8 data.setUint32(offset, channelCount * sampleRate * (sampleBits / 8), true); offset += 4; // 快數(shù)據(jù)調(diào)整數(shù) 采樣一次占用字節(jié)數(shù) 單聲道×每樣本的數(shù)據(jù)位數(shù)/8 data.setUint16(offset, channelCount * (sampleBits / 8), true); offset += 2; // 每樣本數(shù)據(jù)位數(shù) data.setUint16(offset, sampleBits, true); offset += 2; // 數(shù)據(jù)標(biāo)識(shí)符 writeString('data'); offset += 4; // 采樣數(shù)據(jù)總數(shù),即數(shù)據(jù)總大小-44 data.setUint32(offset, dataLength, true); offset += 4; // 寫(xiě)入采樣數(shù)據(jù) if (sampleBits === 8) { for (let i = 0; i < bytes.length; i++, offset++) { const s = Math.max(-1, Math.min(1, bytes[i])); let val = s < 0 ? s * 0x8000 : s * 0x7fff; val = parseInt(255 / (65535 / (val + 32768)), 10); data.setInt8(offset, val, true); } } else { for (let i = 0; i < bytes.length; i++, offset += 2) { const s = Math.max(-1, Math.min(1, bytes[i])); data.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true); } } return new Blob([data], { type: 'audio/wav' }); } };
2、修改HZRecord的音頻采集的調(diào)用方法。
// 音頻采集 recorder.onaudioprocess = function(e) { audioData.encode(e.inputBuffer.getChannelData(0)); };
3、HZRecord的getBlob方法。
this.getBlob = function() { this.stop(); return audioData.finish(); };
4、HZRecord的play方法。把blob轉(zhuǎn)base64url。
this.play = function(func) { readBlobAsDataURL(this.getBlob(), func); }; function readBlobAsDataURL(data, callback) { let fileReader = new FileReader(); fileReader.onload = function(e) { callback(e.target.result); }; fileReader.readAsDataURL(data); }
至此,已經(jīng)解決以上兩個(gè)問(wèn)題。
步驟4
這里主要介紹怎么做錄音時(shí)的動(dòng)效。我們的一個(gè)動(dòng)效需求為:
根據(jù)傳入的音量大小,做一個(gè)圓弧動(dòng)態(tài)擴(kuò)展。
// 創(chuàng)建analyser節(jié)點(diǎn),獲取音頻時(shí)間和頻率數(shù)據(jù) const analyser = context.createAnalyser(); audioInput.connect(analyser); const inputAnalyser = new Uint8Array(1); const wrapEle = $this.refs['wrap']; let ctx = wrapEle.getContext('2d'); const width = wrapEle.width; const height = wrapEle.height; const center = { x: width / 2, y: height / 2 }; function drawArc(ctx, color, x, y, radius, beginAngle, endAngle) { ctx.beginPath(); ctx.lineWidth = 1; ctx.strokeStyle = color; ctx.arc(x, y, radius, (Math.PI * beginAngle) / 180, (Math.PI * endAngle) / 180); ctx.stroke(); } (function drawSpectrum() { analyser.getByteFrequencyData(inputAnalyser); // 獲取頻域數(shù)據(jù) ctx.clearRect(0, 0, width, height); // 畫(huà)線條 for (let i = 0; i < 1; i++) { let value = inputAnalyser[i] / 3; // <===獲取數(shù)據(jù) let colors = []; if (value <= 16) { colors = ['#f5A631', '#f5A631', '#e4e4e4', '#e4e4e4', '#e4e4e4', '#e4e4e4']; } else if (value <= 32) { colors = ['#f5A631', '#f5A631', '#f5A631', '#f5A631', '#e4e4e4', '#e4e4e4']; } else { colors = ['#f5A631', '#f5A631', '#f5A631', '#f5A631', '#f5A631', '#f5A631']; } drawArc(ctx, colors[0], center.x, center.y, 52 + 16, -30, 30); drawArc(ctx, colors[1], center.x, center.y, 52 + 16, 150, 210); drawArc(ctx, colors[2], center.x, center.y, 52 + 32, -22.5, 22.5); drawArc(ctx, colors[3], center.x, center.y, 52 + 32, 157.5, 202.5); drawArc(ctx, colors[4], center.x, center.y, 52 + 48, -13, 13); drawArc(ctx, colors[5], center.x, center.y, 52 + 48, 167, 193); } // 請(qǐng)求下一幀 requestAnimationFrame(drawSpectrum); })();
關(guān)于怎么在html5項(xiàng)目中實(shí)現(xiàn)一個(gè)錄音功能就分享到這里了,希望以上內(nèi)容可以對(duì)大家有一定的幫助,可以學(xué)到更多知識(shí)。如果覺(jué)得文章不錯(cuò),可以把它分享出去讓更多的人看到。
分享題目:怎么在html5項(xiàng)目中實(shí)現(xiàn)一個(gè)錄音功能-創(chuàng)新互聯(lián)
文章轉(zhuǎn)載:http://www.rwnh.cn/article18/doehdp.html
成都網(wǎng)站建設(shè)公司_創(chuàng)新互聯(lián),為您提供建站公司、企業(yè)建站、關(guān)鍵詞優(yōu)化、商城網(wǎng)站、品牌網(wǎng)站建設(shè)、搜索引擎優(yōu)化
聲明:本網(wǎng)站發(fā)布的內(nèi)容(圖片、視頻和文字)以用戶投稿、用戶轉(zhuǎn)載內(nèi)容為主,如果涉及侵權(quán)請(qǐng)盡快告知,我們將會(huì)在第一時(shí)間刪除。文章觀點(diǎn)不代表本網(wǎng)站立場(chǎng),如需處理請(qǐng)聯(lián)系客服。電話:028-86922220;郵箱:631063699@qq.com。內(nèi)容未經(jīng)允許不得轉(zhuǎn)載,或轉(zhuǎn)載時(shí)需注明來(lái)源: 創(chuàng)新互聯(lián)
猜你還喜歡下面的內(nèi)容