您的位置:首页 > Web前端 > HTML5

html5 record 录音实例 使用websocket

2018-01-30 15:54 393 查看
http://www.jsjtt.com/webkaifa/html5/2013-08-28/34.html

chrome 支持语言聊天 下面介绍一个chrome 的录音实例:
<!DOCTYPE HTML>
<html lang="en">
<head>
<meta charset = "utf-8"/>
<title>Chat by Web Sockets</title>
<script type="text/javascript" src="js/recorder.js"> </script>
<script type="text/javascript" src="js/jquery-1.10.1.min.js"> </script>

<style type='text/css'>

</style>
</head>
<body>
<audio controls autoplay></audio>

<input type="button" id="record" value="Record">
<input type="button" id="export" value="Export">
<div id="message"></div>
</body>

<script type='text/javascript'>
var onFail = function(e) {
console.log('Rejected!', e);
};

var onSuccess = function(s) {
var context = new webkitAudioContext();
var mediaStreamSource = context.createMediaStreamSource(s);
rec = new Recorder(mediaStreamSource);
//rec.record();

// audio loopback
// mediaStreamSource.connect(context.destination);
}

//window.URL = URL || window.URL || window.webkitURL;
navigator.getUserMedia  = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;

var rec;
var audio = document.querySelector('#audio');

function startRecording() {
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true}, onSuccess, onFail);
} else {
console.log('navigator.getUserMedia not present');
}
}
startRecording();
//--------------------
$('#record').click(function() {
rec.record();
var dd = ws.send("start");
$("#message").text("Click export to stop recording");

// export a wav every second, so we can send it using websockets
intervalKey = setInterval(function() {
rec.exportWAV(function(blob) {

rec.clear();
ws.send(blob);
//audio.src = URL.createObjectURL(blob);
});
}, 3000);
});

$('#export').click(function() {
// first send the stop command
rec.stop();
ws.send("stop");
clearInterval(intervalKey);

ws.send("analyze");
$("#message").text("");
});

var ws = new WebSocket("ws://127.0.0.1:8088/websocket/servlet/record");
ws.onopen = function () {
console.log("Openened connection to websocket");
};
ws.onclose = function (){
console.log("Close connection to websocket");
}
ws.onmessage = function(e) {
audio.src = URL.createObjectURL(e.data);
}

</script>
</html>


recorder.js内容:
(function(window){

var WORKER_PATH = 'js/recorderWorker.js';

var Recorder = function(source, cfg){
var config = cfg || {};
var bufferLen = config.bufferLen || 4096;
this.context = source.context;
this.node = this.context.createJavaScriptNode(bufferLen, 2, 2);
var worker = new Worker(config.workerPath || WORKER_PATH);
worker.postMessage({
command: 'init',
config: {
sampleRate: this.context.sampleRate
}
});
var recording = false,
currCallback;

this.node.onaudioprocess = function(e){
if (!recording) return;
worker.postMessage({
command: 'record',
buffer: [
e.inputBuffer.getChannelData(0),
e.inputBuffer.getChannelData(1)
]
});
}

this.configure = function(cfg){
for (var prop in cfg){
if (cfg.hasOwnProperty(prop)){
config[prop] = cfg[prop];
}
}
}

this.record = function(){
recording = true;
}

this.stop = function(){
recording = false;
}

this.clear = function(){
worker.postMessage({ command: 'clear' });
}

this.getBuffer = function(cb) {
currCallback = cb || config.callback;
worker.postMessage({ command: 'getBuffer' })
}

this.exportWAV = function(cb, type){
currCallback = cb || config.callback;
type = type || config.type || 'audio/wav';
if (!currCallback) throw new Error('Callback not set');
worker.postMessage({
command: 'exportWAV',
type: type
});
}

worker.onmessage = function(e){
var blob = e.data;
currCallback(blob);
}

source.connect(this.node);
this.node.connect(this.context.destination);    //this should not be necessary
};

Recorder.forceDownload = function(blob, filename){
var url = (window.URL || window.webkitURL).createObjectURL(blob);
var link = window.document.createElement('a');
link.href = url;
link.download = filename || 'output.wav';
var click = document.createEvent("Event");
click.initEvent("click", true, true);
link.dispatchEvent(click);
}

window.Recorder = Recorder;

})(window);


 

recorderWorker.js的内容:
var recLength = 0,
recBuffersL = [],
recBuffersR = [],
sampleRate;

this.onmessage = function(e){
switch(e.data.command){
case 'init':
init(e.data.config);
break;
case 'record':
record(e.data.buffer);
break;
case 'exportWAV':
exportWAV(e.data.type);
break;
case 'getBuffer':
getBuffer();
break;
case 'clear':
clear();
break;
}
};

function init(config){
sampleRate = config.sampleRate;
}

function record(inputBuffer){
recBuffersL.push(inputBuffer[0]);
recBuffersR.push(inputBuffer[1]);
recLength += inputBuffer[0].length;
}

function exportWAV(type){
var bufferL = mergeBuffers(recBuffersL, recLength);
var bufferR = mergeBuffers(recBuffersR, recLength);
var interleaved = interleave(bufferL, bufferR);
var dataview = encodeWAV(interleaved);
var audioBlob = new Blob([dataview], { type: type });

this.postMessage(audioBlob);
}

function getBuffer() {
var buffers = [];
buffers.push( mergeBuffers(recBuffersL, recLength) );
buffers.push( mergeBuffers(recBuffersR, recLength) );
this.postMessage(buffers);
}

function clear(){
recLength = 0;
recBuffersL = [];
recBuffersR = [];
}

function mergeBuffers(recBuffers, recLength){
var result = new Float32Array(recLength);
var offset = 0;
for (var i = 0; i < recBuffers.length; i++){
result.set(recBuffers[i], offset);
offset += recBuffers[i].length;
}
return result;
}

function interleave(inputL, inputR){
var length = inputL.length + inputR.length;
var result = new Float32Array(length);

var index = 0,
inputIndex = 0;

while (index < length){
result[index++] = inputL[inputIndex];
result[index++] = inputR[inputIndex];
inputIndex++;
}
return result;
}

function floatTo16BitPCM(output, offset, input){
for (var i = 0; i < input.length; i++, offset+=2){
var s = Math.max(-1, Math.min(1, input[i]));
output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
}

function writeString(view, offset, string){
for (var i = 0; i < string.length; i++){
view.setUint8(offset + i, string.charCodeAt(i));
}
}

function encodeWAV(samples){
var buffer = new ArrayBuffer(44 + samples.length * 2);
var view = new DataView(buffer);

/* RIFF identifier */
writeString(view, 0, 'RIFF');
/* file length */
view.setUint32(4, 32 + samples.length * 2, true);
/* RIFF type */
writeString(view, 8, 'WAVE');
/* format chunk identifier */
writeString(view, 12, 'fmt ');
/* format chunk length */
view.setUint32(16, 16, true);
/* sample format (raw) */
view.setUint16(20, 1, true);
/* channel count */
view.setUint16(22, 2, true);
/* sample rate */
view.setUint32(24, sampleRate, true);
/* byte rate (sample rate * block align) */
view.setUint32(28, sampleRate * 4, true);
/* block align (channel count * bytes per sample) */
view.setUint16(32, 4, true);
/* bits per sample */
view.setUint16(34, 16, true);
/* data chunk identifier */
writeString(view, 36, 'data');
/* data chunk length */
view.setUint32(40, samples.length * 2, true);

floatTo16BitPCM(view, 44, samples);

return view;
}


 

后台websocket代码:
package com.test;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.SequenceInputStream;

import javax.servlet.http.HttpServletRequest;
import javax.sound.sampled.AudioFileFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.eclipse.jetty.io.Connection;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.nio.SelectChannelConnector;
import org.eclipse.jetty.websocket.WebSocket;
import org.eclipse.jetty.websocket.WebSocketHandler;

public class TestRecordServlet extends Server {

private final static Log LOG =  LogFactory.getLog( TestRecordServlet.class );

public TestRecordServlet(int port) {
SelectChannelConnector connector = new SelectChannelConnector();
connector.setPort(port);
addConnector(connector);

WebSocketHandler wsHandler = new WebSocketHandler() {
public WebSocket doWebSocketConnect(HttpServletRequest request,	String protocol) {
return new FaceDetectWebSocket();
}
};
setHandler(wsHandler);
}

/**
* Simple innerclass that is used to handle websocket connections.
*
* @author jos
*/
private static class FaceDetectWebSocket implements WebSocket,
WebSocket.OnBinaryMessage, WebSocket.OnTextMessage {
private String currentCommand ="";

private Connection connection;
//private FaceDetection faceDetection = new FaceDetection();

public FaceDetectWebSocket() {
super();
}

/**
* On open we set the connection locally, and enable
* binary support
*/
public void onOpen(Connection connection) {
this.connection = connection;
this.connection.setMaxBinaryMessageSize(1024 * 512);
}

/**
* Cleanup if needed. Not used for this example
*/
public void onClose(int code, String message) {}

/**
* When we receive a binary message we assume it is an image. We then run this
* image through our face detection algorithm and send back the response.
*/
public void onMessage(byte[] data, int offset, int length) {

if (currentCommand.equals("start")) {
try {
// The temporary file that contains our captured audio stream
File f = new File("out.wav");

// if the file already exists we append it.
if (f.exists()) {
LOG.info("Adding received block to existing file.");

// two clips are used to concat the data
AudioInputStream clip1 = AudioSystem.getAudioInputStream(f);
AudioInputStream clip2 = AudioSystem.getAudioInputStream(new ByteArrayInputStream(data));

// use a sequenceinput to cat them together
AudioInputStream appendedFiles =
new AudioInputStream(
new SequenceInputStream(clip1, clip2),
clip1.getFormat(),
clip1.getFrameLength() + clip2.getFrameLength());

// write out the output to a temporary file
AudioSystem.write(appendedFiles,
AudioFileFormat.Type.WAVE,
new File("out2.wav"));

// rename the files and delete the old one
File f1 = new File("out.wav");
File f2 = new File("out2.wav");
f1.delete();
f2.renameTo(new File("out.wav"));
} else {
LOG.info("Starting new recording.");
FileOutputStream fOut = new FileOutputStream("out.wav",true);
fOut.write(data);
fOut.close();
}
} catch (Exception e) {
LOG.error("sss:" + e );
}
}
}

public void onMessage(String data) {
if (data.startsWith("start")) {
// before we start we cleanup anything left over
//cleanup();
currentCommand = "start";
} else if (data.startsWith("stop")) {
currentCommand = "stop";
} else if (data.startsWith("clear")) {
// just remove the current recording
//cleanup();
} else if (data.startsWith("analyze")) {

}
}
}

/**
* Start the server on port 999
*/
public static void main(String[] args) throws Exception {
TestRecordServlet server = new TestRecordServlet(8080);
server.start();
server.join();
}
}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: