Nun habe ich eine Lösung programmiert, um ein Mikrofon-Signal im Browser aufzunehmen, an einen Node-JS-Server zu senden und es dort abzuspielen. Der Code sieht, wie folgt, aus:

<!DOCTYPE html>


<html> <head>  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">  <title>Mikrofon mit HTML5 aufnehmen und mit Node JS wiedergeben.</title> </head> <body>


<h1>Mikrofon aufnehmen</h1>

<p>Benutzen Sie für dieses Beispiel Google Chrome (min. Version Canary / 25). Zuerst muss das Tool "Webaudioeingabe" in den Einstellungen aktiviert werden: chrome://flags</p>   <p>Benutzen Sie ein Headset, wenn Sie Rückkopplung vermeiden möchten.</p>   <button onclick="startRecording(this);">Aufnehmen</button>   <button onclick="stopRecording(this);" disabled>Stop</button>   <button onclick="sendStream()">Senden</button>   <form id="sender" action="http://192.168.1.184:8888" method="post">

<input type="hidden" name="sendData" id="sendData" value="">   </form>   <h2>Aufnahmen</h2>   <ul id="recordingslist"></ul>     <h2>Log</h2>   <pre id="log"></pre>

<script>   function __log(e, data) {     log.innerHTML += "n" + e + " " + (data || '');   }

var audio_context;   var recorder;   var sendbuffer;


function startUserMedia(stream) {     var input = audio_context.createMediaStreamSource(stream);     __log('Media stream created.');         input.connect(audio_context.destination);     __log('Input connected to audio context destination.');         recorder = new Recorder(input);     __log('Recorder initialised.');   }

function startRecording(button) {     recorder && recorder.record();     button.disabled = true;     button.nextElementSibling.disabled = false;     __log('Recording...');   }

function stopRecording(button) {     recorder && recorder.stop();     button.disabled = true;     button.previousElementSibling.disabled = false;     __log('Stopped recording.');         // create WAV download link using audio data blob     createDownloadLink();   }     function getStream(b){   sendbuffer=b[0];   var sendarray=new Array();   for(var i=0;i<sendbuffer.length;i++){   sendarray.push(sendbuffer[i]);   }   document.getElementById("sendData").value=sendarray.join(",");   }     function sendStream(){   recorder.getBuffer(getStream);   setTimeout('document.getElementById("sender").submit();',1000);   }

function createDownloadLink() {     recorder && recorder.exportWAV(function(blob) {       var url = URL.createObjectURL(blob);       var li = document.createElement('li');       var au = document.createElement('audio');       var hf = document.createElement('a');             au.controls = true;       au.src = url;       hf.href = url;       hf.download = new Date().toISOString() + '.wav';       hf.innerHTML = hf.download;       li.appendChild(au);       li.appendChild(hf);       recordingslist.appendChild(li);     });   }

window.onload = function init() {     try {       // webkit shim       window.AudioContext = window.AudioContext || window.webkitAudioContext;       navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia;       window.URL = window.URL || window.webkitURL;             audio_context = new AudioContext;       __log('Audio context set up.');       __log('navigator.getUserMedia ' + (navigator.getUserMedia ? 'available.' : 'not present!'));     } catch (e) {       alert('No web audio support in this browser!');     }         navigator.getUserMedia({audio: true}, startUserMedia, function(e) {       __log('No live audio input: ' + e);     });   };   </script>

<script src="recorder.js"></script> </body> </html>

Das war der Code der Klient-Seite, stammt zum grössten Teil von Matt Diamond's GitHub-Beispiel. Dazu werden noch zusätzliche Bibliotheken benötigt, welche in einem meiner vorherigen Posts verlinkt sind. Nun folgt die Server-Seite in Node JS:

var http = require('http');

var url = require("url");

var fs=require('fs');

var sampleRate=48000;

httpServer = http.createServer();

httpServer.on("request",handlerequest);

httpServer.listen(8888,"0.0.0.0");

console.log('Server running at 0.0.0.0:8888');

function handlerequest(req, resp) {

console.log("Transmission...");

if(req.method=="POST"){  

var Buffer=require("buffer").Buffer;  

 var postDataBuffer=new Buffer([],"utf-8");  

req.on("data",function(postChunkedBuffer){postDataBuffer=Buffer.concat([postDataBuffer,postChunkedBuffer]);});  

req.on("end", function(){   var floatWaveArray=new Float32Array(require("querystring").unescape(postDataBuffer.toString()).substring(9).split(","));   

fs.writeFile("output.wav",encodeWAV32(floatWaveArray),function(){    

var c=require('child_process').spawn;    

c("mplayer",['output.wav']);    });   }); 

 resp.writeHead(200, {'Content-Type': 'text/html'}); 

 resp.write("Data received");  resp.end();  } }

function floatTo32BitPCM(output, offset, input){  

for (var i = 0; i < input.length; i++, offset+=4){    

 var s = Math.max(-1, Math.min(1, input[i]));    

output.setInt32(offset, s * 2147483648, true); // Wert errechnet aus 2hoch32 geteilt durch 2.   } }

function writeString(view, offset, string){  

for (var i = 0; i < string.length; i++){    

view.setUint8(offset + i, string.charCodeAt(i));   } }

function encodeWAV32(samples){  

var buffer = new Buffer(44 + samples.length*4);  

var view = new DataView(buffer);

/* RIFF identifier */  

writeString(view, 0, 'RIFF');  

/* file length */   view.setUint32(4, 32 + samples.length * 4, true);  

/* RIFF type */   writeString(view, 8, 'WAVE');  

/* format chunk identifier */   writeString(view, 12, 'fmt ');  

/* format chunk length */   view.setUint32(16, 16, true);  

 /* sample format (raw) */   view.setUint16(20, 1, true);  

/* channel count */   view.setUint16(22, 1, true);  

 /* sample rate */   view.setUint32(24, sampleRate, true);  

 /* byte rate (sample rate * block align) */   view.setUint32(28, sampleRate * 4, true);  

/* block align (channel count * bytes per sample) */   view.setUint16(32, 4, true);  

 /* bits per sample */   view.setUint16(34, 32, true);  

/* data chunk identifier */   writeString(view, 36, 'data');  

 /* data chunk length */   view.setUint32(40, samples.length * 4, true);

floatTo32BitPCM(view, 44, samples);
return buffer; }

Auch dieser Code enthält Teile aus Matt Diamond's JS-Beispielen. Bei Fragen, senden Sie mir einfach eine Nachricht via Kontakt-Formular.