728x90
원본소스 : https://gist.github.com/meziantou/edb7217fddfbb70e899e#file-index-html
원본소스 :
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title></title>
</head>
<body>
<h1>Audio</h1>
<button id="startRecordingButton">Start recording</button>
<button id="stopRecordingButton">Stop recording</button>
<button id="playButton">Play</button>
<button id="downloadButton">Download</button>
<script>
var startRecordingButton = document.getElementById("startRecordingButton");
var stopRecordingButton = document.getElementById("stopRecordingButton");
var playButton = document.getElementById("playButton");
var downloadButton = document.getElementById("downloadButton");
var leftchannel = [];
var rightchannel = [];
var recorder = null;
var recordingLength = 0;
var volume = null;
var mediaStream = null;
var sampleRate = 44100;
var context = null;
var blob = null;
startRecordingButton.addEventListener("click", function () {
// Initialize recorder
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
navigator.getUserMedia(
{
audio: true
},
function (e) {
console.log("user consent");
// creates the audio context
window.AudioContext = window.AudioContext || window.webkitAudioContext;
context = new AudioContext();
// creates an audio node from the microphone incoming stream
mediaStream = context.createMediaStreamSource(e);
// https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/createScriptProcessor
// bufferSize: the onaudioprocess event is called when the buffer is full
var bufferSize = 2048;
var numberOfInputChannels = 2;
var numberOfOutputChannels = 2;
if (context.createScriptProcessor) {
recorder = context.createScriptProcessor(bufferSize, numberOfInputChannels, numberOfOutputChannels);
} else {
recorder = context.createJavaScriptNode(bufferSize, numberOfInputChannels, numberOfOutputChannels);
}
recorder.onaudioprocess = function (e) {
leftchannel.push(new Float32Array(e.inputBuffer.getChannelData(0)));
rightchannel.push(new Float32Array(e.inputBuffer.getChannelData(1)));
recordingLength += bufferSize;
}
// we connect the recorder
mediaStream.connect(recorder);
recorder.connect(context.destination);
},
function (e) {
console.error(e);
});
});
stopRecordingButton.addEventListener("click", function () {
// stop recording
recorder.disconnect(context.destination);
mediaStream.disconnect(recorder);
// we flat the left and right channels down
// Float32Array[] => Float32Array
var leftBuffer = flattenArray(leftchannel, recordingLength);
var rightBuffer = flattenArray(rightchannel, recordingLength);
// we interleave both channels together
// [left[0],right[0],left[1],right[1],...]
var interleaved = interleave(leftBuffer, rightBuffer);
// we create our wav file
var buffer = new ArrayBuffer(44 + interleaved.length * 2);
var view = new DataView(buffer);
// RIFF chunk descriptor
writeUTFBytes(view, 0, 'RIFF');
view.setUint32(4, 44 + interleaved.length * 2, true);
writeUTFBytes(view, 8, 'WAVE');
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true); // chunkSize
view.setUint16(20, 1, true); // wFormatTag
view.setUint16(22, 2, true); // wChannels: stereo (2 channels)
view.setUint32(24, sampleRate, true); // dwSamplesPerSec
view.setUint32(28, sampleRate * 4, true); // dwAvgBytesPerSec
view.setUint16(32, 4, true); // wBlockAlign
view.setUint16(34, 16, true); // wBitsPerSample
// data sub-chunk
writeUTFBytes(view, 36, 'data');
view.setUint32(40, interleaved.length * 2, true);
// write the PCM samples
var index = 44;
var volume = 1;
for (var i = 0; i < interleaved.length; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}
// our final blob
blob = new Blob([view], { type: 'audio/wav' });
});
playButton.addEventListener("click", function () {
if (blob == null) {
return;
}
var url = window.URL.createObjectURL(blob);
var audio = new Audio(url);
audio.play();
});
downloadButton.addEventListener("click", function () {
if (blob == null) {
return;
}
var url = URL.createObjectURL(blob);
var a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
a.href = url;
a.download = "sample.wav";
a.click();
window.URL.revokeObjectURL(url);
});
function flattenArray(channelBuffer, recordingLength) {
var result = new Float32Array(recordingLength);
var offset = 0;
for (var i = 0; i < channelBuffer.length; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string) {
for (var i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
</script>
</body>
</html>
샘플화면 :
16,000 / 16bit / mono로 변환 : 위의소스를 아래처럼 수정
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title></title>
</head>
<body>
<h1>Audio</h1>
<button id="startRecordingButton">Start recording</button>
<button id="stopRecordingButton">Stop recording</button>
<button id="playButton">Play</button>
<button id="downloadButton">Download</button>
<script>
var startRecordingButton = document.getElementById("startRecordingButton");
var stopRecordingButton = document.getElementById("stopRecordingButton");
var playButton = document.getElementById("playButton");
var downloadButton = document.getElementById("downloadButton");
var leftchannel = [];
var rightchannel = [];
var recorder = null;
var recordingLength = 0;
var volume = null;
var mediaStream = null;
var sampleRate = 16000;
var context = null;
var blob = null;
startRecordingButton.addEventListener("click", function () {
// Initialize recorder
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
navigator.getUserMedia(
{
audio: true
},
function (e) {
console.log("user consent");
// creates the audio context
window.AudioContext = window.AudioContext || window.webkitAudioContext;
context = new AudioContext();
// creates an audio node from the microphone incoming stream
mediaStream = context.createMediaStreamSource(e);
// https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/createScriptProcessor
// bufferSize: the onaudioprocess event is called when the buffer is full
var bufferSize = 2048;
var numberOfInputChannels = 2;
var numberOfOutputChannels = 2;
if (context.createScriptProcessor) {
recorder = context.createScriptProcessor(bufferSize, numberOfInputChannels, numberOfOutputChannels);
} else {
recorder = context.createJavaScriptNode(bufferSize, numberOfInputChannels, numberOfOutputChannels);
}
recorder.onaudioprocess = function (e) {
leftchannel.push(new Float32Array(e.inputBuffer.getChannelData(0)));
rightchannel.push(new Float32Array(e.inputBuffer.getChannelData(1)));
recordingLength += bufferSize;
}
// we connect the recorder
mediaStream.connect(recorder);
recorder.connect(context.destination);
},
function (e) {
console.error(e);
});
});
stopRecordingButton.addEventListener("click", function () {
// stop recording
recorder.disconnect(context.destination);
mediaStream.disconnect(recorder);
// we flat the left and right channels down
// Float32Array[] => Float32Array
var leftBuffer = flattenArray(leftchannel, recordingLength);
var rightBuffer = flattenArray(rightchannel, recordingLength);
// we interleave both channels together
// [left[0],right[0],left[1],right[1],...]
var interleaved = interleave(leftBuffer, rightBuffer);
// we create our wav file
var buffer = new ArrayBuffer(44 + interleaved.length * 2 / 3 *2);
var view = new DataView(buffer);
// RIFF chunk descriptor
writeUTFBytes(view, 0, 'RIFF');
view.setUint32(4, 44 + interleaved.length * 2 , true);
writeUTFBytes(view, 8, 'WAVE');
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true); // chunkSize
view.setUint16(20, 1, true); // wFormatTag
view.setUint16(22, 1, true); // wChannels: stereo (2 channels)
view.setUint32(24, sampleRate, true); // dwSamplesPerSec
view.setUint32(28, sampleRate * 4, true); // dwAvgBytesPerSec
view.setUint16(32, 2, true); // wBlockAlign
view.setUint16(34, 16, true); // wBitsPerSample
// data sub-chunk
writeUTFBytes(view, 36, 'data');
view.setUint32(40, interleaved.length / 3, true);
// write the PCM samples
var index = 44;
var volume = 1;
for (var i = 0; i < interleaved.length ; i+=3*2) {
//view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
view.setInt16(index, interleaved[i] < 0 ? interleaved[i] * 0x8000 : interleaved[i] * 0x7FFF, true);
index += 2;
}
// our final blob
// our final blob
blob = new Blob([view], { type: 'audio/wav' });
});
playButton.addEventListener("click", function () {
if (blob == null) {
return;
}
var url = window.URL.createObjectURL(blob);
var audio = new Audio(url);
audio.play();
});
downloadButton.addEventListener("click", function () {
if (blob == null) {
return;
}
var url = URL.createObjectURL(blob);
var a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
a.href = url;
a.download = "sample.wav";
a.click();
window.URL.revokeObjectURL(url);
});
function flattenArray(channelBuffer, recordingLength) {
var result = new Float32Array(recordingLength);
var offset = 0;
for (var i = 0; i < channelBuffer.length; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string) {
for (var i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
</script>
</body>
</html>
'개발언어 > 자바스크립트' 카테고리의 다른 글
[javascript] MediaRecorder 웹에서 마이크녹음 예제3-1(간단버전) (0) | 2022.02.15 |
---|---|
[javascript] MediaRecorder 웹에서 마이크녹음 예제3 (0) | 2022.02.14 |
[javascript] MediaRecorder 웹에서 마이크녹음 예제1 (0) | 2022.02.14 |
[Web API] MediaStream Recording 업로드 (0) | 2022.02.09 |
[자바스크립트] 마이크 녹음 테스트 (0) | 2022.02.09 |