File size: 10,378 Bytes
e30257d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 |
class AudioManager {
constructor() {
// Core audio configuration
this.config = {
sampleRate: 44100,
channels: 1,
bitDepth: 16,
maxRecordingTime: 30000, // 30 seconds in milliseconds
minRecordingTime: 15000 // 15 seconds in milliseconds
};
// Recording state management
this.state = {
isRecording: false,
startTime: null,
recorder: null,
stream: null,
audioChunks: [],
audioContext: null,
analyser: null
};
// Audio visualization settings
this.visualizer = {
canvasContext: null,
dataArray: null,
bufferLength: null,
width: 0,
height: 0
};
// Initialize audio context with error handling
try {
this.state.audioContext = new (window.AudioContext || window.webkitAudioContext)();
} catch (error) {
console.error('AudioContext not supported in this browser');
}
// Bind methods to maintain context
this.startRecording = this.startRecording.bind(this);
this.stopRecording = this.stopRecording.bind(this);
this.processAudio = this.processAudio.bind(this);
}
/**
* Initialize audio visualization on a canvas element
* @param {HTMLCanvasElement} canvas - The canvas element for visualization
*/
initializeVisualizer(canvas) {
if (!canvas) return;
this.visualizer.canvasContext = canvas.getContext('2d');
this.visualizer.width = canvas.width;
this.visualizer.height = canvas.height;
// Set up audio analyser for visualization
this.state.analyser = this.state.audioContext.createAnalyser();
this.state.analyser.fftSize = 2048;
this.visualizer.bufferLength = this.state.analyser.frequencyBinCount;
this.visualizer.dataArray = new Uint8Array(this.visualizer.bufferLength);
}
/**
* Start recording audio with visualization
* @returns {Promise<void>}
*/
async startRecording() {
try {
// Request microphone access
this.state.stream = await navigator.mediaDevices.getUserMedia({
audio: {
channelCount: this.config.channels,
sampleRate: this.config.sampleRate
}
});
// Create and configure MediaRecorder
this.state.recorder = new MediaRecorder(this.state.stream, {
mimeType: 'audio/webm;codecs=opus'
});
// Set up recording event handlers
this.state.recorder.ondataavailable = (event) => {
if (event.data.size > 0) {
this.state.audioChunks.push(event.data);
}
};
// Connect audio nodes for visualization
const source = this.state.audioContext.createMediaStreamSource(this.state.stream);
source.connect(this.state.analyser);
// Start recording
this.state.recorder.start(100); // Collect data every 100ms
this.state.isRecording = true;
this.state.startTime = Date.now();
// Start visualization if canvas is set up
if (this.visualizer.canvasContext) {
this.drawVisualization();
}
// Set up automatic recording stop
setTimeout(() => {
if (this.state.isRecording) {
this.stopRecording();
}
}, this.config.maxRecordingTime);
} catch (error) {
console.error('Error starting recording:', error);
throw new Error('Failed to start recording');
}
}
/**
* Stop recording and process the audio
* @returns {Promise<Blob>} The processed audio blob
*/
async stopRecording() {
return new Promise((resolve, reject) => {
try {
const recordingDuration = Date.now() - this.state.startTime;
// Check if recording meets minimum duration
if (recordingDuration < this.config.minRecordingTime) {
throw new Error('Recording too short');
}
this.state.recorder.onstop = async () => {
try {
const audioBlob = await this.processAudio();
resolve(audioBlob);
} catch (error) {
reject(error);
}
};
// Stop recording and clean up
this.state.recorder.stop();
this.state.stream.getTracks().forEach(track => track.stop());
this.state.isRecording = false;
} catch (error) {
reject(error);
}
});
}
/**
* Process recorded audio chunks into a single blob
* @returns {Promise<Blob>}
*/
async processAudio() {
try {
// Combine audio chunks into a single blob
const audioBlob = new Blob(this.state.audioChunks, { type: 'audio/webm;codecs=opus' });
// Convert to proper format for ElevenLabs API
const arrayBuffer = await audioBlob.arrayBuffer();
const audioContext = new (window.AudioContext || window.webkitAudioContext)();
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
// Create WAV format buffer
const wavBuffer = this.createWAVBuffer(audioBuffer);
return new Blob([wavBuffer], { type: 'audio/wav' });
} catch (error) {
console.error('Error processing audio:', error);
throw new Error('Failed to process audio');
}
}
/**
* Create WAV buffer from audio buffer
* @param {AudioBuffer} audioBuffer
* @returns {ArrayBuffer}
*/
createWAVBuffer(audioBuffer) {
const numChannels = audioBuffer.numberOfChannels;
const length = audioBuffer.length * numChannels * 2;
const buffer = new ArrayBuffer(44 + length);
const view = new DataView(buffer);
// Write WAV header
this.writeWAVHeader(view, length, numChannels, audioBuffer.sampleRate);
// Write audio data
const channels = [];
for (let i = 0; i < numChannels; i++) {
channels.push(audioBuffer.getChannelData(i));
}
let offset = 44;
for (let i = 0; i < audioBuffer.length; i++) {
for (let channel = 0; channel < numChannels; channel++) {
const sample = Math.max(-1, Math.min(1, channels[channel][i]));
view.setInt16(offset, sample < 0 ? sample * 0x8000 : sample * 0x7FFF, true);
offset += 2;
}
}
return buffer;
}
/**
* Write WAV header to DataView
* @param {DataView} view
* @param {number} length
* @param {number} numChannels
* @param {number} sampleRate
*/
writeWAVHeader(view, length, numChannels, sampleRate) {
// RIFF identifier
this.writeString(view, 0, 'RIFF');
// RIFF chunk length
view.setUint32(4, 36 + length, true);
// RIFF type
this.writeString(view, 8, 'WAVE');
// Format chunk identifier
this.writeString(view, 12, 'fmt ');
// Format chunk length
view.setUint32(16, 16, true);
// Sample format (raw)
view.setUint16(20, 1, true);
// Channel count
view.setUint16(22, numChannels, true);
// Sample rate
view.setUint32(24, sampleRate, true);
// Byte rate (sample rate * block align)
view.setUint32(28, sampleRate * numChannels * 2, true);
// Block align (channel count * bytes per sample)
view.setUint16(32, numChannels * 2, true);
// Bits per sample
view.setUint16(34, 16, true);
// Data chunk identifier
this.writeString(view, 36, 'data');
// Data chunk length
view.setUint32(40, length, true);
}
/**
* Write string to DataView
* @param {DataView} view
* @param {number} offset
* @param {string} string
*/
writeString(view, offset, string) {
for (let i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
/**
* Draw audio visualization
*/
drawVisualization() {
if (!this.state.isRecording || !this.visualizer.canvasContext) return;
requestAnimationFrame(() => this.drawVisualization());
// Get frequency data
this.state.analyser.getByteFrequencyData(this.visualizer.dataArray);
// Clear canvas
this.visualizer.canvasContext.fillStyle = 'rgb(10, 10, 10)';
this.visualizer.canvasContext.fillRect(0, 0, this.visualizer.width, this.visualizer.height);
// Draw frequency bars
const barWidth = (this.visualizer.width / this.visualizer.bufferLength) * 2.5;
let barHeight;
let x = 0;
for (let i = 0; i < this.visualizer.bufferLength; i++) {
barHeight = (this.visualizer.dataArray[i] / 255) * this.visualizer.height;
this.visualizer.canvasContext.fillStyle = `rgb(${barHeight + 100},50,50)`;
this.visualizer.canvasContext.fillRect(
x,
this.visualizer.height - barHeight,
barWidth,
barHeight
);
x += barWidth + 1;
}
}
/**
* Clean up audio resources
*/
cleanup() {
if (this.state.stream) {
this.state.stream.getTracks().forEach(track => track.stop());
}
this.state.audioChunks = [];
this.state.isRecording = false;
}
}
// Export the AudioManager class
export default AudioManager; |