about summary refs log tree commit diff stats
path: root/html/voice-memos/app.js
diff options
context:
space:
mode:
Diffstat (limited to 'html/voice-memos/app.js')
-rw-r--r--html/voice-memos/app.js606
1 files changed, 606 insertions, 0 deletions
diff --git a/html/voice-memos/app.js b/html/voice-memos/app.js
new file mode 100644
index 0000000..d21d1fa
--- /dev/null
+++ b/html/voice-memos/app.js
@@ -0,0 +1,606 @@
+/**
+ * @typedef {Object} AudioState
+ * @property {MediaRecorder} mediaRecorder - The MediaRecorder instance
+ * @property {AudioContext} audioContext - The Web Audio API context
+ * @property {AnalyserNode} analyser - The analyser node for waveform visualization
+ * @property {Array<Blob>} audioChunks - Array of audio data chunks
+ * @property {boolean} isRecording - Recording state flag
+ * @property {number} countdown - Countdown value before recording starts
+ * @property {boolean} isPlaying - Playback state flag
+ * @property {number} recordingStartTime - Timestamp when recording started
+ * @property {string} lastError - Last error message
+ * @property {string} mimeType - MIME type for recording
+ */
+
+/**
+ * @type {AudioState}
+ */
+const initialState = {
+    mediaRecorder: null,
+    audioContext: null,
+    analyser: null,
+    audioChunks: [],
+    isRecording: false,
+    countdown: 0,
+    isPlaying: false,
+    recordingStartTime: 0,
+    lastError: '',
+    mimeType: null
+};
+
+/**
+ * Constants for local storage
+ */
+const STORAGE_KEYS = {
+    LAST_DEVICE_ID: 'voice_memo_last_device_id'
+};
+
+/**
+ * DOM Elements
+ */
+const elements = {
+    inputSource: document.getElementById('inputSource'),
+    startBtn: document.getElementById('startBtn'),
+    stopBtn: document.getElementById('stopBtn'),
+    playBtn: document.getElementById('playBtn'),
+    saveBtn: document.getElementById('saveBtn'),
+    waveform: document.getElementById('waveform'),
+    status: document.getElementById('status')
+};
+
+/**
+ * @returns {Object} State management functions
+ */
+const createStateManager = () => {
+    let state = { ...initialState };
+
+    return {
+        getState: () => ({ ...state }),
+        setState: (newState) => {
+            state = { ...state, ...newState };
+            updateUI();
+        }
+    };
+};
+
+const stateManager = createStateManager();
+
+/**
+ * Formats elapsed time to MM:SS
+ * @param {number} seconds - Time in seconds
+ * @returns {string} Formatted time string
+ */
+const formatTime = (seconds) => {
+    const minutes = Math.floor(seconds / 60);
+    const remainingSeconds = Math.floor(seconds % 60);
+    return `${minutes.toString().padStart(2, '0')}:${remainingSeconds.toString().padStart(2, '0')}`;
+};
+
+const updateUI = () => {
+    const { 
+        isRecording, 
+        audioChunks, 
+        countdown, 
+        isPlaying, 
+        recordingStartTime,
+        lastError 
+    } = stateManager.getState();
+    
+    // Update button states
+    elements.startBtn.disabled = isRecording || countdown > 0;
+    elements.stopBtn.disabled = !isRecording;
+    elements.playBtn.disabled = audioChunks.length === 0 || isRecording || isPlaying;
+    elements.saveBtn.disabled = audioChunks.length === 0 || isRecording;
+    
+    // Update recording indicator
+    const recordingIndicator = document.getElementById('recordingIndicator');
+    if (recordingIndicator) {
+        recordingIndicator.style.display = isRecording ? 'block' : 'none';
+    }
+    
+    // Update status message
+    let statusMessage = '';
+    
+    if (lastError) {
+        statusMessage = `Error: ${lastError}`;
+    } else if (countdown > 0) {
+        statusMessage = `Get ready! Recording starts in ${countdown}...`;
+    } else if (isRecording) {
+        const elapsedTime = formatTime((Date.now() - recordingStartTime) / 1000);
+        statusMessage = `Recording in progress... (${elapsedTime})`;
+    } else if (isPlaying) {
+        statusMessage = 'Playing recording...';
+    } else if (audioChunks.length > 0) {
+        statusMessage = 'Recording complete. You can play it back or save it.';
+    } else if (!elements.inputSource.value) {
+        statusMessage = 'Please allow microphone access and select an input source';
+    } else {
+        statusMessage = 'Ready to record. Click "Record" to begin.';
+    }
+    
+    elements.status.textContent = statusMessage;
+};
+
+/**
+ * Saves the selected device ID to local storage
+ * Retains the last selected audio input device ID
+ * @param {string} deviceId - The device ID to save
+ */
+const saveSelectedDevice = (deviceId) => {
+    try {
+        localStorage.setItem(STORAGE_KEYS.LAST_DEVICE_ID, deviceId);
+    } catch (error) {
+        console.warn('Could not save device preference:', error);
+    }
+};
+
+/**
+ * Gets the last selected device ID from local storage
+ * so that the user doesn't have to select the same device every time
+ * @returns {string|null} The last selected device ID or null if not found
+ */
+const getLastSelectedDevice = () => {
+    try {
+        return localStorage.getItem(STORAGE_KEYS.LAST_DEVICE_ID);
+    } catch (error) {
+        console.warn('Could not retrieve device preference:', error);
+        return null;
+    }
+};
+
+const populateInputSources = async () => {
+    try {
+        // First, request microphone permission to get device labels
+        await navigator.mediaDevices.getUserMedia({ audio: true });
+        
+        const devices = await navigator.mediaDevices.enumerateDevices();
+        const audioInputs = devices.filter(device => device.kind === 'audioinput');
+        
+        if (audioInputs.length === 0) {
+            elements.status.textContent = 'No audio input devices found';
+            return;
+        }
+        
+        elements.inputSource.innerHTML = audioInputs
+            .map(device => `<option value="${device.deviceId}">${device.label || `Microphone ${device.deviceId.slice(0, 5)}`}</option>`)
+            .join('');
+
+        // Try to restore the last selected device
+        const lastDeviceId = getLastSelectedDevice();
+        if (lastDeviceId) {
+            // Check if the last used device still exists
+            const deviceExists = audioInputs.some(device => device.deviceId === lastDeviceId);
+            if (deviceExists) {
+                elements.inputSource.value = lastDeviceId;
+            }
+        }
+            
+        elements.status.textContent = 'Ready to record. Click "Start Recording" to begin.';
+    } catch (error) {
+        console.error('Error getting audio devices:', error);
+        elements.status.textContent = 'Please allow microphone access to continue';
+    }
+};
+
+/**
+ * Creates and configures the audio context and analyzer
+ * @returns {Promise<{audioContext: AudioContext, analyser: AnalyserNode}>}
+ */
+const createAudioContext = async () => {
+    const audioContext = new (window.AudioContext || window.webkitAudioContext)();
+    const analyser = audioContext.createAnalyser();
+    analyser.fftSize = 2048;
+    
+    return { audioContext, analyser };
+};
+
+/**
+ * Sets up the waveform visualization
+ * @param {AnalyserNode} analyser - The analyzer node
+ */
+const setupWaveformVisualization = (analyser) => {
+    const bufferLength = analyser.frequencyBinCount;
+    const dataArray = new Uint8Array(bufferLength);
+    const canvas = document.createElement('canvas');
+    const canvasCtx = canvas.getContext('2d');
+    const recordingIndicator = document.getElementById('recordingIndicator');
+    
+    // Show recording indicator
+    recordingIndicator.style.display = 'block';
+    
+    elements.waveform.innerHTML = '';
+    elements.waveform.appendChild(canvas);
+    
+    const draw = () => {
+        if (!stateManager.getState().isRecording) {
+            recordingIndicator.style.display = 'none';
+            return;
+        }
+        
+        requestAnimationFrame(draw);
+        analyser.getByteTimeDomainData(dataArray);
+        
+        canvas.width = elements.waveform.clientWidth;
+        canvas.height = elements.waveform.clientHeight;
+        
+        // Clear the canvas with a gradient background
+        const gradient = canvasCtx.createLinearGradient(0, 0, 0, canvas.height);
+        gradient.addColorStop(0, '#f8f8f8');
+        gradient.addColorStop(1, '#f2f2f7');
+        canvasCtx.fillStyle = gradient;
+        canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
+        
+        // Draw center line
+        canvasCtx.beginPath();
+        canvasCtx.strokeStyle = '#e5e5ea';
+        canvasCtx.lineWidth = 1;
+        canvasCtx.moveTo(0, canvas.height / 2);
+        canvasCtx.lineTo(canvas.width, canvas.height / 2);
+        canvasCtx.stroke();
+        
+        // Draw waveform
+        canvasCtx.beginPath();
+        canvasCtx.lineWidth = 2;
+        canvasCtx.strokeStyle = '#ff3b30';
+        
+        const sliceWidth = canvas.width / bufferLength;
+        let x = 0;
+        
+        const smoothedData = [];
+        const smoothingFactor = 0.2;
+        
+        for (let i = 0; i < bufferLength; i++) {
+            const raw = dataArray[i] / 128.0 - 1.0;
+            
+            if (i > 0) {
+                smoothedData.push(smoothedData[i-1] * smoothingFactor + raw * (1 - smoothingFactor));
+            } else {
+                smoothedData.push(raw);
+            }
+        }
+        
+        for (let i = 0; i < bufferLength; i++) {
+            const v = smoothedData[i];
+            const y = (v * canvas.height / 4) + canvas.height / 2;
+            
+            if (i === 0) {
+                canvasCtx.moveTo(x, y);
+            } else {
+                canvasCtx.lineTo(x, y);
+            }
+            
+            x += sliceWidth;
+        }
+        
+        canvasCtx.stroke();
+        
+        // Draw a reflection of the waveform
+        canvasCtx.beginPath();
+        canvasCtx.strokeStyle = 'rgba(255, 59, 48, 0.3)';
+        x = 0;
+        
+        for (let i = 0; i < bufferLength; i++) {
+            const v = -smoothedData[i]; // Mirror the waveform
+            const y = (v * canvas.height / 4) + canvas.height / 2;
+            
+            if (i === 0) {
+                canvasCtx.moveTo(x, y);
+            } else {
+                canvasCtx.lineTo(x, y);
+            }
+            
+            x += sliceWidth;
+        }
+        
+        canvasCtx.stroke();
+    };
+    
+    draw();
+};
+
+/**
+ * Determines the best supported MIME type for the current browser
+ * @returns {string} The best supported MIME type or null if none found
+ */
+const getSupportedMimeType = () => {
+    const types = [
+        'audio/webm',
+        'audio/mp4',
+        'audio/ogg',
+        'audio/wav',
+        'audio/mpeg'
+    ];
+    
+    // Add codec options for better compatibility
+    const typesWithCodecs = [
+        'audio/webm;codecs=opus',
+        'audio/webm;codecs=pcm',
+        'audio/mp4;codecs=mp4a.40.2'
+    ];
+    
+    // Combine all types to check
+    const allTypes = [...typesWithCodecs, ...types];
+    
+    for (const type of allTypes) {
+        if (MediaRecorder.isTypeSupported(type)) {
+            console.log(`Browser supports recording with MIME type: ${type}`);
+            return type;
+        }
+    }
+    
+    console.warn('No supported MIME types found for MediaRecorder');
+    return null;
+};
+
+/**
+ * Starts the recording process after a countdown
+ */
+const startRecording = async () => {
+    try {
+        stateManager.setState({ 
+            countdown: 3,
+            lastError: ''
+        });
+        
+        // Countdown loop
+        for (let i = 3; i > 0; i--) {
+            await new Promise(resolve => setTimeout(resolve, 1000));
+            stateManager.setState({ countdown: i - 1 });
+        }
+        
+        const deviceId = elements.inputSource.value;
+        const constraints = {
+            audio: deviceId ? { deviceId: { exact: deviceId } } : true
+        };
+        
+        const stream = await navigator.mediaDevices.getUserMedia(constraints);
+        
+        const { audioContext, analyser } = await createAudioContext();
+        const source = audioContext.createMediaStreamSource(stream);
+        source.connect(analyser);
+        
+        // Get supported MIME type
+        const mimeType = getSupportedMimeType();
+        
+        // Create MediaRecorder with options if mimeType is supported
+        const options = mimeType ? { mimeType } : undefined;
+        const mediaRecorder = new MediaRecorder(stream, options);
+        const audioChunks = [];
+        
+        mediaRecorder.ondataavailable = (event) => {
+            if (event.data.size > 0) {
+                audioChunks.push(event.data);
+                stateManager.setState({ audioChunks });
+            }
+        };
+        
+        mediaRecorder.onstop = () => {
+            const finalChunks = [...audioChunks];
+            stateManager.setState({ 
+                audioChunks: finalChunks,
+                isRecording: false,
+                countdown: 0
+            });
+        };
+        
+        // Start recording with 1 second timeslices to ensure we get data chunks
+        // FIXME: experiment with different chunk sizes
+        mediaRecorder.start(1000);
+        
+        stateManager.setState({
+            mediaRecorder,
+            audioContext,
+            analyser,
+            audioChunks,
+            isRecording: true,
+            countdown: 0,
+            recordingStartTime: Date.now(),
+            mimeType: mediaRecorder.mimeType
+        });
+        
+        setupWaveformVisualization(analyser);
+    } catch (error) {
+        console.error('Error starting recording:', error);
+        stateManager.setState({ 
+            countdown: 0,
+            lastError: 'Failed to start recording. Please check microphone permissions.'
+        });
+    }
+};
+
+/**
+ * Stops the recording and prepares for playback
+ */
+const stopRecording = () => {
+    const { mediaRecorder } = stateManager.getState();
+    
+    if (mediaRecorder && mediaRecorder.state !== 'inactive') {
+        mediaRecorder.stop();
+        mediaRecorder.stream.getTracks().forEach(track => track.stop());
+    }
+};
+
+/**
+ * Plays back the recorded audio
+ */
+const playRecording = () => {
+    const { audioChunks, mimeType } = stateManager.getState();
+    
+    // Use the detected MIME type or fallback to a generic audio type
+    const blobType = mimeType || 'audio/webm';
+    const audioBlob = new Blob(audioChunks, { type: blobType });
+    const audioUrl = URL.createObjectURL(audioBlob);
+    
+    const audio = new Audio(audioUrl);
+    
+    stateManager.setState({ isPlaying: true });
+    
+    // Add error handling before playing
+    const playPromise = audio.play();
+    
+    if (playPromise !== undefined) {
+        playPromise
+            .then(() => {
+                console.log('Audio playback started successfully');
+            })
+            .catch(error => {
+                console.error('Error playing audio:', error);
+                URL.revokeObjectURL(audioUrl);
+                stateManager.setState({ 
+                    isPlaying: false,
+                    lastError: `Playback error: ${error.message || 'Could not play recording in this browser'}`
+                });
+                
+                // Try alternative playback method for Safari
+                if (error.name === 'NotSupportedError') {
+                    tryAlternativePlayback(audioBlob);
+                }
+            });
+    }
+    
+    audio.onended = () => {
+        URL.revokeObjectURL(audioUrl);
+        stateManager.setState({ isPlaying: false });
+    };
+};
+
+/**
+ * Attempts alternative playback methods for Safari
+ * @param {Blob} audioBlob - The audio blob to play
+ */
+const tryAlternativePlayback = async (audioBlob) => {
+    try {
+        // Create a new audio context
+        const audioContext = new (window.AudioContext || window.webkitAudioContext)();
+        
+        // Convert blob to array buffer
+        const arrayBuffer = await audioBlob.arrayBuffer();
+        
+        // Decode the audio data
+        const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
+        
+        // Create a buffer source
+        const source = audioContext.createBufferSource();
+        source.buffer = audioBuffer;
+        source.connect(audioContext.destination);
+        
+        // Play the audio
+        source.start(0);
+        
+        stateManager.setState({ isPlaying: true });
+        
+        // Handle playback completion
+        source.onended = () => {
+            stateManager.setState({ isPlaying: false });
+        };
+        
+        console.log('Using alternative playback method for Safari');
+    } catch (error) {
+        console.error('Alternative playback failed:', error);
+        stateManager.setState({ 
+            isPlaying: false,
+            lastError: 'Could not play recording in this browser. Try saving and playing externally.'
+        });
+    }
+};
+
+/**
+ * Saves the recording as an audio file with proper metadata
+ */
+const saveRecording = async () => {
+    const { audioChunks, mimeType, recordingStartTime } = stateManager.getState();
+    
+    // Use the detected MIME type or fallback to a generic audio type
+    const blobType = mimeType || 'audio/webm';
+    const audioBlob = new Blob(audioChunks, { type: blobType });
+    
+    // Calculate recording duration
+    const recordingDuration = recordingStartTime ? 
+        Math.floor((Date.now() - recordingStartTime) / 1000) : 0;
+    
+    // Get device information
+    const deviceId = elements.inputSource.value;
+    const deviceLabel = Array.from(elements.inputSource.options)
+        .find(option => option.value === deviceId)?.text || 'Unknown Device';
+    
+    // Format current date for filename
+    const now = new Date();
+    const formattedDate = now.toISOString().replace(/[:.]/g, '-');
+    
+    // Determine file extension based on MIME type
+    let fileExtension = 'webm';
+    if (mimeType) {
+        if (mimeType.includes('mp4')) fileExtension = 'mp4';
+        else if (mimeType.includes('mp3') || mimeType.includes('mpeg')) fileExtension = 'mp3';
+        else if (mimeType.includes('ogg')) fileExtension = 'ogg';
+        else if (mimeType.includes('wav')) fileExtension = 'wav';
+    }
+    
+    // For formats that support metadata through the Web Audio API
+    if (fileExtension === 'wav' || fileExtension === 'mp3') {
+        try {
+            // Create a new audio context
+            const audioContext = new (window.AudioContext || window.webkitAudioContext)();
+            
+            // Convert blob to array buffer
+            const arrayBuffer = await audioBlob.arrayBuffer();
+            
+            // Decode the audio data
+            const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
+            
+            // Create metadata object
+            const metadata = {
+                title: `Voice Memo ${formattedDate}`,
+                artist: 'Audio Gremlins',
+                album: 'Voice Recordings',
+                date: now.toISOString(),
+                device: deviceLabel,
+                duration: recordingDuration,
+                sampleRate: audioBuffer.sampleRate,
+                numberOfChannels: audioBuffer.numberOfChannels
+            };
+            
+            console.log('Audio metadata:', metadata);
+            
+            // Note: Web browsers don't provide direct API for writing metadata to audio files
+            // We're logging the metadata and including what we can in the filename
+            
+            // For a complete solution, server-side processing or a dedicated library would be needed
+        } catch (error) {
+            console.warn('Could not process audio metadata:', error);
+        }
+    }
+    
+    // Include some metadata in the filename since browser APIs don't allow direct metadata embedding
+    const filename = `voice-memo_${formattedDate}_${deviceLabel.replace(/[^a-z0-9]/gi, '-')}_${recordingDuration}s.${fileExtension}`;
+    
+    // Create download link
+    const audioUrl = URL.createObjectURL(audioBlob);
+    const a = document.createElement('a');
+    a.href = audioUrl;
+    a.download = filename;
+    document.body.appendChild(a);
+    a.click();
+    document.body.removeChild(a);
+    URL.revokeObjectURL(audioUrl);
+    
+    // Log metadata for debugging
+    console.log('Saved recording with filename:', filename);
+    console.log('Recording details:', {
+        timestamp: now.toISOString(),
+        duration: recordingDuration + 's',
+        device: deviceLabel,
+        mimeType: blobType,
+        fileSize: Math.round(audioBlob.size / 1024) + 'KB'
+    });
+};
+
+// Event Listeners
+elements.startBtn.addEventListener('click', startRecording);
+elements.stopBtn.addEventListener('click', stopRecording);
+elements.playBtn.addEventListener('click', playRecording);
+elements.saveBtn.addEventListener('click', saveRecording);
+elements.inputSource.addEventListener('change', (e) => saveSelectedDevice(e.target.value));
+
+// Initialize
+populateInputSources();
\ No newline at end of file