about summary refs log tree commit diff stats
path: root/html
diff options
context:
space:
mode:
authorelioat <{ID}+{username}@users.noreply.github.com>2025-03-10 17:23:01 -0400
committerelioat <{ID}+{username}@users.noreply.github.com>2025-03-10 17:23:01 -0400
commit64b83168fd3b361e40f114a79aac494a4afb4598 (patch)
treed52c633adc4236516cdbf711303211454ef0127f /html
parentc4ac3c0a0da715c6b2925a07d898471fccf08036 (diff)
downloadtour-64b83168fd3b361e40f114a79aac494a4afb4598.tar.gz
*
Diffstat (limited to 'html')
-rw-r--r--html/voice-memos/app.js380
-rw-r--r--html/voice-memos/index.html75
2 files changed, 455 insertions, 0 deletions
diff --git a/html/voice-memos/app.js b/html/voice-memos/app.js
new file mode 100644
index 0000000..d9a300e
--- /dev/null
+++ b/html/voice-memos/app.js
@@ -0,0 +1,380 @@
+/**
+ * @typedef {Object} AudioState
+ * @property {MediaRecorder} mediaRecorder - The MediaRecorder instance
+ * @property {AudioContext} audioContext - The Web Audio API context
+ * @property {AnalyserNode} analyser - The analyser node for waveform visualization
+ * @property {Array<Blob>} audioChunks - Array of audio data chunks
+ * @property {boolean} isRecording - Recording state flag
+ * @property {number} countdown - Countdown value before recording starts
+ * @property {boolean} isPlaying - Playback state flag
+ * @property {number} recordingStartTime - Timestamp when recording started
+ * @property {string} lastError - Last error message
+ */
+
+/**
+ * @type {AudioState}
+ */
+const initialState = {
+    mediaRecorder: null,
+    audioContext: null,
+    analyser: null,
+    audioChunks: [],
+    isRecording: false,
+    countdown: 0,
+    isPlaying: false,
+    recordingStartTime: 0,
+    lastError: ''
+};
+
+/**
+ * Constants for local storage
+ */
+const STORAGE_KEYS = {
+    LAST_DEVICE_ID: 'voice_memo_last_device_id'
+};
+
+/**
+ * DOM Elements
+ */
+const elements = {
+    inputSource: document.getElementById('inputSource'),
+    startBtn: document.getElementById('startBtn'),
+    stopBtn: document.getElementById('stopBtn'),
+    playBtn: document.getElementById('playBtn'),
+    saveBtn: document.getElementById('saveBtn'),
+    waveform: document.getElementById('waveform'),
+    status: document.getElementById('status')
+};
+
+/**
+ * State management using closure
+ * @returns {Object} State management functions
+ */
+const createStateManager = () => {
+    let state = { ...initialState };
+
+    return {
+        getState: () => ({ ...state }),
+        setState: (newState) => {
+            state = { ...state, ...newState };
+            updateUI();
+        }
+    };
+};
+
+const stateManager = createStateManager();
+
+/**
+ * Formats elapsed time in seconds to MM:SS format
+ * @param {number} seconds - Time in seconds
+ * @returns {string} Formatted time string
+ */
+const formatTime = (seconds) => {
+    const minutes = Math.floor(seconds / 60);
+    const remainingSeconds = Math.floor(seconds % 60);
+    return `${minutes.toString().padStart(2, '0')}:${remainingSeconds.toString().padStart(2, '0')}`;
+};
+
+/**
+ * Updates the UI based on current state
+ */
+const updateUI = () => {
+    const { 
+        isRecording, 
+        audioChunks, 
+        countdown, 
+        isPlaying, 
+        recordingStartTime,
+        lastError 
+    } = stateManager.getState();
+    
+    // Update button states
+    elements.startBtn.disabled = isRecording || countdown > 0;
+    elements.stopBtn.disabled = !isRecording;
+    elements.playBtn.disabled = audioChunks.length === 0 || isRecording || isPlaying;
+    elements.saveBtn.disabled = audioChunks.length === 0 || isRecording;
+    
+    // Update status message
+    let statusMessage = '';
+    
+    if (lastError) {
+        statusMessage = `Error: ${lastError}`;
+    } else if (countdown > 0) {
+        statusMessage = `Get ready! Recording starts in ${countdown}...`;
+    } else if (isRecording) {
+        const elapsedTime = formatTime((Date.now() - recordingStartTime) / 1000);
+        statusMessage = `Recording in progress... (${elapsedTime})`;
+    } else if (isPlaying) {
+        statusMessage = 'Playing recording...';
+    } else if (audioChunks.length > 0) {
+        statusMessage = 'Recording complete. You can play it back or save it.';
+    } else if (!elements.inputSource.value) {
+        statusMessage = 'Please allow microphone access and select an input source';
+    } else {
+        statusMessage = 'Ready to record. Click "Start Recording" to begin.';
+    }
+    
+    elements.status.textContent = statusMessage;
+};
+
+/**
+ * Saves the selected device ID to local storage
+ * @param {string} deviceId - The device ID to save
+ */
+const saveSelectedDevice = (deviceId) => {
+    try {
+        localStorage.setItem(STORAGE_KEYS.LAST_DEVICE_ID, deviceId);
+    } catch (error) {
+        console.warn('Could not save device preference:', error);
+    }
+};
+
+/**
+ * Gets the last selected device ID from local storage
+ * @returns {string|null} The last selected device ID or null if not found
+ */
+const getLastSelectedDevice = () => {
+    try {
+        return localStorage.getItem(STORAGE_KEYS.LAST_DEVICE_ID);
+    } catch (error) {
+        console.warn('Could not retrieve device preference:', error);
+        return null;
+    }
+};
+
+/**
+ * Populates the input source dropdown with available audio devices
+ */
+const populateInputSources = async () => {
+    try {
+        // First, request microphone permission to get device labels
+        await navigator.mediaDevices.getUserMedia({ audio: true });
+        
+        const devices = await navigator.mediaDevices.enumerateDevices();
+        const audioInputs = devices.filter(device => device.kind === 'audioinput');
+        
+        if (audioInputs.length === 0) {
+            elements.status.textContent = 'No audio input devices found';
+            return;
+        }
+        
+        elements.inputSource.innerHTML = audioInputs
+            .map(device => `<option value="${device.deviceId}">${device.label || `Microphone ${device.deviceId.slice(0, 5)}`}</option>`)
+            .join('');
+
+        // Try to restore the last selected device
+        const lastDeviceId = getLastSelectedDevice();
+        if (lastDeviceId) {
+            // Check if the last used device still exists
+            const deviceExists = audioInputs.some(device => device.deviceId === lastDeviceId);
+            if (deviceExists) {
+                elements.inputSource.value = lastDeviceId;
+            }
+        }
+            
+        elements.status.textContent = 'Ready to record. Click "Start Recording" to begin.';
+    } catch (error) {
+        console.error('Error getting audio devices:', error);
+        elements.status.textContent = 'Please allow microphone access to continue';
+    }
+};
+
+/**
+ * Creates and configures the audio context and analyzer
+ * @returns {Promise<{audioContext: AudioContext, analyser: AnalyserNode}>}
+ */
+const createAudioContext = async () => {
+    const audioContext = new (window.AudioContext || window.webkitAudioContext)();
+    const analyser = audioContext.createAnalyser();
+    analyser.fftSize = 2048;
+    
+    return { audioContext, analyser };
+};
+
+/**
+ * Sets up the waveform visualization
+ * @param {AnalyserNode} analyser - The analyzer node
+ */
+const setupWaveformVisualization = (analyser) => {
+    const bufferLength = analyser.frequencyBinCount;
+    const dataArray = new Uint8Array(bufferLength);
+    const canvas = document.createElement('canvas');
+    const canvasCtx = canvas.getContext('2d');
+    
+    elements.waveform.innerHTML = '';
+    elements.waveform.appendChild(canvas);
+    
+    const draw = () => {
+        if (!stateManager.getState().isRecording) return;
+        
+        requestAnimationFrame(draw);
+        analyser.getByteTimeDomainData(dataArray);
+        
+        canvas.width = elements.waveform.clientWidth;
+        canvas.height = elements.waveform.clientHeight;
+        
+        canvasCtx.fillStyle = '#f8f8f8';
+        canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
+        
+        canvasCtx.lineWidth = 2;
+        canvasCtx.strokeStyle = '#007AFF';
+        canvasCtx.beginPath();
+        
+        const sliceWidth = canvas.width / bufferLength;
+        let x = 0;
+        
+        for (let i = 0; i < bufferLength; i++) {
+            const v = dataArray[i] / 128.0;
+            const y = v * canvas.height / 2;
+            
+            if (i === 0) {
+                canvasCtx.moveTo(x, y);
+            } else {
+                canvasCtx.lineTo(x, y);
+            }
+            
+            x += sliceWidth;
+        }
+        
+        canvasCtx.lineTo(canvas.width, canvas.height / 2);
+        canvasCtx.stroke();
+    };
+    
+    draw();
+};
+
+/**
+ * Starts the recording process with a countdown
+ */
+const startRecording = async () => {
+    try {
+        stateManager.setState({ 
+            countdown: 3,
+            lastError: ''
+        });
+        
+        // Countdown loop
+        for (let i = 3; i > 0; i--) {
+            await new Promise(resolve => setTimeout(resolve, 1000));
+            stateManager.setState({ countdown: i - 1 });
+        }
+        
+        const deviceId = elements.inputSource.value;
+        const constraints = {
+            audio: deviceId ? { deviceId: { exact: deviceId } } : true
+        };
+        
+        const stream = await navigator.mediaDevices.getUserMedia(constraints);
+        
+        const { audioContext, analyser } = await createAudioContext();
+        const source = audioContext.createMediaStreamSource(stream);
+        source.connect(analyser);
+        
+        const mediaRecorder = new MediaRecorder(stream);
+        const audioChunks = [];
+        
+        mediaRecorder.ondataavailable = (event) => {
+            if (event.data.size > 0) {
+                audioChunks.push(event.data);
+                stateManager.setState({ audioChunks });
+            }
+        };
+        
+        mediaRecorder.onstop = () => {
+            const finalChunks = [...audioChunks];
+            stateManager.setState({ 
+                audioChunks: finalChunks,
+                isRecording: false,
+                countdown: 0
+            });
+        };
+        
+        // Start recording with 1 second timeslices to ensure we get data chunks
+        mediaRecorder.start(1000);
+        
+        stateManager.setState({
+            mediaRecorder,
+            audioContext,
+            analyser,
+            audioChunks,
+            isRecording: true,
+            countdown: 0,
+            recordingStartTime: Date.now()
+        });
+        
+        setupWaveformVisualization(analyser);
+    } catch (error) {
+        console.error('Error starting recording:', error);
+        stateManager.setState({ 
+            countdown: 0,
+            lastError: 'Failed to start recording. Please check microphone permissions.'
+        });
+    }
+};
+
+/**
+ * Stops the recording and prepares for playback
+ */
+const stopRecording = () => {
+    const { mediaRecorder } = stateManager.getState();
+    
+    if (mediaRecorder && mediaRecorder.state !== 'inactive') {
+        mediaRecorder.stop();
+        mediaRecorder.stream.getTracks().forEach(track => track.stop());
+    }
+};
+
+/**
+ * Plays back the recorded audio
+ */
+const playRecording = () => {
+    const { audioChunks } = stateManager.getState();
+    const audioBlob = new Blob(audioChunks, { type: 'audio/webm' });
+    const audioUrl = URL.createObjectURL(audioBlob);
+    
+    const audio = new Audio(audioUrl);
+    
+    stateManager.setState({ isPlaying: true });
+    
+    audio.play();
+    
+    audio.onended = () => {
+        URL.revokeObjectURL(audioUrl);
+        stateManager.setState({ isPlaying: false });
+    };
+    
+    audio.onerror = () => {
+        URL.revokeObjectURL(audioUrl);
+        stateManager.setState({ 
+            isPlaying: false,
+            lastError: 'Error playing back the recording'
+        });
+    };
+};
+
+/**
+ * Saves the recording as an MP3 file
+ */
+const saveRecording = () => {
+    const { audioChunks } = stateManager.getState();
+    const audioBlob = new Blob(audioChunks, { type: 'audio/webm' });
+    const audioUrl = URL.createObjectURL(audioBlob);
+    
+    const a = document.createElement('a');
+    a.href = audioUrl;
+    a.download = `voice-memo-${new Date().toISOString()}.webm`;
+    document.body.appendChild(a);
+    a.click();
+    document.body.removeChild(a);
+    URL.revokeObjectURL(audioUrl);
+};
+
+// Event Listeners
+elements.startBtn.addEventListener('click', startRecording);
+elements.stopBtn.addEventListener('click', stopRecording);
+elements.playBtn.addEventListener('click', playRecording);
+elements.saveBtn.addEventListener('click', saveRecording);
+elements.inputSource.addEventListener('change', (e) => saveSelectedDevice(e.target.value));
+
+// Initialize
+populateInputSources();
\ No newline at end of file
diff --git a/html/voice-memos/index.html b/html/voice-memos/index.html
new file mode 100644
index 0000000..e328ee5
--- /dev/null
+++ b/html/voice-memos/index.html
@@ -0,0 +1,75 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+    <meta charset="UTF-8">
+    <meta name="viewport" content="width=device-width, initial-scale=1.0">
+    <title>Voice Memos</title>
+    <style>
+        body {
+            font-family: sans-serif;
+            max-width: 800px;
+            margin: 0 auto;
+            padding: 20px;
+            background-color:beige;
+        }
+        .container {
+            background-color: white;
+            padding: 20px;
+            border-radius: 8px;
+            box-shadow: 0 2px 4px rgba(0,0,0,0.1);
+        }
+        .controls {
+            display: flex;
+            gap: 10px;
+            margin: 20px 0;
+        }
+        button {
+            padding: 8px 16px;
+            border: none;
+            border-radius: 4px;
+            background-color: #007AFF;
+            color: white;
+            cursor: pointer;
+            transition: background-color 0.2s;
+        }
+        button:hover {
+            background-color: #0056b3;
+        }
+        button:disabled {
+            background-color: #ccc;
+            cursor: not-allowed;
+        }
+        select {
+            padding: 8px;
+            border-radius: 4px;
+            border: 1px solid #ddd;
+            margin-bottom: 20px;
+        }
+        #waveform {
+            width: 100%;
+            height: 100px;
+            background-color: #f8f8f8;
+            border-radius: 4px;
+            margin: 20px 0;
+        }
+        .status {
+            margin: 10px 0;
+            font-weight: 500;
+        }
+    </style>
+</head>
+<body>
+    <div class="container">
+        <select id="inputSource"></select>
+        <div class="controls">
+            <button id="startBtn">Start Recording</button>
+            <button id="stopBtn" disabled>Stop</button>
+            <button id="playBtn" disabled>Play</button>
+            <button id="saveBtn" disabled>Save</button>
+        </div>
+        <div id="waveform"></div>
+        <div id="status" class="status">Select an input source to begin</div>
+    </div>
+    <script src="app.js"></script>
+</body>
+</html>
\ No newline at end of file