feat(matrix): add voice note recording and playback

- Add voice recording with MediaRecorder API
- Show recording indicator with duration
- Replace send button with microphone when input is empty
- Add audio player UI for voice messages with play/pause and progress

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Till-JS 2026-01-29 15:12:44 +01:00
parent 69d405ca84
commit aca66b2014
2 changed files with 259 additions and 12 deletions

View file

@ -11,6 +11,7 @@
DownloadSimple,
File as FileIcon,
Play,
Pause,
Image as ImageIcon,
Lock,
Warning,
@ -43,6 +44,50 @@
let imageLoading = $state(true);
let imageError = $state(false);
// Audio player state
let audioElement: HTMLAudioElement | null = $state(null);
let isPlaying = $state(false);
let audioProgress = $state(0);
let audioDuration = $state(0);
function toggleAudio() {
if (!audioElement) return;
if (isPlaying) {
audioElement.pause();
} else {
audioElement.play();
}
}
function handleAudioTimeUpdate() {
if (!audioElement) return;
audioProgress = audioElement.currentTime;
}
function handleAudioLoadedMetadata() {
if (!audioElement) return;
audioDuration = audioElement.duration;
}
function handleAudioEnded() {
isPlaying = false;
audioProgress = 0;
}
function seekAudio(e: MouseEvent) {
if (!audioElement || !audioDuration) return;
const rect = (e.currentTarget as HTMLElement).getBoundingClientRect();
const percent = (e.clientX - rect.left) / rect.width;
audioElement.currentTime = percent * audioDuration;
}
function formatAudioTime(seconds: number): string {
if (!seconds || isNaN(seconds)) return '0:00';
const mins = Math.floor(seconds / 60);
const secs = Math.floor(seconds % 60);
return `${mins}:${secs.toString().padStart(2, '0')}`;
}
let formattedTime = $derived(format(message.timestamp, 'HH:mm'));
let formattedDate = $derived(() => {
@ -201,8 +246,72 @@
</span>
{/if}
</div>
{:else if message.type === 'm.file' || message.type === 'm.audio'}
<!-- File/Audio message -->
{:else if message.type === 'm.audio'}
<!-- Audio message (voice note) -->
<div
class="flex items-center gap-3 rounded-lg {message.isOwn
? 'bg-white/20'
: 'bg-black/5 dark:bg-white/5'} p-3 min-w-[220px]"
>
<!-- Hidden audio element -->
{#if mediaUrl}
<audio
bind:this={audioElement}
src={mediaUrl}
onplay={() => (isPlaying = true)}
onpause={() => (isPlaying = false)}
ontimeupdate={handleAudioTimeUpdate}
onloadedmetadata={handleAudioLoadedMetadata}
onended={handleAudioEnded}
></audio>
{/if}
<!-- Play/Pause button -->
<button
class="flex-shrink-0 rounded-full {message.isOwn
? 'bg-white/20 hover:bg-white/30'
: 'bg-primary/10 hover:bg-primary/20'} p-2.5 transition-colors"
onclick={toggleAudio}
>
{#if isPlaying}
<Pause
class="h-5 w-5 {message.isOwn ? 'text-white' : 'text-primary'}"
weight="fill"
/>
{:else}
<Play class="h-5 w-5 {message.isOwn ? 'text-white' : 'text-primary'}" weight="fill" />
{/if}
</button>
<!-- Waveform/Progress -->
<div class="flex-1 flex flex-col gap-1">
<!-- Progress bar -->
<button
class="relative h-1.5 w-full rounded-full {message.isOwn
? 'bg-white/20'
: 'bg-black/10 dark:bg-white/10'} overflow-hidden cursor-pointer"
onclick={seekAudio}
>
<div
class="absolute inset-y-0 left-0 {message.isOwn
? 'bg-white'
: 'bg-primary'} rounded-full transition-all"
style="width: {audioDuration > 0 ? (audioProgress / audioDuration) * 100 : 0}%"
></div>
</button>
<!-- Duration -->
<div
class="flex justify-between text-xs {message.isOwn
? 'text-white/70'
: 'text-muted-foreground'}"
>
<span>{formatAudioTime(audioProgress)}</span>
<span>{formatAudioTime(audioDuration || message.media?.duration || 0)}</span>
</div>
</div>
</div>
{:else if message.type === 'm.file'}
<!-- File message -->
<a
href={mediaUrl}
target="_blank"

View file

@ -8,6 +8,8 @@
Image,
File,
CircleNotch,
Microphone,
Stop,
} from '@manacore/shared-icons';
interface Props {
@ -27,6 +29,13 @@
let uploading = $state(false);
let uploadProgress = $state(0);
// Voice recording state
let isRecording = $state(false);
let recordingDuration = $state(0);
let mediaRecorder: MediaRecorder | null = null;
let audioChunks: Blob[] = [];
let recordingInterval: ReturnType<typeof setInterval> | null = null;
// Set message content when editing
$effect(() => {
if (editMessage) {
@ -135,6 +144,95 @@
console.error('Failed to upload file');
}
}
// Voice recording functions
async function startRecording() {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
mediaRecorder = new MediaRecorder(stream, { mimeType: 'audio/webm' });
audioChunks = [];
mediaRecorder.ondataavailable = (event) => {
if (event.data.size > 0) {
audioChunks.push(event.data);
}
};
mediaRecorder.onstop = async () => {
// Stop all tracks
stream.getTracks().forEach((track) => track.stop());
// Create blob and send
const audioBlob = new Blob(audioChunks, { type: 'audio/webm' });
await sendVoiceMessage(audioBlob);
};
mediaRecorder.start(100); // Collect data every 100ms
isRecording = true;
recordingDuration = 0;
// Start duration counter
recordingInterval = setInterval(() => {
recordingDuration++;
}, 1000);
} catch (err) {
console.error('Failed to start recording:', err);
}
}
function stopRecording() {
if (mediaRecorder && isRecording) {
mediaRecorder.stop();
isRecording = false;
if (recordingInterval) {
clearInterval(recordingInterval);
recordingInterval = null;
}
}
}
function cancelRecording() {
if (mediaRecorder && isRecording) {
// Stop without sending
mediaRecorder.ondataavailable = null;
mediaRecorder.onstop = () => {
// Just clean up, don't send
};
mediaRecorder.stop();
isRecording = false;
if (recordingInterval) {
clearInterval(recordingInterval);
recordingInterval = null;
}
}
}
async function sendVoiceMessage(blob: Blob) {
uploading = true;
uploadProgress = 0;
// Create a File from the Blob
const file = new File([blob], `voice-${Date.now()}.webm`, { type: 'audio/webm' });
const success = await matrixStore.sendFile(file, (progress) => {
uploadProgress = progress;
});
uploading = false;
uploadProgress = 0;
if (!success) {
console.error('Failed to send voice message');
}
}
function formatDuration(seconds: number): string {
const mins = Math.floor(seconds / 60);
const secs = seconds % 60;
return `${mins}:${secs.toString().padStart(2, '0')}`;
}
</script>
<div class="p-4">
@ -184,6 +282,26 @@
</div>
{/if}
<!-- Recording Indicator -->
{#if isRecording}
<div class="mb-3 flex items-center gap-3 rounded-xl glass-card px-4 py-3">
<div class="h-3 w-3 rounded-full bg-red-500 animate-pulse"></div>
<div class="flex-1">
<p class="text-sm font-medium">Aufnahme läuft...</p>
</div>
<span class="text-sm font-mono text-muted-foreground"
>{formatDuration(recordingDuration)}</span
>
<button
class="p-1.5 rounded-lg hover:bg-black/5 dark:hover:bg-white/10 transition-colors"
onclick={cancelRecording}
title="Abbrechen"
>
<X class="h-4 w-4" />
</button>
</div>
{/if}
<!-- Input Area - Glassmorphic Pill -->
<div class="flex flex-col gap-2 rounded-2xl glass p-2 shadow-lg">
<!-- Input Row -->
@ -253,16 +371,36 @@
></textarea>
</div>
<!-- Send button -->
<button
class="flex-shrink-0 p-3 rounded-xl glass-button shadow-md text-primary
disabled:opacity-50 disabled:cursor-not-allowed"
onclick={handleSend}
disabled={!message.trim() || uploading}
title={editMessage ? 'Speichern' : 'Senden'}
>
<PaperPlaneTilt class="h-5 w-5" weight="bold" />
</button>
<!-- Voice/Send button -->
{#if isRecording}
<button
class="flex-shrink-0 p-3 rounded-xl glass-button shadow-md text-red-500"
onclick={stopRecording}
title="Aufnahme beenden und senden"
>
<Stop class="h-5 w-5" weight="fill" />
</button>
{:else if message.trim()}
<button
class="flex-shrink-0 p-3 rounded-xl glass-button shadow-md text-primary
disabled:opacity-50 disabled:cursor-not-allowed"
onclick={handleSend}
disabled={uploading}
title={editMessage ? 'Speichern' : 'Senden'}
>
<PaperPlaneTilt class="h-5 w-5" weight="bold" />
</button>
{:else}
<button
class="flex-shrink-0 p-3 rounded-xl glass-button shadow-md text-primary
disabled:opacity-50 disabled:cursor-not-allowed"
onclick={startRecording}
disabled={uploading}
title="Sprachnotiz aufnehmen"
>
<Microphone class="h-5 w-5" weight="bold" />
</button>
{/if}
</div>
</div>