fix(audio-message): ios + up to expo-audio

This commit is contained in:
devthejo 2026-01-15 18:55:03 +01:00
parent bc5129f7bf
commit 4d71c229d6
4 changed files with 389 additions and 175 deletions

View file

@ -120,7 +120,6 @@
"eventemitter3": "^5.0.1",
"expo": "~53.0.23",
"expo-audio": "~0.4.9",
"expo-av": "~15.1.7",
"expo-build-properties": "~0.14.8",
"expo-constants": "~17.1.7",
"expo-contacts": "~14.2.5",

View file

@ -1,15 +1,9 @@
import React, { useState, useCallback, useEffect, useRef } from "react";
import { View, Text, TouchableOpacity, Platform, Alert } from "react-native";
import * as Sentry from "@sentry/react-native";
import { MaterialCommunityIcons } from "@expo/vector-icons";
import {
useAudioRecorder,
createAudioPlayer,
setAudioModeAsync,
requestRecordingPermissionsAsync,
RecordingPresets,
IOSOutputFormat,
AudioQuality,
} from "expo-audio";
import { createAudioPlayer, setAudioModeAsync } from "expo-audio";
import * as Device from "expo-device";
import {
check,
@ -27,6 +21,7 @@ import network from "~/network";
import TextArea from "./TextArea";
import useInsertMessage from "~/hooks/useInsertMessage";
import useVoiceRecorder from "~/hooks/useVoiceRecorder";
import { announceForA11y } from "~/lib/a11y";
const MODE = {
@ -43,63 +38,8 @@ const rightButtonIconNames = {
const RECORDING_TIMEOUT = 59;
// Speech-optimized profile (smaller files, good voice quality)
const recordingOptionsSpeech = {
...RecordingPresets.HIGH_QUALITY,
// Voice-friendly sample rate & bitrate
sampleRate: 22050,
numberOfChannels: 1,
bitRate: 24000,
ios: {
...RecordingPresets.HIGH_QUALITY.ios,
outputFormat: IOSOutputFormat.MPEG4AAC,
// Medium is enough for voice; final quality driven by bitRate above
audioQuality: AudioQuality.MEDIUM,
},
android: {
...RecordingPresets.HIGH_QUALITY.android,
outputFormat: "mpeg4",
audioEncoder: "aac",
},
};
// Fallback profile (broader device compatibility if speech profile fails)
const recordingOptionsFallback = {
...RecordingPresets.HIGH_QUALITY,
sampleRate: 44100,
numberOfChannels: 1,
bitRate: 64000,
ios: {
...RecordingPresets.HIGH_QUALITY.ios,
outputFormat: IOSOutputFormat.MPEG4AAC,
audioQuality: AudioQuality.MAX,
},
android: {
...RecordingPresets.HIGH_QUALITY.android,
outputFormat: "mpeg4",
audioEncoder: "aac",
},
};
const activeOpacity = 0.7;
const withTimeout = (promise, ms = 10000) =>
new Promise((resolve, reject) => {
const id = setTimeout(
() => reject(new Error("Permission request timeout")),
ms,
);
promise
.then((v) => {
clearTimeout(id);
resolve(v);
})
.catch((e) => {
clearTimeout(id);
reject(e);
});
});
const ensureMicPermission = async () => {
if (Platform.OS !== "android") {
return { granted: true, status: RESULTS.UNAVAILABLE };
@ -150,11 +90,16 @@ export default React.memo(function ChatInput({
const { hasMessages } = useAlertState(["hasMessages"]);
const autoFocus = !hasMessages;
const [isRecording, setIsRecording] = useState(false);
const recorder = useAudioRecorder(recordingOptionsSpeech);
const [player, setPlayer] = useState(null);
const requestingMicRef = useRef(false);
const {
isRecording: isVoiceRecording,
uri: recordingUri,
start: startVoiceRecorder,
stop: stopVoiceRecorder,
} = useVoiceRecorder();
// A11y: avoid repeated announcements while recording (e.g. every countdown tick)
const lastRecordingAnnouncementRef = useRef(null);
@ -171,7 +116,11 @@ export default React.memo(function ChatInput({
}, [player]);
const hasText = text.length > 0;
const mode = isRecording ? MODE.RECORDING : hasText ? MODE.TEXT : MODE.EMPTY;
const mode = isVoiceRecording
? MODE.RECORDING
: hasText
? MODE.TEXT
: MODE.EMPTY;
const sendTextMessage = useCallback(async () => {
if (!text) {
@ -215,6 +164,18 @@ export default React.memo(function ChatInput({
}
requestingMicRef.current = true;
try {
console.log("[ChatInput] startRecording invoked", {
platform: Platform.OS,
});
if (Platform.OS === "ios" && Device.isDevice === false) {
Alert.alert(
"Microphone indisponible",
"L'enregistrement audio n'est pas supporté sur le simulateur iOS.",
);
return;
}
console.log("Requesting microphone permission..");
if (Platform.OS === "android") {
const { granted, status } = await ensureMicPermission();
@ -236,24 +197,8 @@ export default React.memo(function ChatInput({
return;
}
} else {
try {
await withTimeout(requestRecordingPermissionsAsync(), 10000);
} catch (permErr) {
console.log(
"Microphone permission request failed/timed out:",
permErr,
);
return;
}
// iOS microphone permission is handled inside useVoiceRecorder via expo-audio
}
await setAudioModeAsync({
allowsRecording: true,
interruptionMode: "doNotMix",
playsInSilentMode: true,
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
shouldPlayInBackground: true,
});
// stop playback
if (player !== null) {
try {
@ -262,27 +207,11 @@ export default React.memo(function ChatInput({
setPlayer(null);
}
console.log("Starting recording..");
await setAudioModeAsync({
allowsRecording: true,
interruptionMode: "doNotMix",
playsInSilentMode: true,
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
shouldPlayInBackground: true,
});
try {
// Try speech-optimized settings first
try {
await recorder.prepareToRecordAsync(recordingOptionsSpeech);
} catch (optErr) {
console.log("Speech-optimized profile failed, falling back:", optErr);
await recorder.prepareToRecordAsync(recordingOptionsFallback);
}
recorder.record();
console.log("recording");
setIsRecording(true);
console.log(
"[ChatInput] startRecording delegating to useVoiceRecorder.start",
);
await startVoiceRecorder();
// Announce once when recording starts.
if (lastRecordingAnnouncementRef.current !== "started") {
@ -291,82 +220,224 @@ export default React.memo(function ChatInput({
}
} catch (error) {
console.log("error while recording:", error);
Sentry.captureException(error, {
tags: {
feature: "audio-message",
stage: "startRecording",
},
extra: {
platform: Platform.OS,
alertId,
recordingUri,
},
});
announceForA11y("Échec du démarrage de l'enregistrement audio");
return;
}
console.log("Recording started");
console.log("[ChatInput] Recording started");
} catch (err) {
console.log("Failed to start recording", err);
Sentry.captureException(err, {
tags: {
feature: "audio-message",
stage: "startRecording-outer",
},
extra: {
platform: Platform.OS,
alertId,
recordingUri,
},
});
} finally {
requestingMicRef.current = false;
}
}, [player, recorder]);
}, [alertId, player, recordingUri, startVoiceRecorder]);
const stopRecording = useCallback(async () => {
console.log("[ChatInput] stopRecording invoked", {
platform: Platform.OS,
isRecordingBefore: isVoiceRecording,
});
let uri = null;
try {
await recorder.stop();
uri = await stopVoiceRecorder();
} catch (_error) {
// Do nothing -- already stopped/unloaded.
console.log("[ChatInput] stopVoiceRecorder threw (ignored)", _error);
}
if (isRecording) {
setIsRecording(false);
const effectiveUri = uri || recordingUri;
console.log("[ChatInput] stopRecording completed", {
platform: Platform.OS,
isRecordingAfter: false,
recordingUri: effectiveUri,
});
if (isVoiceRecording) {
// Announce once when recording stops.
if (lastRecordingAnnouncementRef.current !== "stopped") {
lastRecordingAnnouncementRef.current = "stopped";
announceForA11y("Enregistrement arrêté");
}
}
}, [recorder, isRecording]);
return effectiveUri;
}, [isVoiceRecording, recordingUri, stopVoiceRecorder]);
const recordedToSound = useCallback(async () => {
await setAudioModeAsync({
allowsRecording: false,
interruptionMode: "doNotMix",
playsInSilentMode: true,
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
shouldPlayInBackground: true,
});
const status = recorder.getStatus();
const url = status?.url;
if (url) {
const _player = createAudioPlayer(url);
setPlayer(_player);
}
}, [recorder]);
const recordedToSound = useCallback(
async (uriOverride) => {
console.log("[ChatInput] recordedToSound invoked", {
platform: Platform.OS,
});
try {
await setAudioModeAsync({
allowsRecording: false,
playsInSilentMode: true,
interruptionMode: "doNotMix",
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
// Foreground-first: do not keep audio session alive in background.
shouldPlayInBackground: false,
});
} catch (error) {
console.log(
"[ChatInput] Audio.setAudioModeAsync for playback failed",
error,
);
}
const uploadAudio = useCallback(async () => {
const { url } = recorder.getStatus();
const uri = url;
if (!uri) {
throw new Error("No recording URL available");
}
const fd = new FormData();
fd.append("data[alertId]", alertId);
fd.append("data[file]", {
uri,
type: "audio/mp4",
name: "audioRecord.m4a",
});
await network.oaFilesKy.post("audio/upload", {
body: fd,
});
}, [alertId, recorder]);
const url = uriOverride || recordingUri;
console.log("[ChatInput] recordedToSound status after recording", {
platform: Platform.OS,
url,
});
if (url) {
const _player = createAudioPlayer(url);
setPlayer(_player);
console.log("[ChatInput] recordedToSound created player", {
hasPlayer: !!_player,
});
}
},
[recordingUri],
);
const uploadAudio = useCallback(
async (uriOverride) => {
const rawUrl = uriOverride ?? recordingUri ?? null;
const uri =
Platform.OS === "ios" && rawUrl && !rawUrl.startsWith("file:")
? `file://${rawUrl}`
: rawUrl;
console.log("[ChatInput] uploadAudio invoked", {
platform: Platform.OS,
recordingUri,
rawUrl,
uri,
});
if (!uri) {
const error = new Error("No recording URL available");
console.error("[ChatInput] uploadAudio error: missing uri", error, {
platform: Platform.OS,
recordingUri,
});
Sentry.captureException(error, {
tags: {
feature: "audio-message",
stage: "uploadAudio",
},
extra: {
platform: Platform.OS,
recordingUri,
},
});
throw error;
}
const fd = new FormData();
fd.append("data[alertId]", alertId);
const fileField = {
uri,
// Keep Android behavior, but this remains valid for iOS (AAC in MP4 container).
type: "audio/mp4",
name: "audioRecord.m4a",
};
console.log("[ChatInput] uploadAudio FormData file field", fileField);
fd.append("data[file]", fileField);
try {
const response = await network.oaFilesKy.post("audio/upload", {
body: fd,
});
console.log("[ChatInput] uploadAudio response", {
status: response.status,
statusText: response.statusText,
});
return response;
} catch (error) {
const statusCode = error?.response?.status;
const statusText = error?.response?.statusText;
console.error("[ChatInput] uploadAudio network error", error, {
platform: Platform.OS,
statusCode,
statusText,
});
Sentry.captureException(error, {
tags: {
feature: "audio-message",
stage: "uploadAudio",
},
extra: {
platform: Platform.OS,
statusCode,
statusText,
recordingUri,
uri,
},
});
throw error;
}
},
[alertId, recordingUri],
);
const sendRecording = useCallback(async () => {
try {
await stopRecording();
await recordedToSound();
await uploadAudio();
console.log("[ChatInput] sendRecording start", {
platform: Platform.OS,
});
const uri = await stopRecording();
await recordedToSound(uri);
await uploadAudio(uri);
// Keep focus stable: return focus to input after finishing recording flow.
setTimeout(() => {
textInputRef.current?.focus?.();
}, 0);
console.log("[ChatInput] sendRecording completed successfully");
} catch (error) {
console.error("Failed to send recording:", error);
const statusCode = error?.response?.status;
const statusText = error?.response?.statusText;
console.error("[ChatInput] Failed to send recording", error, {
platform: Platform.OS,
statusCode,
statusText,
});
Sentry.captureException(error, {
tags: {
feature: "audio-message",
stage: "sendRecording",
},
extra: {
platform: Platform.OS,
statusCode,
statusText,
alertId,
recordingUri,
},
});
announceForA11y("Échec de l'envoi de l'enregistrement audio");
}
}, [stopRecording, recordedToSound, uploadAudio]);
}, [alertId, recordingUri, stopRecording, recordedToSound, uploadAudio]);
const deleteRecording = useCallback(async () => {
await stopRecording();
@ -376,18 +447,16 @@ export default React.memo(function ChatInput({
}, [stopRecording]);
const triggerMicrophoneClick = useCallback(async () => {
if (isRecording) {
if (isVoiceRecording) {
await sendRecording();
} else {
await startRecording();
}
}, [isRecording, startRecording, sendRecording]);
}, [isVoiceRecording, startRecording, sendRecording]);
const onRecordingCountDownComplete = useCallback(async () => {
await stopRecording();
await recordedToSound();
await sendRecording();
}, [sendRecording, stopRecording, recordedToSound]);
}, [sendRecording]);
// reset on alert change
const dataRef = useRef(null);
@ -488,20 +557,20 @@ export default React.memo(function ChatInput({
accessibilityLabel={
hasText
? "Envoyer le message"
: isRecording
: isVoiceRecording
? "Envoyer l'enregistrement audio"
: "Démarrer l'enregistrement audio"
}
accessibilityHint={
hasText
? "Envoie le message."
: isRecording
: isVoiceRecording
? "Envoie l'enregistrement audio."
: "Démarre l'enregistrement audio."
}
accessibilityState={{
disabled: false,
...(isRecording ? { selected: true } : null),
...(isVoiceRecording ? { selected: true } : null),
}}
onPress={hasText ? sendTextMessage : triggerMicrophoneClick}
>

View file

@ -0,0 +1,162 @@
import { useCallback, useEffect, useRef, useState } from "react";
import {
RecordingPresets,
requestRecordingPermissionsAsync,
setAudioModeAsync,
setIsAudioActiveAsync,
useAudioRecorder,
} from "expo-audio";
let hasLoggedAudioMode = false;
export default function useVoiceRecorder() {
const recorderRef = useRef(null);
const [isRecording, setIsRecording] = useState(false);
const [uri, setUri] = useState(null);
// NOTE: `expo-audio` doesn't export `AudioRecorder` as a runtime JS class.
// The supported API is `useAudioRecorder`, which returns a native-backed SharedObject.
const preset =
RecordingPresets?.HIGH_QUALITY || RecordingPresets?.LOW_QUALITY;
if (!preset) {
throw new Error(
"expo-audio RecordingPresets are not available; cannot start recording",
);
}
const recorder = useAudioRecorder({
...preset,
isMeteringEnabled: true,
});
useEffect(() => {
recorderRef.current = recorder;
return () => {
if (recorderRef.current === recorder) {
recorderRef.current = null;
}
};
}, [recorder]);
const cleanupRecording = useCallback(async () => {
const recorder = recorderRef.current;
if (recorder) {
try {
if (recorder.isRecording) {
await recorder.stop();
}
} catch (_e) {
// no-op
}
}
setIsRecording(false);
}, []);
const start = useCallback(async () => {
// Reset any previous recording before starting a new one
await cleanupRecording();
setUri(null);
const permission = await requestRecordingPermissionsAsync();
if (!permission?.granted) {
throw new Error("Microphone permission not granted");
}
// Configure audio mode for recording (iOS & Android)
const recordingAudioMode = {
allowsRecording: true,
playsInSilentMode: true,
interruptionMode: "doNotMix",
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
// Foreground-first: keep the audio session inactive in background.
shouldPlayInBackground: false,
};
if (!hasLoggedAudioMode) {
console.log("[useVoiceRecorder] audio mode set", recordingAudioMode);
hasLoggedAudioMode = true;
}
await setAudioModeAsync(recordingAudioMode);
const prepareAndStart = async () => {
await setIsAudioActiveAsync(true).catch(() => {});
console.log("[useVoiceRecorder] preparing recorder");
await recorder.prepareToRecordAsync();
console.log("[useVoiceRecorder] starting recorder");
recorder.record();
setIsRecording(true);
};
try {
await prepareAndStart();
} catch (error) {
console.log("[useVoiceRecorder] recorder start failed", error);
// One controlled retry for iOS: reset the audio session and try once more.
try {
await cleanupRecording();
await setAudioModeAsync(recordingAudioMode);
await new Promise((r) => setTimeout(r, 150));
await prepareAndStart();
return;
} catch (_retryError) {
console.log("[useVoiceRecorder] recorder retry failed", _retryError);
}
try {
if (recorderRef.current?.isRecording) {
await recorderRef.current.stop();
}
} catch (_e) {
// ignore cleanup failures
} finally {
// keep recorder instance; hook will manage its lifecycle
setIsRecording(false);
}
throw error;
}
}, [cleanupRecording, recorder]);
const stop = useCallback(async () => {
const recorder = recorderRef.current;
if (!recorder) {
setIsRecording(false);
return null;
}
try {
await recorder.stop();
} catch (_e) {
// ignore errors from already-stopped/unloaded recordings
}
const recordingUri = recorder.uri;
setUri(recordingUri ?? null);
setIsRecording(false);
return recordingUri ?? null;
}, []);
const reset = useCallback(() => {
setUri(null);
setIsRecording(false);
}, []);
useEffect(() => {
return () => {
const recorder = recorderRef.current;
if (recorder) {
if (recorder.isRecording) {
recorder.stop().catch(() => {});
}
}
};
}, []);
return {
isRecording,
uri,
start,
stop,
reset,
};
}

View file

@ -7066,7 +7066,6 @@ __metadata:
eventemitter3: "npm:^5.0.1"
expo: "npm:~53.0.23"
expo-audio: "npm:~0.4.9"
expo-av: "npm:~15.1.7"
expo-build-properties: "npm:~0.14.8"
expo-constants: "npm:~17.1.7"
expo-contacts: "npm:~14.2.5"
@ -10594,21 +10593,6 @@ __metadata:
languageName: node
linkType: hard
"expo-av@npm:~15.1.7":
version: 15.1.7
resolution: "expo-av@npm:15.1.7"
peerDependencies:
expo: "*"
react: "*"
react-native: "*"
react-native-web: "*"
peerDependenciesMeta:
react-native-web:
optional: true
checksum: 10/8f3055b68cac76b627116cf93a63bebdacb8c0d22f630f9fdae7dc74633a945d9a469421fce0f4c345c730eda0d039d78c88e59c09e2219bfde7e259d42981ba
languageName: node
linkType: hard
"expo-build-properties@npm:~0.14.8":
version: 0.14.8
resolution: "expo-build-properties@npm:0.14.8"