Compare commits
No commits in common. "fc53d1d91a4c43ecdc159b4bbf0d2f712f86001b" and "bc5129f7bfb0b637f8d4e130c7f4e4cc7879e22a" have entirely different histories.
fc53d1d91a
...
bc5129f7bf
14 changed files with 212 additions and 545 deletions
7
.vscode/settings.json
vendored
7
.vscode/settings.json
vendored
|
|
@ -1,6 +1 @@
|
|||
{
|
||||
"i18n-ally.localesPaths": [
|
||||
"src/i18n",
|
||||
"src/i18n/locales"
|
||||
]
|
||||
}
|
||||
{}
|
||||
|
|
@ -199,6 +199,7 @@
|
|||
A891237ADBD54747890A99FB /* Fix Xcode 15 Bug */,
|
||||
9E272D599E42446BB8DFD8D0 /* Fix Xcode 15 Bug */,
|
||||
884372829D0E4436B7BC6F91 /* Fix Xcode 15 Bug */,
|
||||
F778935A7C8642C4851AE054 /* Remove signature files (Xcode workaround) */,
|
||||
A1D9EF7E0B4442DF8860F6A5 /* Fix Xcode 15 Bug */,
|
||||
B6A277D5FA3848ECACDAF93C /* Remove signature files (Xcode workaround) */,
|
||||
);
|
||||
|
|
|
|||
|
|
@ -120,6 +120,7 @@
|
|||
"eventemitter3": "^5.0.1",
|
||||
"expo": "~53.0.23",
|
||||
"expo-audio": "~0.4.9",
|
||||
"expo-av": "~15.1.7",
|
||||
"expo-build-properties": "~0.14.8",
|
||||
"expo-constants": "~17.1.7",
|
||||
"expo-contacts": "~14.2.5",
|
||||
|
|
@ -282,4 +283,4 @@
|
|||
}
|
||||
},
|
||||
"packageManager": "yarn@4.5.3"
|
||||
}
|
||||
}
|
||||
|
|
@ -29,7 +29,6 @@ import {
|
|||
} from "react-native-safe-area-context";
|
||||
|
||||
import useTrackLocation from "~/hooks/useTrackLocation";
|
||||
import useWsWatchdog from "~/hooks/useWsWatchdog";
|
||||
// import { initializeBackgroundFetch } from "~/services/backgroundFetch";
|
||||
import useMount from "~/hooks/useMount";
|
||||
|
||||
|
|
@ -225,7 +224,6 @@ function AppContent() {
|
|||
useUpdates();
|
||||
useNetworkListener();
|
||||
useTrackLocation();
|
||||
useWsWatchdog();
|
||||
|
||||
// useMount(() => {
|
||||
// const setupBackgroundFetch = async () => {
|
||||
|
|
|
|||
|
|
@ -1,9 +1,15 @@
|
|||
import React, { useState, useCallback, useEffect, useRef } from "react";
|
||||
import { View, Text, TouchableOpacity, Platform, Alert } from "react-native";
|
||||
import * as Sentry from "@sentry/react-native";
|
||||
import { MaterialCommunityIcons } from "@expo/vector-icons";
|
||||
import { createAudioPlayer, setAudioModeAsync } from "expo-audio";
|
||||
import * as Device from "expo-device";
|
||||
import {
|
||||
useAudioRecorder,
|
||||
createAudioPlayer,
|
||||
setAudioModeAsync,
|
||||
requestRecordingPermissionsAsync,
|
||||
RecordingPresets,
|
||||
IOSOutputFormat,
|
||||
AudioQuality,
|
||||
} from "expo-audio";
|
||||
|
||||
import {
|
||||
check,
|
||||
|
|
@ -21,7 +27,6 @@ import network from "~/network";
|
|||
|
||||
import TextArea from "./TextArea";
|
||||
import useInsertMessage from "~/hooks/useInsertMessage";
|
||||
import useVoiceRecorder from "~/hooks/useVoiceRecorder";
|
||||
import { announceForA11y } from "~/lib/a11y";
|
||||
|
||||
const MODE = {
|
||||
|
|
@ -38,8 +43,63 @@ const rightButtonIconNames = {
|
|||
|
||||
const RECORDING_TIMEOUT = 59;
|
||||
|
||||
// Speech-optimized profile (smaller files, good voice quality)
|
||||
const recordingOptionsSpeech = {
|
||||
...RecordingPresets.HIGH_QUALITY,
|
||||
// Voice-friendly sample rate & bitrate
|
||||
sampleRate: 22050,
|
||||
numberOfChannels: 1,
|
||||
bitRate: 24000,
|
||||
ios: {
|
||||
...RecordingPresets.HIGH_QUALITY.ios,
|
||||
outputFormat: IOSOutputFormat.MPEG4AAC,
|
||||
// Medium is enough for voice; final quality driven by bitRate above
|
||||
audioQuality: AudioQuality.MEDIUM,
|
||||
},
|
||||
android: {
|
||||
...RecordingPresets.HIGH_QUALITY.android,
|
||||
outputFormat: "mpeg4",
|
||||
audioEncoder: "aac",
|
||||
},
|
||||
};
|
||||
|
||||
// Fallback profile (broader device compatibility if speech profile fails)
|
||||
const recordingOptionsFallback = {
|
||||
...RecordingPresets.HIGH_QUALITY,
|
||||
sampleRate: 44100,
|
||||
numberOfChannels: 1,
|
||||
bitRate: 64000,
|
||||
ios: {
|
||||
...RecordingPresets.HIGH_QUALITY.ios,
|
||||
outputFormat: IOSOutputFormat.MPEG4AAC,
|
||||
audioQuality: AudioQuality.MAX,
|
||||
},
|
||||
android: {
|
||||
...RecordingPresets.HIGH_QUALITY.android,
|
||||
outputFormat: "mpeg4",
|
||||
audioEncoder: "aac",
|
||||
},
|
||||
};
|
||||
|
||||
const activeOpacity = 0.7;
|
||||
|
||||
const withTimeout = (promise, ms = 10000) =>
|
||||
new Promise((resolve, reject) => {
|
||||
const id = setTimeout(
|
||||
() => reject(new Error("Permission request timeout")),
|
||||
ms,
|
||||
);
|
||||
promise
|
||||
.then((v) => {
|
||||
clearTimeout(id);
|
||||
resolve(v);
|
||||
})
|
||||
.catch((e) => {
|
||||
clearTimeout(id);
|
||||
reject(e);
|
||||
});
|
||||
});
|
||||
|
||||
const ensureMicPermission = async () => {
|
||||
if (Platform.OS !== "android") {
|
||||
return { granted: true, status: RESULTS.UNAVAILABLE };
|
||||
|
|
@ -90,16 +150,11 @@ export default React.memo(function ChatInput({
|
|||
const { hasMessages } = useAlertState(["hasMessages"]);
|
||||
const autoFocus = !hasMessages;
|
||||
|
||||
const [isRecording, setIsRecording] = useState(false);
|
||||
const recorder = useAudioRecorder(recordingOptionsSpeech);
|
||||
const [player, setPlayer] = useState(null);
|
||||
const requestingMicRef = useRef(false);
|
||||
|
||||
const {
|
||||
isRecording: isVoiceRecording,
|
||||
uri: recordingUri,
|
||||
start: startVoiceRecorder,
|
||||
stop: stopVoiceRecorder,
|
||||
} = useVoiceRecorder();
|
||||
|
||||
// A11y: avoid repeated announcements while recording (e.g. every countdown tick)
|
||||
const lastRecordingAnnouncementRef = useRef(null);
|
||||
|
||||
|
|
@ -116,11 +171,7 @@ export default React.memo(function ChatInput({
|
|||
}, [player]);
|
||||
|
||||
const hasText = text.length > 0;
|
||||
const mode = isVoiceRecording
|
||||
? MODE.RECORDING
|
||||
: hasText
|
||||
? MODE.TEXT
|
||||
: MODE.EMPTY;
|
||||
const mode = isRecording ? MODE.RECORDING : hasText ? MODE.TEXT : MODE.EMPTY;
|
||||
|
||||
const sendTextMessage = useCallback(async () => {
|
||||
if (!text) {
|
||||
|
|
@ -164,18 +215,6 @@ export default React.memo(function ChatInput({
|
|||
}
|
||||
requestingMicRef.current = true;
|
||||
try {
|
||||
console.log("[ChatInput] startRecording invoked", {
|
||||
platform: Platform.OS,
|
||||
});
|
||||
|
||||
if (Platform.OS === "ios" && Device.isDevice === false) {
|
||||
Alert.alert(
|
||||
"Microphone indisponible",
|
||||
"L'enregistrement audio n'est pas supporté sur le simulateur iOS.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log("Requesting microphone permission..");
|
||||
if (Platform.OS === "android") {
|
||||
const { granted, status } = await ensureMicPermission();
|
||||
|
|
@ -197,8 +236,24 @@ export default React.memo(function ChatInput({
|
|||
return;
|
||||
}
|
||||
} else {
|
||||
// iOS microphone permission is handled inside useVoiceRecorder via expo-audio
|
||||
try {
|
||||
await withTimeout(requestRecordingPermissionsAsync(), 10000);
|
||||
} catch (permErr) {
|
||||
console.log(
|
||||
"Microphone permission request failed/timed out:",
|
||||
permErr,
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
await setAudioModeAsync({
|
||||
allowsRecording: true,
|
||||
interruptionMode: "doNotMix",
|
||||
playsInSilentMode: true,
|
||||
interruptionModeAndroid: "doNotMix",
|
||||
shouldRouteThroughEarpiece: false,
|
||||
shouldPlayInBackground: true,
|
||||
});
|
||||
// stop playback
|
||||
if (player !== null) {
|
||||
try {
|
||||
|
|
@ -207,11 +262,27 @@ export default React.memo(function ChatInput({
|
|||
setPlayer(null);
|
||||
}
|
||||
|
||||
console.log("Starting recording..");
|
||||
await setAudioModeAsync({
|
||||
allowsRecording: true,
|
||||
interruptionMode: "doNotMix",
|
||||
playsInSilentMode: true,
|
||||
interruptionModeAndroid: "doNotMix",
|
||||
shouldRouteThroughEarpiece: false,
|
||||
shouldPlayInBackground: true,
|
||||
});
|
||||
|
||||
try {
|
||||
console.log(
|
||||
"[ChatInput] startRecording delegating to useVoiceRecorder.start",
|
||||
);
|
||||
await startVoiceRecorder();
|
||||
// Try speech-optimized settings first
|
||||
try {
|
||||
await recorder.prepareToRecordAsync(recordingOptionsSpeech);
|
||||
} catch (optErr) {
|
||||
console.log("Speech-optimized profile failed, falling back:", optErr);
|
||||
await recorder.prepareToRecordAsync(recordingOptionsFallback);
|
||||
}
|
||||
recorder.record();
|
||||
console.log("recording");
|
||||
setIsRecording(true);
|
||||
|
||||
// Announce once when recording starts.
|
||||
if (lastRecordingAnnouncementRef.current !== "started") {
|
||||
|
|
@ -220,224 +291,82 @@ export default React.memo(function ChatInput({
|
|||
}
|
||||
} catch (error) {
|
||||
console.log("error while recording:", error);
|
||||
Sentry.captureException(error, {
|
||||
tags: {
|
||||
feature: "audio-message",
|
||||
stage: "startRecording",
|
||||
},
|
||||
extra: {
|
||||
platform: Platform.OS,
|
||||
alertId,
|
||||
recordingUri,
|
||||
},
|
||||
});
|
||||
announceForA11y("Échec du démarrage de l'enregistrement audio");
|
||||
return;
|
||||
}
|
||||
console.log("[ChatInput] Recording started");
|
||||
console.log("Recording started");
|
||||
} catch (err) {
|
||||
console.log("Failed to start recording", err);
|
||||
Sentry.captureException(err, {
|
||||
tags: {
|
||||
feature: "audio-message",
|
||||
stage: "startRecording-outer",
|
||||
},
|
||||
extra: {
|
||||
platform: Platform.OS,
|
||||
alertId,
|
||||
recordingUri,
|
||||
},
|
||||
});
|
||||
} finally {
|
||||
requestingMicRef.current = false;
|
||||
}
|
||||
}, [alertId, player, recordingUri, startVoiceRecorder]);
|
||||
}, [player, recorder]);
|
||||
|
||||
const stopRecording = useCallback(async () => {
|
||||
console.log("[ChatInput] stopRecording invoked", {
|
||||
platform: Platform.OS,
|
||||
isRecordingBefore: isVoiceRecording,
|
||||
});
|
||||
let uri = null;
|
||||
try {
|
||||
uri = await stopVoiceRecorder();
|
||||
await recorder.stop();
|
||||
} catch (_error) {
|
||||
// Do nothing -- already stopped/unloaded.
|
||||
console.log("[ChatInput] stopVoiceRecorder threw (ignored)", _error);
|
||||
}
|
||||
const effectiveUri = uri || recordingUri;
|
||||
console.log("[ChatInput] stopRecording completed", {
|
||||
platform: Platform.OS,
|
||||
isRecordingAfter: false,
|
||||
recordingUri: effectiveUri,
|
||||
});
|
||||
if (isVoiceRecording) {
|
||||
if (isRecording) {
|
||||
setIsRecording(false);
|
||||
|
||||
// Announce once when recording stops.
|
||||
if (lastRecordingAnnouncementRef.current !== "stopped") {
|
||||
lastRecordingAnnouncementRef.current = "stopped";
|
||||
announceForA11y("Enregistrement arrêté");
|
||||
}
|
||||
}
|
||||
return effectiveUri;
|
||||
}, [isVoiceRecording, recordingUri, stopVoiceRecorder]);
|
||||
}, [recorder, isRecording]);
|
||||
|
||||
const recordedToSound = useCallback(
|
||||
async (uriOverride) => {
|
||||
console.log("[ChatInput] recordedToSound invoked", {
|
||||
platform: Platform.OS,
|
||||
});
|
||||
try {
|
||||
await setAudioModeAsync({
|
||||
allowsRecording: false,
|
||||
playsInSilentMode: true,
|
||||
interruptionMode: "doNotMix",
|
||||
interruptionModeAndroid: "doNotMix",
|
||||
shouldRouteThroughEarpiece: false,
|
||||
// Foreground-first: do not keep audio session alive in background.
|
||||
shouldPlayInBackground: false,
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(
|
||||
"[ChatInput] Audio.setAudioModeAsync for playback failed",
|
||||
error,
|
||||
);
|
||||
}
|
||||
const recordedToSound = useCallback(async () => {
|
||||
await setAudioModeAsync({
|
||||
allowsRecording: false,
|
||||
interruptionMode: "doNotMix",
|
||||
playsInSilentMode: true,
|
||||
interruptionModeAndroid: "doNotMix",
|
||||
shouldRouteThroughEarpiece: false,
|
||||
shouldPlayInBackground: true,
|
||||
});
|
||||
const status = recorder.getStatus();
|
||||
const url = status?.url;
|
||||
if (url) {
|
||||
const _player = createAudioPlayer(url);
|
||||
setPlayer(_player);
|
||||
}
|
||||
}, [recorder]);
|
||||
|
||||
const url = uriOverride || recordingUri;
|
||||
console.log("[ChatInput] recordedToSound status after recording", {
|
||||
platform: Platform.OS,
|
||||
url,
|
||||
});
|
||||
if (url) {
|
||||
const _player = createAudioPlayer(url);
|
||||
setPlayer(_player);
|
||||
console.log("[ChatInput] recordedToSound created player", {
|
||||
hasPlayer: !!_player,
|
||||
});
|
||||
}
|
||||
},
|
||||
[recordingUri],
|
||||
);
|
||||
|
||||
const uploadAudio = useCallback(
|
||||
async (uriOverride) => {
|
||||
const rawUrl = uriOverride ?? recordingUri ?? null;
|
||||
const uri =
|
||||
Platform.OS === "ios" && rawUrl && !rawUrl.startsWith("file:")
|
||||
? `file://${rawUrl}`
|
||||
: rawUrl;
|
||||
|
||||
console.log("[ChatInput] uploadAudio invoked", {
|
||||
platform: Platform.OS,
|
||||
recordingUri,
|
||||
rawUrl,
|
||||
uri,
|
||||
});
|
||||
|
||||
if (!uri) {
|
||||
const error = new Error("No recording URL available");
|
||||
console.error("[ChatInput] uploadAudio error: missing uri", error, {
|
||||
platform: Platform.OS,
|
||||
recordingUri,
|
||||
});
|
||||
Sentry.captureException(error, {
|
||||
tags: {
|
||||
feature: "audio-message",
|
||||
stage: "uploadAudio",
|
||||
},
|
||||
extra: {
|
||||
platform: Platform.OS,
|
||||
recordingUri,
|
||||
},
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
|
||||
const fd = new FormData();
|
||||
fd.append("data[alertId]", alertId);
|
||||
const fileField = {
|
||||
uri,
|
||||
// Keep Android behavior, but this remains valid for iOS (AAC in MP4 container).
|
||||
type: "audio/mp4",
|
||||
name: "audioRecord.m4a",
|
||||
};
|
||||
console.log("[ChatInput] uploadAudio FormData file field", fileField);
|
||||
fd.append("data[file]", fileField);
|
||||
|
||||
try {
|
||||
const response = await network.oaFilesKy.post("audio/upload", {
|
||||
body: fd,
|
||||
});
|
||||
console.log("[ChatInput] uploadAudio response", {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
});
|
||||
return response;
|
||||
} catch (error) {
|
||||
const statusCode = error?.response?.status;
|
||||
const statusText = error?.response?.statusText;
|
||||
console.error("[ChatInput] uploadAudio network error", error, {
|
||||
platform: Platform.OS,
|
||||
statusCode,
|
||||
statusText,
|
||||
});
|
||||
Sentry.captureException(error, {
|
||||
tags: {
|
||||
feature: "audio-message",
|
||||
stage: "uploadAudio",
|
||||
},
|
||||
extra: {
|
||||
platform: Platform.OS,
|
||||
statusCode,
|
||||
statusText,
|
||||
recordingUri,
|
||||
uri,
|
||||
},
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
[alertId, recordingUri],
|
||||
);
|
||||
const uploadAudio = useCallback(async () => {
|
||||
const { url } = recorder.getStatus();
|
||||
const uri = url;
|
||||
if (!uri) {
|
||||
throw new Error("No recording URL available");
|
||||
}
|
||||
const fd = new FormData();
|
||||
fd.append("data[alertId]", alertId);
|
||||
fd.append("data[file]", {
|
||||
uri,
|
||||
type: "audio/mp4",
|
||||
name: "audioRecord.m4a",
|
||||
});
|
||||
await network.oaFilesKy.post("audio/upload", {
|
||||
body: fd,
|
||||
});
|
||||
}, [alertId, recorder]);
|
||||
|
||||
const sendRecording = useCallback(async () => {
|
||||
try {
|
||||
console.log("[ChatInput] sendRecording start", {
|
||||
platform: Platform.OS,
|
||||
});
|
||||
const uri = await stopRecording();
|
||||
await recordedToSound(uri);
|
||||
await uploadAudio(uri);
|
||||
await stopRecording();
|
||||
await recordedToSound();
|
||||
await uploadAudio();
|
||||
|
||||
// Keep focus stable: return focus to input after finishing recording flow.
|
||||
setTimeout(() => {
|
||||
textInputRef.current?.focus?.();
|
||||
}, 0);
|
||||
console.log("[ChatInput] sendRecording completed successfully");
|
||||
} catch (error) {
|
||||
const statusCode = error?.response?.status;
|
||||
const statusText = error?.response?.statusText;
|
||||
console.error("[ChatInput] Failed to send recording", error, {
|
||||
platform: Platform.OS,
|
||||
statusCode,
|
||||
statusText,
|
||||
});
|
||||
Sentry.captureException(error, {
|
||||
tags: {
|
||||
feature: "audio-message",
|
||||
stage: "sendRecording",
|
||||
},
|
||||
extra: {
|
||||
platform: Platform.OS,
|
||||
statusCode,
|
||||
statusText,
|
||||
alertId,
|
||||
recordingUri,
|
||||
},
|
||||
});
|
||||
console.error("Failed to send recording:", error);
|
||||
announceForA11y("Échec de l'envoi de l'enregistrement audio");
|
||||
}
|
||||
}, [alertId, recordingUri, stopRecording, recordedToSound, uploadAudio]);
|
||||
}, [stopRecording, recordedToSound, uploadAudio]);
|
||||
|
||||
const deleteRecording = useCallback(async () => {
|
||||
await stopRecording();
|
||||
|
|
@ -447,16 +376,18 @@ export default React.memo(function ChatInput({
|
|||
}, [stopRecording]);
|
||||
|
||||
const triggerMicrophoneClick = useCallback(async () => {
|
||||
if (isVoiceRecording) {
|
||||
if (isRecording) {
|
||||
await sendRecording();
|
||||
} else {
|
||||
await startRecording();
|
||||
}
|
||||
}, [isVoiceRecording, startRecording, sendRecording]);
|
||||
}, [isRecording, startRecording, sendRecording]);
|
||||
|
||||
const onRecordingCountDownComplete = useCallback(async () => {
|
||||
await stopRecording();
|
||||
await recordedToSound();
|
||||
await sendRecording();
|
||||
}, [sendRecording]);
|
||||
}, [sendRecording, stopRecording, recordedToSound]);
|
||||
|
||||
// reset on alert change
|
||||
const dataRef = useRef(null);
|
||||
|
|
@ -557,20 +488,20 @@ export default React.memo(function ChatInput({
|
|||
accessibilityLabel={
|
||||
hasText
|
||||
? "Envoyer le message"
|
||||
: isVoiceRecording
|
||||
: isRecording
|
||||
? "Envoyer l'enregistrement audio"
|
||||
: "Démarrer l'enregistrement audio"
|
||||
}
|
||||
accessibilityHint={
|
||||
hasText
|
||||
? "Envoie le message."
|
||||
: isVoiceRecording
|
||||
: isRecording
|
||||
? "Envoie l'enregistrement audio."
|
||||
: "Démarre l'enregistrement audio."
|
||||
}
|
||||
accessibilityState={{
|
||||
disabled: false,
|
||||
...(isVoiceRecording ? { selected: true } : null),
|
||||
...(isRecording ? { selected: true } : null),
|
||||
}}
|
||||
onPress={hasText ? sendTextMessage : triggerMicrophoneClick}
|
||||
>
|
||||
|
|
|
|||
|
|
@ -47,7 +47,6 @@ export default function useLatestWithSubscription(
|
|||
const retryCountRef = useRef(0);
|
||||
const subscriptionErrorRef = useRef(null);
|
||||
const timeoutIdRef = useRef(null);
|
||||
const unsubscribeRef = useRef(null);
|
||||
|
||||
useEffect(() => {
|
||||
const currentVarsHash = JSON.stringify(variables);
|
||||
|
|
@ -135,17 +134,6 @@ export default function useLatestWithSubscription(
|
|||
if (!subscribeToMore) return;
|
||||
if (highestIdRef.current === null) return; // Wait until we have the highest ID
|
||||
|
||||
// Always cleanup any previous active subscription before creating a new one.
|
||||
// React only runs the cleanup returned directly from the effect.
|
||||
if (unsubscribeRef.current) {
|
||||
try {
|
||||
unsubscribeRef.current();
|
||||
} catch (_e) {
|
||||
// ignore
|
||||
}
|
||||
unsubscribeRef.current = null;
|
||||
}
|
||||
|
||||
// Check if max retries reached and we have an error
|
||||
if (retryCountRef.current >= maxRetries && subscriptionErrorRef.current) {
|
||||
console.error(
|
||||
|
|
@ -295,7 +283,15 @@ export default function useLatestWithSubscription(
|
|||
},
|
||||
});
|
||||
|
||||
unsubscribeRef.current = unsubscribe;
|
||||
// Cleanup on unmount or re-run
|
||||
return () => {
|
||||
console.log(`[${subscriptionKey}] Cleaning up subscription`);
|
||||
if (timeoutIdRef.current) {
|
||||
clearTimeout(timeoutIdRef.current);
|
||||
timeoutIdRef.current = null;
|
||||
}
|
||||
unsubscribe();
|
||||
};
|
||||
} catch (error) {
|
||||
// Handle setup errors (like malformed queries)
|
||||
console.error(
|
||||
|
|
@ -335,24 +331,22 @@ export default function useLatestWithSubscription(
|
|||
console.error("Failed to report to Sentry:", sentryError);
|
||||
}
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (timeoutIdRef.current) {
|
||||
clearTimeout(timeoutIdRef.current);
|
||||
timeoutIdRef.current = null;
|
||||
}
|
||||
};
|
||||
}
|
||||
}, backoffDelay);
|
||||
|
||||
// Cleanup function that will run when component unmounts or effect re-runs
|
||||
return () => {
|
||||
console.log(`[${subscriptionKey}] Cleaning up subscription`);
|
||||
if (timeoutIdRef.current) {
|
||||
clearTimeout(timeoutIdRef.current);
|
||||
timeoutIdRef.current = null;
|
||||
}
|
||||
if (unsubscribeRef.current) {
|
||||
try {
|
||||
unsubscribeRef.current();
|
||||
} catch (_e) {
|
||||
// ignore
|
||||
}
|
||||
unsubscribeRef.current = null;
|
||||
}
|
||||
};
|
||||
}, [
|
||||
skip,
|
||||
|
|
|
|||
|
|
@ -40,7 +40,6 @@ export default function useStreamQueryWithSubscription(
|
|||
const retryCountRef = useRef(0);
|
||||
const subscriptionErrorRef = useRef(null);
|
||||
const timeoutIdRef = useRef(null);
|
||||
const unsubscribeRef = useRef(null);
|
||||
|
||||
useEffect(() => {
|
||||
const currentVarsHash = JSON.stringify(variables);
|
||||
|
|
@ -125,18 +124,6 @@ export default function useStreamQueryWithSubscription(
|
|||
if (skip) return; // If skipping, do nothing
|
||||
if (!subscribeToMore) return;
|
||||
|
||||
// If we're about to (re)subscribe, always cleanup any previous subscription first.
|
||||
// This is critical because React effect cleanups must be returned synchronously
|
||||
// from the effect, not from inside async callbacks.
|
||||
if (unsubscribeRef.current) {
|
||||
try {
|
||||
unsubscribeRef.current();
|
||||
} catch (_e) {
|
||||
// ignore
|
||||
}
|
||||
unsubscribeRef.current = null;
|
||||
}
|
||||
|
||||
// Check if max retries reached and we have an error - this check must be done regardless of other conditions
|
||||
if (retryCountRef.current >= maxRetries && subscriptionErrorRef.current) {
|
||||
console.error(
|
||||
|
|
@ -302,7 +289,15 @@ export default function useStreamQueryWithSubscription(
|
|||
},
|
||||
});
|
||||
|
||||
unsubscribeRef.current = unsubscribe;
|
||||
// Cleanup on unmount or re-run
|
||||
return () => {
|
||||
console.log(`[${subscriptionKey}] Cleaning up subscription`);
|
||||
if (timeoutIdRef.current) {
|
||||
clearTimeout(timeoutIdRef.current);
|
||||
timeoutIdRef.current = null;
|
||||
}
|
||||
unsubscribe();
|
||||
};
|
||||
} catch (error) {
|
||||
// Handle setup errors (like malformed queries)
|
||||
console.error(
|
||||
|
|
@ -342,24 +337,22 @@ export default function useStreamQueryWithSubscription(
|
|||
console.error("Failed to report to Sentry:", sentryError);
|
||||
}
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (timeoutIdRef.current) {
|
||||
clearTimeout(timeoutIdRef.current);
|
||||
timeoutIdRef.current = null;
|
||||
}
|
||||
};
|
||||
}
|
||||
}, backoffDelay);
|
||||
|
||||
// Cleanup function that will run when component unmounts or effect re-runs
|
||||
return () => {
|
||||
console.log(`[${subscriptionKey}] Cleaning up subscription`);
|
||||
if (timeoutIdRef.current) {
|
||||
clearTimeout(timeoutIdRef.current);
|
||||
timeoutIdRef.current = null;
|
||||
}
|
||||
if (unsubscribeRef.current) {
|
||||
try {
|
||||
unsubscribeRef.current();
|
||||
} catch (_e) {
|
||||
// ignore
|
||||
}
|
||||
unsubscribeRef.current = null;
|
||||
}
|
||||
};
|
||||
}, [
|
||||
skip,
|
||||
|
|
|
|||
|
|
@ -1,162 +0,0 @@
|
|||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import {
|
||||
RecordingPresets,
|
||||
requestRecordingPermissionsAsync,
|
||||
setAudioModeAsync,
|
||||
setIsAudioActiveAsync,
|
||||
useAudioRecorder,
|
||||
} from "expo-audio";
|
||||
|
||||
let hasLoggedAudioMode = false;
|
||||
|
||||
export default function useVoiceRecorder() {
|
||||
const recorderRef = useRef(null);
|
||||
const [isRecording, setIsRecording] = useState(false);
|
||||
const [uri, setUri] = useState(null);
|
||||
|
||||
// NOTE: `expo-audio` doesn't export `AudioRecorder` as a runtime JS class.
|
||||
// The supported API is `useAudioRecorder`, which returns a native-backed SharedObject.
|
||||
const preset =
|
||||
RecordingPresets?.HIGH_QUALITY || RecordingPresets?.LOW_QUALITY;
|
||||
if (!preset) {
|
||||
throw new Error(
|
||||
"expo-audio RecordingPresets are not available; cannot start recording",
|
||||
);
|
||||
}
|
||||
const recorder = useAudioRecorder({
|
||||
...preset,
|
||||
isMeteringEnabled: true,
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
recorderRef.current = recorder;
|
||||
return () => {
|
||||
if (recorderRef.current === recorder) {
|
||||
recorderRef.current = null;
|
||||
}
|
||||
};
|
||||
}, [recorder]);
|
||||
|
||||
const cleanupRecording = useCallback(async () => {
|
||||
const recorder = recorderRef.current;
|
||||
if (recorder) {
|
||||
try {
|
||||
if (recorder.isRecording) {
|
||||
await recorder.stop();
|
||||
}
|
||||
} catch (_e) {
|
||||
// no-op
|
||||
}
|
||||
}
|
||||
setIsRecording(false);
|
||||
}, []);
|
||||
|
||||
const start = useCallback(async () => {
|
||||
// Reset any previous recording before starting a new one
|
||||
await cleanupRecording();
|
||||
setUri(null);
|
||||
|
||||
const permission = await requestRecordingPermissionsAsync();
|
||||
if (!permission?.granted) {
|
||||
throw new Error("Microphone permission not granted");
|
||||
}
|
||||
|
||||
// Configure audio mode for recording (iOS & Android)
|
||||
const recordingAudioMode = {
|
||||
allowsRecording: true,
|
||||
playsInSilentMode: true,
|
||||
interruptionMode: "doNotMix",
|
||||
interruptionModeAndroid: "doNotMix",
|
||||
shouldRouteThroughEarpiece: false,
|
||||
// Foreground-first: keep the audio session inactive in background.
|
||||
shouldPlayInBackground: false,
|
||||
};
|
||||
|
||||
if (!hasLoggedAudioMode) {
|
||||
console.log("[useVoiceRecorder] audio mode set", recordingAudioMode);
|
||||
hasLoggedAudioMode = true;
|
||||
}
|
||||
|
||||
await setAudioModeAsync(recordingAudioMode);
|
||||
|
||||
const prepareAndStart = async () => {
|
||||
await setIsAudioActiveAsync(true).catch(() => {});
|
||||
console.log("[useVoiceRecorder] preparing recorder");
|
||||
await recorder.prepareToRecordAsync();
|
||||
console.log("[useVoiceRecorder] starting recorder");
|
||||
recorder.record();
|
||||
setIsRecording(true);
|
||||
};
|
||||
try {
|
||||
await prepareAndStart();
|
||||
} catch (error) {
|
||||
console.log("[useVoiceRecorder] recorder start failed", error);
|
||||
|
||||
// One controlled retry for iOS: reset the audio session and try once more.
|
||||
try {
|
||||
await cleanupRecording();
|
||||
await setAudioModeAsync(recordingAudioMode);
|
||||
await new Promise((r) => setTimeout(r, 150));
|
||||
await prepareAndStart();
|
||||
return;
|
||||
} catch (_retryError) {
|
||||
console.log("[useVoiceRecorder] recorder retry failed", _retryError);
|
||||
}
|
||||
|
||||
try {
|
||||
if (recorderRef.current?.isRecording) {
|
||||
await recorderRef.current.stop();
|
||||
}
|
||||
} catch (_e) {
|
||||
// ignore cleanup failures
|
||||
} finally {
|
||||
// keep recorder instance; hook will manage its lifecycle
|
||||
setIsRecording(false);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}, [cleanupRecording, recorder]);
|
||||
|
||||
const stop = useCallback(async () => {
|
||||
const recorder = recorderRef.current;
|
||||
if (!recorder) {
|
||||
setIsRecording(false);
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
await recorder.stop();
|
||||
} catch (_e) {
|
||||
// ignore errors from already-stopped/unloaded recordings
|
||||
}
|
||||
|
||||
const recordingUri = recorder.uri;
|
||||
setUri(recordingUri ?? null);
|
||||
setIsRecording(false);
|
||||
return recordingUri ?? null;
|
||||
}, []);
|
||||
|
||||
const reset = useCallback(() => {
|
||||
setUri(null);
|
||||
setIsRecording(false);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
const recorder = recorderRef.current;
|
||||
if (recorder) {
|
||||
if (recorder.isRecording) {
|
||||
recorder.stop().catch(() => {});
|
||||
}
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
return {
|
||||
isRecording,
|
||||
uri,
|
||||
start,
|
||||
stop,
|
||||
reset,
|
||||
};
|
||||
}
|
||||
|
|
@ -1,74 +0,0 @@
|
|||
import { useEffect, useRef } from "react";
|
||||
import { useNetworkState, networkActions } from "~/stores";
|
||||
import network from "~/network";
|
||||
import { createLogger } from "~/lib/logger";
|
||||
import { NETWORK_SCOPES } from "~/lib/logger/scopes";
|
||||
|
||||
const watchdogLogger = createLogger({
|
||||
module: NETWORK_SCOPES.WEBSOCKET,
|
||||
feature: "watchdog",
|
||||
});
|
||||
|
||||
const HEARTBEAT_STALE_MS = 45_000;
|
||||
const CHECK_EVERY_MS = 10_000;
|
||||
const MIN_RESTART_INTERVAL_MS = 30_000;
|
||||
|
||||
export default function useWsWatchdog({ enabled = true } = {}) {
|
||||
const { wsConnected, wsLastHeartbeatDate, hasInternetConnection } =
|
||||
useNetworkState([
|
||||
"wsConnected",
|
||||
"wsLastHeartbeatDate",
|
||||
"hasInternetConnection",
|
||||
]);
|
||||
|
||||
const lastRestartRef = useRef(0);
|
||||
|
||||
useEffect(() => {
|
||||
if (!enabled) return;
|
||||
|
||||
const interval = setInterval(() => {
|
||||
if (!hasInternetConnection) return;
|
||||
if (!wsConnected) return;
|
||||
if (!wsLastHeartbeatDate) return;
|
||||
|
||||
const last = Date.parse(wsLastHeartbeatDate);
|
||||
if (!Number.isFinite(last)) return;
|
||||
|
||||
const age = Date.now() - last;
|
||||
if (age < HEARTBEAT_STALE_MS) return;
|
||||
|
||||
const now = Date.now();
|
||||
if (now - lastRestartRef.current < MIN_RESTART_INTERVAL_MS) return;
|
||||
lastRestartRef.current = now;
|
||||
|
||||
watchdogLogger.warn("WS heartbeat stale, triggering recovery", {
|
||||
ageMs: age,
|
||||
lastHeartbeatDate: wsLastHeartbeatDate,
|
||||
});
|
||||
|
||||
try {
|
||||
// First line recovery: restart websocket transport
|
||||
network.apolloClient?.restartWS?.();
|
||||
} catch (error) {
|
||||
watchdogLogger.error("WS restart failed", { error });
|
||||
}
|
||||
|
||||
// Second line recovery: if WS stays stale, do a full client reload
|
||||
setTimeout(() => {
|
||||
const last2 = Date.parse(wsLastHeartbeatDate);
|
||||
const age2 = Number.isFinite(last2) ? Date.now() - last2 : Infinity;
|
||||
if (age2 >= HEARTBEAT_STALE_MS) {
|
||||
watchdogLogger.warn(
|
||||
"WS still stale after restart, triggering reload",
|
||||
{
|
||||
ageMs: age2,
|
||||
},
|
||||
);
|
||||
networkActions.triggerReload();
|
||||
}
|
||||
}, 10_000);
|
||||
}, CHECK_EVERY_MS);
|
||||
|
||||
return () => clearInterval(interval);
|
||||
}, [enabled, hasInternetConnection, wsConnected, wsLastHeartbeatDate]);
|
||||
}
|
||||
|
|
@ -81,7 +81,6 @@ export default function createWsLink({ store, GRAPHQL_WS_URL }) {
|
|||
activeSocket = socket;
|
||||
reconnectAttempts = 0; // Reset attempts on successful connection
|
||||
networkActions.WSConnected();
|
||||
networkActions.WSTouch();
|
||||
cancelReconnect(); // Cancel any pending reconnects
|
||||
|
||||
// Clear any lingering ping timeouts
|
||||
|
|
@ -115,7 +114,6 @@ export default function createWsLink({ store, GRAPHQL_WS_URL }) {
|
|||
},
|
||||
ping: (received) => {
|
||||
// wsLogger.debug("WebSocket ping", { received });
|
||||
networkActions.WSTouch();
|
||||
if (!received) {
|
||||
// Clear any existing ping timeout
|
||||
if (pingTimeout) {
|
||||
|
|
@ -140,7 +138,6 @@ export default function createWsLink({ store, GRAPHQL_WS_URL }) {
|
|||
},
|
||||
pong: (received) => {
|
||||
// wsLogger.debug("WebSocket pong", { received });
|
||||
networkActions.WSTouch();
|
||||
if (received) {
|
||||
clearTimeout(pingTimeout); // pong is received, clear connection close timeout
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,14 +17,11 @@ export default withConnectivity(function Params() {
|
|||
deviceId,
|
||||
},
|
||||
});
|
||||
if (loading) {
|
||||
if (loading || !data) {
|
||||
return <Loader />;
|
||||
}
|
||||
if (error) {
|
||||
return <Error />;
|
||||
}
|
||||
if (!data) {
|
||||
return <Error />;
|
||||
}
|
||||
return <ParamsView data={data} />;
|
||||
});
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import { ScrollView, View } from "react-native";
|
|||
|
||||
import Loader from "~/components/Loader";
|
||||
import { useSubscription } from "@apollo/client";
|
||||
import Error from "~/components/Error";
|
||||
|
||||
import { LOAD_PROFILE_SUBSCRIPTION } from "./gql";
|
||||
|
||||
|
|
@ -24,7 +23,7 @@ const profileLogger = createLogger({
|
|||
export default withConnectivity(function Profile({ navigation, route }) {
|
||||
const { userId } = useSessionState(["userId"]);
|
||||
// profileLogger.debug("Profile user ID", { userId });
|
||||
const { data, loading, error, restart } = useSubscription(
|
||||
const { data, loading, restart } = useSubscription(
|
||||
LOAD_PROFILE_SUBSCRIPTION,
|
||||
{
|
||||
variables: {
|
||||
|
|
@ -45,21 +44,10 @@ export default withConnectivity(function Profile({ navigation, route }) {
|
|||
});
|
||||
}, [navigation]);
|
||||
|
||||
if (loading) {
|
||||
if (loading || !data?.selectOneUser) {
|
||||
return <Loader />;
|
||||
}
|
||||
|
||||
if (error) {
|
||||
profileLogger.error("Profile subscription error", { error });
|
||||
return <Error />;
|
||||
}
|
||||
|
||||
if (!data?.selectOneUser) {
|
||||
// No error surfaced, but no payload either. Avoid infinite loader.
|
||||
profileLogger.error("Profile subscription returned no user", { userId });
|
||||
return <Error />;
|
||||
}
|
||||
|
||||
return (
|
||||
<ScrollView
|
||||
style={{
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ export default createAtom(({ merge, get }) => {
|
|||
wsConnected: false,
|
||||
wsConnectedDate: null,
|
||||
wsClosedDate: null,
|
||||
wsLastHeartbeatDate: null,
|
||||
triggerReload: false,
|
||||
initialized: true,
|
||||
hasInternetConnection: true,
|
||||
|
|
@ -28,7 +27,6 @@ export default createAtom(({ merge, get }) => {
|
|||
merge({
|
||||
wsConnected: true,
|
||||
wsConnectedDate: new Date().toISOString(),
|
||||
wsLastHeartbeatDate: new Date().toISOString(),
|
||||
});
|
||||
},
|
||||
WSClosed: () => {
|
||||
|
|
@ -42,12 +40,6 @@ export default createAtom(({ merge, get }) => {
|
|||
wsClosedDate: new Date().toISOString(),
|
||||
});
|
||||
},
|
||||
WSTouch: () => {
|
||||
// Update whenever we get any WS-level signal: connected, ping/pong, or a message.
|
||||
merge({
|
||||
wsLastHeartbeatDate: new Date().toISOString(),
|
||||
});
|
||||
},
|
||||
setHasInternetConnection: (status) =>
|
||||
merge({ hasInternetConnection: status }),
|
||||
},
|
||||
|
|
|
|||
16
yarn.lock
16
yarn.lock
|
|
@ -7066,6 +7066,7 @@ __metadata:
|
|||
eventemitter3: "npm:^5.0.1"
|
||||
expo: "npm:~53.0.23"
|
||||
expo-audio: "npm:~0.4.9"
|
||||
expo-av: "npm:~15.1.7"
|
||||
expo-build-properties: "npm:~0.14.8"
|
||||
expo-constants: "npm:~17.1.7"
|
||||
expo-contacts: "npm:~14.2.5"
|
||||
|
|
@ -10593,6 +10594,21 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"expo-av@npm:~15.1.7":
|
||||
version: 15.1.7
|
||||
resolution: "expo-av@npm:15.1.7"
|
||||
peerDependencies:
|
||||
expo: "*"
|
||||
react: "*"
|
||||
react-native: "*"
|
||||
react-native-web: "*"
|
||||
peerDependenciesMeta:
|
||||
react-native-web:
|
||||
optional: true
|
||||
checksum: 10/8f3055b68cac76b627116cf93a63bebdacb8c0d22f630f9fdae7dc74633a945d9a469421fce0f4c345c730eda0d039d78c88e59c09e2219bfde7e259d42981ba
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"expo-build-properties@npm:~0.14.8":
|
||||
version: 0.14.8
|
||||
resolution: "expo-build-properties@npm:0.14.8"
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue