Compare commits

...

3 commits

Author SHA1 Message Date
fc53d1d91a
chore: wip 2026-01-15 19:18:13 +01:00
147e514d03
fix(ws): stabilization try 1 2026-01-15 19:17:58 +01:00
4d71c229d6 fix(audio-message): ios + up to expo-audio 2026-01-15 18:56:02 +01:00
14 changed files with 545 additions and 212 deletions

View file

@ -1 +1,6 @@
{}
{
"i18n-ally.localesPaths": [
"src/i18n",
"src/i18n/locales"
]
}

View file

@ -199,7 +199,6 @@
A891237ADBD54747890A99FB /* Fix Xcode 15 Bug */,
9E272D599E42446BB8DFD8D0 /* Fix Xcode 15 Bug */,
884372829D0E4436B7BC6F91 /* Fix Xcode 15 Bug */,
F778935A7C8642C4851AE054 /* Remove signature files (Xcode workaround) */,
A1D9EF7E0B4442DF8860F6A5 /* Fix Xcode 15 Bug */,
B6A277D5FA3848ECACDAF93C /* Remove signature files (Xcode workaround) */,
);

View file

@ -120,7 +120,6 @@
"eventemitter3": "^5.0.1",
"expo": "~53.0.23",
"expo-audio": "~0.4.9",
"expo-av": "~15.1.7",
"expo-build-properties": "~0.14.8",
"expo-constants": "~17.1.7",
"expo-contacts": "~14.2.5",

View file

@ -29,6 +29,7 @@ import {
} from "react-native-safe-area-context";
import useTrackLocation from "~/hooks/useTrackLocation";
import useWsWatchdog from "~/hooks/useWsWatchdog";
// import { initializeBackgroundFetch } from "~/services/backgroundFetch";
import useMount from "~/hooks/useMount";
@ -224,6 +225,7 @@ function AppContent() {
useUpdates();
useNetworkListener();
useTrackLocation();
useWsWatchdog();
// useMount(() => {
// const setupBackgroundFetch = async () => {

View file

@ -1,15 +1,9 @@
import React, { useState, useCallback, useEffect, useRef } from "react";
import { View, Text, TouchableOpacity, Platform, Alert } from "react-native";
import * as Sentry from "@sentry/react-native";
import { MaterialCommunityIcons } from "@expo/vector-icons";
import {
useAudioRecorder,
createAudioPlayer,
setAudioModeAsync,
requestRecordingPermissionsAsync,
RecordingPresets,
IOSOutputFormat,
AudioQuality,
} from "expo-audio";
import { createAudioPlayer, setAudioModeAsync } from "expo-audio";
import * as Device from "expo-device";
import {
check,
@ -27,6 +21,7 @@ import network from "~/network";
import TextArea from "./TextArea";
import useInsertMessage from "~/hooks/useInsertMessage";
import useVoiceRecorder from "~/hooks/useVoiceRecorder";
import { announceForA11y } from "~/lib/a11y";
const MODE = {
@ -43,63 +38,8 @@ const rightButtonIconNames = {
const RECORDING_TIMEOUT = 59;
// Speech-optimized profile (smaller files, good voice quality)
const recordingOptionsSpeech = {
...RecordingPresets.HIGH_QUALITY,
// Voice-friendly sample rate & bitrate
sampleRate: 22050,
numberOfChannels: 1,
bitRate: 24000,
ios: {
...RecordingPresets.HIGH_QUALITY.ios,
outputFormat: IOSOutputFormat.MPEG4AAC,
// Medium is enough for voice; final quality driven by bitRate above
audioQuality: AudioQuality.MEDIUM,
},
android: {
...RecordingPresets.HIGH_QUALITY.android,
outputFormat: "mpeg4",
audioEncoder: "aac",
},
};
// Fallback profile (broader device compatibility if speech profile fails)
const recordingOptionsFallback = {
...RecordingPresets.HIGH_QUALITY,
sampleRate: 44100,
numberOfChannels: 1,
bitRate: 64000,
ios: {
...RecordingPresets.HIGH_QUALITY.ios,
outputFormat: IOSOutputFormat.MPEG4AAC,
audioQuality: AudioQuality.MAX,
},
android: {
...RecordingPresets.HIGH_QUALITY.android,
outputFormat: "mpeg4",
audioEncoder: "aac",
},
};
const activeOpacity = 0.7;
const withTimeout = (promise, ms = 10000) =>
new Promise((resolve, reject) => {
const id = setTimeout(
() => reject(new Error("Permission request timeout")),
ms,
);
promise
.then((v) => {
clearTimeout(id);
resolve(v);
})
.catch((e) => {
clearTimeout(id);
reject(e);
});
});
const ensureMicPermission = async () => {
if (Platform.OS !== "android") {
return { granted: true, status: RESULTS.UNAVAILABLE };
@ -150,11 +90,16 @@ export default React.memo(function ChatInput({
const { hasMessages } = useAlertState(["hasMessages"]);
const autoFocus = !hasMessages;
const [isRecording, setIsRecording] = useState(false);
const recorder = useAudioRecorder(recordingOptionsSpeech);
const [player, setPlayer] = useState(null);
const requestingMicRef = useRef(false);
const {
isRecording: isVoiceRecording,
uri: recordingUri,
start: startVoiceRecorder,
stop: stopVoiceRecorder,
} = useVoiceRecorder();
// A11y: avoid repeated announcements while recording (e.g. every countdown tick)
const lastRecordingAnnouncementRef = useRef(null);
@ -171,7 +116,11 @@ export default React.memo(function ChatInput({
}, [player]);
const hasText = text.length > 0;
const mode = isRecording ? MODE.RECORDING : hasText ? MODE.TEXT : MODE.EMPTY;
const mode = isVoiceRecording
? MODE.RECORDING
: hasText
? MODE.TEXT
: MODE.EMPTY;
const sendTextMessage = useCallback(async () => {
if (!text) {
@ -215,6 +164,18 @@ export default React.memo(function ChatInput({
}
requestingMicRef.current = true;
try {
console.log("[ChatInput] startRecording invoked", {
platform: Platform.OS,
});
if (Platform.OS === "ios" && Device.isDevice === false) {
Alert.alert(
"Microphone indisponible",
"L'enregistrement audio n'est pas supporté sur le simulateur iOS.",
);
return;
}
console.log("Requesting microphone permission..");
if (Platform.OS === "android") {
const { granted, status } = await ensureMicPermission();
@ -236,24 +197,8 @@ export default React.memo(function ChatInput({
return;
}
} else {
try {
await withTimeout(requestRecordingPermissionsAsync(), 10000);
} catch (permErr) {
console.log(
"Microphone permission request failed/timed out:",
permErr,
);
return;
}
// iOS microphone permission is handled inside useVoiceRecorder via expo-audio
}
await setAudioModeAsync({
allowsRecording: true,
interruptionMode: "doNotMix",
playsInSilentMode: true,
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
shouldPlayInBackground: true,
});
// stop playback
if (player !== null) {
try {
@ -262,27 +207,11 @@ export default React.memo(function ChatInput({
setPlayer(null);
}
console.log("Starting recording..");
await setAudioModeAsync({
allowsRecording: true,
interruptionMode: "doNotMix",
playsInSilentMode: true,
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
shouldPlayInBackground: true,
});
try {
// Try speech-optimized settings first
try {
await recorder.prepareToRecordAsync(recordingOptionsSpeech);
} catch (optErr) {
console.log("Speech-optimized profile failed, falling back:", optErr);
await recorder.prepareToRecordAsync(recordingOptionsFallback);
}
recorder.record();
console.log("recording");
setIsRecording(true);
console.log(
"[ChatInput] startRecording delegating to useVoiceRecorder.start",
);
await startVoiceRecorder();
// Announce once when recording starts.
if (lastRecordingAnnouncementRef.current !== "started") {
@ -291,82 +220,224 @@ export default React.memo(function ChatInput({
}
} catch (error) {
console.log("error while recording:", error);
Sentry.captureException(error, {
tags: {
feature: "audio-message",
stage: "startRecording",
},
extra: {
platform: Platform.OS,
alertId,
recordingUri,
},
});
announceForA11y("Échec du démarrage de l'enregistrement audio");
return;
}
console.log("Recording started");
console.log("[ChatInput] Recording started");
} catch (err) {
console.log("Failed to start recording", err);
Sentry.captureException(err, {
tags: {
feature: "audio-message",
stage: "startRecording-outer",
},
extra: {
platform: Platform.OS,
alertId,
recordingUri,
},
});
} finally {
requestingMicRef.current = false;
}
}, [player, recorder]);
}, [alertId, player, recordingUri, startVoiceRecorder]);
const stopRecording = useCallback(async () => {
console.log("[ChatInput] stopRecording invoked", {
platform: Platform.OS,
isRecordingBefore: isVoiceRecording,
});
let uri = null;
try {
await recorder.stop();
uri = await stopVoiceRecorder();
} catch (_error) {
// Do nothing -- already stopped/unloaded.
console.log("[ChatInput] stopVoiceRecorder threw (ignored)", _error);
}
if (isRecording) {
setIsRecording(false);
const effectiveUri = uri || recordingUri;
console.log("[ChatInput] stopRecording completed", {
platform: Platform.OS,
isRecordingAfter: false,
recordingUri: effectiveUri,
});
if (isVoiceRecording) {
// Announce once when recording stops.
if (lastRecordingAnnouncementRef.current !== "stopped") {
lastRecordingAnnouncementRef.current = "stopped";
announceForA11y("Enregistrement arrêté");
}
}
}, [recorder, isRecording]);
return effectiveUri;
}, [isVoiceRecording, recordingUri, stopVoiceRecorder]);
const recordedToSound = useCallback(async () => {
await setAudioModeAsync({
allowsRecording: false,
interruptionMode: "doNotMix",
playsInSilentMode: true,
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
shouldPlayInBackground: true,
});
const status = recorder.getStatus();
const url = status?.url;
if (url) {
const _player = createAudioPlayer(url);
setPlayer(_player);
}
}, [recorder]);
const recordedToSound = useCallback(
async (uriOverride) => {
console.log("[ChatInput] recordedToSound invoked", {
platform: Platform.OS,
});
try {
await setAudioModeAsync({
allowsRecording: false,
playsInSilentMode: true,
interruptionMode: "doNotMix",
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
// Foreground-first: do not keep audio session alive in background.
shouldPlayInBackground: false,
});
} catch (error) {
console.log(
"[ChatInput] Audio.setAudioModeAsync for playback failed",
error,
);
}
const uploadAudio = useCallback(async () => {
const { url } = recorder.getStatus();
const uri = url;
if (!uri) {
throw new Error("No recording URL available");
}
const fd = new FormData();
fd.append("data[alertId]", alertId);
fd.append("data[file]", {
uri,
type: "audio/mp4",
name: "audioRecord.m4a",
});
await network.oaFilesKy.post("audio/upload", {
body: fd,
});
}, [alertId, recorder]);
const url = uriOverride || recordingUri;
console.log("[ChatInput] recordedToSound status after recording", {
platform: Platform.OS,
url,
});
if (url) {
const _player = createAudioPlayer(url);
setPlayer(_player);
console.log("[ChatInput] recordedToSound created player", {
hasPlayer: !!_player,
});
}
},
[recordingUri],
);
const uploadAudio = useCallback(
async (uriOverride) => {
const rawUrl = uriOverride ?? recordingUri ?? null;
const uri =
Platform.OS === "ios" && rawUrl && !rawUrl.startsWith("file:")
? `file://${rawUrl}`
: rawUrl;
console.log("[ChatInput] uploadAudio invoked", {
platform: Platform.OS,
recordingUri,
rawUrl,
uri,
});
if (!uri) {
const error = new Error("No recording URL available");
console.error("[ChatInput] uploadAudio error: missing uri", error, {
platform: Platform.OS,
recordingUri,
});
Sentry.captureException(error, {
tags: {
feature: "audio-message",
stage: "uploadAudio",
},
extra: {
platform: Platform.OS,
recordingUri,
},
});
throw error;
}
const fd = new FormData();
fd.append("data[alertId]", alertId);
const fileField = {
uri,
// Keep Android behavior, but this remains valid for iOS (AAC in MP4 container).
type: "audio/mp4",
name: "audioRecord.m4a",
};
console.log("[ChatInput] uploadAudio FormData file field", fileField);
fd.append("data[file]", fileField);
try {
const response = await network.oaFilesKy.post("audio/upload", {
body: fd,
});
console.log("[ChatInput] uploadAudio response", {
status: response.status,
statusText: response.statusText,
});
return response;
} catch (error) {
const statusCode = error?.response?.status;
const statusText = error?.response?.statusText;
console.error("[ChatInput] uploadAudio network error", error, {
platform: Platform.OS,
statusCode,
statusText,
});
Sentry.captureException(error, {
tags: {
feature: "audio-message",
stage: "uploadAudio",
},
extra: {
platform: Platform.OS,
statusCode,
statusText,
recordingUri,
uri,
},
});
throw error;
}
},
[alertId, recordingUri],
);
const sendRecording = useCallback(async () => {
try {
await stopRecording();
await recordedToSound();
await uploadAudio();
console.log("[ChatInput] sendRecording start", {
platform: Platform.OS,
});
const uri = await stopRecording();
await recordedToSound(uri);
await uploadAudio(uri);
// Keep focus stable: return focus to input after finishing recording flow.
setTimeout(() => {
textInputRef.current?.focus?.();
}, 0);
console.log("[ChatInput] sendRecording completed successfully");
} catch (error) {
console.error("Failed to send recording:", error);
const statusCode = error?.response?.status;
const statusText = error?.response?.statusText;
console.error("[ChatInput] Failed to send recording", error, {
platform: Platform.OS,
statusCode,
statusText,
});
Sentry.captureException(error, {
tags: {
feature: "audio-message",
stage: "sendRecording",
},
extra: {
platform: Platform.OS,
statusCode,
statusText,
alertId,
recordingUri,
},
});
announceForA11y("Échec de l'envoi de l'enregistrement audio");
}
}, [stopRecording, recordedToSound, uploadAudio]);
}, [alertId, recordingUri, stopRecording, recordedToSound, uploadAudio]);
const deleteRecording = useCallback(async () => {
await stopRecording();
@ -376,18 +447,16 @@ export default React.memo(function ChatInput({
}, [stopRecording]);
const triggerMicrophoneClick = useCallback(async () => {
if (isRecording) {
if (isVoiceRecording) {
await sendRecording();
} else {
await startRecording();
}
}, [isRecording, startRecording, sendRecording]);
}, [isVoiceRecording, startRecording, sendRecording]);
const onRecordingCountDownComplete = useCallback(async () => {
await stopRecording();
await recordedToSound();
await sendRecording();
}, [sendRecording, stopRecording, recordedToSound]);
}, [sendRecording]);
// reset on alert change
const dataRef = useRef(null);
@ -488,20 +557,20 @@ export default React.memo(function ChatInput({
accessibilityLabel={
hasText
? "Envoyer le message"
: isRecording
: isVoiceRecording
? "Envoyer l'enregistrement audio"
: "Démarrer l'enregistrement audio"
}
accessibilityHint={
hasText
? "Envoie le message."
: isRecording
: isVoiceRecording
? "Envoie l'enregistrement audio."
: "Démarre l'enregistrement audio."
}
accessibilityState={{
disabled: false,
...(isRecording ? { selected: true } : null),
...(isVoiceRecording ? { selected: true } : null),
}}
onPress={hasText ? sendTextMessage : triggerMicrophoneClick}
>

View file

@ -47,6 +47,7 @@ export default function useLatestWithSubscription(
const retryCountRef = useRef(0);
const subscriptionErrorRef = useRef(null);
const timeoutIdRef = useRef(null);
const unsubscribeRef = useRef(null);
useEffect(() => {
const currentVarsHash = JSON.stringify(variables);
@ -134,6 +135,17 @@ export default function useLatestWithSubscription(
if (!subscribeToMore) return;
if (highestIdRef.current === null) return; // Wait until we have the highest ID
// Always cleanup any previous active subscription before creating a new one.
// React only runs the cleanup returned directly from the effect.
if (unsubscribeRef.current) {
try {
unsubscribeRef.current();
} catch (_e) {
// ignore
}
unsubscribeRef.current = null;
}
// Check if max retries reached and we have an error
if (retryCountRef.current >= maxRetries && subscriptionErrorRef.current) {
console.error(
@ -283,15 +295,7 @@ export default function useLatestWithSubscription(
},
});
// Cleanup on unmount or re-run
return () => {
console.log(`[${subscriptionKey}] Cleaning up subscription`);
if (timeoutIdRef.current) {
clearTimeout(timeoutIdRef.current);
timeoutIdRef.current = null;
}
unsubscribe();
};
unsubscribeRef.current = unsubscribe;
} catch (error) {
// Handle setup errors (like malformed queries)
console.error(
@ -331,22 +335,24 @@ export default function useLatestWithSubscription(
console.error("Failed to report to Sentry:", sentryError);
}
}
return () => {
if (timeoutIdRef.current) {
clearTimeout(timeoutIdRef.current);
timeoutIdRef.current = null;
}
};
}
}, backoffDelay);
// Cleanup function that will run when component unmounts or effect re-runs
return () => {
console.log(`[${subscriptionKey}] Cleaning up subscription`);
if (timeoutIdRef.current) {
clearTimeout(timeoutIdRef.current);
timeoutIdRef.current = null;
}
if (unsubscribeRef.current) {
try {
unsubscribeRef.current();
} catch (_e) {
// ignore
}
unsubscribeRef.current = null;
}
};
}, [
skip,

View file

@ -40,6 +40,7 @@ export default function useStreamQueryWithSubscription(
const retryCountRef = useRef(0);
const subscriptionErrorRef = useRef(null);
const timeoutIdRef = useRef(null);
const unsubscribeRef = useRef(null);
useEffect(() => {
const currentVarsHash = JSON.stringify(variables);
@ -124,6 +125,18 @@ export default function useStreamQueryWithSubscription(
if (skip) return; // If skipping, do nothing
if (!subscribeToMore) return;
// If we're about to (re)subscribe, always cleanup any previous subscription first.
// This is critical because React effect cleanups must be returned synchronously
// from the effect, not from inside async callbacks.
if (unsubscribeRef.current) {
try {
unsubscribeRef.current();
} catch (_e) {
// ignore
}
unsubscribeRef.current = null;
}
// Check if max retries reached and we have an error - this check must be done regardless of other conditions
if (retryCountRef.current >= maxRetries && subscriptionErrorRef.current) {
console.error(
@ -289,15 +302,7 @@ export default function useStreamQueryWithSubscription(
},
});
// Cleanup on unmount or re-run
return () => {
console.log(`[${subscriptionKey}] Cleaning up subscription`);
if (timeoutIdRef.current) {
clearTimeout(timeoutIdRef.current);
timeoutIdRef.current = null;
}
unsubscribe();
};
unsubscribeRef.current = unsubscribe;
} catch (error) {
// Handle setup errors (like malformed queries)
console.error(
@ -337,22 +342,24 @@ export default function useStreamQueryWithSubscription(
console.error("Failed to report to Sentry:", sentryError);
}
}
return () => {
if (timeoutIdRef.current) {
clearTimeout(timeoutIdRef.current);
timeoutIdRef.current = null;
}
};
}
}, backoffDelay);
// Cleanup function that will run when component unmounts or effect re-runs
return () => {
console.log(`[${subscriptionKey}] Cleaning up subscription`);
if (timeoutIdRef.current) {
clearTimeout(timeoutIdRef.current);
timeoutIdRef.current = null;
}
if (unsubscribeRef.current) {
try {
unsubscribeRef.current();
} catch (_e) {
// ignore
}
unsubscribeRef.current = null;
}
};
}, [
skip,

View file

@ -0,0 +1,162 @@
import { useCallback, useEffect, useRef, useState } from "react";
import {
RecordingPresets,
requestRecordingPermissionsAsync,
setAudioModeAsync,
setIsAudioActiveAsync,
useAudioRecorder,
} from "expo-audio";
let hasLoggedAudioMode = false;
export default function useVoiceRecorder() {
const recorderRef = useRef(null);
const [isRecording, setIsRecording] = useState(false);
const [uri, setUri] = useState(null);
// NOTE: `expo-audio` doesn't export `AudioRecorder` as a runtime JS class.
// The supported API is `useAudioRecorder`, which returns a native-backed SharedObject.
const preset =
RecordingPresets?.HIGH_QUALITY || RecordingPresets?.LOW_QUALITY;
if (!preset) {
throw new Error(
"expo-audio RecordingPresets are not available; cannot start recording",
);
}
const recorder = useAudioRecorder({
...preset,
isMeteringEnabled: true,
});
useEffect(() => {
recorderRef.current = recorder;
return () => {
if (recorderRef.current === recorder) {
recorderRef.current = null;
}
};
}, [recorder]);
const cleanupRecording = useCallback(async () => {
const recorder = recorderRef.current;
if (recorder) {
try {
if (recorder.isRecording) {
await recorder.stop();
}
} catch (_e) {
// no-op
}
}
setIsRecording(false);
}, []);
const start = useCallback(async () => {
// Reset any previous recording before starting a new one
await cleanupRecording();
setUri(null);
const permission = await requestRecordingPermissionsAsync();
if (!permission?.granted) {
throw new Error("Microphone permission not granted");
}
// Configure audio mode for recording (iOS & Android)
const recordingAudioMode = {
allowsRecording: true,
playsInSilentMode: true,
interruptionMode: "doNotMix",
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
// Foreground-first: keep the audio session inactive in background.
shouldPlayInBackground: false,
};
if (!hasLoggedAudioMode) {
console.log("[useVoiceRecorder] audio mode set", recordingAudioMode);
hasLoggedAudioMode = true;
}
await setAudioModeAsync(recordingAudioMode);
const prepareAndStart = async () => {
await setIsAudioActiveAsync(true).catch(() => {});
console.log("[useVoiceRecorder] preparing recorder");
await recorder.prepareToRecordAsync();
console.log("[useVoiceRecorder] starting recorder");
recorder.record();
setIsRecording(true);
};
try {
await prepareAndStart();
} catch (error) {
console.log("[useVoiceRecorder] recorder start failed", error);
// One controlled retry for iOS: reset the audio session and try once more.
try {
await cleanupRecording();
await setAudioModeAsync(recordingAudioMode);
await new Promise((r) => setTimeout(r, 150));
await prepareAndStart();
return;
} catch (_retryError) {
console.log("[useVoiceRecorder] recorder retry failed", _retryError);
}
try {
if (recorderRef.current?.isRecording) {
await recorderRef.current.stop();
}
} catch (_e) {
// ignore cleanup failures
} finally {
// keep recorder instance; hook will manage its lifecycle
setIsRecording(false);
}
throw error;
}
}, [cleanupRecording, recorder]);
const stop = useCallback(async () => {
const recorder = recorderRef.current;
if (!recorder) {
setIsRecording(false);
return null;
}
try {
await recorder.stop();
} catch (_e) {
// ignore errors from already-stopped/unloaded recordings
}
const recordingUri = recorder.uri;
setUri(recordingUri ?? null);
setIsRecording(false);
return recordingUri ?? null;
}, []);
const reset = useCallback(() => {
setUri(null);
setIsRecording(false);
}, []);
useEffect(() => {
return () => {
const recorder = recorderRef.current;
if (recorder) {
if (recorder.isRecording) {
recorder.stop().catch(() => {});
}
}
};
}, []);
return {
isRecording,
uri,
start,
stop,
reset,
};
}

View file

@ -0,0 +1,74 @@
import { useEffect, useRef } from "react";
import { useNetworkState, networkActions } from "~/stores";
import network from "~/network";
import { createLogger } from "~/lib/logger";
import { NETWORK_SCOPES } from "~/lib/logger/scopes";
const watchdogLogger = createLogger({
module: NETWORK_SCOPES.WEBSOCKET,
feature: "watchdog",
});
const HEARTBEAT_STALE_MS = 45_000;
const CHECK_EVERY_MS = 10_000;
const MIN_RESTART_INTERVAL_MS = 30_000;
export default function useWsWatchdog({ enabled = true } = {}) {
const { wsConnected, wsLastHeartbeatDate, hasInternetConnection } =
useNetworkState([
"wsConnected",
"wsLastHeartbeatDate",
"hasInternetConnection",
]);
const lastRestartRef = useRef(0);
useEffect(() => {
if (!enabled) return;
const interval = setInterval(() => {
if (!hasInternetConnection) return;
if (!wsConnected) return;
if (!wsLastHeartbeatDate) return;
const last = Date.parse(wsLastHeartbeatDate);
if (!Number.isFinite(last)) return;
const age = Date.now() - last;
if (age < HEARTBEAT_STALE_MS) return;
const now = Date.now();
if (now - lastRestartRef.current < MIN_RESTART_INTERVAL_MS) return;
lastRestartRef.current = now;
watchdogLogger.warn("WS heartbeat stale, triggering recovery", {
ageMs: age,
lastHeartbeatDate: wsLastHeartbeatDate,
});
try {
// First line recovery: restart websocket transport
network.apolloClient?.restartWS?.();
} catch (error) {
watchdogLogger.error("WS restart failed", { error });
}
// Second line recovery: if WS stays stale, do a full client reload
setTimeout(() => {
const last2 = Date.parse(wsLastHeartbeatDate);
const age2 = Number.isFinite(last2) ? Date.now() - last2 : Infinity;
if (age2 >= HEARTBEAT_STALE_MS) {
watchdogLogger.warn(
"WS still stale after restart, triggering reload",
{
ageMs: age2,
},
);
networkActions.triggerReload();
}
}, 10_000);
}, CHECK_EVERY_MS);
return () => clearInterval(interval);
}, [enabled, hasInternetConnection, wsConnected, wsLastHeartbeatDate]);
}

View file

@ -81,6 +81,7 @@ export default function createWsLink({ store, GRAPHQL_WS_URL }) {
activeSocket = socket;
reconnectAttempts = 0; // Reset attempts on successful connection
networkActions.WSConnected();
networkActions.WSTouch();
cancelReconnect(); // Cancel any pending reconnects
// Clear any lingering ping timeouts
@ -114,6 +115,7 @@ export default function createWsLink({ store, GRAPHQL_WS_URL }) {
},
ping: (received) => {
// wsLogger.debug("WebSocket ping", { received });
networkActions.WSTouch();
if (!received) {
// Clear any existing ping timeout
if (pingTimeout) {
@ -138,6 +140,7 @@ export default function createWsLink({ store, GRAPHQL_WS_URL }) {
},
pong: (received) => {
// wsLogger.debug("WebSocket pong", { received });
networkActions.WSTouch();
if (received) {
clearTimeout(pingTimeout); // pong is received, clear connection close timeout
}

View file

@ -17,11 +17,14 @@ export default withConnectivity(function Params() {
deviceId,
},
});
if (loading || !data) {
if (loading) {
return <Loader />;
}
if (error) {
return <Error />;
}
if (!data) {
return <Error />;
}
return <ParamsView data={data} />;
});

View file

@ -4,6 +4,7 @@ import { ScrollView, View } from "react-native";
import Loader from "~/components/Loader";
import { useSubscription } from "@apollo/client";
import Error from "~/components/Error";
import { LOAD_PROFILE_SUBSCRIPTION } from "./gql";
@ -23,7 +24,7 @@ const profileLogger = createLogger({
export default withConnectivity(function Profile({ navigation, route }) {
const { userId } = useSessionState(["userId"]);
// profileLogger.debug("Profile user ID", { userId });
const { data, loading, restart } = useSubscription(
const { data, loading, error, restart } = useSubscription(
LOAD_PROFILE_SUBSCRIPTION,
{
variables: {
@ -44,10 +45,21 @@ export default withConnectivity(function Profile({ navigation, route }) {
});
}, [navigation]);
if (loading || !data?.selectOneUser) {
if (loading) {
return <Loader />;
}
if (error) {
profileLogger.error("Profile subscription error", { error });
return <Error />;
}
if (!data?.selectOneUser) {
// No error surfaced, but no payload either. Avoid infinite loader.
profileLogger.error("Profile subscription returned no user", { userId });
return <Error />;
}
return (
<ScrollView
style={{

View file

@ -6,6 +6,7 @@ export default createAtom(({ merge, get }) => {
wsConnected: false,
wsConnectedDate: null,
wsClosedDate: null,
wsLastHeartbeatDate: null,
triggerReload: false,
initialized: true,
hasInternetConnection: true,
@ -27,6 +28,7 @@ export default createAtom(({ merge, get }) => {
merge({
wsConnected: true,
wsConnectedDate: new Date().toISOString(),
wsLastHeartbeatDate: new Date().toISOString(),
});
},
WSClosed: () => {
@ -40,6 +42,12 @@ export default createAtom(({ merge, get }) => {
wsClosedDate: new Date().toISOString(),
});
},
WSTouch: () => {
// Update whenever we get any WS-level signal: connected, ping/pong, or a message.
merge({
wsLastHeartbeatDate: new Date().toISOString(),
});
},
setHasInternetConnection: (status) =>
merge({ hasInternetConnection: status }),
},

View file

@ -7066,7 +7066,6 @@ __metadata:
eventemitter3: "npm:^5.0.1"
expo: "npm:~53.0.23"
expo-audio: "npm:~0.4.9"
expo-av: "npm:~15.1.7"
expo-build-properties: "npm:~0.14.8"
expo-constants: "npm:~17.1.7"
expo-contacts: "npm:~14.2.5"
@ -10594,21 +10593,6 @@ __metadata:
languageName: node
linkType: hard
"expo-av@npm:~15.1.7":
version: 15.1.7
resolution: "expo-av@npm:15.1.7"
peerDependencies:
expo: "*"
react: "*"
react-native: "*"
react-native-web: "*"
peerDependenciesMeta:
react-native-web:
optional: true
checksum: 10/8f3055b68cac76b627116cf93a63bebdacb8c0d22f630f9fdae7dc74633a945d9a469421fce0f4c345c730eda0d039d78c88e59c09e2219bfde7e259d42981ba
languageName: node
linkType: hard
"expo-build-properties@npm:~0.14.8":
version: 0.14.8
resolution: "expo-build-properties@npm:0.14.8"