diff --git a/app.config.js b/app.config.js index 09fc17a..d9806a7 100644 --- a/app.config.js +++ b/app.config.js @@ -195,6 +195,7 @@ let config = { }, }, ], + "expo-audio", "./plugins/withXcode15Fix", "./plugins/withCustomScheme", // Preserve URL schemes during prebuild ], diff --git a/ios/AlerteSecours.xcodeproj/project.pbxproj b/ios/AlerteSecours.xcodeproj/project.pbxproj index 8cbcbac..e65689a 100644 --- a/ios/AlerteSecours.xcodeproj/project.pbxproj +++ b/ios/AlerteSecours.xcodeproj/project.pbxproj @@ -180,7 +180,9 @@ F3F5A8D7A73545D78A4D8467 /* Fix Xcode 15 Bug */, BC7FCBEF8C354C749AB11067 /* Fix Xcode 15 Bug */, 59A6E29E61A94EC98E5B50A7 /* Fix Xcode 15 Bug */, - 822458BA69944A72BCDBEB3B /* Remove signature files (Xcode workaround) */, + 94EB5593B5404563AA1FB51E /* Fix Xcode 15 Bug */, + 976C0290112E4DD89069ADCE /* Fix Xcode 15 Bug */, + 0976B7D5E5CF42E49F388927 /* Remove signature files (Xcode workaround) */, ); buildRules = ( ); @@ -1171,6 +1173,74 @@ fi"; shellScript = " echo \"Remove signature files (Xcode workaround)\"; rm -rf \"$CONFIGURATION_BUILD_DIR/MapLibre.xcframework-ios.signature\"; + "; + }; + 94EB5593B5404563AA1FB51E /* Fix Xcode 15 Bug */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + name = "Fix Xcode 15 Bug"; + inputPaths = ( + ); + outputPaths = ( + ); + shellPath = /bin/sh; + shellScript = "if [ \"$XCODE_VERSION_MAJOR\" = \"1500\" ]; then + echo \"Remove signature files (Xcode 15 workaround)\" + find \"$BUILD_DIR/${CONFIGURATION}-iphoneos\" -name \"*.signature\" -type f | xargs -r rm +fi"; + }; + CEAB2B1E58724CE999FACC27 /* Remove signature files (Xcode workaround) */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + name = "Remove signature files (Xcode workaround)"; + inputPaths = ( + ); + outputPaths = ( + ); + shellPath = /bin/sh; + shellScript = " + echo \"Remove signature files (Xcode workaround)\"; + rm -rf \"$CONFIGURATION_BUILD_DIR/MapLibre.xcframework-ios.signature\"; + "; + }; + 976C0290112E4DD89069ADCE /* Fix Xcode 15 Bug */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + name = "Fix Xcode 15 Bug"; + inputPaths = ( + ); + outputPaths = ( + ); + shellPath = /bin/sh; + shellScript = "if [ \"$XCODE_VERSION_MAJOR\" = \"1500\" ]; then + echo \"Remove signature files (Xcode 15 workaround)\" + find \"$BUILD_DIR/${CONFIGURATION}-iphoneos\" -name \"*.signature\" -type f | xargs -r rm +fi"; + }; + 0976B7D5E5CF42E49F388927 /* Remove signature files (Xcode workaround) */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + name = "Remove signature files (Xcode workaround)"; + inputPaths = ( + ); + outputPaths = ( + ); + shellPath = /bin/sh; + shellScript = " + echo \"Remove signature files (Xcode workaround)\"; + rm -rf \"$CONFIGURATION_BUILD_DIR/MapLibre.xcframework-ios.signature\"; "; }; /* End PBXShellScriptBuildPhase section */ diff --git a/package.json b/package.json index c31681e..3cffcfc 100644 --- a/package.json +++ b/package.json @@ -116,6 +116,7 @@ "delay": "^6.0.0", "eventemitter3": "^5.0.1", "expo": "~53.0.23", + "expo-audio": "~0.4.9", "expo-av": "~15.1.7", "expo-build-properties": "~0.14.8", "expo-constants": "~17.1.7", @@ -279,4 +280,4 @@ } }, "packageManager": "yarn@4.5.3" -} \ No newline at end of file +} diff --git a/src/containers/ChatInput/index.js b/src/containers/ChatInput/index.js index 789c9dc..edc3d56 100644 --- a/src/containers/ChatInput/index.js +++ b/src/containers/ChatInput/index.js @@ -1,13 +1,8 @@ import React, { useState, useCallback, useEffect, useRef } from "react"; import { View, Text, TouchableOpacity } from "react-native"; import { MaterialCommunityIcons } from "@expo/vector-icons"; -import { Audio, InterruptionModeIOS, InterruptionModeAndroid } from "expo-av"; -import { - AndroidOutputFormat, - IOSOutputFormat, - AndroidAudioEncoder, - IOSAudioQuality, -} from "expo-av/build/Audio"; +import * as Audio from "expo-audio"; +import { IOSOutputFormat, AudioQuality } from "expo-audio"; import Countdown from "react-countdown"; @@ -35,8 +30,8 @@ const RECORDING_TIMEOUT = 59; const recordingSettings = { android: { extension: ".m4a", - outputFormat: AndroidOutputFormat.MPEG_4, - audioEncoder: AndroidAudioEncoder.AAC, + outputFormat: "mpeg4", + audioEncoder: "aac", sampleRate: 44100, numberOfChannels: 1, bitRate: 64000, @@ -44,7 +39,7 @@ const recordingSettings = { ios: { extension: ".m4a", outputFormat: IOSOutputFormat.MPEG4AAC, - audioQuality: IOSAudioQuality.MAX, + audioQuality: AudioQuality.MAX, sampleRate: 44100, numberOfChannels: 1, bitRate: 64000, @@ -128,15 +123,14 @@ export default React.memo(function ChatInput({ const startRecording = useCallback(async () => { try { console.log("Requesting permissions.."); - await Audio.requestPermissionsAsync(); + await Audio.requestRecordingPermissionsAsync(); await Audio.setAudioModeAsync({ - allowsRecordingIOS: true, - interruptionModeIOS: InterruptionModeIOS.DoNotMix, - playsInSilentModeIOS: true, - shouldDuckAndroid: true, - interruptionModeAndroid: InterruptionModeAndroid.DoNotMix, - playThroughEarpieceAndroid: false, - staysActiveInBackground: true, + allowsRecording: true, + interruptionMode: "doNotMix", + playsInSilentMode: true, + interruptionModeAndroid: "doNotMix", + shouldRouteThroughEarpiece: false, + shouldPlayInBackground: true, }); // stop playback if (sound !== null) { @@ -147,13 +141,12 @@ export default React.memo(function ChatInput({ console.log("Starting recording.."); await Audio.setAudioModeAsync({ - allowsRecordingIOS: true, - interruptionModeIOS: InterruptionModeIOS.DoNotMix, - playsInSilentModeIOS: true, - shouldDuckAndroid: true, - interruptionModeAndroid: InterruptionModeAndroid.DoNotMix, - playThroughEarpieceAndroid: false, - staysActiveInBackground: true, + allowsRecording: true, + interruptionMode: "doNotMix", + playsInSilentMode: true, + interruptionModeAndroid: "doNotMix", + shouldRouteThroughEarpiece: false, + shouldPlayInBackground: true, }); const _recording = new Audio.Recording(); try { @@ -184,14 +177,12 @@ export default React.memo(function ChatInput({ const recordedToSound = useCallback(async () => { await Audio.setAudioModeAsync({ - allowsRecordingIOS: false, - interruptionModeIOS: InterruptionModeIOS.DoNotMix, - playsInSilentModeIOS: true, - playsInSilentLockedModeIOS: true, - shouldDuckAndroid: true, - interruptionModeAndroid: InterruptionModeAndroid.DoNotMix, - playThroughEarpieceAndroid: false, - staysActiveInBackground: true, + allowsRecording: false, + interruptionMode: "doNotMix", + playsInSilentMode: true, + interruptionModeAndroid: "doNotMix", + shouldRouteThroughEarpiece: false, + shouldPlayInBackground: true, }); const { sound: _sound } = await recording.createNewLoadedSoundAsync({ isLooping: false, diff --git a/src/lib/expo-audio-player/index.js b/src/lib/expo-audio-player/index.js index 3b3dadc..f8d8ce2 100644 --- a/src/lib/expo-audio-player/index.js +++ b/src/lib/expo-audio-player/index.js @@ -1,14 +1,7 @@ -import React, { PureComponent } from "react"; -import { - TouchableOpacity, - Animated, - PanResponder, - View, - Easing, -} from "react-native"; -import { Audio } from "expo-av"; +import React, { useEffect, useMemo, useRef, useState } from "react"; +import { TouchableOpacity, Animated, PanResponder, View } from "react-native"; +import { useAudioPlayer, useAudioPlayerStatus } from "expo-audio"; import { MaterialCommunityIcons } from "@expo/vector-icons"; -import sleep from "./sleep"; import DigitalTimeString from "./DigitalTimeString"; import useStyles from "./styles"; @@ -17,405 +10,319 @@ import withHooks from "~/hoc/withHooks"; const TRACK_SIZE = 4; const THUMB_SIZE = 20; -class AudioSlider extends PureComponent { - constructor(props) { - super(props); - this.state = { - playing: false, - currentTime: 0, // miliseconds; value interpolated by animation. - duration: 0, - trackLayout: {}, - dotOffset: new Animated.ValueXY(), - xDotOffsetAtAnimationStart: 0, - }; +function clamp(n, min, max) { + return Math.max(min, Math.min(n, max)); +} - this._updateProps(); +function AudioSlider(props) { + // Props mapping (kept compatible with previous class component) + const { audio: audioUrl, registry, style: styleProp } = props; + const pauseAllBeforePlay = + props.pauseAllBeforePlay === undefined ? true : props.pauseAllBeforePlay; - // Important: - // this.state.dotOffset.x is the actual offset - // this.state.dotOffset.x._value is the offset from the point where the animation started - // However, since this.state.dotOffset.x is an object and not a value, it is difficult - // to compare it with other numbers. Therefore, the const currentOffsetX is used. - // To print all attributes of the object see https://stackoverflow.com/questions/9209747/printing-all-the-hidden-properties-of-an-object - this._panResponder = PanResponder.create({ - onMoveShouldSetResponderCapture: () => true, - onMoveShouldSetPanResponderCapture: () => true, - onPanResponderGrant: async (e, gestureState) => { - if (this.state.playing) { - await this.pause(); - } - await this.setState({ - xDotOffsetAtAnimationStart: this.state.dotOffset.x._value, - }); - await this.state.dotOffset.setOffset({ - x: this.state.dotOffset.x._value, - }); - await this.state.dotOffset.setValue({ x: 0, y: 0 }); - }, - onPanResponderMove: (e, gestureState) => { - Animated.event([ - null, - { dx: this.state.dotOffset.x, dy: this.state.dotOffset.y }, - ])(e, gestureState); - }, - onPanResponderTerminationRequest: () => false, - onPanResponderTerminate: async (evt, gestureState) => { - // Another component has become the responder, so this gesture is cancelled. + // Styles injected by withHooks HOC + const styles = props.styles; - const currentOffsetX = - this.state.xDotOffsetAtAnimationStart + this.state.dotOffset.x._value; - if ( - currentOffsetX < 0 || - currentOffsetX > this.state.trackLayout.width - ) { - await this.state.dotOffset.setValue({ - x: -this.state.xDotOffsetAtAnimationStart, - y: 0, - }); - } - await this.state.dotOffset.flattenOffset(); - await this.mapAudioToCurrentTime(); - }, - onPanResponderRelease: async (e, { vx }) => { - const currentOffsetX = - this.state.xDotOffsetAtAnimationStart + this.state.dotOffset.x._value; - if ( - currentOffsetX < 0 || - currentOffsetX > this.state.trackLayout.width - ) { - await this.state.dotOffset.setValue({ - x: -this.state.xDotOffsetAtAnimationStart, - y: 0, - }); - } - await this.state.dotOffset.flattenOffset(); - await this.mapAudioToCurrentTime(); - }, - }); - } + // Track layout (for computing pixel & time mappings) + const [trackLayout, setTrackLayout] = useState({ width: 0, height: 0 }); - _updateProps() { - const props = this.props; - this.registry = props.registry; - this.style = props.style || {}; - if (this.registry) { - this.registry.register(this); - } - const { pauseAllBeforePlay = true } = props; - this.pauseAllBeforePlay = pauseAllBeforePlay; - } + // Thumb X position (in pixels) — single Animated.Value used both for dragging and syncing to playback + const dotX = useRef(new Animated.Value(0)).current; + const isDraggingRef = useRef(false); + const xDotOffsetAtStartRef = useRef(0); - componentDidUpdate() { - this._updateProps(); - } + // While dragging, we derive the current time from the thumb position for live display + const [dragTimeMs, setDragTimeMs] = useState(0); - mapAudioToCurrentTime = async () => { - if (!this.soundObject) return; - await this.soundObject.setPositionAsync(this.state.currentTime); - }; + // Player using new expo-audio hook API + const player = useAudioPlayer(audioUrl, { updateInterval: 250 }); + const status = useAudioPlayerStatus(player) || {}; - onPressPlayPause = async () => { - if (this.state.playing) { - await this.pause(); - return; - } - await this.play(); - }; - - play = async () => { - if (!this.soundObject) return; - if (this.registry && this.pauseAllBeforePlay) { - const players = this.registry.getAll(); - await Promise.all( - players.filter((p) => this !== p).map((p) => p.pause()), - ); - } - await this.soundObject.playAsync(); - this.setState({ playing: true }); // This is for the play-button to go to play - this.startMovingDot(); - }; - - pause = async () => { - if (!this.soundObject) return; - await this.soundObject.pauseAsync(); - this.setState({ playing: false }); // This is for the play-button to go to pause - Animated.timing(this.state.dotOffset, { useNativeDriver: false }).stop(); // Will also call animationPausedOrStopped() - }; - - startMovingDot = async () => { - if (!this.soundObject) return; - const status = await this.soundObject.getStatusAsync(); - const durationLeft = status["durationMillis"] - status["positionMillis"]; - - Animated.timing(this.state.dotOffset, { - toValue: { x: this.state.trackLayout.width, y: 0 }, - duration: durationLeft, - easing: Easing.linear, - useNativeDriver: false, - }).start(() => this.animationPausedOrStopped()); - }; - - animationPausedOrStopped = async () => { - if (!this.state.playing) { - // Audio has been paused - return; - } - if (!this.soundObject) return; - // Animation-duration is over (reset Animation and Audio): - await sleep(200); // In case animation has finished, but audio has not - this.setState({ playing: false }); - await this.state.dotOffset.setValue({ x: 0, y: 0 }); - // this.state.dotOffset.setValue(0); - await this.soundObject.setPositionAsync(0); - }; - - handlePlaybackFinished = async () => { - // console.log(`[AudioSlider] Playback finished, resetting for replay`); - // Reset for replay instead of unloading - this.setState({ playing: false }); - await this.state.dotOffset.setValue({ x: 0, y: 0 }); - if (this.soundObject) { - await this.soundObject.stopAsync(); - } - }; - - measureTrack = (event) => { - this.setState({ trackLayout: event.nativeEvent.layout }); // {x, y, width, height} - }; - - async componentDidMount() { - // https://github.com/olapiv/expo-audio-player/issues/13 - - const audioUrl = this.props.audio; - - const loadAudio = async () => { - const tryLoad = async (ext) => { - // console.log(`[AudioSlider] Attempting to load with extension: ${ext}`); - const { sound } = await Audio.Sound.createAsync({ - uri: audioUrl, - overrideFileExtensionAndroid: ext, - }); - return sound; - }; - - let lastError = null; + const durationSec = status.duration || 0; + const currentTimeSec = status.currentTime || 0; + // Register in an optional registry to allow pausing other players before play + const selfRef = useRef({ + pause: () => { try { - // First try with m4a (preferred) - const sound = await tryLoad("m4a"); - // console.log(`[AudioSlider] Successfully loaded with m4a extension`); - this.soundObject = sound; - await this.soundObject.setIsLoopingAsync(false); - this.soundObject.setOnPlaybackStatusUpdate((status) => { - if (!status.didJustFinish) return; - this.handlePlaybackFinished(); - }); - return; - } catch (err1) { - // console.log(`[AudioSlider] Failed to load with m4a:`, err1.message); - lastError = err1; - try { - // Fallback to mp4 - const sound = await tryLoad("mp4"); - // console.log(`[AudioSlider] Successfully loaded with mp4 extension`); - this.soundObject = sound; - await this.soundObject.setIsLoopingAsync(false); - this.soundObject.setOnPlaybackStatusUpdate((status) => { - if (!status.didJustFinish) return; - this.handlePlaybackFinished(); - }); - return; - } catch (err2) { - // console.log(`[AudioSlider] Failed to load with mp4:`, err2.message); - lastError = err2; - try { - // Last fallback to aac - const sound = await tryLoad("aac"); - // console.log(`[AudioSlider] Successfully loaded with aac extension`); - this.soundObject = sound; - await this.soundObject.setIsLoopingAsync(false); - this.soundObject.setOnPlaybackStatusUpdate((status) => { - if (!status.didJustFinish) return; - this.handlePlaybackFinished(); - }); - return; - } catch (err3) { - // console.log(`[AudioSlider] Failed to load with aac:`, err3.message); - lastError = err3; - } - } - } + player.pause(); + } catch {} + }, + }); - // All attempts failed - console.error( - `[AudioSlider] All load attempts failed for ${audioUrl}. Last error:`, - lastError, - ); + useEffect(() => { + selfRef.current.pause = () => { + try { + player.pause(); + } catch {} }; + }, [player]); - await loadAudio(); + useEffect(() => { + if (!registry) return; + const self = selfRef.current; + registry.register(self); + return () => { + try { + registry.unregister(self); + } catch {} + }; + }, [registry]); - if (!this.soundObject) { - // Loading failed; avoid further calls and leave UI inert or show error - console.log( - `[AudioSlider] No sound object created, setting duration to 0`, - ); - this.setState({ duration: 0 }); + // Ensure no looping (mimics the previous behavior) + useEffect(() => { + try { + player.loop = false; + } catch {} + }, [player]); + + // When not dragging, keep the thumb in sync with the playback position + useEffect(() => { + if (!isDraggingRef.current) { + const w = trackLayout.width || 0; + const x = durationSec > 0 ? (currentTimeSec / durationSec) * w : 0; + dotX.setValue(x); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [currentTimeSec, durationSec, trackLayout.width]); + + // When playback finishes, reset to start (seek to 0 and move thumb to start) + useEffect(() => { + if (status.didJustFinish) { + try { + player.seekTo(0); + } catch {} + dotX.setValue(0); + } + }, [status.didJustFinish, player, dotX]); + + const onPressPlayPause = async () => { + if (status.playing) { + try { + player.pause(); + } catch {} return; } + // Pause others first if asked + if (registry && pauseAllBeforePlay) { + try { + const players = registry.getAll ? registry.getAll() : []; + players + .filter((p) => p !== selfRef.current && typeof p.pause === "function") + .forEach((p) => p.pause()); + } catch {} + } + try { - const status = await this.soundObject.getStatusAsync(); - this.setState({ duration: status.durationMillis }); - } catch (error) { - console.log("Error getting audio status:", error); - this.setState({ duration: 0 }); - return; - } + player.play(); + } catch {} + }; - // This requires measureTrack to have been called. - this.state.dotOffset.addListener(() => { - const animatedCurrentTime = this.state.dotOffset.x - .interpolate({ - inputRange: [0, this.state.trackLayout.width], - outputRange: [0, this.state.duration], - extrapolate: "clamp", - }) - .__getValue(); - this.setState({ currentTime: animatedCurrentTime }); - }); - } + // Pan handling for seeking + const panResponder = useMemo( + () => + PanResponder.create({ + onMoveShouldSetResponderCapture: () => true, + onMoveShouldSetPanResponderCapture: () => true, - async componentWillUnmount() { - if (this.soundObject) { - await this.soundObject.unloadAsync(); - } - this.state.dotOffset.removeAllListeners(); - if (this.registry) { - this.registry.unregister(this); - } - } + onPanResponderGrant: async () => { + // Pause if currently playing (mimic previous behavior) + if (status.playing) { + try { + player.pause(); + } catch {} + } - render() { - return ( + isDraggingRef.current = true; + + // Initialize offset for drag + const currentX = dotX.__getValue(); + xDotOffsetAtStartRef.current = currentX; + dotX.setOffset(currentX); + dotX.setValue(0); + + // While dragging, update displayed time + dotX.addListener(({ value }) => { + const w = trackLayout.width || 1; + const currentOffset = + xDotOffsetAtStartRef.current + + (typeof value === "number" ? value : 0); + const clampedX = clamp(currentOffset, 0, w); + const percent = w > 0 ? clampedX / w : 0; + const ms = Math.round(percent * durationSec * 1000); + setDragTimeMs(ms); + }); + }, + + onPanResponderMove: Animated.event([null, { dx: dotX }], { + useNativeDriver: false, + }), + + onPanResponderTerminationRequest: () => false, + + onPanResponderTerminate: async () => { + // Another component took the responder + dotX.removeAllListeners(); + + const w = trackLayout.width || 1; + const value = dotX.__getValue(); + const currentOffset = + xDotOffsetAtStartRef.current + + (typeof value === "number" ? value : 0); + + let clampedX = clamp(currentOffset, 0, w); + dotX.flattenOffset(); + dotX.setValue(clampedX); + + if (durationSec > 0) { + const targetSec = (clampedX / w) * durationSec; + try { + await player.seekTo(targetSec); + } catch {} + } + + isDraggingRef.current = false; + setDragTimeMs(0); + }, + + onPanResponderRelease: async () => { + dotX.removeAllListeners(); + + const w = trackLayout.width || 1; + const value = dotX.__getValue(); + const currentOffset = + xDotOffsetAtStartRef.current + + (typeof value === "number" ? value : 0); + + let clampedX = clamp(currentOffset, 0, w); + dotX.flattenOffset(); + dotX.setValue(clampedX); + + if (durationSec > 0) { + const targetSec = (clampedX / w) * durationSec; + try { + await player.seekTo(targetSec); + } catch {} + } + + isDraggingRef.current = false; + setDragTimeMs(0); + }, + }), + [dotX, durationSec, player, status.playing, trackLayout.width], + ); + + const measureTrack = (event) => { + setTrackLayout(event.nativeEvent.layout || {}); + }; + + // Times for display (DigitalTimeString expects milliseconds) + const durationMs = Math.round(durationSec * 1000); + const currentTimeMs = isDraggingRef.current + ? dragTimeMs + : Math.round(currentTimeSec * 1000); + + return ( + - - + ) : ( + + )} + + + + - {this.state.playing ? ( - - ) : ( - - )} - - - - - - + - - - - - - + - ); - } + + + + + + + ); } export default withHooks(AudioSlider, () => { diff --git a/yarn.lock b/yarn.lock index 921a658..1096c78 100644 --- a/yarn.lock +++ b/yarn.lock @@ -7114,6 +7114,7 @@ __metadata: eslint-plugin-unused-imports: "npm:^3.0.0" eventemitter3: "npm:^5.0.1" expo: "npm:~53.0.23" + expo-audio: "npm:~0.4.9" expo-av: "npm:~15.1.7" expo-build-properties: "npm:~0.14.8" expo-constants: "npm:~17.1.7" @@ -10630,6 +10631,17 @@ __metadata: languageName: node linkType: hard +"expo-audio@npm:~0.4.9": + version: 0.4.9 + resolution: "expo-audio@npm:0.4.9" + peerDependencies: + expo: "*" + react: "*" + react-native: "*" + checksum: 10/874527adcf03e044770fc64fa9d58f735528bd40ebe038635ebf5ef34a21cdbce0115c9b2b1406604b9622f665d27a3a7eda417275322f2506a77f74c359724f + languageName: node + linkType: hard + "expo-av@npm:~15.1.7": version: 15.1.7 resolution: "expo-av@npm:15.1.7"