fix(up): expo-audio

This commit is contained in:
devthejo 2025-10-01 16:37:22 +02:00
commit 4d726d3f1a
No known key found for this signature in database
GPG key ID: 00CCA7A92B1D5351
6 changed files with 449 additions and 447 deletions

View file

@ -197,6 +197,7 @@ let config = {
},
},
],
"expo-audio",
"./plugins/withXcode15Fix",
"./plugins/withCustomScheme", // Preserve URL schemes during prebuild
],

View file

@ -182,6 +182,9 @@
59A6E29E61A94EC98E5B50A7 /* Fix Xcode 15 Bug */,
07BB91F001704B368647D86B /* Fix Xcode 15 Bug */,
DD1558A5827A42DCA5A17C1F /* Remove signature files (Xcode workaround) */,
94EB5593B5404563AA1FB51E /* Fix Xcode 15 Bug */,
976C0290112E4DD89069ADCE /* Fix Xcode 15 Bug */,
0976B7D5E5CF42E49F388927 /* Remove signature files (Xcode workaround) */,
);
buildRules = (
);
@ -1175,6 +1178,7 @@ fi";
";
};
07BB91F001704B368647D86B /* Fix Xcode 15 Bug */ = {
94EB5593B5404563AA1FB51E /* Fix Xcode 15 Bug */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
@ -1192,6 +1196,41 @@ fi";
fi";
};
DD1558A5827A42DCA5A17C1F /* Remove signature files (Xcode workaround) */ = {
CEAB2B1E58724CE999FACC27 /* Remove signature files (Xcode workaround) */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
name = "Remove signature files (Xcode workaround)";
inputPaths = (
);
outputPaths = (
);
shellPath = /bin/sh;
shellScript = "
echo \"Remove signature files (Xcode workaround)\";
rm -rf \"$CONFIGURATION_BUILD_DIR/MapLibre.xcframework-ios.signature\";
";
};
976C0290112E4DD89069ADCE /* Fix Xcode 15 Bug */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
name = "Fix Xcode 15 Bug";
inputPaths = (
);
outputPaths = (
);
shellPath = /bin/sh;
shellScript = "if [ \"$XCODE_VERSION_MAJOR\" = \"1500\" ]; then
echo \"Remove signature files (Xcode 15 workaround)\"
find \"$BUILD_DIR/${CONFIGURATION}-iphoneos\" -name \"*.signature\" -type f | xargs -r rm
fi";
};
0976B7D5E5CF42E49F388927 /* Remove signature files (Xcode workaround) */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (

View file

@ -116,6 +116,7 @@
"delay": "^6.0.0",
"eventemitter3": "^5.0.1",
"expo": "~53.0.23",
"expo-audio": "~0.4.9",
"expo-av": "~15.1.7",
"expo-build-properties": "~0.14.8",
"expo-constants": "~17.1.7",

View file

@ -1,13 +1,15 @@
import React, { useState, useCallback, useEffect, useRef } from "react";
import { View, Text, TouchableOpacity } from "react-native";
import { MaterialCommunityIcons } from "@expo/vector-icons";
import { Audio, InterruptionModeIOS, InterruptionModeAndroid } from "expo-av";
import {
AndroidOutputFormat,
useAudioRecorder,
createAudioPlayer,
setAudioModeAsync,
requestRecordingPermissionsAsync,
RecordingPresets,
IOSOutputFormat,
AndroidAudioEncoder,
IOSAudioQuality,
} from "expo-av/build/Audio";
AudioQuality,
} from "expo-audio";
import Countdown from "react-countdown";
@ -32,25 +34,41 @@ const rightButtonIconNames = {
const RECORDING_TIMEOUT = 59;
const recordingSettings = {
android: {
extension: ".m4a",
outputFormat: AndroidOutputFormat.MPEG_4,
audioEncoder: AndroidAudioEncoder.AAC,
sampleRate: 44100,
// Speech-optimized profile (smaller files, good voice quality)
const recordingOptionsSpeech = {
...RecordingPresets.HIGH_QUALITY,
// Voice-friendly sample rate & bitrate
sampleRate: 22050,
numberOfChannels: 1,
bitRate: 64000,
},
bitRate: 24000,
ios: {
extension: ".m4a",
...RecordingPresets.HIGH_QUALITY.ios,
outputFormat: IOSOutputFormat.MPEG4AAC,
audioQuality: IOSAudioQuality.MAX,
// Medium is enough for voice; final quality driven by bitRate above
audioQuality: AudioQuality.MEDIUM,
},
android: {
...RecordingPresets.HIGH_QUALITY.android,
outputFormat: "mpeg4",
audioEncoder: "aac",
},
};
// Fallback profile (broader device compatibility if speech profile fails)
const recordingOptionsFallback = {
...RecordingPresets.HIGH_QUALITY,
sampleRate: 44100,
numberOfChannels: 1,
bitRate: 64000,
linearPCMBitDepth: 16,
linearPCMIsBigEndian: false,
linearPCMIsFloat: false,
ios: {
...RecordingPresets.HIGH_QUALITY.ios,
outputFormat: IOSOutputFormat.MPEG4AAC,
audioQuality: AudioQuality.MAX,
},
android: {
...RecordingPresets.HIGH_QUALITY.android,
outputFormat: "mpeg4",
audioEncoder: "aac",
},
};
@ -79,19 +97,20 @@ export default React.memo(function ChatInput({
const autoFocus = !hasMessages;
const [isRecording, setIsRecording] = useState(false);
const [recording, setRecording] = useState(null);
const [sound, setSound] = useState(null);
const recorder = useAudioRecorder(recordingOptionsSpeech);
const [player, setPlayer] = useState(null);
const insertMessage = useInsertMessage(alertId);
useEffect(() => {
return sound
return player
? () => {
console.log("Unloading Sound");
sound.unloadAsync();
try {
player.remove();
} catch (e) {}
}
: undefined;
}, [sound]);
}, [player]);
const hasText = text.length > 0;
const mode = isRecording ? MODE.RECORDING : hasText ? MODE.TEXT : MODE.EMPTY;
@ -128,38 +147,42 @@ export default React.memo(function ChatInput({
const startRecording = useCallback(async () => {
try {
console.log("Requesting permissions..");
await Audio.requestPermissionsAsync();
await Audio.setAudioModeAsync({
allowsRecordingIOS: true,
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
playsInSilentModeIOS: true,
shouldDuckAndroid: true,
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix,
playThroughEarpieceAndroid: false,
staysActiveInBackground: true,
await requestRecordingPermissionsAsync();
await setAudioModeAsync({
allowsRecording: true,
interruptionMode: "doNotMix",
playsInSilentMode: true,
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
shouldPlayInBackground: true,
});
// stop playback
if (sound !== null) {
await sound.unloadAsync();
sound.setOnPlaybackStatusUpdate(null);
setSound(null);
if (player !== null) {
try {
player.remove();
} catch (e) {}
setPlayer(null);
}
console.log("Starting recording..");
await Audio.setAudioModeAsync({
allowsRecordingIOS: true,
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
playsInSilentModeIOS: true,
shouldDuckAndroid: true,
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix,
playThroughEarpieceAndroid: false,
staysActiveInBackground: true,
await setAudioModeAsync({
allowsRecording: true,
interruptionMode: "doNotMix",
playsInSilentMode: true,
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
shouldPlayInBackground: true,
});
const _recording = new Audio.Recording();
try {
await _recording.prepareToRecordAsync(recordingSettings);
setRecording(_recording);
await _recording.startAsync();
// Try speech-optimized settings first
try {
await recorder.prepareToRecordAsync(recordingOptionsSpeech);
} catch (optErr) {
console.log("Speech-optimized profile failed, falling back:", optErr);
await recorder.prepareToRecordAsync(recordingOptionsFallback);
}
recorder.record();
console.log("recording");
setIsRecording(true);
} catch (error) {
@ -169,42 +192,42 @@ export default React.memo(function ChatInput({
} catch (err) {
console.log("Failed to start recording", err);
}
}, [sound]);
}, [player, recorder]);
const stopRecording = useCallback(async () => {
try {
await recording.stopAndUnloadAsync();
await recorder.stop();
} catch (_error) {
// Do nothing -- we are already unloaded.
// Do nothing -- already stopped/unloaded.
}
if (isRecording) {
setIsRecording(false);
}
}, [recording, isRecording]);
}, [recorder, isRecording]);
const recordedToSound = useCallback(async () => {
await Audio.setAudioModeAsync({
allowsRecordingIOS: false,
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
playsInSilentModeIOS: true,
playsInSilentLockedModeIOS: true,
shouldDuckAndroid: true,
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix,
playThroughEarpieceAndroid: false,
staysActiveInBackground: true,
await setAudioModeAsync({
allowsRecording: false,
interruptionMode: "doNotMix",
playsInSilentMode: true,
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
shouldPlayInBackground: true,
});
const { sound: _sound } = await recording.createNewLoadedSoundAsync({
isLooping: false,
isMuted: false,
volume: 1.0,
rate: 1.0,
shouldCorrectPitch: true,
});
setSound(_sound);
}, [recording]);
const status = recorder.getStatus();
const url = status?.url;
if (url) {
const _player = createAudioPlayer(url);
setPlayer(_player);
}
}, [recorder]);
const uploadAudio = useCallback(async () => {
const uri = recording.getURI();
const { url } = recorder.getStatus();
const uri = url;
if (!uri) {
throw new Error("No recording URL available");
}
const fd = new FormData();
fd.append("data[alertId]", alertId);
fd.append("data[file]", {
@ -215,7 +238,7 @@ export default React.memo(function ChatInput({
await network.oaFilesKy.post("audio/upload", {
body: fd,
});
}, [alertId, recording]);
}, [alertId, recorder]);
const sendRecording = useCallback(async () => {
await stopRecording();

View file

@ -1,14 +1,7 @@
import React, { PureComponent } from "react";
import {
TouchableOpacity,
Animated,
PanResponder,
View,
Easing,
} from "react-native";
import { Audio } from "expo-av";
import React, { useEffect, useMemo, useRef, useState } from "react";
import { TouchableOpacity, Animated, PanResponder, View } from "react-native";
import { useAudioPlayer, useAudioPlayerStatus } from "expo-audio";
import { MaterialCommunityIcons } from "@expo/vector-icons";
import sleep from "./sleep";
import DigitalTimeString from "./DigitalTimeString";
import useStyles from "./styles";
@ -17,286 +10,238 @@ import withHooks from "~/hoc/withHooks";
const TRACK_SIZE = 4;
const THUMB_SIZE = 20;
class AudioSlider extends PureComponent {
constructor(props) {
super(props);
this.state = {
playing: false,
currentTime: 0, // miliseconds; value interpolated by animation.
duration: 0,
trackLayout: {},
dotOffset: new Animated.ValueXY(),
xDotOffsetAtAnimationStart: 0,
function clamp(n, min, max) {
return Math.max(min, Math.min(n, max));
}
function AudioSlider(props) {
// Props mapping (kept compatible with previous class component)
const { audio: audioUrl, registry, style: styleProp } = props;
const pauseAllBeforePlay =
props.pauseAllBeforePlay === undefined ? true : props.pauseAllBeforePlay;
// Styles injected by withHooks HOC
const styles = props.styles;
// Track layout (for computing pixel & time mappings)
const [trackLayout, setTrackLayout] = useState({ width: 0, height: 0 });
// Thumb X position (in pixels) — single Animated.Value used both for dragging and syncing to playback
const dotX = useRef(new Animated.Value(0)).current;
const isDraggingRef = useRef(false);
const xDotOffsetAtStartRef = useRef(0);
// While dragging, we derive the current time from the thumb position for live display
const [dragTimeMs, setDragTimeMs] = useState(0);
// Player using new expo-audio hook API
const player = useAudioPlayer(audioUrl, 250);
const status = useAudioPlayerStatus(player) || {};
const durationSec = status.duration || 0;
const currentTimeSec = status.currentTime || 0;
// Register in an optional registry to allow pausing other players before play
const selfRef = useRef({
pause: () => {
try {
player.pause();
} catch {}
},
});
useEffect(() => {
selfRef.current.pause = () => {
try {
player.pause();
} catch {}
};
}, [player]);
useEffect(() => {
if (!registry) return;
const self = selfRef.current;
registry.register(self);
return () => {
try {
registry.unregister(self);
} catch {}
};
}, [registry]);
// Ensure no looping (mimics the previous behavior)
useEffect(() => {
try {
player.loop = false;
} catch {}
}, [player]);
// When not dragging, keep the thumb in sync with the playback position
useEffect(() => {
if (!isDraggingRef.current) {
const w = trackLayout.width || 0;
const x = durationSec > 0 ? (currentTimeSec / durationSec) * w : 0;
dotX.setValue(x);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [currentTimeSec, durationSec, trackLayout.width]);
// When playback finishes, reset to start (seek to 0 and move thumb to start)
useEffect(() => {
if (status.didJustFinish) {
try {
player.pause();
} catch {}
try {
player.seekTo(0);
} catch {}
dotX.setValue(0);
}
}, [status.didJustFinish, player, dotX]);
// Safety: if we detect playback reached or passed the end, stop and reset
useEffect(() => {
if (durationSec > 0 && currentTimeSec >= durationSec - 0.05) {
try {
player.pause();
} catch {}
try {
player.seekTo(0);
} catch {}
dotX.setValue(0);
}
}, [currentTimeSec, durationSec, player, dotX]);
const onPressPlayPause = async () => {
if (status.playing) {
try {
player.pause();
} catch {}
return;
}
// Pause others first if asked
if (registry && pauseAllBeforePlay) {
try {
const players = registry.getAll ? registry.getAll() : [];
players
.filter((p) => p !== selfRef.current && typeof p.pause === "function")
.forEach((p) => p.pause());
} catch {}
}
try {
if (durationSec > 0 && currentTimeSec >= durationSec - 0.05) {
await player.seekTo(0);
}
player.play();
} catch {}
};
this._updateProps();
// Important:
// this.state.dotOffset.x is the actual offset
// this.state.dotOffset.x._value is the offset from the point where the animation started
// However, since this.state.dotOffset.x is an object and not a value, it is difficult
// to compare it with other numbers. Therefore, the const currentOffsetX is used.
// To print all attributes of the object see https://stackoverflow.com/questions/9209747/printing-all-the-hidden-properties-of-an-object
this._panResponder = PanResponder.create({
// Pan handling for seeking
const panResponder = useMemo(
() =>
PanResponder.create({
onMoveShouldSetResponderCapture: () => true,
onMoveShouldSetPanResponderCapture: () => true,
onPanResponderGrant: async (e, gestureState) => {
if (this.state.playing) {
await this.pause();
onPanResponderGrant: async () => {
// Pause if currently playing (mimic previous behavior)
if (status.playing) {
try {
player.pause();
} catch {}
}
await this.setState({
xDotOffsetAtAnimationStart: this.state.dotOffset.x._value,
isDraggingRef.current = true;
// Initialize offset for drag
const currentX = dotX.__getValue();
xDotOffsetAtStartRef.current = currentX;
dotX.setOffset(currentX);
dotX.setValue(0);
// While dragging, update displayed time
dotX.addListener(({ value }) => {
const w = trackLayout.width || 1;
const currentOffset =
xDotOffsetAtStartRef.current +
(typeof value === "number" ? value : 0);
const clampedX = clamp(currentOffset, 0, w);
const percent = w > 0 ? clampedX / w : 0;
const ms = Math.round(percent * durationSec * 1000);
setDragTimeMs(ms);
});
await this.state.dotOffset.setOffset({
x: this.state.dotOffset.x._value,
});
await this.state.dotOffset.setValue({ x: 0, y: 0 });
},
onPanResponderMove: (e, gestureState) => {
Animated.event([
null,
{ dx: this.state.dotOffset.x, dy: this.state.dotOffset.y },
])(e, gestureState);
},
onPanResponderTerminationRequest: () => false,
onPanResponderTerminate: async (evt, gestureState) => {
// Another component has become the responder, so this gesture is cancelled.
const currentOffsetX =
this.state.xDotOffsetAtAnimationStart + this.state.dotOffset.x._value;
if (
currentOffsetX < 0 ||
currentOffsetX > this.state.trackLayout.width
) {
await this.state.dotOffset.setValue({
x: -this.state.xDotOffsetAtAnimationStart,
y: 0,
});
}
await this.state.dotOffset.flattenOffset();
await this.mapAudioToCurrentTime();
},
onPanResponderRelease: async (e, { vx }) => {
const currentOffsetX =
this.state.xDotOffsetAtAnimationStart + this.state.dotOffset.x._value;
if (
currentOffsetX < 0 ||
currentOffsetX > this.state.trackLayout.width
) {
await this.state.dotOffset.setValue({
x: -this.state.xDotOffsetAtAnimationStart,
y: 0,
});
}
await this.state.dotOffset.flattenOffset();
await this.mapAudioToCurrentTime();
},
});
}
_updateProps() {
const props = this.props;
this.registry = props.registry;
this.style = props.style || {};
if (this.registry) {
this.registry.register(this);
}
const { pauseAllBeforePlay = true } = props;
this.pauseAllBeforePlay = pauseAllBeforePlay;
}
componentDidUpdate() {
this._updateProps();
}
mapAudioToCurrentTime = async () => {
if (!this.soundObject) return;
await this.soundObject.setPositionAsync(this.state.currentTime);
};
onPressPlayPause = async () => {
if (this.state.playing) {
await this.pause();
return;
}
await this.play();
};
play = async () => {
if (!this.soundObject) return;
if (this.registry && this.pauseAllBeforePlay) {
const players = this.registry.getAll();
await Promise.all(
players.filter((p) => this !== p).map((p) => p.pause()),
);
}
await this.soundObject.playAsync();
this.setState({ playing: true }); // This is for the play-button to go to play
this.startMovingDot();
};
pause = async () => {
if (!this.soundObject) return;
await this.soundObject.pauseAsync();
this.setState({ playing: false }); // This is for the play-button to go to pause
Animated.timing(this.state.dotOffset, { useNativeDriver: false }).stop(); // Will also call animationPausedOrStopped()
};
startMovingDot = async () => {
if (!this.soundObject) return;
const status = await this.soundObject.getStatusAsync();
const durationLeft = status["durationMillis"] - status["positionMillis"];
Animated.timing(this.state.dotOffset, {
toValue: { x: this.state.trackLayout.width, y: 0 },
duration: durationLeft,
easing: Easing.linear,
onPanResponderMove: Animated.event([null, { dx: dotX }], {
useNativeDriver: false,
}).start(() => this.animationPausedOrStopped());
};
}),
animationPausedOrStopped = async () => {
if (!this.state.playing) {
// Audio has been paused
return;
}
if (!this.soundObject) return;
// Animation-duration is over (reset Animation and Audio):
await sleep(200); // In case animation has finished, but audio has not
this.setState({ playing: false });
await this.state.dotOffset.setValue({ x: 0, y: 0 });
// this.state.dotOffset.setValue(0);
await this.soundObject.setPositionAsync(0);
};
onPanResponderTerminationRequest: () => false,
handlePlaybackFinished = async () => {
// console.log(`[AudioSlider] Playback finished, resetting for replay`);
// Reset for replay instead of unloading
this.setState({ playing: false });
await this.state.dotOffset.setValue({ x: 0, y: 0 });
if (this.soundObject) {
await this.soundObject.stopAsync();
}
};
onPanResponderTerminate: async () => {
// Another component took the responder
dotX.removeAllListeners();
measureTrack = (event) => {
this.setState({ trackLayout: event.nativeEvent.layout }); // {x, y, width, height}
};
const w = trackLayout.width || 1;
const value = dotX.__getValue();
const currentOffset =
xDotOffsetAtStartRef.current +
(typeof value === "number" ? value : 0);
async componentDidMount() {
// https://github.com/olapiv/expo-audio-player/issues/13
const audioUrl = this.props.audio;
const loadAudio = async () => {
const tryLoad = async (ext) => {
// console.log(`[AudioSlider] Attempting to load with extension: ${ext}`);
const { sound } = await Audio.Sound.createAsync({
uri: audioUrl,
overrideFileExtensionAndroid: ext,
});
return sound;
};
let lastError = null;
let clampedX = clamp(currentOffset, 0, w);
dotX.flattenOffset();
dotX.setValue(clampedX);
if (durationSec > 0) {
const targetSec = (clampedX / w) * durationSec;
try {
// First try with m4a (preferred)
const sound = await tryLoad("m4a");
// console.log(`[AudioSlider] Successfully loaded with m4a extension`);
this.soundObject = sound;
await this.soundObject.setIsLoopingAsync(false);
this.soundObject.setOnPlaybackStatusUpdate((status) => {
if (!status.didJustFinish) return;
this.handlePlaybackFinished();
});
return;
} catch (err1) {
// console.log(`[AudioSlider] Failed to load with m4a:`, err1.message);
lastError = err1;
try {
// Fallback to mp4
const sound = await tryLoad("mp4");
// console.log(`[AudioSlider] Successfully loaded with mp4 extension`);
this.soundObject = sound;
await this.soundObject.setIsLoopingAsync(false);
this.soundObject.setOnPlaybackStatusUpdate((status) => {
if (!status.didJustFinish) return;
this.handlePlaybackFinished();
});
return;
} catch (err2) {
// console.log(`[AudioSlider] Failed to load with mp4:`, err2.message);
lastError = err2;
try {
// Last fallback to aac
const sound = await tryLoad("aac");
// console.log(`[AudioSlider] Successfully loaded with aac extension`);
this.soundObject = sound;
await this.soundObject.setIsLoopingAsync(false);
this.soundObject.setOnPlaybackStatusUpdate((status) => {
if (!status.didJustFinish) return;
this.handlePlaybackFinished();
});
return;
} catch (err3) {
// console.log(`[AudioSlider] Failed to load with aac:`, err3.message);
lastError = err3;
}
}
await player.seekTo(targetSec);
} catch {}
}
// All attempts failed
console.error(
`[AudioSlider] All load attempts failed for ${audioUrl}. Last error:`,
lastError,
isDraggingRef.current = false;
setDragTimeMs(0);
},
onPanResponderRelease: async () => {
dotX.removeAllListeners();
const w = trackLayout.width || 1;
const value = dotX.__getValue();
const currentOffset =
xDotOffsetAtStartRef.current +
(typeof value === "number" ? value : 0);
let clampedX = clamp(currentOffset, 0, w);
dotX.flattenOffset();
dotX.setValue(clampedX);
if (durationSec > 0) {
const targetSec = (clampedX / w) * durationSec;
try {
await player.seekTo(targetSec);
} catch {}
}
isDraggingRef.current = false;
setDragTimeMs(0);
},
}),
[dotX, durationSec, player, status.playing, trackLayout.width],
);
const measureTrack = (event) => {
setTrackLayout(event.nativeEvent.layout || {});
};
await loadAudio();
// Times for display (DigitalTimeString expects milliseconds)
const durationMs = Math.round(durationSec * 1000);
const currentTimeMs = isDraggingRef.current
? dragTimeMs
: Math.round(currentTimeSec * 1000);
if (!this.soundObject) {
// Loading failed; avoid further calls and leave UI inert or show error
console.log(
`[AudioSlider] No sound object created, setting duration to 0`,
);
this.setState({ duration: 0 });
return;
}
try {
const status = await this.soundObject.getStatusAsync();
this.setState({ duration: status.durationMillis });
} catch (error) {
console.log("Error getting audio status:", error);
this.setState({ duration: 0 });
return;
}
// This requires measureTrack to have been called.
this.state.dotOffset.addListener(() => {
const animatedCurrentTime = this.state.dotOffset.x
.interpolate({
inputRange: [0, this.state.trackLayout.width],
outputRange: [0, this.state.duration],
extrapolate: "clamp",
})
.__getValue();
this.setState({ currentTime: animatedCurrentTime });
});
}
async componentWillUnmount() {
if (this.soundObject) {
await this.soundObject.unloadAsync();
}
this.state.dotOffset.removeAllListeners();
if (this.registry) {
this.registry.unregister(this);
}
}
render() {
return (
<View
style={{
@ -328,28 +273,28 @@ class AudioSlider extends PureComponent {
paddingRight: THUMB_SIZE,
zIndex: 2,
}}
onPress={this.onPressPlayPause}
onPress={onPressPlayPause}
>
{this.state.playing ? (
{status.playing ? (
<MaterialCommunityIcons
name="pause-circle-outline"
size={30}
style={[this.props.styles.controlIcon, this.style.controlIcon]}
style={[styles.controlIcon, styleProp?.controlIcon]}
/>
) : (
<MaterialCommunityIcons
name="play-circle-outline"
size={30}
style={[this.props.styles.controlIcon, this.style.controlIcon]}
style={[styles.controlIcon, styleProp?.controlIcon]}
/>
)}
</TouchableOpacity>
<Animated.View
onLayout={this.measureTrack}
onLayout={measureTrack}
style={[
this.props.styles.slideBar,
this.style.slideBar,
styles.slideBar,
styleProp?.slideBar,
{
height: TRACK_SIZE,
borderRadius: TRACK_SIZE / 2,
@ -366,39 +311,21 @@ class AudioSlider extends PureComponent {
left: -((THUMB_SIZE * 4) / 2),
width: THUMB_SIZE * 4,
height: THUMB_SIZE * 4,
transform: [
{
translateX: this.state.dotOffset.x.interpolate({
inputRange: [
0,
this.state.trackLayout.width != undefined
? this.state.trackLayout.width
: 1,
],
outputRange: [
0,
this.state.trackLayout.width != undefined
? this.state.trackLayout.width
: 1,
],
extrapolate: "clamp",
}),
},
],
transform: [{ translateX: dotX }],
}}
{...this._panResponder.panHandlers}
{...panResponder.panHandlers}
>
<View
style={[
this.props.styles.slideCursor,
this.style.slideCursor,
styles.slideCursor,
styleProp?.slideCursor,
{
width: THUMB_SIZE,
height: THUMB_SIZE,
borderRadius: THUMB_SIZE / 2,
},
]}
></View>
/>
</Animated.View>
</Animated.View>
</View>
@ -410,12 +337,11 @@ class AudioSlider extends PureComponent {
justifyContent: "space-between",
}}
>
<DigitalTimeString time={this.state.currentTime} style={this.style} />
<DigitalTimeString time={this.state.duration} style={this.style} />
<DigitalTimeString time={currentTimeMs} style={styleProp} />
<DigitalTimeString time={durationMs} style={styleProp} />
</View>
</View>
);
}
}
export default withHooks(AudioSlider, () => {

View file

@ -7102,6 +7102,7 @@ __metadata:
eslint-plugin-unused-imports: "npm:^3.0.0"
eventemitter3: "npm:^5.0.1"
expo: "npm:~53.0.23"
expo-audio: "npm:~0.4.9"
expo-av: "npm:~15.1.7"
expo-build-properties: "npm:~0.14.8"
expo-constants: "npm:~17.1.7"
@ -10618,6 +10619,17 @@ __metadata:
languageName: node
linkType: hard
"expo-audio@npm:~0.4.9":
version: 0.4.9
resolution: "expo-audio@npm:0.4.9"
peerDependencies:
expo: "*"
react: "*"
react-native: "*"
checksum: 10/874527adcf03e044770fc64fa9d58f735528bd40ebe038635ebf5ef34a21cdbce0115c9b2b1406604b9622f665d27a3a7eda417275322f2506a77f74c359724f
languageName: node
linkType: hard
"expo-av@npm:~15.1.7":
version: 15.1.7
resolution: "expo-av@npm:15.1.7"