chore: wip
This commit is contained in:
parent
0d0e694e8d
commit
c72708b79f
6 changed files with 387 additions and 405 deletions
|
@ -195,6 +195,7 @@ let config = {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
"expo-audio",
|
||||||
"./plugins/withXcode15Fix",
|
"./plugins/withXcode15Fix",
|
||||||
"./plugins/withCustomScheme", // Preserve URL schemes during prebuild
|
"./plugins/withCustomScheme", // Preserve URL schemes during prebuild
|
||||||
],
|
],
|
||||||
|
|
|
@ -180,7 +180,9 @@
|
||||||
F3F5A8D7A73545D78A4D8467 /* Fix Xcode 15 Bug */,
|
F3F5A8D7A73545D78A4D8467 /* Fix Xcode 15 Bug */,
|
||||||
BC7FCBEF8C354C749AB11067 /* Fix Xcode 15 Bug */,
|
BC7FCBEF8C354C749AB11067 /* Fix Xcode 15 Bug */,
|
||||||
59A6E29E61A94EC98E5B50A7 /* Fix Xcode 15 Bug */,
|
59A6E29E61A94EC98E5B50A7 /* Fix Xcode 15 Bug */,
|
||||||
822458BA69944A72BCDBEB3B /* Remove signature files (Xcode workaround) */,
|
94EB5593B5404563AA1FB51E /* Fix Xcode 15 Bug */,
|
||||||
|
976C0290112E4DD89069ADCE /* Fix Xcode 15 Bug */,
|
||||||
|
0976B7D5E5CF42E49F388927 /* Remove signature files (Xcode workaround) */,
|
||||||
);
|
);
|
||||||
buildRules = (
|
buildRules = (
|
||||||
);
|
);
|
||||||
|
@ -1171,6 +1173,74 @@ fi";
|
||||||
shellScript = "
|
shellScript = "
|
||||||
echo \"Remove signature files (Xcode workaround)\";
|
echo \"Remove signature files (Xcode workaround)\";
|
||||||
rm -rf \"$CONFIGURATION_BUILD_DIR/MapLibre.xcframework-ios.signature\";
|
rm -rf \"$CONFIGURATION_BUILD_DIR/MapLibre.xcframework-ios.signature\";
|
||||||
|
";
|
||||||
|
};
|
||||||
|
94EB5593B5404563AA1FB51E /* Fix Xcode 15 Bug */ = {
|
||||||
|
isa = PBXShellScriptBuildPhase;
|
||||||
|
buildActionMask = 2147483647;
|
||||||
|
files = (
|
||||||
|
);
|
||||||
|
runOnlyForDeploymentPostprocessing = 0;
|
||||||
|
name = "Fix Xcode 15 Bug";
|
||||||
|
inputPaths = (
|
||||||
|
);
|
||||||
|
outputPaths = (
|
||||||
|
);
|
||||||
|
shellPath = /bin/sh;
|
||||||
|
shellScript = "if [ \"$XCODE_VERSION_MAJOR\" = \"1500\" ]; then
|
||||||
|
echo \"Remove signature files (Xcode 15 workaround)\"
|
||||||
|
find \"$BUILD_DIR/${CONFIGURATION}-iphoneos\" -name \"*.signature\" -type f | xargs -r rm
|
||||||
|
fi";
|
||||||
|
};
|
||||||
|
CEAB2B1E58724CE999FACC27 /* Remove signature files (Xcode workaround) */ = {
|
||||||
|
isa = PBXShellScriptBuildPhase;
|
||||||
|
buildActionMask = 2147483647;
|
||||||
|
files = (
|
||||||
|
);
|
||||||
|
runOnlyForDeploymentPostprocessing = 0;
|
||||||
|
name = "Remove signature files (Xcode workaround)";
|
||||||
|
inputPaths = (
|
||||||
|
);
|
||||||
|
outputPaths = (
|
||||||
|
);
|
||||||
|
shellPath = /bin/sh;
|
||||||
|
shellScript = "
|
||||||
|
echo \"Remove signature files (Xcode workaround)\";
|
||||||
|
rm -rf \"$CONFIGURATION_BUILD_DIR/MapLibre.xcframework-ios.signature\";
|
||||||
|
";
|
||||||
|
};
|
||||||
|
976C0290112E4DD89069ADCE /* Fix Xcode 15 Bug */ = {
|
||||||
|
isa = PBXShellScriptBuildPhase;
|
||||||
|
buildActionMask = 2147483647;
|
||||||
|
files = (
|
||||||
|
);
|
||||||
|
runOnlyForDeploymentPostprocessing = 0;
|
||||||
|
name = "Fix Xcode 15 Bug";
|
||||||
|
inputPaths = (
|
||||||
|
);
|
||||||
|
outputPaths = (
|
||||||
|
);
|
||||||
|
shellPath = /bin/sh;
|
||||||
|
shellScript = "if [ \"$XCODE_VERSION_MAJOR\" = \"1500\" ]; then
|
||||||
|
echo \"Remove signature files (Xcode 15 workaround)\"
|
||||||
|
find \"$BUILD_DIR/${CONFIGURATION}-iphoneos\" -name \"*.signature\" -type f | xargs -r rm
|
||||||
|
fi";
|
||||||
|
};
|
||||||
|
0976B7D5E5CF42E49F388927 /* Remove signature files (Xcode workaround) */ = {
|
||||||
|
isa = PBXShellScriptBuildPhase;
|
||||||
|
buildActionMask = 2147483647;
|
||||||
|
files = (
|
||||||
|
);
|
||||||
|
runOnlyForDeploymentPostprocessing = 0;
|
||||||
|
name = "Remove signature files (Xcode workaround)";
|
||||||
|
inputPaths = (
|
||||||
|
);
|
||||||
|
outputPaths = (
|
||||||
|
);
|
||||||
|
shellPath = /bin/sh;
|
||||||
|
shellScript = "
|
||||||
|
echo \"Remove signature files (Xcode workaround)\";
|
||||||
|
rm -rf \"$CONFIGURATION_BUILD_DIR/MapLibre.xcframework-ios.signature\";
|
||||||
";
|
";
|
||||||
};
|
};
|
||||||
/* End PBXShellScriptBuildPhase section */
|
/* End PBXShellScriptBuildPhase section */
|
||||||
|
|
|
@ -116,6 +116,7 @@
|
||||||
"delay": "^6.0.0",
|
"delay": "^6.0.0",
|
||||||
"eventemitter3": "^5.0.1",
|
"eventemitter3": "^5.0.1",
|
||||||
"expo": "~53.0.23",
|
"expo": "~53.0.23",
|
||||||
|
"expo-audio": "~0.4.9",
|
||||||
"expo-av": "~15.1.7",
|
"expo-av": "~15.1.7",
|
||||||
"expo-build-properties": "~0.14.8",
|
"expo-build-properties": "~0.14.8",
|
||||||
"expo-constants": "~17.1.7",
|
"expo-constants": "~17.1.7",
|
||||||
|
|
|
@ -1,13 +1,8 @@
|
||||||
import React, { useState, useCallback, useEffect, useRef } from "react";
|
import React, { useState, useCallback, useEffect, useRef } from "react";
|
||||||
import { View, Text, TouchableOpacity } from "react-native";
|
import { View, Text, TouchableOpacity } from "react-native";
|
||||||
import { MaterialCommunityIcons } from "@expo/vector-icons";
|
import { MaterialCommunityIcons } from "@expo/vector-icons";
|
||||||
import { Audio, InterruptionModeIOS, InterruptionModeAndroid } from "expo-av";
|
import * as Audio from "expo-audio";
|
||||||
import {
|
import { IOSOutputFormat, AudioQuality } from "expo-audio";
|
||||||
AndroidOutputFormat,
|
|
||||||
IOSOutputFormat,
|
|
||||||
AndroidAudioEncoder,
|
|
||||||
IOSAudioQuality,
|
|
||||||
} from "expo-av/build/Audio";
|
|
||||||
|
|
||||||
import Countdown from "react-countdown";
|
import Countdown from "react-countdown";
|
||||||
|
|
||||||
|
@ -35,8 +30,8 @@ const RECORDING_TIMEOUT = 59;
|
||||||
const recordingSettings = {
|
const recordingSettings = {
|
||||||
android: {
|
android: {
|
||||||
extension: ".m4a",
|
extension: ".m4a",
|
||||||
outputFormat: AndroidOutputFormat.MPEG_4,
|
outputFormat: "mpeg4",
|
||||||
audioEncoder: AndroidAudioEncoder.AAC,
|
audioEncoder: "aac",
|
||||||
sampleRate: 44100,
|
sampleRate: 44100,
|
||||||
numberOfChannels: 1,
|
numberOfChannels: 1,
|
||||||
bitRate: 64000,
|
bitRate: 64000,
|
||||||
|
@ -44,7 +39,7 @@ const recordingSettings = {
|
||||||
ios: {
|
ios: {
|
||||||
extension: ".m4a",
|
extension: ".m4a",
|
||||||
outputFormat: IOSOutputFormat.MPEG4AAC,
|
outputFormat: IOSOutputFormat.MPEG4AAC,
|
||||||
audioQuality: IOSAudioQuality.MAX,
|
audioQuality: AudioQuality.MAX,
|
||||||
sampleRate: 44100,
|
sampleRate: 44100,
|
||||||
numberOfChannels: 1,
|
numberOfChannels: 1,
|
||||||
bitRate: 64000,
|
bitRate: 64000,
|
||||||
|
@ -128,15 +123,14 @@ export default React.memo(function ChatInput({
|
||||||
const startRecording = useCallback(async () => {
|
const startRecording = useCallback(async () => {
|
||||||
try {
|
try {
|
||||||
console.log("Requesting permissions..");
|
console.log("Requesting permissions..");
|
||||||
await Audio.requestPermissionsAsync();
|
await Audio.requestRecordingPermissionsAsync();
|
||||||
await Audio.setAudioModeAsync({
|
await Audio.setAudioModeAsync({
|
||||||
allowsRecordingIOS: true,
|
allowsRecording: true,
|
||||||
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
|
interruptionMode: "doNotMix",
|
||||||
playsInSilentModeIOS: true,
|
playsInSilentMode: true,
|
||||||
shouldDuckAndroid: true,
|
interruptionModeAndroid: "doNotMix",
|
||||||
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix,
|
shouldRouteThroughEarpiece: false,
|
||||||
playThroughEarpieceAndroid: false,
|
shouldPlayInBackground: true,
|
||||||
staysActiveInBackground: true,
|
|
||||||
});
|
});
|
||||||
// stop playback
|
// stop playback
|
||||||
if (sound !== null) {
|
if (sound !== null) {
|
||||||
|
@ -147,13 +141,12 @@ export default React.memo(function ChatInput({
|
||||||
|
|
||||||
console.log("Starting recording..");
|
console.log("Starting recording..");
|
||||||
await Audio.setAudioModeAsync({
|
await Audio.setAudioModeAsync({
|
||||||
allowsRecordingIOS: true,
|
allowsRecording: true,
|
||||||
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
|
interruptionMode: "doNotMix",
|
||||||
playsInSilentModeIOS: true,
|
playsInSilentMode: true,
|
||||||
shouldDuckAndroid: true,
|
interruptionModeAndroid: "doNotMix",
|
||||||
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix,
|
shouldRouteThroughEarpiece: false,
|
||||||
playThroughEarpieceAndroid: false,
|
shouldPlayInBackground: true,
|
||||||
staysActiveInBackground: true,
|
|
||||||
});
|
});
|
||||||
const _recording = new Audio.Recording();
|
const _recording = new Audio.Recording();
|
||||||
try {
|
try {
|
||||||
|
@ -184,14 +177,12 @@ export default React.memo(function ChatInput({
|
||||||
|
|
||||||
const recordedToSound = useCallback(async () => {
|
const recordedToSound = useCallback(async () => {
|
||||||
await Audio.setAudioModeAsync({
|
await Audio.setAudioModeAsync({
|
||||||
allowsRecordingIOS: false,
|
allowsRecording: false,
|
||||||
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
|
interruptionMode: "doNotMix",
|
||||||
playsInSilentModeIOS: true,
|
playsInSilentMode: true,
|
||||||
playsInSilentLockedModeIOS: true,
|
interruptionModeAndroid: "doNotMix",
|
||||||
shouldDuckAndroid: true,
|
shouldRouteThroughEarpiece: false,
|
||||||
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix,
|
shouldPlayInBackground: true,
|
||||||
playThroughEarpieceAndroid: false,
|
|
||||||
staysActiveInBackground: true,
|
|
||||||
});
|
});
|
||||||
const { sound: _sound } = await recording.createNewLoadedSoundAsync({
|
const { sound: _sound } = await recording.createNewLoadedSoundAsync({
|
||||||
isLooping: false,
|
isLooping: false,
|
||||||
|
|
|
@ -1,14 +1,7 @@
|
||||||
import React, { PureComponent } from "react";
|
import React, { useEffect, useMemo, useRef, useState } from "react";
|
||||||
import {
|
import { TouchableOpacity, Animated, PanResponder, View } from "react-native";
|
||||||
TouchableOpacity,
|
import { useAudioPlayer, useAudioPlayerStatus } from "expo-audio";
|
||||||
Animated,
|
|
||||||
PanResponder,
|
|
||||||
View,
|
|
||||||
Easing,
|
|
||||||
} from "react-native";
|
|
||||||
import { Audio } from "expo-av";
|
|
||||||
import { MaterialCommunityIcons } from "@expo/vector-icons";
|
import { MaterialCommunityIcons } from "@expo/vector-icons";
|
||||||
import sleep from "./sleep";
|
|
||||||
import DigitalTimeString from "./DigitalTimeString";
|
import DigitalTimeString from "./DigitalTimeString";
|
||||||
|
|
||||||
import useStyles from "./styles";
|
import useStyles from "./styles";
|
||||||
|
@ -17,286 +10,219 @@ import withHooks from "~/hoc/withHooks";
|
||||||
const TRACK_SIZE = 4;
|
const TRACK_SIZE = 4;
|
||||||
const THUMB_SIZE = 20;
|
const THUMB_SIZE = 20;
|
||||||
|
|
||||||
class AudioSlider extends PureComponent {
|
function clamp(n, min, max) {
|
||||||
constructor(props) {
|
return Math.max(min, Math.min(n, max));
|
||||||
super(props);
|
}
|
||||||
this.state = {
|
|
||||||
playing: false,
|
function AudioSlider(props) {
|
||||||
currentTime: 0, // miliseconds; value interpolated by animation.
|
// Props mapping (kept compatible with previous class component)
|
||||||
duration: 0,
|
const { audio: audioUrl, registry, style: styleProp } = props;
|
||||||
trackLayout: {},
|
const pauseAllBeforePlay =
|
||||||
dotOffset: new Animated.ValueXY(),
|
props.pauseAllBeforePlay === undefined ? true : props.pauseAllBeforePlay;
|
||||||
xDotOffsetAtAnimationStart: 0,
|
|
||||||
|
// Styles injected by withHooks HOC
|
||||||
|
const styles = props.styles;
|
||||||
|
|
||||||
|
// Track layout (for computing pixel & time mappings)
|
||||||
|
const [trackLayout, setTrackLayout] = useState({ width: 0, height: 0 });
|
||||||
|
|
||||||
|
// Thumb X position (in pixels) — single Animated.Value used both for dragging and syncing to playback
|
||||||
|
const dotX = useRef(new Animated.Value(0)).current;
|
||||||
|
const isDraggingRef = useRef(false);
|
||||||
|
const xDotOffsetAtStartRef = useRef(0);
|
||||||
|
|
||||||
|
// While dragging, we derive the current time from the thumb position for live display
|
||||||
|
const [dragTimeMs, setDragTimeMs] = useState(0);
|
||||||
|
|
||||||
|
// Player using new expo-audio hook API
|
||||||
|
const player = useAudioPlayer(audioUrl, { updateInterval: 250 });
|
||||||
|
const status = useAudioPlayerStatus(player) || {};
|
||||||
|
|
||||||
|
const durationSec = status.duration || 0;
|
||||||
|
const currentTimeSec = status.currentTime || 0;
|
||||||
|
|
||||||
|
// Register in an optional registry to allow pausing other players before play
|
||||||
|
const selfRef = useRef({
|
||||||
|
pause: () => {
|
||||||
|
try {
|
||||||
|
player.pause();
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
selfRef.current.pause = () => {
|
||||||
|
try {
|
||||||
|
player.pause();
|
||||||
|
} catch {}
|
||||||
|
};
|
||||||
|
}, [player]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!registry) return;
|
||||||
|
const self = selfRef.current;
|
||||||
|
registry.register(self);
|
||||||
|
return () => {
|
||||||
|
try {
|
||||||
|
registry.unregister(self);
|
||||||
|
} catch {}
|
||||||
|
};
|
||||||
|
}, [registry]);
|
||||||
|
|
||||||
|
// Ensure no looping (mimics the previous behavior)
|
||||||
|
useEffect(() => {
|
||||||
|
try {
|
||||||
|
player.loop = false;
|
||||||
|
} catch {}
|
||||||
|
}, [player]);
|
||||||
|
|
||||||
|
// When not dragging, keep the thumb in sync with the playback position
|
||||||
|
useEffect(() => {
|
||||||
|
if (!isDraggingRef.current) {
|
||||||
|
const w = trackLayout.width || 0;
|
||||||
|
const x = durationSec > 0 ? (currentTimeSec / durationSec) * w : 0;
|
||||||
|
dotX.setValue(x);
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, [currentTimeSec, durationSec, trackLayout.width]);
|
||||||
|
|
||||||
|
// When playback finishes, reset to start (seek to 0 and move thumb to start)
|
||||||
|
useEffect(() => {
|
||||||
|
if (status.didJustFinish) {
|
||||||
|
try {
|
||||||
|
player.seekTo(0);
|
||||||
|
} catch {}
|
||||||
|
dotX.setValue(0);
|
||||||
|
}
|
||||||
|
}, [status.didJustFinish, player, dotX]);
|
||||||
|
|
||||||
|
const onPressPlayPause = async () => {
|
||||||
|
if (status.playing) {
|
||||||
|
try {
|
||||||
|
player.pause();
|
||||||
|
} catch {}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pause others first if asked
|
||||||
|
if (registry && pauseAllBeforePlay) {
|
||||||
|
try {
|
||||||
|
const players = registry.getAll ? registry.getAll() : [];
|
||||||
|
players
|
||||||
|
.filter((p) => p !== selfRef.current && typeof p.pause === "function")
|
||||||
|
.forEach((p) => p.pause());
|
||||||
|
} catch {}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
player.play();
|
||||||
|
} catch {}
|
||||||
};
|
};
|
||||||
|
|
||||||
this._updateProps();
|
// Pan handling for seeking
|
||||||
|
const panResponder = useMemo(
|
||||||
// Important:
|
() =>
|
||||||
// this.state.dotOffset.x is the actual offset
|
PanResponder.create({
|
||||||
// this.state.dotOffset.x._value is the offset from the point where the animation started
|
|
||||||
// However, since this.state.dotOffset.x is an object and not a value, it is difficult
|
|
||||||
// to compare it with other numbers. Therefore, the const currentOffsetX is used.
|
|
||||||
// To print all attributes of the object see https://stackoverflow.com/questions/9209747/printing-all-the-hidden-properties-of-an-object
|
|
||||||
this._panResponder = PanResponder.create({
|
|
||||||
onMoveShouldSetResponderCapture: () => true,
|
onMoveShouldSetResponderCapture: () => true,
|
||||||
onMoveShouldSetPanResponderCapture: () => true,
|
onMoveShouldSetPanResponderCapture: () => true,
|
||||||
onPanResponderGrant: async (e, gestureState) => {
|
|
||||||
if (this.state.playing) {
|
onPanResponderGrant: async () => {
|
||||||
await this.pause();
|
// Pause if currently playing (mimic previous behavior)
|
||||||
|
if (status.playing) {
|
||||||
|
try {
|
||||||
|
player.pause();
|
||||||
|
} catch {}
|
||||||
}
|
}
|
||||||
await this.setState({
|
|
||||||
xDotOffsetAtAnimationStart: this.state.dotOffset.x._value,
|
isDraggingRef.current = true;
|
||||||
|
|
||||||
|
// Initialize offset for drag
|
||||||
|
const currentX = dotX.__getValue();
|
||||||
|
xDotOffsetAtStartRef.current = currentX;
|
||||||
|
dotX.setOffset(currentX);
|
||||||
|
dotX.setValue(0);
|
||||||
|
|
||||||
|
// While dragging, update displayed time
|
||||||
|
dotX.addListener(({ value }) => {
|
||||||
|
const w = trackLayout.width || 1;
|
||||||
|
const currentOffset =
|
||||||
|
xDotOffsetAtStartRef.current +
|
||||||
|
(typeof value === "number" ? value : 0);
|
||||||
|
const clampedX = clamp(currentOffset, 0, w);
|
||||||
|
const percent = w > 0 ? clampedX / w : 0;
|
||||||
|
const ms = Math.round(percent * durationSec * 1000);
|
||||||
|
setDragTimeMs(ms);
|
||||||
});
|
});
|
||||||
await this.state.dotOffset.setOffset({
|
|
||||||
x: this.state.dotOffset.x._value,
|
|
||||||
});
|
|
||||||
await this.state.dotOffset.setValue({ x: 0, y: 0 });
|
|
||||||
},
|
},
|
||||||
onPanResponderMove: (e, gestureState) => {
|
|
||||||
Animated.event([
|
|
||||||
null,
|
|
||||||
{ dx: this.state.dotOffset.x, dy: this.state.dotOffset.y },
|
|
||||||
])(e, gestureState);
|
|
||||||
},
|
|
||||||
onPanResponderTerminationRequest: () => false,
|
|
||||||
onPanResponderTerminate: async (evt, gestureState) => {
|
|
||||||
// Another component has become the responder, so this gesture is cancelled.
|
|
||||||
|
|
||||||
const currentOffsetX =
|
onPanResponderMove: Animated.event([null, { dx: dotX }], {
|
||||||
this.state.xDotOffsetAtAnimationStart + this.state.dotOffset.x._value;
|
|
||||||
if (
|
|
||||||
currentOffsetX < 0 ||
|
|
||||||
currentOffsetX > this.state.trackLayout.width
|
|
||||||
) {
|
|
||||||
await this.state.dotOffset.setValue({
|
|
||||||
x: -this.state.xDotOffsetAtAnimationStart,
|
|
||||||
y: 0,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
await this.state.dotOffset.flattenOffset();
|
|
||||||
await this.mapAudioToCurrentTime();
|
|
||||||
},
|
|
||||||
onPanResponderRelease: async (e, { vx }) => {
|
|
||||||
const currentOffsetX =
|
|
||||||
this.state.xDotOffsetAtAnimationStart + this.state.dotOffset.x._value;
|
|
||||||
if (
|
|
||||||
currentOffsetX < 0 ||
|
|
||||||
currentOffsetX > this.state.trackLayout.width
|
|
||||||
) {
|
|
||||||
await this.state.dotOffset.setValue({
|
|
||||||
x: -this.state.xDotOffsetAtAnimationStart,
|
|
||||||
y: 0,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
await this.state.dotOffset.flattenOffset();
|
|
||||||
await this.mapAudioToCurrentTime();
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
_updateProps() {
|
|
||||||
const props = this.props;
|
|
||||||
this.registry = props.registry;
|
|
||||||
this.style = props.style || {};
|
|
||||||
if (this.registry) {
|
|
||||||
this.registry.register(this);
|
|
||||||
}
|
|
||||||
const { pauseAllBeforePlay = true } = props;
|
|
||||||
this.pauseAllBeforePlay = pauseAllBeforePlay;
|
|
||||||
}
|
|
||||||
|
|
||||||
componentDidUpdate() {
|
|
||||||
this._updateProps();
|
|
||||||
}
|
|
||||||
|
|
||||||
mapAudioToCurrentTime = async () => {
|
|
||||||
if (!this.soundObject) return;
|
|
||||||
await this.soundObject.setPositionAsync(this.state.currentTime);
|
|
||||||
};
|
|
||||||
|
|
||||||
onPressPlayPause = async () => {
|
|
||||||
if (this.state.playing) {
|
|
||||||
await this.pause();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
await this.play();
|
|
||||||
};
|
|
||||||
|
|
||||||
play = async () => {
|
|
||||||
if (!this.soundObject) return;
|
|
||||||
if (this.registry && this.pauseAllBeforePlay) {
|
|
||||||
const players = this.registry.getAll();
|
|
||||||
await Promise.all(
|
|
||||||
players.filter((p) => this !== p).map((p) => p.pause()),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
await this.soundObject.playAsync();
|
|
||||||
this.setState({ playing: true }); // This is for the play-button to go to play
|
|
||||||
this.startMovingDot();
|
|
||||||
};
|
|
||||||
|
|
||||||
pause = async () => {
|
|
||||||
if (!this.soundObject) return;
|
|
||||||
await this.soundObject.pauseAsync();
|
|
||||||
this.setState({ playing: false }); // This is for the play-button to go to pause
|
|
||||||
Animated.timing(this.state.dotOffset, { useNativeDriver: false }).stop(); // Will also call animationPausedOrStopped()
|
|
||||||
};
|
|
||||||
|
|
||||||
startMovingDot = async () => {
|
|
||||||
if (!this.soundObject) return;
|
|
||||||
const status = await this.soundObject.getStatusAsync();
|
|
||||||
const durationLeft = status["durationMillis"] - status["positionMillis"];
|
|
||||||
|
|
||||||
Animated.timing(this.state.dotOffset, {
|
|
||||||
toValue: { x: this.state.trackLayout.width, y: 0 },
|
|
||||||
duration: durationLeft,
|
|
||||||
easing: Easing.linear,
|
|
||||||
useNativeDriver: false,
|
useNativeDriver: false,
|
||||||
}).start(() => this.animationPausedOrStopped());
|
}),
|
||||||
};
|
|
||||||
|
|
||||||
animationPausedOrStopped = async () => {
|
onPanResponderTerminationRequest: () => false,
|
||||||
if (!this.state.playing) {
|
|
||||||
// Audio has been paused
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (!this.soundObject) return;
|
|
||||||
// Animation-duration is over (reset Animation and Audio):
|
|
||||||
await sleep(200); // In case animation has finished, but audio has not
|
|
||||||
this.setState({ playing: false });
|
|
||||||
await this.state.dotOffset.setValue({ x: 0, y: 0 });
|
|
||||||
// this.state.dotOffset.setValue(0);
|
|
||||||
await this.soundObject.setPositionAsync(0);
|
|
||||||
};
|
|
||||||
|
|
||||||
handlePlaybackFinished = async () => {
|
onPanResponderTerminate: async () => {
|
||||||
// console.log(`[AudioSlider] Playback finished, resetting for replay`);
|
// Another component took the responder
|
||||||
// Reset for replay instead of unloading
|
dotX.removeAllListeners();
|
||||||
this.setState({ playing: false });
|
|
||||||
await this.state.dotOffset.setValue({ x: 0, y: 0 });
|
|
||||||
if (this.soundObject) {
|
|
||||||
await this.soundObject.stopAsync();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
measureTrack = (event) => {
|
const w = trackLayout.width || 1;
|
||||||
this.setState({ trackLayout: event.nativeEvent.layout }); // {x, y, width, height}
|
const value = dotX.__getValue();
|
||||||
};
|
const currentOffset =
|
||||||
|
xDotOffsetAtStartRef.current +
|
||||||
|
(typeof value === "number" ? value : 0);
|
||||||
|
|
||||||
async componentDidMount() {
|
let clampedX = clamp(currentOffset, 0, w);
|
||||||
// https://github.com/olapiv/expo-audio-player/issues/13
|
dotX.flattenOffset();
|
||||||
|
dotX.setValue(clampedX);
|
||||||
const audioUrl = this.props.audio;
|
|
||||||
|
|
||||||
const loadAudio = async () => {
|
|
||||||
const tryLoad = async (ext) => {
|
|
||||||
// console.log(`[AudioSlider] Attempting to load with extension: ${ext}`);
|
|
||||||
const { sound } = await Audio.Sound.createAsync({
|
|
||||||
uri: audioUrl,
|
|
||||||
overrideFileExtensionAndroid: ext,
|
|
||||||
});
|
|
||||||
return sound;
|
|
||||||
};
|
|
||||||
|
|
||||||
let lastError = null;
|
|
||||||
|
|
||||||
|
if (durationSec > 0) {
|
||||||
|
const targetSec = (clampedX / w) * durationSec;
|
||||||
try {
|
try {
|
||||||
// First try with m4a (preferred)
|
await player.seekTo(targetSec);
|
||||||
const sound = await tryLoad("m4a");
|
} catch {}
|
||||||
// console.log(`[AudioSlider] Successfully loaded with m4a extension`);
|
|
||||||
this.soundObject = sound;
|
|
||||||
await this.soundObject.setIsLoopingAsync(false);
|
|
||||||
this.soundObject.setOnPlaybackStatusUpdate((status) => {
|
|
||||||
if (!status.didJustFinish) return;
|
|
||||||
this.handlePlaybackFinished();
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
} catch (err1) {
|
|
||||||
// console.log(`[AudioSlider] Failed to load with m4a:`, err1.message);
|
|
||||||
lastError = err1;
|
|
||||||
try {
|
|
||||||
// Fallback to mp4
|
|
||||||
const sound = await tryLoad("mp4");
|
|
||||||
// console.log(`[AudioSlider] Successfully loaded with mp4 extension`);
|
|
||||||
this.soundObject = sound;
|
|
||||||
await this.soundObject.setIsLoopingAsync(false);
|
|
||||||
this.soundObject.setOnPlaybackStatusUpdate((status) => {
|
|
||||||
if (!status.didJustFinish) return;
|
|
||||||
this.handlePlaybackFinished();
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
} catch (err2) {
|
|
||||||
// console.log(`[AudioSlider] Failed to load with mp4:`, err2.message);
|
|
||||||
lastError = err2;
|
|
||||||
try {
|
|
||||||
// Last fallback to aac
|
|
||||||
const sound = await tryLoad("aac");
|
|
||||||
// console.log(`[AudioSlider] Successfully loaded with aac extension`);
|
|
||||||
this.soundObject = sound;
|
|
||||||
await this.soundObject.setIsLoopingAsync(false);
|
|
||||||
this.soundObject.setOnPlaybackStatusUpdate((status) => {
|
|
||||||
if (!status.didJustFinish) return;
|
|
||||||
this.handlePlaybackFinished();
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
} catch (err3) {
|
|
||||||
// console.log(`[AudioSlider] Failed to load with aac:`, err3.message);
|
|
||||||
lastError = err3;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// All attempts failed
|
isDraggingRef.current = false;
|
||||||
console.error(
|
setDragTimeMs(0);
|
||||||
`[AudioSlider] All load attempts failed for ${audioUrl}. Last error:`,
|
},
|
||||||
lastError,
|
|
||||||
|
onPanResponderRelease: async () => {
|
||||||
|
dotX.removeAllListeners();
|
||||||
|
|
||||||
|
const w = trackLayout.width || 1;
|
||||||
|
const value = dotX.__getValue();
|
||||||
|
const currentOffset =
|
||||||
|
xDotOffsetAtStartRef.current +
|
||||||
|
(typeof value === "number" ? value : 0);
|
||||||
|
|
||||||
|
let clampedX = clamp(currentOffset, 0, w);
|
||||||
|
dotX.flattenOffset();
|
||||||
|
dotX.setValue(clampedX);
|
||||||
|
|
||||||
|
if (durationSec > 0) {
|
||||||
|
const targetSec = (clampedX / w) * durationSec;
|
||||||
|
try {
|
||||||
|
await player.seekTo(targetSec);
|
||||||
|
} catch {}
|
||||||
|
}
|
||||||
|
|
||||||
|
isDraggingRef.current = false;
|
||||||
|
setDragTimeMs(0);
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
[dotX, durationSec, player, status.playing, trackLayout.width],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const measureTrack = (event) => {
|
||||||
|
setTrackLayout(event.nativeEvent.layout || {});
|
||||||
};
|
};
|
||||||
|
|
||||||
await loadAudio();
|
// Times for display (DigitalTimeString expects milliseconds)
|
||||||
|
const durationMs = Math.round(durationSec * 1000);
|
||||||
|
const currentTimeMs = isDraggingRef.current
|
||||||
|
? dragTimeMs
|
||||||
|
: Math.round(currentTimeSec * 1000);
|
||||||
|
|
||||||
if (!this.soundObject) {
|
|
||||||
// Loading failed; avoid further calls and leave UI inert or show error
|
|
||||||
console.log(
|
|
||||||
`[AudioSlider] No sound object created, setting duration to 0`,
|
|
||||||
);
|
|
||||||
this.setState({ duration: 0 });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const status = await this.soundObject.getStatusAsync();
|
|
||||||
this.setState({ duration: status.durationMillis });
|
|
||||||
} catch (error) {
|
|
||||||
console.log("Error getting audio status:", error);
|
|
||||||
this.setState({ duration: 0 });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// This requires measureTrack to have been called.
|
|
||||||
this.state.dotOffset.addListener(() => {
|
|
||||||
const animatedCurrentTime = this.state.dotOffset.x
|
|
||||||
.interpolate({
|
|
||||||
inputRange: [0, this.state.trackLayout.width],
|
|
||||||
outputRange: [0, this.state.duration],
|
|
||||||
extrapolate: "clamp",
|
|
||||||
})
|
|
||||||
.__getValue();
|
|
||||||
this.setState({ currentTime: animatedCurrentTime });
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async componentWillUnmount() {
|
|
||||||
if (this.soundObject) {
|
|
||||||
await this.soundObject.unloadAsync();
|
|
||||||
}
|
|
||||||
this.state.dotOffset.removeAllListeners();
|
|
||||||
if (this.registry) {
|
|
||||||
this.registry.unregister(this);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
render() {
|
|
||||||
return (
|
return (
|
||||||
<View
|
<View
|
||||||
style={{
|
style={{
|
||||||
|
@ -328,28 +254,28 @@ class AudioSlider extends PureComponent {
|
||||||
paddingRight: THUMB_SIZE,
|
paddingRight: THUMB_SIZE,
|
||||||
zIndex: 2,
|
zIndex: 2,
|
||||||
}}
|
}}
|
||||||
onPress={this.onPressPlayPause}
|
onPress={onPressPlayPause}
|
||||||
>
|
>
|
||||||
{this.state.playing ? (
|
{status.playing ? (
|
||||||
<MaterialCommunityIcons
|
<MaterialCommunityIcons
|
||||||
name="pause-circle-outline"
|
name="pause-circle-outline"
|
||||||
size={30}
|
size={30}
|
||||||
style={[this.props.styles.controlIcon, this.style.controlIcon]}
|
style={[styles.controlIcon, styleProp?.controlIcon]}
|
||||||
/>
|
/>
|
||||||
) : (
|
) : (
|
||||||
<MaterialCommunityIcons
|
<MaterialCommunityIcons
|
||||||
name="play-circle-outline"
|
name="play-circle-outline"
|
||||||
size={30}
|
size={30}
|
||||||
style={[this.props.styles.controlIcon, this.style.controlIcon]}
|
style={[styles.controlIcon, styleProp?.controlIcon]}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
</TouchableOpacity>
|
</TouchableOpacity>
|
||||||
|
|
||||||
<Animated.View
|
<Animated.View
|
||||||
onLayout={this.measureTrack}
|
onLayout={measureTrack}
|
||||||
style={[
|
style={[
|
||||||
this.props.styles.slideBar,
|
styles.slideBar,
|
||||||
this.style.slideBar,
|
styleProp?.slideBar,
|
||||||
{
|
{
|
||||||
height: TRACK_SIZE,
|
height: TRACK_SIZE,
|
||||||
borderRadius: TRACK_SIZE / 2,
|
borderRadius: TRACK_SIZE / 2,
|
||||||
|
@ -366,39 +292,21 @@ class AudioSlider extends PureComponent {
|
||||||
left: -((THUMB_SIZE * 4) / 2),
|
left: -((THUMB_SIZE * 4) / 2),
|
||||||
width: THUMB_SIZE * 4,
|
width: THUMB_SIZE * 4,
|
||||||
height: THUMB_SIZE * 4,
|
height: THUMB_SIZE * 4,
|
||||||
transform: [
|
transform: [{ translateX: dotX }],
|
||||||
{
|
|
||||||
translateX: this.state.dotOffset.x.interpolate({
|
|
||||||
inputRange: [
|
|
||||||
0,
|
|
||||||
this.state.trackLayout.width != undefined
|
|
||||||
? this.state.trackLayout.width
|
|
||||||
: 1,
|
|
||||||
],
|
|
||||||
outputRange: [
|
|
||||||
0,
|
|
||||||
this.state.trackLayout.width != undefined
|
|
||||||
? this.state.trackLayout.width
|
|
||||||
: 1,
|
|
||||||
],
|
|
||||||
extrapolate: "clamp",
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}}
|
}}
|
||||||
{...this._panResponder.panHandlers}
|
{...panResponder.panHandlers}
|
||||||
>
|
>
|
||||||
<View
|
<View
|
||||||
style={[
|
style={[
|
||||||
this.props.styles.slideCursor,
|
styles.slideCursor,
|
||||||
this.style.slideCursor,
|
styleProp?.slideCursor,
|
||||||
{
|
{
|
||||||
width: THUMB_SIZE,
|
width: THUMB_SIZE,
|
||||||
height: THUMB_SIZE,
|
height: THUMB_SIZE,
|
||||||
borderRadius: THUMB_SIZE / 2,
|
borderRadius: THUMB_SIZE / 2,
|
||||||
},
|
},
|
||||||
]}
|
]}
|
||||||
></View>
|
/>
|
||||||
</Animated.View>
|
</Animated.View>
|
||||||
</Animated.View>
|
</Animated.View>
|
||||||
</View>
|
</View>
|
||||||
|
@ -410,12 +318,11 @@ class AudioSlider extends PureComponent {
|
||||||
justifyContent: "space-between",
|
justifyContent: "space-between",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<DigitalTimeString time={this.state.currentTime} style={this.style} />
|
<DigitalTimeString time={currentTimeMs} style={styleProp} />
|
||||||
<DigitalTimeString time={this.state.duration} style={this.style} />
|
<DigitalTimeString time={durationMs} style={styleProp} />
|
||||||
</View>
|
</View>
|
||||||
</View>
|
</View>
|
||||||
);
|
);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export default withHooks(AudioSlider, () => {
|
export default withHooks(AudioSlider, () => {
|
||||||
|
|
12
yarn.lock
12
yarn.lock
|
@ -7114,6 +7114,7 @@ __metadata:
|
||||||
eslint-plugin-unused-imports: "npm:^3.0.0"
|
eslint-plugin-unused-imports: "npm:^3.0.0"
|
||||||
eventemitter3: "npm:^5.0.1"
|
eventemitter3: "npm:^5.0.1"
|
||||||
expo: "npm:~53.0.23"
|
expo: "npm:~53.0.23"
|
||||||
|
expo-audio: "npm:~0.4.9"
|
||||||
expo-av: "npm:~15.1.7"
|
expo-av: "npm:~15.1.7"
|
||||||
expo-build-properties: "npm:~0.14.8"
|
expo-build-properties: "npm:~0.14.8"
|
||||||
expo-constants: "npm:~17.1.7"
|
expo-constants: "npm:~17.1.7"
|
||||||
|
@ -10630,6 +10631,17 @@ __metadata:
|
||||||
languageName: node
|
languageName: node
|
||||||
linkType: hard
|
linkType: hard
|
||||||
|
|
||||||
|
"expo-audio@npm:~0.4.9":
|
||||||
|
version: 0.4.9
|
||||||
|
resolution: "expo-audio@npm:0.4.9"
|
||||||
|
peerDependencies:
|
||||||
|
expo: "*"
|
||||||
|
react: "*"
|
||||||
|
react-native: "*"
|
||||||
|
checksum: 10/874527adcf03e044770fc64fa9d58f735528bd40ebe038635ebf5ef34a21cdbce0115c9b2b1406604b9622f665d27a3a7eda417275322f2506a77f74c359724f
|
||||||
|
languageName: node
|
||||||
|
linkType: hard
|
||||||
|
|
||||||
"expo-av@npm:~15.1.7":
|
"expo-av@npm:~15.1.7":
|
||||||
version: 15.1.7
|
version: 15.1.7
|
||||||
resolution: "expo-av@npm:15.1.7"
|
resolution: "expo-av@npm:15.1.7"
|
||||||
|
|
Loading…
Add table
Reference in a new issue