chore: wip

This commit is contained in:
devthejo 2025-09-29 08:49:05 +02:00
parent 0d0e694e8d
commit c72708b79f
No known key found for this signature in database
GPG key ID: 00CCA7A92B1D5351
6 changed files with 387 additions and 405 deletions

View file

@ -195,6 +195,7 @@ let config = {
},
},
],
"expo-audio",
"./plugins/withXcode15Fix",
"./plugins/withCustomScheme", // Preserve URL schemes during prebuild
],

View file

@ -180,7 +180,9 @@
F3F5A8D7A73545D78A4D8467 /* Fix Xcode 15 Bug */,
BC7FCBEF8C354C749AB11067 /* Fix Xcode 15 Bug */,
59A6E29E61A94EC98E5B50A7 /* Fix Xcode 15 Bug */,
822458BA69944A72BCDBEB3B /* Remove signature files (Xcode workaround) */,
94EB5593B5404563AA1FB51E /* Fix Xcode 15 Bug */,
976C0290112E4DD89069ADCE /* Fix Xcode 15 Bug */,
0976B7D5E5CF42E49F388927 /* Remove signature files (Xcode workaround) */,
);
buildRules = (
);
@ -1171,6 +1173,74 @@ fi";
shellScript = "
echo \"Remove signature files (Xcode workaround)\";
rm -rf \"$CONFIGURATION_BUILD_DIR/MapLibre.xcframework-ios.signature\";
";
};
94EB5593B5404563AA1FB51E /* Fix Xcode 15 Bug */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
name = "Fix Xcode 15 Bug";
inputPaths = (
);
outputPaths = (
);
shellPath = /bin/sh;
shellScript = "if [ \"$XCODE_VERSION_MAJOR\" = \"1500\" ]; then
echo \"Remove signature files (Xcode 15 workaround)\"
find \"$BUILD_DIR/${CONFIGURATION}-iphoneos\" -name \"*.signature\" -type f | xargs -r rm
fi";
};
CEAB2B1E58724CE999FACC27 /* Remove signature files (Xcode workaround) */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
name = "Remove signature files (Xcode workaround)";
inputPaths = (
);
outputPaths = (
);
shellPath = /bin/sh;
shellScript = "
echo \"Remove signature files (Xcode workaround)\";
rm -rf \"$CONFIGURATION_BUILD_DIR/MapLibre.xcframework-ios.signature\";
";
};
976C0290112E4DD89069ADCE /* Fix Xcode 15 Bug */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
name = "Fix Xcode 15 Bug";
inputPaths = (
);
outputPaths = (
);
shellPath = /bin/sh;
shellScript = "if [ \"$XCODE_VERSION_MAJOR\" = \"1500\" ]; then
echo \"Remove signature files (Xcode 15 workaround)\"
find \"$BUILD_DIR/${CONFIGURATION}-iphoneos\" -name \"*.signature\" -type f | xargs -r rm
fi";
};
0976B7D5E5CF42E49F388927 /* Remove signature files (Xcode workaround) */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
name = "Remove signature files (Xcode workaround)";
inputPaths = (
);
outputPaths = (
);
shellPath = /bin/sh;
shellScript = "
echo \"Remove signature files (Xcode workaround)\";
rm -rf \"$CONFIGURATION_BUILD_DIR/MapLibre.xcframework-ios.signature\";
";
};
/* End PBXShellScriptBuildPhase section */

View file

@ -116,6 +116,7 @@
"delay": "^6.0.0",
"eventemitter3": "^5.0.1",
"expo": "~53.0.23",
"expo-audio": "~0.4.9",
"expo-av": "~15.1.7",
"expo-build-properties": "~0.14.8",
"expo-constants": "~17.1.7",

View file

@ -1,13 +1,8 @@
import React, { useState, useCallback, useEffect, useRef } from "react";
import { View, Text, TouchableOpacity } from "react-native";
import { MaterialCommunityIcons } from "@expo/vector-icons";
import { Audio, InterruptionModeIOS, InterruptionModeAndroid } from "expo-av";
import {
AndroidOutputFormat,
IOSOutputFormat,
AndroidAudioEncoder,
IOSAudioQuality,
} from "expo-av/build/Audio";
import * as Audio from "expo-audio";
import { IOSOutputFormat, AudioQuality } from "expo-audio";
import Countdown from "react-countdown";
@ -35,8 +30,8 @@ const RECORDING_TIMEOUT = 59;
const recordingSettings = {
android: {
extension: ".m4a",
outputFormat: AndroidOutputFormat.MPEG_4,
audioEncoder: AndroidAudioEncoder.AAC,
outputFormat: "mpeg4",
audioEncoder: "aac",
sampleRate: 44100,
numberOfChannels: 1,
bitRate: 64000,
@ -44,7 +39,7 @@ const recordingSettings = {
ios: {
extension: ".m4a",
outputFormat: IOSOutputFormat.MPEG4AAC,
audioQuality: IOSAudioQuality.MAX,
audioQuality: AudioQuality.MAX,
sampleRate: 44100,
numberOfChannels: 1,
bitRate: 64000,
@ -128,15 +123,14 @@ export default React.memo(function ChatInput({
const startRecording = useCallback(async () => {
try {
console.log("Requesting permissions..");
await Audio.requestPermissionsAsync();
await Audio.requestRecordingPermissionsAsync();
await Audio.setAudioModeAsync({
allowsRecordingIOS: true,
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
playsInSilentModeIOS: true,
shouldDuckAndroid: true,
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix,
playThroughEarpieceAndroid: false,
staysActiveInBackground: true,
allowsRecording: true,
interruptionMode: "doNotMix",
playsInSilentMode: true,
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
shouldPlayInBackground: true,
});
// stop playback
if (sound !== null) {
@ -147,13 +141,12 @@ export default React.memo(function ChatInput({
console.log("Starting recording..");
await Audio.setAudioModeAsync({
allowsRecordingIOS: true,
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
playsInSilentModeIOS: true,
shouldDuckAndroid: true,
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix,
playThroughEarpieceAndroid: false,
staysActiveInBackground: true,
allowsRecording: true,
interruptionMode: "doNotMix",
playsInSilentMode: true,
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
shouldPlayInBackground: true,
});
const _recording = new Audio.Recording();
try {
@ -184,14 +177,12 @@ export default React.memo(function ChatInput({
const recordedToSound = useCallback(async () => {
await Audio.setAudioModeAsync({
allowsRecordingIOS: false,
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
playsInSilentModeIOS: true,
playsInSilentLockedModeIOS: true,
shouldDuckAndroid: true,
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix,
playThroughEarpieceAndroid: false,
staysActiveInBackground: true,
allowsRecording: false,
interruptionMode: "doNotMix",
playsInSilentMode: true,
interruptionModeAndroid: "doNotMix",
shouldRouteThroughEarpiece: false,
shouldPlayInBackground: true,
});
const { sound: _sound } = await recording.createNewLoadedSoundAsync({
isLooping: false,

View file

@ -1,14 +1,7 @@
import React, { PureComponent } from "react";
import {
TouchableOpacity,
Animated,
PanResponder,
View,
Easing,
} from "react-native";
import { Audio } from "expo-av";
import React, { useEffect, useMemo, useRef, useState } from "react";
import { TouchableOpacity, Animated, PanResponder, View } from "react-native";
import { useAudioPlayer, useAudioPlayerStatus } from "expo-audio";
import { MaterialCommunityIcons } from "@expo/vector-icons";
import sleep from "./sleep";
import DigitalTimeString from "./DigitalTimeString";
import useStyles from "./styles";
@ -17,286 +10,219 @@ import withHooks from "~/hoc/withHooks";
const TRACK_SIZE = 4;
const THUMB_SIZE = 20;
class AudioSlider extends PureComponent {
constructor(props) {
super(props);
this.state = {
playing: false,
currentTime: 0, // miliseconds; value interpolated by animation.
duration: 0,
trackLayout: {},
dotOffset: new Animated.ValueXY(),
xDotOffsetAtAnimationStart: 0,
function clamp(n, min, max) {
return Math.max(min, Math.min(n, max));
}
function AudioSlider(props) {
// Props mapping (kept compatible with previous class component)
const { audio: audioUrl, registry, style: styleProp } = props;
const pauseAllBeforePlay =
props.pauseAllBeforePlay === undefined ? true : props.pauseAllBeforePlay;
// Styles injected by withHooks HOC
const styles = props.styles;
// Track layout (for computing pixel & time mappings)
const [trackLayout, setTrackLayout] = useState({ width: 0, height: 0 });
// Thumb X position (in pixels) — single Animated.Value used both for dragging and syncing to playback
const dotX = useRef(new Animated.Value(0)).current;
const isDraggingRef = useRef(false);
const xDotOffsetAtStartRef = useRef(0);
// While dragging, we derive the current time from the thumb position for live display
const [dragTimeMs, setDragTimeMs] = useState(0);
// Player using new expo-audio hook API
const player = useAudioPlayer(audioUrl, { updateInterval: 250 });
const status = useAudioPlayerStatus(player) || {};
const durationSec = status.duration || 0;
const currentTimeSec = status.currentTime || 0;
// Register in an optional registry to allow pausing other players before play
const selfRef = useRef({
pause: () => {
try {
player.pause();
} catch {}
},
});
useEffect(() => {
selfRef.current.pause = () => {
try {
player.pause();
} catch {}
};
}, [player]);
useEffect(() => {
if (!registry) return;
const self = selfRef.current;
registry.register(self);
return () => {
try {
registry.unregister(self);
} catch {}
};
}, [registry]);
// Ensure no looping (mimics the previous behavior)
useEffect(() => {
try {
player.loop = false;
} catch {}
}, [player]);
// When not dragging, keep the thumb in sync with the playback position
useEffect(() => {
if (!isDraggingRef.current) {
const w = trackLayout.width || 0;
const x = durationSec > 0 ? (currentTimeSec / durationSec) * w : 0;
dotX.setValue(x);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [currentTimeSec, durationSec, trackLayout.width]);
// When playback finishes, reset to start (seek to 0 and move thumb to start)
useEffect(() => {
if (status.didJustFinish) {
try {
player.seekTo(0);
} catch {}
dotX.setValue(0);
}
}, [status.didJustFinish, player, dotX]);
const onPressPlayPause = async () => {
if (status.playing) {
try {
player.pause();
} catch {}
return;
}
// Pause others first if asked
if (registry && pauseAllBeforePlay) {
try {
const players = registry.getAll ? registry.getAll() : [];
players
.filter((p) => p !== selfRef.current && typeof p.pause === "function")
.forEach((p) => p.pause());
} catch {}
}
try {
player.play();
} catch {}
};
this._updateProps();
// Important:
// this.state.dotOffset.x is the actual offset
// this.state.dotOffset.x._value is the offset from the point where the animation started
// However, since this.state.dotOffset.x is an object and not a value, it is difficult
// to compare it with other numbers. Therefore, the const currentOffsetX is used.
// To print all attributes of the object see https://stackoverflow.com/questions/9209747/printing-all-the-hidden-properties-of-an-object
this._panResponder = PanResponder.create({
// Pan handling for seeking
const panResponder = useMemo(
() =>
PanResponder.create({
onMoveShouldSetResponderCapture: () => true,
onMoveShouldSetPanResponderCapture: () => true,
onPanResponderGrant: async (e, gestureState) => {
if (this.state.playing) {
await this.pause();
onPanResponderGrant: async () => {
// Pause if currently playing (mimic previous behavior)
if (status.playing) {
try {
player.pause();
} catch {}
}
await this.setState({
xDotOffsetAtAnimationStart: this.state.dotOffset.x._value,
isDraggingRef.current = true;
// Initialize offset for drag
const currentX = dotX.__getValue();
xDotOffsetAtStartRef.current = currentX;
dotX.setOffset(currentX);
dotX.setValue(0);
// While dragging, update displayed time
dotX.addListener(({ value }) => {
const w = trackLayout.width || 1;
const currentOffset =
xDotOffsetAtStartRef.current +
(typeof value === "number" ? value : 0);
const clampedX = clamp(currentOffset, 0, w);
const percent = w > 0 ? clampedX / w : 0;
const ms = Math.round(percent * durationSec * 1000);
setDragTimeMs(ms);
});
await this.state.dotOffset.setOffset({
x: this.state.dotOffset.x._value,
});
await this.state.dotOffset.setValue({ x: 0, y: 0 });
},
onPanResponderMove: (e, gestureState) => {
Animated.event([
null,
{ dx: this.state.dotOffset.x, dy: this.state.dotOffset.y },
])(e, gestureState);
},
onPanResponderTerminationRequest: () => false,
onPanResponderTerminate: async (evt, gestureState) => {
// Another component has become the responder, so this gesture is cancelled.
const currentOffsetX =
this.state.xDotOffsetAtAnimationStart + this.state.dotOffset.x._value;
if (
currentOffsetX < 0 ||
currentOffsetX > this.state.trackLayout.width
) {
await this.state.dotOffset.setValue({
x: -this.state.xDotOffsetAtAnimationStart,
y: 0,
});
}
await this.state.dotOffset.flattenOffset();
await this.mapAudioToCurrentTime();
},
onPanResponderRelease: async (e, { vx }) => {
const currentOffsetX =
this.state.xDotOffsetAtAnimationStart + this.state.dotOffset.x._value;
if (
currentOffsetX < 0 ||
currentOffsetX > this.state.trackLayout.width
) {
await this.state.dotOffset.setValue({
x: -this.state.xDotOffsetAtAnimationStart,
y: 0,
});
}
await this.state.dotOffset.flattenOffset();
await this.mapAudioToCurrentTime();
},
});
}
_updateProps() {
const props = this.props;
this.registry = props.registry;
this.style = props.style || {};
if (this.registry) {
this.registry.register(this);
}
const { pauseAllBeforePlay = true } = props;
this.pauseAllBeforePlay = pauseAllBeforePlay;
}
componentDidUpdate() {
this._updateProps();
}
mapAudioToCurrentTime = async () => {
if (!this.soundObject) return;
await this.soundObject.setPositionAsync(this.state.currentTime);
};
onPressPlayPause = async () => {
if (this.state.playing) {
await this.pause();
return;
}
await this.play();
};
play = async () => {
if (!this.soundObject) return;
if (this.registry && this.pauseAllBeforePlay) {
const players = this.registry.getAll();
await Promise.all(
players.filter((p) => this !== p).map((p) => p.pause()),
);
}
await this.soundObject.playAsync();
this.setState({ playing: true }); // This is for the play-button to go to play
this.startMovingDot();
};
pause = async () => {
if (!this.soundObject) return;
await this.soundObject.pauseAsync();
this.setState({ playing: false }); // This is for the play-button to go to pause
Animated.timing(this.state.dotOffset, { useNativeDriver: false }).stop(); // Will also call animationPausedOrStopped()
};
startMovingDot = async () => {
if (!this.soundObject) return;
const status = await this.soundObject.getStatusAsync();
const durationLeft = status["durationMillis"] - status["positionMillis"];
Animated.timing(this.state.dotOffset, {
toValue: { x: this.state.trackLayout.width, y: 0 },
duration: durationLeft,
easing: Easing.linear,
onPanResponderMove: Animated.event([null, { dx: dotX }], {
useNativeDriver: false,
}).start(() => this.animationPausedOrStopped());
};
}),
animationPausedOrStopped = async () => {
if (!this.state.playing) {
// Audio has been paused
return;
}
if (!this.soundObject) return;
// Animation-duration is over (reset Animation and Audio):
await sleep(200); // In case animation has finished, but audio has not
this.setState({ playing: false });
await this.state.dotOffset.setValue({ x: 0, y: 0 });
// this.state.dotOffset.setValue(0);
await this.soundObject.setPositionAsync(0);
};
onPanResponderTerminationRequest: () => false,
handlePlaybackFinished = async () => {
// console.log(`[AudioSlider] Playback finished, resetting for replay`);
// Reset for replay instead of unloading
this.setState({ playing: false });
await this.state.dotOffset.setValue({ x: 0, y: 0 });
if (this.soundObject) {
await this.soundObject.stopAsync();
}
};
onPanResponderTerminate: async () => {
// Another component took the responder
dotX.removeAllListeners();
measureTrack = (event) => {
this.setState({ trackLayout: event.nativeEvent.layout }); // {x, y, width, height}
};
const w = trackLayout.width || 1;
const value = dotX.__getValue();
const currentOffset =
xDotOffsetAtStartRef.current +
(typeof value === "number" ? value : 0);
async componentDidMount() {
// https://github.com/olapiv/expo-audio-player/issues/13
const audioUrl = this.props.audio;
const loadAudio = async () => {
const tryLoad = async (ext) => {
// console.log(`[AudioSlider] Attempting to load with extension: ${ext}`);
const { sound } = await Audio.Sound.createAsync({
uri: audioUrl,
overrideFileExtensionAndroid: ext,
});
return sound;
};
let lastError = null;
let clampedX = clamp(currentOffset, 0, w);
dotX.flattenOffset();
dotX.setValue(clampedX);
if (durationSec > 0) {
const targetSec = (clampedX / w) * durationSec;
try {
// First try with m4a (preferred)
const sound = await tryLoad("m4a");
// console.log(`[AudioSlider] Successfully loaded with m4a extension`);
this.soundObject = sound;
await this.soundObject.setIsLoopingAsync(false);
this.soundObject.setOnPlaybackStatusUpdate((status) => {
if (!status.didJustFinish) return;
this.handlePlaybackFinished();
});
return;
} catch (err1) {
// console.log(`[AudioSlider] Failed to load with m4a:`, err1.message);
lastError = err1;
try {
// Fallback to mp4
const sound = await tryLoad("mp4");
// console.log(`[AudioSlider] Successfully loaded with mp4 extension`);
this.soundObject = sound;
await this.soundObject.setIsLoopingAsync(false);
this.soundObject.setOnPlaybackStatusUpdate((status) => {
if (!status.didJustFinish) return;
this.handlePlaybackFinished();
});
return;
} catch (err2) {
// console.log(`[AudioSlider] Failed to load with mp4:`, err2.message);
lastError = err2;
try {
// Last fallback to aac
const sound = await tryLoad("aac");
// console.log(`[AudioSlider] Successfully loaded with aac extension`);
this.soundObject = sound;
await this.soundObject.setIsLoopingAsync(false);
this.soundObject.setOnPlaybackStatusUpdate((status) => {
if (!status.didJustFinish) return;
this.handlePlaybackFinished();
});
return;
} catch (err3) {
// console.log(`[AudioSlider] Failed to load with aac:`, err3.message);
lastError = err3;
}
}
await player.seekTo(targetSec);
} catch {}
}
// All attempts failed
console.error(
`[AudioSlider] All load attempts failed for ${audioUrl}. Last error:`,
lastError,
isDraggingRef.current = false;
setDragTimeMs(0);
},
onPanResponderRelease: async () => {
dotX.removeAllListeners();
const w = trackLayout.width || 1;
const value = dotX.__getValue();
const currentOffset =
xDotOffsetAtStartRef.current +
(typeof value === "number" ? value : 0);
let clampedX = clamp(currentOffset, 0, w);
dotX.flattenOffset();
dotX.setValue(clampedX);
if (durationSec > 0) {
const targetSec = (clampedX / w) * durationSec;
try {
await player.seekTo(targetSec);
} catch {}
}
isDraggingRef.current = false;
setDragTimeMs(0);
},
}),
[dotX, durationSec, player, status.playing, trackLayout.width],
);
const measureTrack = (event) => {
setTrackLayout(event.nativeEvent.layout || {});
};
await loadAudio();
// Times for display (DigitalTimeString expects milliseconds)
const durationMs = Math.round(durationSec * 1000);
const currentTimeMs = isDraggingRef.current
? dragTimeMs
: Math.round(currentTimeSec * 1000);
if (!this.soundObject) {
// Loading failed; avoid further calls and leave UI inert or show error
console.log(
`[AudioSlider] No sound object created, setting duration to 0`,
);
this.setState({ duration: 0 });
return;
}
try {
const status = await this.soundObject.getStatusAsync();
this.setState({ duration: status.durationMillis });
} catch (error) {
console.log("Error getting audio status:", error);
this.setState({ duration: 0 });
return;
}
// This requires measureTrack to have been called.
this.state.dotOffset.addListener(() => {
const animatedCurrentTime = this.state.dotOffset.x
.interpolate({
inputRange: [0, this.state.trackLayout.width],
outputRange: [0, this.state.duration],
extrapolate: "clamp",
})
.__getValue();
this.setState({ currentTime: animatedCurrentTime });
});
}
async componentWillUnmount() {
if (this.soundObject) {
await this.soundObject.unloadAsync();
}
this.state.dotOffset.removeAllListeners();
if (this.registry) {
this.registry.unregister(this);
}
}
render() {
return (
<View
style={{
@ -328,28 +254,28 @@ class AudioSlider extends PureComponent {
paddingRight: THUMB_SIZE,
zIndex: 2,
}}
onPress={this.onPressPlayPause}
onPress={onPressPlayPause}
>
{this.state.playing ? (
{status.playing ? (
<MaterialCommunityIcons
name="pause-circle-outline"
size={30}
style={[this.props.styles.controlIcon, this.style.controlIcon]}
style={[styles.controlIcon, styleProp?.controlIcon]}
/>
) : (
<MaterialCommunityIcons
name="play-circle-outline"
size={30}
style={[this.props.styles.controlIcon, this.style.controlIcon]}
style={[styles.controlIcon, styleProp?.controlIcon]}
/>
)}
</TouchableOpacity>
<Animated.View
onLayout={this.measureTrack}
onLayout={measureTrack}
style={[
this.props.styles.slideBar,
this.style.slideBar,
styles.slideBar,
styleProp?.slideBar,
{
height: TRACK_SIZE,
borderRadius: TRACK_SIZE / 2,
@ -366,39 +292,21 @@ class AudioSlider extends PureComponent {
left: -((THUMB_SIZE * 4) / 2),
width: THUMB_SIZE * 4,
height: THUMB_SIZE * 4,
transform: [
{
translateX: this.state.dotOffset.x.interpolate({
inputRange: [
0,
this.state.trackLayout.width != undefined
? this.state.trackLayout.width
: 1,
],
outputRange: [
0,
this.state.trackLayout.width != undefined
? this.state.trackLayout.width
: 1,
],
extrapolate: "clamp",
}),
},
],
transform: [{ translateX: dotX }],
}}
{...this._panResponder.panHandlers}
{...panResponder.panHandlers}
>
<View
style={[
this.props.styles.slideCursor,
this.style.slideCursor,
styles.slideCursor,
styleProp?.slideCursor,
{
width: THUMB_SIZE,
height: THUMB_SIZE,
borderRadius: THUMB_SIZE / 2,
},
]}
></View>
/>
</Animated.View>
</Animated.View>
</View>
@ -410,13 +318,12 @@ class AudioSlider extends PureComponent {
justifyContent: "space-between",
}}
>
<DigitalTimeString time={this.state.currentTime} style={this.style} />
<DigitalTimeString time={this.state.duration} style={this.style} />
<DigitalTimeString time={currentTimeMs} style={styleProp} />
<DigitalTimeString time={durationMs} style={styleProp} />
</View>
</View>
);
}
}
export default withHooks(AudioSlider, () => {
const styles = useStyles();

View file

@ -7114,6 +7114,7 @@ __metadata:
eslint-plugin-unused-imports: "npm:^3.0.0"
eventemitter3: "npm:^5.0.1"
expo: "npm:~53.0.23"
expo-audio: "npm:~0.4.9"
expo-av: "npm:~15.1.7"
expo-build-properties: "npm:~0.14.8"
expo-constants: "npm:~17.1.7"
@ -10630,6 +10631,17 @@ __metadata:
languageName: node
linkType: hard
"expo-audio@npm:~0.4.9":
version: 0.4.9
resolution: "expo-audio@npm:0.4.9"
peerDependencies:
expo: "*"
react: "*"
react-native: "*"
checksum: 10/874527adcf03e044770fc64fa9d58f735528bd40ebe038635ebf5ef34a21cdbce0115c9b2b1406604b9622f665d27a3a7eda417275322f2506a77f74c359724f
languageName: node
linkType: hard
"expo-av@npm:~15.1.7":
version: 15.1.7
resolution: "expo-av@npm:15.1.7"