import React, { useState, useCallback, useEffect, useRef } from "react"; import { View, Text, TouchableOpacity } from "react-native"; import { MaterialCommunityIcons } from "@expo/vector-icons"; import * as Audio from "expo-audio"; import { IOSOutputFormat, AudioQuality } from "expo-audio"; import Countdown from "react-countdown"; import { useAlertState, getLocationState, useSessionState } from "~/stores"; import { useTheme, createStyles } from "~/theme"; import network from "~/network"; import TextArea from "./TextArea"; import useInsertMessage from "~/hooks/useInsertMessage"; const MODE = { EMPTY: "EMPTY", RECORDING: "RECORDING", TEXT: "TEXT", }; const rightButtonIconNames = { [MODE.EMPTY]: "microphone", [MODE.RECORDING]: "send-circle", [MODE.TEXT]: "send-circle", }; const RECORDING_TIMEOUT = 59; const recordingSettings = { android: { extension: ".m4a", outputFormat: "mpeg4", audioEncoder: "aac", sampleRate: 44100, numberOfChannels: 1, bitRate: 64000, }, ios: { extension: ".m4a", outputFormat: IOSOutputFormat.MPEG4AAC, audioQuality: AudioQuality.MAX, sampleRate: 44100, numberOfChannels: 1, bitRate: 64000, linearPCMBitDepth: 16, linearPCMIsBigEndian: false, linearPCMIsFloat: false, }, }; const activeOpacity = 0.7; export default React.memo(function ChatInput({ style, labelStyle, inputStyle, label, data: { alertId }, scrollViewRef, ...props }) { const { colors } = useTheme(); const styles = useStyles(); const [text, setText] = useState(""); const { userId, username: sessionUsername } = useSessionState([ "userId", "username", ]); const username = sessionUsername || "anonyme"; const { hasMessages } = useAlertState(["hasMessages"]); const autoFocus = !hasMessages; const [isRecording, setIsRecording] = useState(false); const [recording, setRecording] = useState(null); const [sound, setSound] = useState(null); const insertMessage = useInsertMessage(alertId); useEffect(() => { return sound ? () => { console.log("Unloading Sound"); sound.unloadAsync(); } : undefined; }, [sound]); const hasText = text.length > 0; const mode = isRecording ? MODE.RECORDING : hasText ? MODE.TEXT : MODE.EMPTY; const sendTextMessage = useCallback(async () => { if (!text) { return; } const coords = getLocationState(); const { latitude, longitude } = coords || {}; const location = latitude && longitude ? { type: "Point", coordinates: [longitude, latitude], } : null; const messageText = text; setText(""); try { await insertMessage({ text: messageText, location, username, userId, }); } catch (error) { console.error("Failed to send message:", error); } }, [insertMessage, text, setText, userId, username]); const startRecording = useCallback(async () => { try { console.log("Requesting permissions.."); await Audio.requestRecordingPermissionsAsync(); await Audio.setAudioModeAsync({ allowsRecording: true, interruptionMode: "doNotMix", playsInSilentMode: true, interruptionModeAndroid: "doNotMix", shouldRouteThroughEarpiece: false, shouldPlayInBackground: true, }); // stop playback if (sound !== null) { await sound.unloadAsync(); sound.setOnPlaybackStatusUpdate(null); setSound(null); } console.log("Starting recording.."); await Audio.setAudioModeAsync({ allowsRecording: true, interruptionMode: "doNotMix", playsInSilentMode: true, interruptionModeAndroid: "doNotMix", shouldRouteThroughEarpiece: false, shouldPlayInBackground: true, }); const _recording = new Audio.Recording(); try { await _recording.prepareToRecordAsync(recordingSettings); setRecording(_recording); await _recording.startAsync(); console.log("recording"); setIsRecording(true); } catch (error) { console.log("error while recording:", error); } console.log("Recording started"); } catch (err) { console.log("Failed to start recording", err); } }, [sound]); const stopRecording = useCallback(async () => { try { await recording.stopAndUnloadAsync(); } catch (_error) { // Do nothing -- we are already unloaded. } if (isRecording) { setIsRecording(false); } }, [recording, isRecording]); const recordedToSound = useCallback(async () => { await Audio.setAudioModeAsync({ allowsRecording: false, interruptionMode: "doNotMix", playsInSilentMode: true, interruptionModeAndroid: "doNotMix", shouldRouteThroughEarpiece: false, shouldPlayInBackground: true, }); const { sound: _sound } = await recording.createNewLoadedSoundAsync({ isLooping: false, isMuted: false, volume: 1.0, rate: 1.0, shouldCorrectPitch: true, }); setSound(_sound); }, [recording]); const uploadAudio = useCallback(async () => { const uri = recording.getURI(); const fd = new FormData(); fd.append("data[alertId]", alertId); fd.append("data[file]", { uri, type: "audio/mp4", name: "audioRecord.m4a", }); await network.oaFilesKy.post("audio/upload", { body: fd, }); }, [alertId, recording]); const sendRecording = useCallback(async () => { await stopRecording(); await recordedToSound(); await uploadAudio(); }, [stopRecording, recordedToSound, uploadAudio]); const deleteRecording = useCallback(async () => { await stopRecording(); }, [stopRecording]); const triggerMicrophoneClick = useCallback(async () => { if (isRecording) { await sendRecording(); } else { await startRecording(); } }, [isRecording, startRecording, sendRecording]); const onRecordingCountDownComplete = useCallback(async () => { await stopRecording(); await recordedToSound(); await sendRecording(); }, [sendRecording, stopRecording, recordedToSound]); // reset on alert change const dataRef = useRef(null); if (!dataRef.current) { dataRef.current = alertId; } if (dataRef.current !== alertId) { dataRef.current = alertId; if (hasText) { setText(""); } if (mode === MODE.RECORDING) { deleteRecording(); } } return ( {mode === MODE.RECORDING && ( Enregistrement audio en cours )} {(mode === MODE.TEXT || mode === MODE.EMPTY) && (