我正在使用Expo构建一个语音记录应用程序。这是我开始录音的功能,它工作正常。
const [recording, setRecording] = React.useState();
async function startRecording() {
try {
console.log('Requesting permissions..');
await Audio.requestPermissionsAsync();
await Audio.setAudioModeAsync({
allowsRecordingIOS: true,
playsInSilentModeIOS: true,
});
console.log('Starting recording..');
const recording = new Audio.Recording();
await recording.prepareToRecordAsync(Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY);
await recording.startAsync();
setRecording(recording);
console.log('Recording started');
} catch (err) {
console.error('Failed to start recording', err);
}
}
这是我的“停止”功能,记录已停止并存储...
async function stopRecording() {
console.log('Stopping recording..');
setRecording(undefined);
await recording.stopAndUnloadAsync();
const uri = recording.getURI();
console.log('Recording stopped and stored at', uri);
}
现在我想用一个播放按钮来播放这个保存的声音。我怎样才能到达这个保存的声音?
return (
<View style={styles.container}>
<Button
title={recording ? 'Stop Recording' : 'Start Recording'}
onPress={recording ? stopRecording : startRecording}
/>
</View>
);
我想将这个存储的uri作为文件位置进行传递。这样可以吗?
async function playSound() {
console.log('Loading Sound');
const { sound } = await Audio.Sound.createAsync(
require(RecordedURI)
);
setSound(sound);
console.log('Playing Sound');
await sound.playAsync();
}
发布于 2021-04-22 15:06:57
首先,您将在获得录音后将状态分配给undefined
。
我建议你做这样的事情
为Recording
和Playing
音频创建两个ref。
const AudioRecorder = useRef(new Audio.Recording());
const AudioPlayer = useRef(new Audio.Sound());
以及recordingStatus
、permission
等的一些状态。
const [RecordedURI, SetRecordedURI] = useState<string>("");
const [AudioPermission, SetAudioPermission] = useState<boolean>(false);
const [IsRecording, SetIsRecording] = useState<boolean>(false);
const [IsPLaying, SetIsPLaying] = useState<boolean>(false);
记录实现的Snack是here
这是一个具有实现的GitHub Repo。
结果的Screenshot
我在下面添加了实现的其余部分
import React, { useState, useRef, useEffect } from "react";
import { View, StyleSheet, Button, Text } from "react-native";
import { Audio } from "expo-av";
export default function App() {
// Refs for the audio
const AudioRecorder = useRef(new Audio.Recording());
const AudioPlayer = useRef(new Audio.Sound());
// States for UI
const [RecordedURI, SetRecordedURI] = useState<string>("");
const [AudioPermission, SetAudioPermission] = useState<boolean>(false);
const [IsRecording, SetIsRecording] = useState<boolean>(false);
const [IsPLaying, SetIsPLaying] = useState<boolean>(false);
// Initial Load to get the audio permission
useEffect(() => {
GetPermission();
}, []);
// Function to get the audio permission
const GetPermission = async () => {
const getAudioPerm = await Audio.requestPermissionsAsync();
SetAudioPermission(getAudioPerm.granted);
};
// Function to start recording
const StartRecording = async () => {
try {
// Check if user has given the permission to record
if (AudioPermission === true) {
try {
// Prepare the Audio Recorder
await AudioRecorder.current.prepareToRecordAsync(
Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY
);
// Start recording
await AudioRecorder.current.startAsync();
SetIsRecording(true);
} catch (error) {
console.log(error);
}
} else {
// If user has not given the permission to record, then ask for permission
GetPermission();
}
} catch (error) {}
};
// Function to stop recording
const StopRecording = async () => {
try {
// Stop recording
await AudioRecorder.current.stopAndUnloadAsync();
// Get the recorded URI here
const result = AudioRecorder.current.getURI();
if (result) SetRecordedURI(result);
// Reset the Audio Recorder
AudioRecorder.current = new Audio.Recording();
SetIsRecording(false);
} catch (error) {}
};
// Function to play the recorded audio
const PlayRecordedAudio = async () => {
try {
// Load the Recorded URI
await AudioPlayer.current.loadAsync({ uri: RecordedURI }, {}, true);
// Get Player Status
const playerStatus = await AudioPlayer.current.getStatusAsync();
// Play if song is loaded successfully
if (playerStatus.isLoaded) {
if (playerStatus.isPlaying === false) {
AudioPlayer.current.playAsync();
SetIsPLaying(true);
}
}
} catch (error) {}
};
// Function to stop the playing audio
const StopPlaying = async () => {
try {
//Get Player Status
const playerStatus = await AudioPlayer.current.getStatusAsync();
// If song is playing then stop it
if (playerStatus.isLoaded === true)
await AudioPlayer.current.unloadAsync();
SetIsPLaying(false);
} catch (error) {}
};
return (
<View style={styles.container}>
<Button
title={IsRecording ? "Stop Recording" : "Start Recording"}
color={IsRecording ? "red" : "green"}
onPress={IsRecording ? StopRecording : StartRecording}
/>
<Button
title={IsPLaying ? "Stop Sound" : "Play Sound"}
color={IsPLaying ? "red" : "orange"}
onPress={IsPLaying ? StopPlaying : PlayRecordedAudio}
/>
<Text>{RecordedURI}</Text>
</View>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: "center",
backgroundColor: "#ecf0f1",
padding: 8,
},
});
发布于 2021-05-16 22:56:44
这是一个有趣的问题,我也遇到了同样的问题。我无法从录制的URI播放。
此外,我已经通过官方文件,但他们只提供了有限的例子。
但我想出了一个有趣的解决方案,这对我来说是有效的。Audio.Sound.createAsync()
也支持URI。
您只需传递{uri: recording. getURI() || URIFROMFileSystem}
,它就会完美地工作。
在我的例子中
const { sound } = await Audio.Sound.createAsync({
uri: "file:///Users/xyz/Library/Developer/CoreSimulator/Devices/1BBRFGFCBC414-6685-4818-B625-01038771B105/data/Containers/Data/Application/18E8D28E-EA03-4733-A0CF-F3E21A23427D/Library/Caches/ExponentExperienceData/%2540anonymous%252Fchat-app-c88a6b2e-ad36-45b8-9e5e-1fb6001826eb/AV/recording-87738E73-A38E-49E8-89D1-9689DC5F316B.caf",
});
setSound(sound);
console.log("Playing Sound");
await sound.playAsync();
https://stackoverflow.com/questions/67207450
复制相似问题