Flutter音频录制插件flutter_sound_record的使用
Flutter音频录制插件flutter_sound_record的使用
简介
flutter_sound_record
是一个用于从麦克风录制音频到指定文件路径的Flutter插件,它基于 record 插件。该插件在Android上使用MediaRecorder,在iOS上使用AVAudioRecorder,且不需要额外的依赖。
选项
- bit rate (注意:在iOS上要小心设置这个参数)
- sampling rate
- encoder
平台支持
Android
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<!-- 可选,你需要自行检查此权限 -->
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
- 最低SDK版本:16(如果使用OPUS,则为29)
iOS
<key>NSMicrophoneUsageDescription</key>
<string>We need to access to the microphone to record audio file</string>
- 最低SDK版本:8.0(如果使用OPUS,则为11)
支持的编码器
enum AudioEncoder {
/// Will output to MPEG_4 format container
AAC,
/// Will output to MPEG_4 format container
AAC_LD,
/// Will output to MPEG_4 format container
AAC_HE,
/// sampling rate should be set to 8kHz
/// Will output to 3GP format container on Android
AMR_NB,
/// sampling rate should be set to 16kHz
/// Will output to 3GP format container on Android
AMR_WB,
/// Will output to MPEG_4 format container
/// /!\ SDK 29 on Android /!\
/// /!\ SDK 11 on iOs /!\
OPUS,
}
使用方法
// 导入包
import 'package:record/record.dart';
// 检查并请求权限
bool result = await Record.hasPermission();
// 开始录音
await Record.start(
path: 'aFullPath/myFile.m4a', // 必填项
encoder: AudioEncoder.AAC, // 默认值
bitRate: 128000, // 默认值
sampleRate: 44100, // 默认值
);
// 停止录音
await Record.stop();
// 获取录音状态
bool isRecording = await Record.isRecording();
注意事项
确保检查支持的值,详情请参阅提供的链接。
已知问题
无
完整示例Demo
以下是一个完整的示例代码,展示了如何使用 flutter_sound_record
插件进行音频录制,并结合 just_audio
插件播放录制的音频。
import 'dart:async';
import 'package:flutter/material.dart';
import 'package:flutter_sound_record/flutter_sound_record.dart';
import 'package:just_audio/just_audio.dart' as ap;
void main() {
runApp(const MyApp());
}
class MyApp extends StatefulWidget {
const MyApp({Key? key}) : super(key: key);
@override
_MyAppState createState() => _MyAppState();
}
class _MyAppState extends State<MyApp> {
bool showPlayer = false;
ap.AudioSource? audioSource;
@override
void initState() {
showPlayer = false;
super.initState();
}
@override
Widget build(BuildContext context) {
return MaterialApp(
home: Scaffold(
body: Center(
child: showPlayer
? Padding(
padding: const EdgeInsets.symmetric(horizontal: 25),
child: AudioPlayer(
source: audioSource!,
onDelete: () {
setState(() => showPlayer = false);
},
),
)
: AudioRecorder(
onStop: (String path) {
setState(() {
audioSource = ap.AudioSource.uri(Uri.parse(path));
showPlayer = true;
});
},
),
),
),
);
}
}
class AudioRecorder extends StatefulWidget {
const AudioRecorder({required this.onStop, Key? key}) : super(key: key);
final void Function(String path) onStop;
@override
_AudioRecorderState createState() => _AudioRecorderState();
}
class _AudioRecorderState extends State<AudioRecorder> {
bool _isRecording = false;
bool _isPaused = false;
int _recordDuration = 0;
Timer? _timer;
Timer? _ampTimer;
final FlutterSoundRecord _audioRecorder = FlutterSoundRecord();
Amplitude? _amplitude;
@override
void initState() {
_isRecording = false;
super.initState();
}
@override
void dispose() {
_timer?.cancel();
_ampTimer?.cancel();
_audioRecorder.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
return MaterialApp(
home: Scaffold(
body: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
Row(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
_buildRecordStopControl(),
const SizedBox(width: 20),
_buildPauseResumeControl(),
const SizedBox(width: 20),
_buildText(),
],
),
if (_amplitude != null) ...<Widget>[
const SizedBox(height: 40),
Text('Current: ${_amplitude?.current ?? 0.0}'),
Text('Max: ${_amplitude?.max ?? 0.0}'),
],
],
),
),
);
}
Widget _buildRecordStopControl() {
late Icon icon;
late Color color;
if (_isRecording || _isPaused) {
icon = const Icon(Icons.stop, color: Colors.red, size: 30);
color = Colors.red.withOpacity(0.1);
} else {
final ThemeData theme = Theme.of(context);
icon = Icon(Icons.mic, color: theme.primaryColor, size: 30);
color = theme.primaryColor.withOpacity(0.1);
}
return ClipOval(
child: Material(
color: color,
child: InkWell(
child: SizedBox(width: 56, height: 56, child: icon),
onTap: () {
_isRecording ? _stop() : _start();
},
),
),
);
}
Widget _buildPauseResumeControl() {
if (!_isRecording && !_isPaused) {
return const SizedBox.shrink();
}
late Icon icon;
late Color color;
if (!_isPaused) {
icon = const Icon(Icons.pause, color: Colors.red, size: 30);
color = Colors.red.withOpacity(0.1);
} else {
final ThemeData theme = Theme.of(context);
icon = const Icon(Icons.play_arrow, color: Colors.red, size: 30);
color = theme.primaryColor.withOpacity(0.1);
}
return ClipOval(
child: Material(
color: color,
child: InkWell(
child: SizedBox(width: 56, height: 56, child: icon),
onTap: () {
_isPaused ? _resume() : _pause();
},
),
),
);
}
Widget _buildText() {
if (_isRecording || _isPaused) {
return _buildTimer();
}
return const Text('Waiting to record');
}
Widget _buildTimer() {
final String minutes = _formatNumber(_recordDuration ~/ 60);
final String seconds = _formatNumber(_recordDuration % 60);
return Text(
'$minutes : $seconds',
style: const TextStyle(color: Colors.red),
);
}
String _formatNumber(int number) {
String numberStr = number.toString();
if (number < 10) {
numberStr = '0$numberStr';
}
return numberStr;
}
Future<void> _start() async {
try {
if (await _audioRecorder.hasPermission()) {
await _audioRecorder.start();
bool isRecording = await _audioRecorder.isRecording();
setState(() {
_isRecording = isRecording;
_recordDuration = 0;
});
_startTimer();
}
} catch (e) {
if (kDebugMode) {
print(e);
}
}
}
Future<void> _stop() async {
_timer?.cancel();
_ampTimer?.cancel();
final String? path = await _audioRecorder.stop();
widget.onStop(path!);
setState(() => _isRecording = false);
}
Future<void> _pause() async {
_timer?.cancel();
_ampTimer?.cancel();
await _audioRecorder.pause();
setState(() => _isPaused = true);
}
Future<void> _resume() async {
_startTimer();
await _audioRecorder.resume();
setState(() => _isPaused = false);
}
void _startTimer() {
_timer?.cancel();
_ampTimer?.cancel();
_timer = Timer.periodic(const Duration(seconds: 1), (Timer t) {
setState(() => _recordDuration++);
});
_ampTimer = Timer.periodic(const Duration(milliseconds: 200), (Timer t) async {
_amplitude = await _audioRecorder.getAmplitude();
setState(() {});
});
}
}
class AudioPlayer extends StatelessWidget {
final ap.AudioSource source;
final VoidCallback onDelete;
const AudioPlayer({
required this.source,
required this.onDelete,
Key? key,
}) : super(key: key);
@override
Widget build(BuildContext context) {
return Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
ap.AudioPlayerWidget(
player: ap.AudioPlayer(),
child: ap.AudioPlayerControls(
player: ap.AudioPlayer(),
builder: (context, player, controls) {
return Column(
children: [
ap.PlayButton(player: player),
ap.PauseButton(player: player),
ap.StopButton(player: player),
],
);
},
),
),
ElevatedButton(
onPressed: onDelete,
child: const Text('Delete'),
),
],
);
}
}
说明
- AudioRecorder: 负责录音功能,包含开始、暂停、继续和停止录音的操作。
- MyApp: 主应用程序入口,根据是否显示播放器来切换显示录音界面或播放器界面。
- AudioPlayer: 用于播放录制的音频文件,并提供删除按钮以清除播放器。
通过上述代码,你可以实现一个简单的录音应用,并能够播放录制的音频。希望这对你有所帮助!
更多关于Flutter音频录制插件flutter_sound_record的使用的实战系列教程也可以访问 https://www.itying.com/category-92-b0.html
更多关于Flutter音频录制插件flutter_sound_record的使用的实战系列教程也可以访问 https://www.itying.com/category-92-b0.html
当然,以下是一个关于如何使用Flutter音频录制插件flutter_sound_record
的示例代码。这个插件允许你在Flutter应用中录制音频。
首先,你需要在pubspec.yaml
文件中添加flutter_sound_record
依赖:
dependencies:
flutter:
sdk: flutter
flutter_sound_record: ^x.y.z # 请替换为最新版本号
然后运行flutter pub get
来获取依赖。
接下来,你可以在你的Flutter应用中实现音频录制功能。以下是一个简单的示例:
import 'package:flutter/material.dart';
import 'package:flutter_sound_record/flutter_sound_record.dart';
import 'dart:io';
void main() {
runApp(MyApp());
}
class MyApp extends StatelessWidget {
@override
Widget build(BuildContext context) {
return MaterialApp(
home: AudioRecorderScreen(),
);
}
}
class AudioRecorderScreen extends StatefulWidget {
@override
_AudioRecorderScreenState createState() => _AudioRecorderScreenState();
}
class _AudioRecorderScreenState extends State<AudioRecorderScreen> {
final FlutterSoundRecord _flutterSoundRecord = FlutterSoundRecord();
String _recordedFilePath;
bool _isRecording = false;
@override
void initState() {
super.initState();
// 请求权限(根据平台不同可能需要手动处理权限请求)
_requestPermissions();
}
Future<void> _requestPermissions() async {
// Android和iOS权限请求逻辑可能不同,这里简化处理
// 实际项目中应根据平台分别处理
bool hasPermission = await _flutterSoundRecord.hasPermissions;
if (!hasPermission) {
bool requestResult = await _flutterSoundRecord.requestPermissions;
if (!requestResult) {
// 处理权限请求失败的情况
}
}
}
Future<void> _startRecording() async {
setState(() {
_isRecording = true;
});
String filePath = await _flutterSoundRecord.startRecorder(
toFile: 'audio_record_${DateTime.now().millisecondsSinceEpoch}.wav',
);
setState(() {
_recordedFilePath = filePath;
});
}
Future<void> _stopRecording() async {
setState(() {
_isRecording = false;
});
await _flutterSoundRecord.stopRecorder();
// 可以在这里添加保存或处理录音文件的逻辑
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text('Audio Recorder'),
),
body: Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
ElevatedButton(
onPressed: _isRecording ? _stopRecording : _startRecording,
child: Text(_isRecording ? 'Stop Recording' : 'Start Recording'),
),
if (_recordedFilePath != null)
Text('Recorded File: $_recordedFilePath'),
],
),
),
);
}
@override
void dispose() {
// 释放资源
_flutterSoundRecord.dispose();
super.dispose();
}
}
注意事项:
-
权限处理:在实际应用中,你需要根据平台(Android和iOS)分别处理权限请求。上面的代码仅包含了一个简化的权限请求示例。
-
文件路径:
startRecorder
方法中的toFile
参数指定了录音文件的保存路径和名称。你可以根据需要调整这个路径和文件名。 -
错误处理:在实际应用中,你应该添加错误处理逻辑来处理可能发生的异常,例如权限请求失败、录音失败等。
-
资源释放:在
dispose
方法中释放FlutterSoundRecord
资源,以避免内存泄漏。
这个示例代码展示了如何使用flutter_sound_record
插件进行音频录制,并提供了基本的UI控件来启动和停止录音。你可以根据实际需求进一步扩展和定制这个示例。