Flutter音频捕获插件better_audio_capture的使用

发布于 1周前 作者 eggper 来自 Flutter

Flutter音频捕获插件better_audio_capture的使用

安装开始

  1. 在你的pubspec.yaml文件中添加以下依赖:
dependencies:
  better_audio_capture: ^0.1.1
  1. 运行安装命令:
$ flutter packages get

正常使用

在Flutter应用中使用better_audio_capture插件来捕获音频。以下是具体的步骤和代码示例:

CupertinoButton(
  child: Text("开始录制"),
  onPressed: () async {
    bytesBuilder.clear();

    // 请求音频会话
    final session = await AudioSession.instance;
    await session.configure(AudioSessionConfiguration(
      avAudioSessionCategory: AVAudioSessionCategory.record,
      avAudioSessionMode: AVAudioSessionMode.measurement,
      avAudioSessionRouteSharingPolicy: AVAudioSessionRouteSharingPolicy.defaultPolicy,
      avAudioSessionSetActiveOptions: AVAudioSessionSetActiveOptions.none,
    ));

    if (await session.setActive(true)) {
      subscription = betterAudioCapture?.pcmStream.listen((event) {
        bytesBuilder.add(event);
        print("正在录制");
      });

      betterAudioCapture?.init(sampleRate: 16000, channelCount: 1);
      betterAudioCapture?.startCapture();
    }
  },
),

CupertinoButton(
  child: Text("停止录制"),
  onPressed: () async {
    subscription?.cancel();
    betterAudioCapture.stopCapture();
    betterAudioCapture.dispose();

    Directory tempDir = await getTemporaryDirectory();
    File waveFile = File(tempDir.path + "/record.wav");
    if (waveFile.existsSync()) {
      waveFile.deleteSync();
    }

    IOSink waveFileSink = waveFile.openWrite();
    waveFileSink.add(BetterAudioCapture.waveHeader(bytesBuilder.length));
    waveFileSink.add(bytesBuilder.takeBytes());
    await waveFileSink.close();
  },
),

功能

  • ✅ 通过麦克风捕获音频。
  • ✅ 设置音频流采样率。
  • ✅ 设置音频流通道数。

完整示例代码

以下是一个完整的示例代码,展示了如何使用better_audio_capture插件进行音频录制和播放。

import 'dart:async';
import 'dart:io';
import 'dart:typed_data';

import 'package:audio_session/audio_session.dart';
import 'package:better_audio_capture/better_audio_capture.dart';
import 'package:flutter/cupertino.dart';
import 'package:flutter/material.dart';
import 'package:just_audio/just_audio.dart';
import 'package:path_provider/path_provider.dart';
import 'package:permission_handler/permission_handler.dart';

void main() {
  runApp(MyApp());
}

class MyApp extends StatefulWidget {
  [@override](/user/override)
  _MyAppState createState() => _MyAppState();
}

class _MyAppState extends State<MyApp> {
  PermissionStatus? permissionStatus;

  BetterAudioCapture? betterAudioCapture;

  StreamSubscription? subscription;

  BytesBuilder bytesBuilder = BytesBuilder();

  AudioPlayer audioPlayer = AudioPlayer();

  [@override](/user/override)
  void initState() {
    super.initState();

    Future.microtask(() async {
      PermissionStatus value = await Permission.microphone.status;
      setState(() {
        this.permissionStatus = value;
      });
    });

    super.initState();
  }

  [@override](/user/override)
  void dispose() {
    subscription?.cancel();
    betterAudioCapture?.dispose();
    audioPlayer.dispose();

    super.dispose();
  }

  [@override](/user/override)
  Widget build(BuildContext context) {
    return MaterialApp(
      home: Scaffold(
        appBar: AppBar(
          title: const Text('插件示例应用'),
        ),
        body: Center(
          child: Column(
            mainAxisSize: MainAxisSize.min,
            children: [
              Offstage(
                offstage: permissionStatus == PermissionStatus.granted,
                child: CupertinoButton(
                  child: Text("请求麦克风权限"),
                  onPressed: () async {
                    final value = await Permission.microphone.request();
                    if (value == PermissionStatus.permanentlyDenied || value == PermissionStatus.restricted) {
                      openAppSettings();
                    }

                    setState(() {
                      permissionStatus = value;
                    });
                  },
                ),
              ),
              Offstage(
                offstage: permissionStatus != PermissionStatus.granted,
                child: CupertinoButton(
                  child: Text("开始录制"),
                  onPressed: () async {
                    bytesBuilder.clear();

                    if (betterAudioCapture != null) {
                      subscription?.cancel();
                      betterAudioCapture?.stopCapture();
                      betterAudioCapture?.dispose();
                    }

                    betterAudioCapture = BetterAudioCapture();
                    subscription = betterAudioCapture?.pcmStream.listen((event) {
                      bytesBuilder.add(event);
                      print("正在录制");
                    });

                    betterAudioCapture?.init();
                    betterAudioCapture?.startCapture();
                  },
                ),
              ),
              Offstage(
                offstage: permissionStatus != PermissionStatus.granted,
                child: CupertinoButton(
                  child: Text("停止录制"),
                  onPressed: () async {
                    if (betterAudioCapture == null) {
                      return;
                    }

                    subscription?.cancel();
                    betterAudioCapture?.stopCapture();
                    betterAudioCapture?.dispose();
                    betterAudioCapture = null;

                    Directory tempDir = await getTemporaryDirectory();
                    File waveFile = File(tempDir.path + "/record.wav");
                    if (waveFile.existsSync()) {
                      waveFile.deleteSync();
                    }

                    // 写入wav文件
                    if (bytesBuilder.length > 0) {
                      IOSink waveFileSink = waveFile.openWrite();
                      waveFileSink.add(BetterAudioCapture.waveHeader(bytesBuilder.length));
                      waveFileSink.add(bytesBuilder.takeBytes());
                      await waveFileSink.close();
                    }
                  },
                ),
              ),
              Offstage(
                offstage: permissionStatus != PermissionStatus.granted,
                child: CupertinoButton(
                  child: Text("播放录音"),
                  onPressed: () async {
                    Directory tempDir = await getTemporaryDirectory();
                    File waveFile = File(tempDir.path + "/record.wav");

                    if (waveFile.existsSync() && waveFile.lengthSync() > 0) {

                      // 请求音频会话
                      final session = await AudioSession.instance;
                      await session.configure(AudioSessionConfiguration.speech());

                      if (await session.setActive(true)) {
                        audioPlayer = AudioPlayer();
                        audioPlayer.setFilePath(waveFile.path);
                        audioPlayer.play();
                      }
                    }
                  },
                ),
              ),
            ],
          ),
        ),
      ),
    );
  }
}

更多关于Flutter音频捕获插件better_audio_capture的使用的实战系列教程也可以访问 https://www.itying.com/category-92-b0.html

1 回复

更多关于Flutter音频捕获插件better_audio_capture的使用的实战系列教程也可以访问 https://www.itying.com/category-92-b0.html


当然,以下是如何在Flutter项目中使用better_audio_capture插件进行音频捕获的示例代码。请注意,在实际使用中,你需要确保已经正确安装并配置了better_audio_capture插件。

1. 安装better_audio_capture插件

首先,在你的pubspec.yaml文件中添加better_audio_capture依赖:

dependencies:
  flutter:
    sdk: flutter
  better_audio_capture: ^最新版本号 # 请替换为最新版本号

然后运行flutter pub get来获取依赖。

2. 配置Android和iOS权限

确保在AndroidManifest.xmlInfo.plist中配置了必要的权限。例如,在AndroidManifest.xml中:

<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>

Info.plist中,你可能需要添加:

<key>NSMicrophoneUsageDescription</key>
<string>App需要访问麦克风</string>

3. 使用better_audio_capture进行音频捕获

下面是一个简单的Flutter应用示例,演示如何使用better_audio_capture插件进行音频捕获:

import 'package:flutter/material.dart';
import 'package:better_audio_capture/better_audio_capture.dart';
import 'package:path_provider/path_provider.dart';

void main() {
  runApp(MyApp());
}

class MyApp extends StatelessWidget {
  @override
  Widget build(BuildContext context) {
    return MaterialApp(
      title: 'Audio Capture Demo',
      theme: ThemeData(
        primarySwatch: Colors.blue,
      ),
      home: AudioCapturePage(),
    );
  }
}

class AudioCapturePage extends StatefulWidget {
  @override
  _AudioCapturePageState createState() => _AudioCapturePageState();
}

class _AudioCapturePageState extends State<AudioCapturePage> {
  BetterAudioCapture? _audioCapture;
  String? _audioPath;

  @override
  void initState() {
    super.initState();
    _initAudioCapture();
  }

  @override
  void dispose() {
    _audioCapture?.dispose();
    super.dispose();
  }

  Future<void> _initAudioCapture() async {
    _audioCapture = BetterAudioCapture();
    _audioCapture!.setAudioOutputFormat(OutputFormat.mp3); // 设置音频格式,例如mp3
  }

  Future<void> _startRecording() async {
    final Directory appDocDir = await getApplicationDocumentsDirectory();
    final String filePath = '${appDocDir.path}/audio_record.mp3';
    setState(() {
      _audioPath = filePath;
    });

    try {
      await _audioCapture!.startRecorder(filePath: filePath);
      ScaffoldMessenger.of(context).showSnackBar(SnackBar(content: Text('录音开始')));
    } catch (e) {
      print('Error starting recorder: $e');
    }
  }

  Future<void> _stopRecording() async {
    try {
      await _audioCapture!.stopRecorder();
      ScaffoldMessenger.of(context).showSnackBar(SnackBar(content: Text('录音结束')));
    } catch (e) {
      print('Error stopping recorder: $e');
    }
  }

  @override
  Widget build(BuildContext context) {
    return Scaffold(
      appBar: AppBar(
        title: Text('音频捕获示例'),
      ),
      body: Center(
        child: Column(
          mainAxisAlignment: MainAxisAlignment.center,
          children: <Widget>[
            ElevatedButton(
              onPressed: _startRecording,
              child: Text('开始录音'),
            ),
            SizedBox(height: 20),
            ElevatedButton(
              onPressed: _stopRecording,
              child: Text('停止录音'),
            ),
            SizedBox(height: 20),
            if (_audioPath != null)
              Text('录音文件路径: $_audioPath'),
          ],
        ),
      ),
    );
  }
}

说明

  1. 初始化音频捕获:在initState方法中初始化BetterAudioCapture实例,并设置音频格式。
  2. 开始录音:点击“开始录音”按钮时,调用_startRecording方法,指定录音文件的保存路径并开始录音。
  3. 停止录音:点击“停止录音”按钮时,调用_stopRecording方法停止录音。
  4. 显示录音文件路径:录音文件保存路径会显示在UI上。

请确保在实际应用中处理权限请求,并在必要时添加错误处理和UI反馈,以提供更好的用户体验。

回到顶部