Flutter音频处理插件ultravox_client的使用
Flutter音频处理插件ultravox_client的使用
Flutter客户端SDK用于Ultravox。
开始使用
在你的项目中添加插件:
flutter pub add ultravox_client
或者直接在pubspec.yaml
文件中添加依赖:
dependencies:
ultravox_client: <version>
使用方法
以下是一个简单的示例代码:
final session = UltravoxSession.create();
await session.joinCall(joinUrl);
session.statusNotifier.addListener(myListener);
await session.leaveCall();
完整的示例可以查看示例应用。为了获取joinUrl
,你需要将你的服务器与Ultravox REST API集成。
支持的平台
Ultravox客户端SDK支持所有Flutter平台:Android、iOS、Web、Linux、Windows和macOS。
示例应用
你可以查看示例应用的演示,地址为: https://fixie-ai.github.io/ultravox-client-sdk-flutter/
你也可以使用Ultravox REST API来获取joinUrl
。
完整示例代码
以下是在示例应用中的完整代码:
import 'dart:async';
import 'dart:collection';
import 'dart:convert';
import 'package:flutter/material.dart';
import 'package:ultravox_client/ultravox_client.dart';
void main() {
runApp(const MyApp());
}
class MyApp extends StatelessWidget {
const MyApp({super.key});
[@override](/user/override)
Widget build(BuildContext context) {
return MaterialApp(
title: 'Ultravox Flutter Example',
theme: ThemeData(
colorScheme: ColorScheme.fromSeed(
seedColor: const Color.fromARGB(
255,
255,
95,
109,
)),
useMaterial3: true,
),
home: const MyHomePage(title: 'Ultravox Flutter Example Home Page'),
);
}
}
class MyHomePage extends StatefulWidget {
const MyHomePage({super.key, required this.title});
final String title;
[@override](/user/override)
State<MyHomePage> createState() => _MyHomePageState();
}
class _MyHomePageState extends State<MyHomePage> {
UltravoxSession? _session;
bool _debug = false;
final LinkedHashSet<int> _transcriptArrivalOrder = LinkedHashSet<int>();
bool _connected = false;
[@override](/user/override)
void dispose() {
if (_session != null) {
_session!.statusNotifier.removeListener(_onStatusChange);
_session!.dataMessageNotifier.removeListener(_onDataMessage);
unawaited(_session!.leaveCall());
}
super.dispose();
}
void _onStatusChange() {
if (_session?.status.live != _connected) {
// 刷新UI当连接和断开连接时。
setState(() {
_connected = _session?.status.live ?? false;
});
}
}
void _onDataMessage() {
final message = _session!.lastDataMessage;
if (message["type"] == "transcript" && message.containsKey("ordinal")) {
final ordinal = message["ordinal"] as int;
if (!_transcriptArrivalOrder.contains(ordinal)) {
setState(() {
_transcriptArrivalOrder.add(ordinal);
});
}
}
}
Future<void> _startCall(String joinUrl) async {
if (_session != null) {
return;
}
setState(() {
_session = UltravoxSession.create(experimentalMessages: _debug ? {"debug"} : {});
});
_session!.statusNotifier.addListener(_onStatusChange);
_session!.dataMessageNotifier.addListener(_onDataMessage);
_session!.registerToolImplementation("getSecretMenu", _getSecretMenu);
await _session!.joinCall(joinUrl, clientVersion: "UltravoxExampleApp");
}
ClientToolResult _getSecretMenu(Object params) {
return ClientToolResult(json.encode({
"date": DateTime.now().toIso8601String(),
"specialItems": [
{
"name": "Banana smoothie",
"price": 3.99,
},
{
"name": "Butter pecan ice cream (one scoop)",
"price": 1.99,
}
],
}));
}
Future<void> _endCall() async {
_transcriptArrivalOrder.clear();
if (_session == null) {
return;
}
_session!.statusNotifier.removeListener(_onStatusChange);
_session!.dataMessageNotifier.removeListener(_onDataMessage);
await _session!.leaveCall();
setState(() {
_session = null;
});
}
[@override](/user/override)
Widget build(BuildContext context) {
final mainBodyChildren = <Widget>[];
if (_session == null) {
final textController = TextEditingController();
final textInput = TextField(
decoration: const InputDecoration(
border: OutlineInputBorder(),
labelText: 'Join URL',
),
controller: textController,
);
mainBodyChildren.add(Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
textInput,
const SizedBox(height: 20, width: 20),
Row(
mainAxisAlignment: MainAxisAlignment.start,
children: [
const Text.rich(TextSpan(
text: 'Debug',
style: TextStyle(fontWeight: FontWeight.bold))),
Switch(
value: _debug,
onChanged: (value) => setState(() => _debug = value),
),
const Spacer(),
ElevatedButton.icon(
icon: const Icon(Icons.call),
onPressed: () => _startCall(textController.text),
label: const Text('Start Call'),
),
],
)
],
),
));
} else if (!_connected) {
mainBodyChildren.add(const Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
CircularProgressIndicator(),
Text('Connecting...'),
],
)));
} else {
mainBodyChildren.add(
Container(
constraints: const BoxConstraints(maxHeight: 200),
child: ListenableBuilder(
listenable: _session!.transcriptsNotifier,
builder: (BuildContext context, Widget? child) {
return ListView(
reverse: true, // 填充从底部开始,顶部裁剪。
children: [
for (final transcript in _session!.transcripts.reversed)
TranscriptWidget(transcript: transcript),
]);
})),
);
final textController = TextEditingController();
final textInput = TextField(
decoration: const InputDecoration(
border: OutlineInputBorder(),
),
controller: textController,
);
mainBodyChildren.add(Row(
mainAxisAlignment: MainAxisAlignment.start,
children: [
Expanded(child: textInput),
ElevatedButton.icon(
icon: const Icon(Icons.send),
onPressed: () {
_session!.sendText(textController.text);
textController.clear();
},
label: const Text('Send'),
),
],
));
mainBodyChildren.add(const SizedBox(height: 20));
mainBodyChildren.add(Row(
mainAxisAlignment: MainAxisAlignment.start,
children: [
ListenableBuilder(
listenable: _session!.micMutedNotifier,
builder: (BuildContext context, Widget? child) {
return ElevatedButton.icon(
icon: _session!.micMuted
? const Icon(Icons.mic_off)
: const Icon(Icons.mic),
onPressed: () {
_session!.micMuted = !_session!.micMuted;
},
label: _session!.micMuted
? const Text('Unmute')
: const Text('Mute'),
);
}),
ListenableBuilder(
listenable: _session!.speakerMutedNotifier,
builder: (BuildContext context, Widget? child) {
return ElevatedButton.icon(
icon: _session!.speakerMuted
? const Icon(Icons.volume_off)
: const Icon(Icons.volume_up),
onPressed: () {
_session!.speakerMuted = !_session!.speakerMuted;
},
label: _session!.speakerMuted
? const Text('Unmute Agent')
: const Text('Mute Agent'),
);
}),
ElevatedButton.icon(
icon: const Icon(Icons.call_end),
onPressed: _endCall,
label: const Text('End Call'),
),
],
));
if (_debug) {
mainBodyChildren.add(const SizedBox(height: 20));
mainBodyChildren.add(const Text.rich(TextSpan(
text: 'Last Debug Message:',
style: TextStyle(fontWeight: FontWeight.w700))));
mainBodyChildren.add(ListenableBuilder(
listenable: _session!.experimentalMessageNotifier,
builder: (BuildContext context, Widget? child) {
final message = _session!.lastExperimentalMessage;
if (message.containsKey("type") && message["type"] == "debug") {
return DebugMessageWidget(message: message);
} else {
return const SizedBox(height: 20);
}
},
));
mainBodyChildren.add(const SizedBox(height: 10));
mainBodyChildren.add(const Text.rich(TextSpan(
text: 'Transcript Arrival Order:',
style: TextStyle(fontWeight: FontWeight.w700))));
mainBodyChildren.add(Text(_transcriptArrivalOrder.join(", ")));
}
}
return Scaffold(
appBar: AppBar(
backgroundColor: Theme.of(context).colorScheme.inversePrimary,
title: Text(widget.title),
),
body: Center(
child: Row(
children: [
const Expanded(flex: 2, child: Column()),
Expanded(
flex: 6,
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: mainBodyChildren,
),
),
const Expanded(flex: 2, child: Column()),
],
),
),
);
}
}
class TranscriptWidget extends StatelessWidget {
const TranscriptWidget({super.key, required this.transcript});
final Transcript transcript;
[@override](/user/override)
Widget build(BuildContext context) {
return RichText(
text:
TextSpan(style: Theme.of(context).textTheme.bodyMedium, children: [
TextSpan(
text: transcript.speaker == Role.user ? 'You: ' : 'Agent: ',
style: const TextStyle(fontWeight: FontWeight.bold),
),
TextSpan(text: transcript.text),
]));
}
}
class DebugMessageWidget extends StatelessWidget {
const DebugMessageWidget({super.key, required this.message});
final Map<String, dynamic> message;
[@override](/user/override)
Widget build(BuildContext context) {
List<InlineSpan> children = [];
for (final entry in message.entries) {
children.add(TextSpan(
text: '${entry.key}: ',
style: const TextStyle(fontWeight: FontWeight.bold),
));
children.add(TextSpan(text: '${entry.value}\n'));
}
return RichText(
text: TextSpan(
style: Theme.of(context).textTheme.bodySmall,
children: children,
),
);
}
}
更多关于Flutter音频处理插件ultravox_client的使用的实战教程也可以访问 https://www.itying.com/category-92-b0.html
更多关于Flutter音频处理插件ultravox_client的使用的实战系列教程也可以访问 https://www.itying.com/category-92-b0.html
当然,以下是如何在Flutter应用中使用ultravox_client
插件进行音频处理的示例代码。请注意,你需要先确保已经在pubspec.yaml
文件中添加了ultravox_client
依赖,并运行flutter pub get
来安装它。
pubspec.yaml
dependencies:
flutter:
sdk: flutter
ultravox_client: ^最新版本号 # 请替换为实际的最新版本号
main.dart
import 'package:flutter/material.dart';
import 'package:ultravox_client/ultravox_client.dart';
void main() {
runApp(MyApp());
}
class MyApp extends StatefulWidget {
@override
_MyAppState createState() => _MyAppState();
}
class _MyAppState extends State<MyApp> {
late UltravoxClient _ultravoxClient;
String _status = "未初始化";
@override
void initState() {
super.initState();
// 初始化UltravoxClient
_ultravoxClient = UltravoxClient();
_ultravoxClient.initialize().then((value) {
setState(() {
_status = "已初始化";
});
}).catchError((error) {
setState(() {
_status = "初始化失败: $error";
});
});
}
@override
Widget build(BuildContext context) {
return MaterialApp(
home: Scaffold(
appBar: AppBar(
title: Text('UltravoxClient Demo'),
),
body: Padding(
padding: const EdgeInsets.all(16.0),
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
Text('状态: $_status'),
SizedBox(height: 20),
ElevatedButton(
onPressed: _startRecording,
child: Text('开始录音'),
),
SizedBox(height: 10),
ElevatedButton(
onPressed: _stopRecording,
child: Text('停止录音'),
),
SizedBox(height: 20),
ElevatedButton(
onPressed: _playAudio,
child: Text('播放音频'),
),
],
),
),
),
);
}
void _startRecording() async {
try {
await _ultravoxClient.startRecording(localPath: "path/to/save/audio.wav");
setState(() {
_status = "录音中...";
});
} catch (error) {
setState(() {
_status = "录音失败: $error";
});
}
}
void _stopRecording() async {
try {
await _ultravoxClient.stopRecording();
setState(() {
_status = "录音已停止";
});
} catch (error) {
setState(() {
_status = "停止录音失败: $error";
});
}
}
void _playAudio() async {
try {
await _ultravoxClient.playAudio(filePath: "path/to/save/audio.wav");
setState(() {
_status = "播放中...";
});
} catch (error) {
setState(() {
_status = "播放失败: $error";
});
}
}
@override
void dispose() {
// 释放资源
_ultravoxClient.dispose();
super.dispose();
}
}
注意事项
- 路径处理:在调用
startRecording
和playAudio
方法时,确保提供正确的文件路径。路径可以是应用沙盒内的路径,也可以是设备存储上的路径,具体取决于你的需求和权限设置。 - 权限处理:在实际应用中,录音和播放音频通常需要访问设备的麦克风和存储权限。你需要在AndroidManifest.xml和Info.plist中添加相应的权限声明,并在运行时请求这些权限。
- 错误处理:上述代码已经包含了一些基本的错误处理,但在实际应用中,你可能需要更详细的错误处理逻辑,以提供更好的用户体验。
- 插件版本:确保你使用的
ultravox_client
插件版本是最新的,并且与你的Flutter SDK版本兼容。
这段代码提供了一个基本的框架,展示了如何使用ultravox_client
插件进行音频录制和播放。你可以根据自己的需求进一步扩展和定制这些功能。