Flutter WebRTC通信插件flutter_webrtc_plus的使用
Flutter WebRTC通信插件flutter_webrtc_plus的使用
概述
flutter_webrtc_plus
是一个用于 Flutter 应用程序的 WebRTC 插件。它支持移动设备、桌面和 Web 平台,并且实现了诸如美颜滤镜和虚拟背景等高级功能。
功能
特性 | Android | iOS | Web | macOS | Windows | Linux | 嵌入式 | Fuchsia |
---|---|---|---|---|---|---|---|---|
音频/视频 | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | |
数据通道 | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | |
屏幕共享 | ✔️ | ✔️(*) | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | |
统一计划 | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | |
多流传输 | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | |
媒体录制 | ⚠️ | ⚠️ | ✔️ | |||||
端到端加密 | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | |
插入流 |
添加依赖
在你的 pubspec.yaml
文件中添加 flutter_webrtc_plus
作为依赖:
dependencies:
flutter_webrtc_plus: ^版本号
iOS 设置
在 Info.plist
文件中添加以下条目以允许访问摄像头和麦克风:
<key>NSCameraUsageDescription</key>
<string>$(PRODUCT_NAME) Camera Usage!</string>
<key>NSMicrophoneUsageDescription</key>
<string>$(PRODUCT_NAME) Microphone Usage!</string>
iOS 注意事项
对于 M1 芯片的 iOS 设备,需要在 ios/Podfile
中添加以下配置:
post_install do |installer|
installer.pods_project.targets.each do |target|
flutter_additional_ios_build_settings(target)
target.build_configurations.each do |config|
config.build_settings['ONLY_ACTIVE_ARCH'] = 'YES'
end
end
end
Android 设置
确保在 AndroidManifest.xml
文件中包含以下权限:
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
如果需要使用蓝牙设备,还需要添加以下权限:
<uses-permission android:name="android.permission.BLUETOOTH" android:maxSdkVersion="30" />
<uses-permission android:name="android.permission.BLUETOOTH_ADMIN" android:maxSdkVersion="30" />
为了兼容性,将 build.gradle
的 compileOptions
设置为 Java 8:
android {
//...
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
示例代码
以下是使用 flutter_webrtc_plus
实现的完整示例代码。
主要文件结构
main.dart
activity_main.xml
MainActivity.kt
FlutterViewEngine.kt
main.dart
import 'dart:core';
import 'package:flutter/foundation.dart' show debugDefaultTargetPlatformOverride;
import 'package:flutter/material.dart';
import 'package:flutter_background/flutter_background.dart';
import 'package:flutter_webrtc_example/src/capture_frame_sample.dart';
import 'package:flutter_webrtc_plus/flutter_webrtc_plus.dart';
import 'src/device_enumeration_sample.dart';
import 'src/get_display_media_sample.dart';
import 'src/get_user_media_sample.dart'
if (dart.library.html) 'src/get_user_media_sample_web.dart';
import 'src/loopback_data_channel_sample.dart';
import 'src/loopback_sample_unified_tracks.dart';
import 'src/route_item.dart';
void main() {
WidgetsFlutterBinding.ensureInitialized();
if (WebRTC.platformIsDesktop) {
debugDefaultTargetPlatformOverride = TargetPlatform.fuchsia;
} else if (WebRTC.platformIsAndroid) {
//startForegroundService();
}
runApp(MyApp());
}
Future<bool> startForegroundService() async {
final androidConfig = FlutterBackgroundAndroidConfig(
notificationTitle: 'Title of the notification',
notificationText: 'Text of the notification',
notificationImportance: AndroidNotificationImportance.normal,
notificationIcon: AndroidResource(
name: 'background_icon',
defType: 'drawable'), // Default is ic_launcher from folder mipmap
);
await FlutterBackground.initialize(androidConfig: androidConfig);
return FlutterBackground.enableBackgroundExecution();
}
class MyApp extends StatefulWidget {
@override
_MyAppState createState() => _MyAppState();
}
class _MyAppState extends State<MyApp> {
late List<RouteItem> items;
@override
void initState() {
super.initState();
_initItems();
}
ListBody _buildRow(context, item) {
return ListBody(children: <Widget>[
ListTile(
title: Text(item.title),
onTap: () => item.push(context),
trailing: Icon(Icons.arrow_right),
),
Divider()
]);
}
@override
Widget build(BuildContext context) {
return MaterialApp(
debugShowCheckedModeBanner: false,
home: Scaffold(
appBar: AppBar(
title: Text('Flutter-WebRTC example'),
),
body: ListView.builder(
shrinkWrap: true,
padding: const EdgeInsets.all(0.0),
itemCount: items.length,
itemBuilder: (context, i) {
return _buildRow(context, items[i]);
})),
);
}
void _initItems() {
items = <RouteItem>[
RouteItem(
title: 'GetUserMedia',
push: (BuildContext context) {
Navigator.push(
context,
MaterialPageRoute(
builder: (BuildContext context) => GetUserMediaSample()));
}),
RouteItem(
title: 'Device Enumeration',
push: (BuildContext context) {
Navigator.push(
context,
MaterialPageRoute(
builder: (BuildContext context) => DeviceEnumerationSample()));
}),
RouteItem(
title: 'GetDisplayMedia',
push: (BuildContext context) {
Navigator.push(
context,
MaterialPageRoute(
builder: (BuildContext context) => GetDisplayMediaSample()));
}),
RouteItem(
title: 'LoopBack Sample (Unified Tracks)',
push: (BuildContext context) {
Navigator.push(
context,
MaterialPageRoute(
builder: (BuildContext context) => LoopBackSampleUnifiedTracks()));
}),
RouteItem(
title: 'DataChannelLoopBackSample',
push: (BuildContext context) {
Navigator.push(
context,
MaterialPageRoute(
builder: (BuildContext context) => DataChannelLoopBackSample()));
}),
RouteItem(
title: 'Capture Frame',
push: (BuildContext context) {
Navigator.push(
context,
MaterialPageRoute(
builder: (BuildContext context) => CaptureFrameSample()));
}),
];
}
}
activity_main.xml
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<io.flutter.embedding.android.FlutterView
android:id="@+id/flutterView"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:focusable="true"
android:focusableInTouchMode="true"/>
<!-- GPUPixelView -->
<com.pixpark.gpupixel.GPUPixelView
android:id="@+id/surfaceView"
android:layout_width="match_parent"
android:layout_height="40dp"
tools:layout_editor_absoluteX="-183dp"
tools:layout_editor_absoluteY="0dp" />
</RelativeLayout>
MainActivity.kt
package com.waterbus.wanted
import android.content.Intent
import android.os.Bundle
import androidx.appcompat.app.AppCompatActivity
import com.example.waterbus.FlutterViewEngine
import com.pixpark.gpupixel.GPUPixel
import com.waterbus.wanted.databinding.ActivityMainBinding
import io.flutter.embedding.engine.FlutterEngine
import io.flutter.embedding.engine.dart.DartExecutor
class MainActivity: AppCompatActivity() {
private lateinit var binding: ActivityMainBinding
private lateinit var flutterViewEngine: FlutterViewEngine
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
binding = ActivityMainBinding.inflate(layoutInflater)
setContentView(binding.root)
// TODO: create a multi-engine version after
// https://github.com/flutter/flutter/issues/72009 is built.
val engine = FlutterEngine(applicationContext)
engine.dartExecutor.executeDartEntrypoint(
DartExecutor.DartEntrypoint.createDefault()
);
flutterViewEngine = FlutterViewEngine(engine)
// The activity and FlutterView have different lifecycles.
// Attach the activity right away but only start rendering when the
// view is also scrolled into the screen.
flutterViewEngine.attachToActivity(this)
val flutterView = binding.flutterView
// Attach FlutterEngine to FlutterView
flutterView.attachToFlutterEngine(engine)
flutterViewEngine.attachFlutterView(flutterView)
// Config for Beauty Filters feature
GPUPixel.setContext(applicationContext)
FlutterRTCBeautyFilters.initialize()
}
override fun onDestroy() {
super.onDestroy()
flutterViewEngine.detachActivity()
}
override fun onRequestPermissionsResult(
requestCode: Int,
permissions: Array<out String>,
grantResults: IntArray
) {
flutterViewEngine.onRequestPermissionsResult(requestCode, permissions, grantResults)
super.onRequestPermissionsResult(requestCode, permissions, grantResults)
}
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
flutterViewEngine.onActivityResult(requestCode, resultCode, data)
super.onActivityResult(requestCode, resultCode, data)
}
override fun onUserLeaveHint() {
flutterViewEngine.onUserLeaveHint()
super.onUserLeaveHint()
}
}
FlutterViewEngine.kt
package com.example.waterbus
import android.app.Activity
import android.content.Intent
import androidx.activity.ComponentActivity
import androidx.lifecycle.Lifecycle
import androidx.lifecycle.LifecycleObserver
import androidx.lifecycle.OnLifecycleEvent
import cl.puntito.simple_pip_mode.PipCallbackHelper
import io.flutter.embedding.android.ExclusiveAppComponent
import io.flutter.embedding.android.FlutterView
import io.flutter.embedding.engine.FlutterEngine
import io.flutter.plugin.platform.PlatformPlugin
/**
* This is an application-specific wrapper class that exists to expose the intersection of an
* application's active activity and an application's visible view to a [FlutterEngine] for
* rendering.
*
* Omitted features from the [io.flutter.embedding.android.FlutterActivity] include:
* * **State restoration**. If you're integrating at the view level, you should handle activity
* state restoration yourself.
* * **Engine creations**. At this level of granularity, you must make an engine and attach.
* and all engine features like initial route etc must be configured on the engine yourself.
* * **Splash screens**. You must implement it yourself. Read from
* `addOnFirstFrameRenderedListener` as needed.
* * **Transparency, surface/texture**. These are just [FlutterView] level APIs. Set them on the
* [FlutterView] directly.
* * **Intents**. This doesn't do any translation of intents into actions in the [FlutterEngine].
* you must do them yourself.
* * **Back buttons**. You must decide whether to send it to Flutter via
* [FlutterEngine.getNavigationChannel.popRoute()], or consume it natively. Though that
* decision may be difficult due to https://github.com/flutter/flutter/issues/67011.
* * **Low memory signals**. You're strongly encouraged to pass the low memory signals (such
* as from the host `Activity`'s `onTrimMemory` callbacks) to the [FlutterEngine] to let
* Flutter and the Dart VM cull its own memory usage.
*
* Your own [FlutterView] integrating application may need a similar wrapper but you must decide on
* what the appropriate intersection between the [FlutterView], the [FlutterEngine] and your
* `Activity` should be for your own application.
*/
class FlutterViewEngine(val engine: FlutterEngine) : LifecycleObserver, ExclusiveAppComponent<Activity>{
private var callbackHelper = PipCallbackHelper()
private var flutterView: FlutterView? = null
private var activity: ComponentActivity? = null
private var platformPlugin: PlatformPlugin? = null
init {
callbackHelper.configureFlutterEngine(engine)
}
/**
* This is the intersection of an available activity and of a visible [FlutterView]. This is
* where Flutter would start rendering.
*/
private fun hookActivityAndView() {
// Assert state.
activity!!.let { activity ->
flutterView!!.let { flutterView ->
platformPlugin = PlatformPlugin(activity, engine.platformChannel)
engine.activityControlSurface.attachToActivity(this, activity.lifecycle)
flutterView.attachToFlutterEngine(engine)
activity.lifecycle.addObserver(this)
activity.addOnPictureInPictureModeChangedListener {
callbackHelper.onPictureInPictureModeChanged(it.isInPictureInPictureMode)
}
}
}
}
/**
* Lost the intersection of either an available activity or a visible
* [FlutterView].
*/
private fun unhookActivityAndView() {
// Stop reacting to activity events.
activity!!.lifecycle.removeObserver(this)
// Plugins are no longer attached to an activity.
engine.activityControlSurface.detachFromActivity()
// Release Flutter's control of UI such as system chrome.
platformPlugin!!.destroy()
platformPlugin = null
// Set Flutter's application state to detached.
engine.lifecycleChannel.appIsDetached();
// Detach rendering pipeline.
flutterView!!.detachFromFlutterEngine()
}
/**
* Signal that a host `Activity` is now ready. If there is no [FlutterView] instance currently
* attached to the view hierarchy and visible, Flutter is not yet rendering.
*
* You can also choose at this point whether to notify the plugins that an `Activity` is
* attached or not. You can also choose at this point whether to connect a Flutter
* [PlatformPlugin] at this point which allows your Dart program to trigger things like
* haptic feedback and read the clipboard. This sample arbitrarily chooses no for both.
*/
fun attachToActivity(activity: ComponentActivity) {
this.activity = activity
if (flutterView != null) {
hookActivityAndView()
}
}
/**
* Signal that a host `Activity` now no longer connected. If there were a [FlutterView] in
* the view hierarchy and visible at this moment, that [FlutterView] will stop rendering.
*
* You can also choose at this point whether to notify the plugins that an `Activity` is
* no longer attached or not. You can also choose at this point whether to disconnect Flutter's
* [PlatformPlugin] at this point which stops your Dart program being able to trigger things
* like haptic feedback and read the clipboard. This sample arbitrarily chooses yes for both.
*/
fun detachActivity() {
if (flutterView != null) {
unhookActivityAndView()
}
activity = null
}
/**
* Signal that a [FlutterView] instance is created and attached to a visible Android view
* hierarchy.
*
* If an `Activity` was also previously provided, this puts Flutter into the rendering state
* for this [FlutterView]. This also connects this wrapper class to listen to the `Activity`'s
* lifecycle to pause rendering when the activity is put into the background while the
* view is still attached to the view hierarchy.
*/
fun attachFlutterView(flutterView: FlutterView) {
this.flutterView = flutterView
if (activity != null) {
hookActivityAndView()
}
}
/**
* Signal that the attached [FlutterView] instance destroyed or no longer attached to a visible
* Android view hierarchy.
*
* If an `Activity` was attached, this stops Flutter from rendering. It also makes this wrapper
* class stop listening to the `Activity`'s lifecycle since it's no longer rendering.
*/
fun detachFlutterView() {
unhookActivityAndView()
flutterView = null
}
/**
* Callback to let Flutter respond to the `Activity`'s resumed lifecycle event while both an
* `Activity` and a [FlutterView] are attached.
*/
@OnLifecycleEvent(Lifecycle.Event.ON_RESUME)
private fun resumeActivity() {
if (activity != null) {
engine.lifecycleChannel.appIsResumed()
}
platformPlugin?.updateSystemUiOverlays()
}
/**
* Callback to let Flutter respond to the `Activity`'s paused lifecycle event while both an
* `Activity` and a [FlutterView] are attached.
*/
@OnLifecycleEvent(Lifecycle.Event.ON_PAUSE)
private fun pauseActivity() {
if (activity != null) {
engine.lifecycleChannel.appIsInactive()
}
}
/**
* Callback to let Flutter respond to the `Activity`'s stopped lifecycle event while both an
* `Activity` and a [FlutterView] are attached.
*/
@OnLifecycleEvent(Lifecycle.Event.ON_STOP)
private fun stopActivity() {
if (activity != null) {
engine.lifecycleChannel.appIsPaused()
}
}
// These events aren't used but would be needed for Flutter plugins consuming
// these events to function.
/**
* Pass through the `Activity`'s `onRequestPermissionsResult` signal to plugins that may be
* listening to it while the `Activity` and the [FlutterView] are connected.
*/
fun onRequestPermissionsResult(
requestCode: Int,
permissions: Array<out String>,
grantResults: IntArray
) {
if (activity != null && flutterView != null) {
engine
.activityControlSurface
.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
/**
* Pass through the `Activity`'s `onActivityResult` signal to plugins that may be
* listening to it while the `Activity` and the [FlutterView] are connected.
*/
fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
if (activity != null && flutterView != null) {
engine.activityControlSurface.onActivityResult(requestCode, resultCode, data);
}
}
/**
* Pass through the `Activity`'s `onUserLeaveHint` signal to plugins that may be
* listening to it while the `Activity` and the [FlutterView] are connected.
*/
fun onUserLeaveHint() {
if (activity != null && flutterView != null) {
engine.activityControlSurface.onUserLeaveHint();
}
}
/**
* Called when another App Component is about to become attached to the [ ] this App Component
* is currently attached to.
*
*
* This App Component's connections to the [io.flutter.embedding.engine.FlutterEngine]
* are still valid at the moment of this call.
*/
override fun detachFromFlutterEngine() {
// Do nothing here
}
/**
* Retrieve the App Component behind this exclusive App Component.
*
* @return The app component.
*/
override fun getAppComponent(): Activity {
return activity!!;
}
}
编译发布 APK
当你编译发布 APK 时,需要设置 ProGuard 规则:
# Setup Proguard Rules
更多关于Flutter WebRTC通信插件flutter_webrtc_plus的使用的实战教程也可以访问 https://www.itying.com/category-92-b0.html
更多关于Flutter WebRTC通信插件flutter_webrtc_plus的使用的实战系列教程也可以访问 https://www.itying.com/category-92-b0.html
flutter_webrtc_plus
是一个基于 flutter_webrtc
的扩展插件,用于在 Flutter 应用中实现 WebRTC 通信。WebRTC(Web Real-Time Communication)是一种支持浏览器之间实时音视频通信的开放标准。使用 flutter_webrtc_plus
,你可以在 Flutter 应用中轻松实现音视频通话、屏幕共享等功能。
以下是使用 flutter_webrtc_plus
实现 WebRTC 通信的基本步骤:
1. 添加依赖
首先,在 pubspec.yaml
文件中添加 flutter_webrtc_plus
依赖:
dependencies:
flutter:
sdk: flutter
flutter_webrtc_plus: ^0.9.0
然后运行 flutter pub get
来安装依赖。
2. 初始化 WebRTC
在使用 WebRTC 之前,需要初始化相关的配置。通常,你需要在应用的启动阶段初始化 WebRTC:
import 'package:flutter_webrtc_plus/flutter_webrtc_plus.dart';
void main() async {
WidgetsFlutterBinding.ensureInitialized();
await WebRTC.initialize();
runApp(MyApp());
}
3. 创建本地和远程视频视图
在应用中显示本地和远程视频流,你需要使用 RTCVideoView
组件:
import 'package:flutter/material.dart';
import 'package:flutter_webrtc_plus/flutter_webrtc_plus.dart';
class VideoCallScreen extends StatefulWidget {
@override
_VideoCallScreenState createState() => _VideoCallScreenState();
}
class _VideoCallScreenState extends State<VideoCallScreen> {
final _localRenderer = RTCVideoRenderer();
final _remoteRenderer = RTCVideoRenderer();
@override
void initState() {
super.initState();
initRenderers();
}
Future<void> initRenderers() async {
await _localRenderer.initialize();
await _remoteRenderer.initialize();
}
@override
void dispose() {
_localRenderer.dispose();
_remoteRenderer.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
return Scaffold(
body: Column(
children: [
Expanded(
child: RTCVideoView(_localRenderer),
),
Expanded(
child: RTCVideoView(_remoteRenderer),
),
],
),
);
}
}
4. 创建并配置 RTCPeerConnection
RTCPeerConnection
是 WebRTC 的核心,用于管理本端和对端的连接。你需要创建一个 RTCPeerConnection
并配置相应的音视频流:
final Map<String, dynamic> configuration = {
"iceServers": [
{"url": "stun:stun.l.google.com:19302"},
]
};
final RTCPeerConnection peerConnection = await createPeerConnection(configuration);
final MediaStream localStream = await navigator.mediaDevices.getUserMedia(
{"audio": true, "video": true},
);
_localRenderer.srcObject = localStream;
peerConnection.onTrack = (RTCTrackEvent event) {
if (event.track.kind == 'video') {
_remoteRenderer.srcObject = event.streams[0];
}
};
localStream.getTracks().forEach((track) {
peerConnection.addTrack(track, localStream);
});
5. 处理信令
WebRTC 需要一个信令服务器来交换 SDP 和 ICE 候选者。你可以使用 WebSocket 或者其他方式来实现信令:
void sendSignal(dynamic data) {
// 通过WebSocket或其他方式发送信令数据
}
void onSignalReceived(dynamic data) {
// 处理接收到的信令数据
if (data['type'] == 'offer') {
handleOffer(data);
} else if (data['type'] == 'answer') {
handleAnswer(data);
} else if (data['type'] == 'candidate') {
handleCandidate(data);
}
}
6. 创建和应答 Offer/Answer
你需要根据需要创建 Offer 或 Answer,并通过信令服务器发送给对端:
void createOffer() async {
final RTCSessionDescription offer = await peerConnection.createOffer(
{'offerToReceiveVideo': 1});
await peerConnection.setLocalDescription(offer);
sendSignal({'type': 'offer', 'sdp': offer.sdp});
}
void handleOffer(dynamic data) async {
final RTCSessionDescription description = RTCSessionDescription(
data['sdp'],
data['type'],
);
await peerConnection.setRemoteDescription(description);
final RTCSessionDescription answer = await peerConnection.createAnswer();
await peerConnection.setLocalDescription(answer);
sendSignal({'type': 'answer', 'sdp': answer.sdp});
}
void handleAnswer(dynamic data) async {
final RTCSessionDescription description = RTCSessionDescription(
data['sdp'],
data['type'],
);
await peerConnection.setRemoteDescription(description);
}
7. 处理 ICE 候选者
处理 ICE 候选者并添加到 RTCPeerConnection
:
peerConnection.onIceCandidate = (RTCIceCandidate candidate) {
sendSignal({'type': 'candidate', 'candidate': candidate.candidate});
};
void handleCandidate(dynamic data) {
final RTCIceCandidate candidate = RTCIceCandidate(
data['candidate'],
data['sdpMid'],
data['sdpMLineIndex'],
);
peerConnection.addCandidate(candidate);
}
8. 结束通话
结束通话时,释放资源:
void endCall() {
peerConnection.close();
localStream.getTracks().forEach((track) => track.stop());
_localRenderer.srcObject = null;
_remoteRenderer.srcObject = null;
}