Flutter WebRTC视频流插件flutter_kinesis_video_webrtc的使用

发布于 1周前 作者 bupafengyu 来自 Flutter

Flutter WebRTC视频流插件flutter_kinesis_video_webrtc的使用

使用方法

flutter_kinesis_video_webrtc 是一个用于在Flutter应用中实现WebRTC视频流的插件,支持与AWS Kinesis Video Streams集成。以下是该插件的基本使用方法和一个完整的示例demo。

初始化PeerConnection

peerConnection() async {
  // 创建SignalingClient实例,用于与Kinesis Video Streams进行信令通信
  SignalingClient _signalingClient = SignalingClient(
    accessKey: 'YOUR_AWS_ACCESS_KEY',  // AWS访问密钥
    secretKey: 'YOUR_AWS_SECRET_KEY',  // AWS秘密密钥
    region: 'YOUR_REGION',             // AWS区域
    channelName: 'YOUR_CHANNEL_NAME',  // Kinesis Video Stream通道名称
  );

  // 初始化SignalingClient
  await _signalingClient.init();
}

iOS配置

为了使应用能够访问摄像头和麦克风,需要在Info.plist文件中添加以下权限:

<key>NSCameraUsageDescription</key>
<string>$(PRODUCT_NAME) Camera Usage!</string>
<key>NSMicrophoneUsageDescription</key>
<string>$(PRODUCT_NAME) Microphone Usage!</string>

Android配置

确保在AndroidManifest.xml文件中添加以下权限:

<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />

如果需要使用蓝牙设备,还需添加以下权限:

<uses-permission android:name="android.permission.BLUETOOTH" android:maxSdkVersion="30" />
<uses-permission android:name="android.permission.BLUETOOTH_ADMIN" android:maxSdkVersion="30" />

此外,建议将minSdkVersion提高到23或更高,以确保兼容性。

完整示例代码

以下是一个完整的示例代码,展示了如何在Flutter应用中使用flutter_kinesis_video_webrtc插件进行WebRTC视频流传输。

import 'dart:convert';

import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_kinesis_video_webrtc/flutter_kinesis_video_webrtc.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';

void main() {
  runApp(const MyApp());
}

class MyApp extends StatelessWidget {
  const MyApp({super.key});

  [@override](/user/override)
  Widget build(BuildContext context) {
    return MaterialApp(
      title: 'KVSWebrtcExample',
      theme: ThemeData(
        primarySwatch: Colors.blue,
      ),
      home: const FlutterKinesisVideoWebrtcExample(),
    );
  }
}

class FlutterKinesisVideoWebrtcExample extends StatefulWidget {
  const FlutterKinesisVideoWebrtcExample({super.key});

  [@override](/user/override)
  State<FlutterKinesisVideoWebrtcExample> createState() => _FlutterKinesisVideoWebrtcExampleState();
}

class _FlutterKinesisVideoWebrtcExampleState extends State<FlutterKinesisVideoWebrtcExample> {
  final TextEditingController _accessKeyController = TextEditingController();
  final TextEditingController _secretKeyController = TextEditingController();
  final TextEditingController _regionController = TextEditingController();
  final TextEditingController _channelNameController = TextEditingController();
  final RTCVideoRenderer _rtcVideoRenderer = RTCVideoRenderer();
  RTCPeerConnection? _rtcPeerConnection;
  late SignalingClient _signalingClient;
  bool sendAudio = false;
  bool sendVideo = false;
  MediaStream? _localStream;

  [@override](/user/override)
  void initState() {
    // 初始化RTCVideoRenderer
    _rtcVideoRenderer.initialize();
    super.initState();
  }

  [@override](/user/override)
  void dispose() {
    // 释放资源
    _accessKeyController.dispose();
    _secretKeyController.dispose();
    _regionController.dispose();
    _channelNameController.dispose();
    _rtcVideoRenderer.dispose();
    _rtcPeerConnection?.dispose();
    super.dispose();
  }

  peerConnection() async {
    // 创建SignalingClient实例
    _signalingClient = SignalingClient(
      channelName: _channelNameController.text.trim(),
      accessKey: _accessKeyController.text.trim(),
      secretKey: _secretKeyController.text.trim(),
      region: _regionController.text.trim(),
    );

    // 初始化SignalingClient
    await _signalingClient.init();

    // 创建RTCPeerConnection
    _rtcPeerConnection = await createPeerConnection({
      'iceServers': _signalingClient.iceServers,
      'iceTransportPolicy': 'all'
    });

    // 处理接收到的视频流
    _rtcPeerConnection!.onTrack = (event) {
      _rtcVideoRenderer.srcObject = event.streams[0];
      setState(() {});
    };

    // 如果需要发送音频和视频
    if (sendAudio || sendVideo) {
      _localStream = await navigator.mediaDevices.getUserMedia({
        'audio': sendAudio,
        'video': sendVideo,
      });

      _localStream!.getTracks().forEach((track) {
        _rtcPeerConnection!.addTrack(track, _localStream!);
        setState(() {});
      });
    }

    // 创建WebSocket连接
    var webSocket = SimpleWebSocket(_signalingClient.domain ?? '',
        _signalingClient.signedQueryParams ?? <String, dynamic>{});

    // 处理接收到的消息
    webSocket.onMessage = (data) async {
      if (data != '') {
        var objectOfData = jsonDecode(data);
        print("-------------------- receiving ${objectOfData['messageType']} --------------------");
        if (objectOfData['messageType'] == "SDP_ANSWER") {
          var decodedAns = jsonDecode(utf8.decode(base64.decode(objectOfData['messagePayload'])));
          await _rtcPeerConnection?.setRemoteDescription(RTCSessionDescription(
            decodedAns["sdp"],
            decodedAns["type"],
          ));
        } else if (objectOfData['messageType'] == "ICE_CANDIDATE") {
          var decodedCandidate = jsonDecode(utf8.decode(base64.decode(objectOfData['messagePayload'])));
          await _rtcPeerConnection?.addCandidate(
            RTCIceCandidate(decodedCandidate["candidate"], decodedCandidate["sdpMid"], decodedCandidate["sdpMLineIndex"]),
          );
        }
      }
    };

    // WebSocket连接打开时发送SDP_OFFER
    webSocket.onOpen = () async {
      if (kDebugMode) {
        print("-------------------- socket opened --------------------");
        print("-------------------- sending 'SDP_OFFER' --------------------");
      }
      RTCSessionDescription offer = await _rtcPeerConnection!.createOffer({
        'mandatory': {
          'OfferToReceiveAudio': true,
          'OfferToReceiveVideo': true,
        },
        'optional': [],
      });
      await _rtcPeerConnection!.setLocalDescription(offer);
      RTCSessionDescription? localDescription = await _rtcPeerConnection?.getLocalDescription();
      var request = {};
      request["action"] = "SDP_OFFER";
      request["messagePayload"] = base64.encode(jsonEncode(localDescription?.toMap()).codeUnits);
      webSocket.send(jsonEncode(request));
    };

    // 处理ICE候选
    _rtcPeerConnection!.onIceCandidate = (dynamic candidate) {
      if (kDebugMode) {
        print("-------------------- sending 'ICE_CANDIDATE' --------------------");
      }
      var request = {};
      request["action"] = "ICE_CANDIDATE";
      request["messagePayload"] = base64.encode(jsonEncode(candidate.toMap()).codeUnits);
      webSocket.send(jsonEncode(request));
    };

    // 连接WebSocket
    await webSocket.connect();
  }

  [@override](/user/override)
  Widget build(BuildContext context) {
    return Scaffold(
      appBar: AppBar(
        title: const Text('Example'),
      ),
      body: SafeArea(
        child: SingleChildScrollView(
          child: SizedBox(
            height: MediaQuery.of(context).size.height * 1.4,
            child: Column(
              children: [
                Expanded(
                  flex: 4,
                  child: Padding(
                    padding: const EdgeInsets.all(20),
                    child: Column(
                      crossAxisAlignment: CrossAxisAlignment.start,
                      children: [
                        const Text("ACCESS-KEY", textScaleFactor: 1.0),
                        TextFormField(
                          keyboardType: TextInputType.text,
                          maxLength: 128,
                          controller: _accessKeyController,
                          decoration: const InputDecoration(
                            hintText: "Enter access key",
                            counterText: "",
                          ),
                        ),
                        const SizedBox(height: 20),
                        const Text("SECRET-KEY", textScaleFactor: 1.0),
                        TextFormField(
                          keyboardType: TextInputType.visiblePassword,
                          obscureText: true,
                          controller: _secretKeyController,
                          decoration: const InputDecoration(
                            hintText: "Enter secret key",
                            counterText: "",
                          ),
                        ),
                        const SizedBox(height: 20),
                        const Text("REGION", textScaleFactor: 1.0),
                        TextFormField(
                          keyboardType: TextInputType.text,
                          controller: _regionController,
                          decoration: const InputDecoration(
                            hintText: "Enter region ex: us-east-1",
                          ),
                        ),
                        const SizedBox(height: 20),
                        const Text("CHANNEL NAME", textScaleFactor: 1.0),
                        TextFormField(
                          keyboardType: TextInputType.text,
                          controller: _channelNameController,
                          decoration: const InputDecoration(
                            hintText: "Enter channel name",
                          ),
                        ),
                        const SizedBox(height: 20),
                        Center(
                          child: ElevatedButton(
                            onPressed: () {
                              peerConnection();
                              setState(() {});
                            },
                            child: Container(
                              padding: const EdgeInsets.symmetric(vertical: 20),
                              width: MediaQuery.of(context).size.width - 80,
                              child: const Text(
                                "START WEBRTC",
                                textAlign: TextAlign.center,
                                textScaleFactor: 1.0,
                              ),
                            ),
                          ),
                        ),
                      ],
                    ),
                  ),
                ),
                Expanded(
                  flex: 6,
                  child: _rtcVideoRenderer.renderVideo
                      ? Center(
                          child: AspectRatio(
                            aspectRatio: _rtcVideoRenderer.value.aspectRatio,
                            child: RTCVideoView(_rtcVideoRenderer),
                          ),
                        )
                      : const Center(
                          child: CircularProgressIndicator(),
                        ),
                ),
              ],
            ),
          ),
        ),
      ),
    );
  }
}

更多关于Flutter WebRTC视频流插件flutter_kinesis_video_webrtc的使用的实战系列教程也可以访问 https://www.itying.com/category-92-b0.html

1 回复

更多关于Flutter WebRTC视频流插件flutter_kinesis_video_webrtc的使用的实战系列教程也可以访问 https://www.itying.com/category-92-b0.html


当然,关于如何在Flutter项目中使用flutter_kinesis_video_webrtc插件来实现WebRTC视频流,以下是一个简单的代码示例。这个示例展示了如何设置和连接WebRTC客户端。请注意,flutter_kinesis_video_webrtc插件主要用于与Amazon Kinesis Video Streams进行WebRTC通信,所以你可能需要有一个Kinesis Video Stream来测试这个示例。

1. 添加依赖

首先,在你的pubspec.yaml文件中添加flutter_kinesis_video_webrtc依赖:

dependencies:
  flutter:
    sdk: flutter
  flutter_kinesis_video_webrtc: ^x.y.z  # 替换为最新版本号

然后运行flutter pub get来安装依赖。

2. 配置权限(Android)

android/app/src/main/AndroidManifest.xml中添加必要的权限:

<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"/>

3. 实现WebRTC连接

以下是一个简单的Flutter应用示例,展示了如何使用flutter_kinesis_video_webrtc进行WebRTC连接:

import 'package:flutter/material.dart';
import 'package:flutter_kinesis_video_webrtc/flutter_kinesis_video_webrtc.dart';

void main() {
  runApp(MyApp());
}

class MyApp extends StatefulWidget {
  @override
  _MyAppState createState() => _MyAppState();
}

class _MyAppState extends State<MyApp> {
  KinesisVideoWebRTCClient? _client;
  String _signalingUrl = 'YOUR_SIGNALING_SERVER_URL'; // 替换为你的信令服务器URL

  @override
  void initState() {
    super.initState();
    initWebRTC();
  }

  Future<void> initWebRTC() async {
    _client = KinesisVideoWebRTCClient(
      signalingUrl: _signalingUrl,
      iceServers: [
        // 添加你的ICE服务器配置
        IceServer('stun:stun.l.google.com:19302'),
      ],
    );

    _client!.onSignalingChange = (SignalingState state) {
      print('Signaling state changed: $state');
      if (state == SignalingState.CONNECTED) {
        // 连接成功后可以开始创建Offer或Answer
        createOffer();
      }
    };

    _client!.onRemoteStreamAdded = (MediaStream stream) {
      print('Remote stream added');
      // 在这里处理远程视频流,比如将其显示在一个VideoPlayer上
    };

    await _client!.connect();
  }

  Future<void> createOffer() async {
    if (_client!.signalingState == SignalingState.CONNECTED) {
      RTCSessionDescription? offer = await _client!.createOffer();
      await _client!.setLocalDescription(offer!);
      // 发送Offer到你的信令服务器
      // await signalingServer.sendOffer(offer.sdp);
    }
  }

  @override
  Widget build(BuildContext context) {
    return MaterialApp(
      home: Scaffold(
        appBar: AppBar(
          title: Text('Flutter Kinesis Video WebRTC Example'),
        ),
        body: Center(
          child: Text('Connecting to WebRTC...'),
        ),
      ),
    );
  }

  @override
  void dispose() {
    _client?.close();
    super.dispose();
  }
}

注意事项

  1. 信令服务器:这个示例假设你有一个信令服务器来处理WebRTC的信令交换。你需要将_signalingUrl替换为你的信令服务器URL,并实现与信令服务器的通信(例如发送Offer和Answer)。

  2. ICE服务器:示例中使用了Google的STUN服务器。如果你需要TURN服务器来处理NAT穿越,你需要在iceServers列表中添加相应的配置。

  3. 错误处理:为了简洁,示例中没有包含错误处理逻辑。在实际应用中,你应该添加适当的错误处理来确保应用的健壮性。

  4. UI更新:示例中的UI非常简单,只显示了一个连接中的文本。在实际应用中,你可能需要更新UI以显示本地和远程视频流,以及处理各种WebRTC事件(如连接状态变化、流添加/移除等)。

这个示例提供了一个基本的框架,你可以根据具体需求进行扩展和修改。

回到顶部